def test_bad_first_arg(self): try: compose(5, compose) except TypeError: pass else: self.fail("Failed to raise TypeError")
def test_no_args(self): try: compose() except TypeError: pass else: self.fail("Failed to raise TypeError")
def test_bad_second_arg(self): try: compose(compose, 5) except TypeError: pass else: self.fail("Failed to raise TypeError")
def load_transfer_data(): """Loads bytes transferred data into the db. You will want to run this sql first: ALTER TABLE sync_records ADD COLUMN transfer_in int(11) DEFAULT 0; ALTER TABLE sync_records ADD COLUMN transfer_out int(11) DEFAULT 0; """ records = Session.query(SyncRecord).filter( SyncRecord.type==SyncRecord.TYPE_PHOTO).filter( SyncRecord.transfer_in==0).all() def progress(message): def func(processed, total): print "%i/%i\t%i\t%s" % (processed, total, float(processed)/total*100, message) return func def get_size_request(record): return http.JsonRequest(flickr.get_url( record.user.flickr_token, method='flickr.photos.getSizes', photo_id=record.flickrid)) def get_byte_size_request(request): try: url = request.read_response()['sizes']['size'][-1]['source'] return http.Request(url, method="HEAD") except: pass def get_content_size(request): return int(request.read_headers()['Content-Length']) if request else None def migrate_sync_record(sync_record, content_size): if content_size: sync_record.transfer_in = content_size sync_record.transfer_out = content_size Session.commit() else: print "Couldn't get content size for", sync_record.id fetcher = http.Fetcher( progress_callback=progress("getting image urls")) map(compose(fetcher.queue, get_size_request), records) size_fetcher = http.Fetcher( progress_callback=progress("getting byte sizes")) map(compose(size_fetcher.queue, get_byte_size_request), fetcher.run()) list(starmap(migrate_sync_record, izip(records, imap(get_content_size, size_fetcher.run()))))
def load_transfer_data(): """Loads bytes transferred data into the db. You will want to run this sql first: ALTER TABLE sync_records ADD COLUMN transfer_in int(11) DEFAULT 0; ALTER TABLE sync_records ADD COLUMN transfer_out int(11) DEFAULT 0; """ records = Session.query(SyncRecord).filter( SyncRecord.type == SyncRecord.TYPE_PHOTO).filter( SyncRecord.transfer_in == 0).all() def progress(message): def func(processed, total): print "%i/%i\t%i\t%s" % (processed, total, float(processed) / total * 100, message) return func def get_size_request(record): return http.JsonRequest( flickr.get_url(record.user.flickr_token, method='flickr.photos.getSizes', photo_id=record.flickrid)) def get_byte_size_request(request): try: url = request.read_response()['sizes']['size'][-1]['source'] return http.Request(url, method="HEAD") except: pass def get_content_size(request): return int( request.read_headers()['Content-Length']) if request else None def migrate_sync_record(sync_record, content_size): if content_size: sync_record.transfer_in = content_size sync_record.transfer_out = content_size Session.commit() else: print "Couldn't get content size for", sync_record.id fetcher = http.Fetcher(progress_callback=progress("getting image urls")) map(compose(fetcher.queue, get_size_request), records) size_fetcher = http.Fetcher( progress_callback=progress("getting byte sizes")) map(compose(size_fetcher.queue, get_byte_size_request), fetcher.run()) list( starmap(migrate_sync_record, izip(records, imap(get_content_size, size_fetcher.run()))))
def __hash__(self): attrs = ['__class__', 'foo', 'bar', 'baz'] self_getattr = partial(getattr, self) str_hash_getattr = compose(str, compose(hash, self_getattr)) # In Haskell, we could use concatMap here, since the String type # just an alias for a list of Chars. In Python, strings may obey # the sequence protocol, but that doesn't mean we can do # "abc" + [] return hash(''.join(map(str_hash_getattr, attrs)))
def test_outer_raises_exc(self): from functional import compose def inner(*vargs): return sum(vargs) def outer(num): raise RuntimeError() try: compose(outer, inner)(5, 6, 7) except RuntimeError: pass else: raise AssertionError("Failed to raise AssertionError")
def __init__(self, prefix: List[str]): self.prefix = prefix self.clean = list(map(compose(str.strip, str.lower), prefix)) self.next = {} self.count = 0 self.as_first = 0 self.as_last = 0
def macro(self, args, **kwargs): verbose = kwargs.get('verbose', True) store = kwargs.get('store', True) # args is the string following the macro command, if any # we use the escape split on it - honouring quotation # three cases: len(args)==0 => just the 'macro' command, list all macros parts = hvutil.escape_split(args) if len(parts) > 2: raise RuntimeError( "internal error - too many arguments to 'macro' command") if len(parts) == 0: # display all macro definitions drap(compose(print, "{0[0]} => '{0[1]}'".format), iteritems(self.macros)) elif len(parts) == 1: # display the definition of macro 'xxx' (if any) try: print("{0} => '{1}'".format(parts[0], self.macros[parts[0]])) except KeyError: print(parts[0] + " => no such macro defined") else: # attempt to define the macro # make sure no command of that name exists for cmd in self.commands: if cmd.rx.match(parts[0]): raise RuntimeError( "'{0}' already exists as command".format(parts[0])) self.addMacro((parts[0], parts[1]), store=store) if verbose: print(parts[0] + " => " + parts[1]) return None
def make_text_cleaners(): subs = ((r'https\w+$', ' '), (r'https://\w+$', ' '), ('http\w+\s', ' '), (r'@\w+', '_TN'), (r'[\w\-][\w\-\.]+@[\w\-][\w\-\.]+[a-zA-Z]{1,4}', '_EM'), (r'\w+:\/\/\S+', r'_U'), ( r' +', ' ', ), (r'([^!\?])(\?{2,})(\Z|[^!\?])', r'\1 _BQ\n\3'), (r'([^\.])(\.{2,})', r'\1 _SS\n'), (r'([^!\?])(\?|!{2,})(\Z|[^!\?])', r'\1 _BX\n\3'), (r'([^!\?])\?(\Z|[^!\?])', r'\1 _Q\n\2'), (r'([^!\?])!(\Z|[^!\?])', r'\1 _X\n\2'), (r'([a-zA-Z])\1\1+(\w*)', r'\1\1\2 _EL'), (r'(\w+)\.(\w+)', r'\1\2'), (r'[^a-zA-Z\s]', ''), (r'([#%&\*\$]{2,})(\w*)', r'\1\2 _SW'), (r' [8x;:=]-?(?:\)|\}|\]|>){2,}', r' _BS'), (r' (?:[;:=]-?[\)\}\]d>])|(?:<3)', r' _S'), (r' [x:=]-?(?:\(|\[|\||\\|/|\{|<){2,}', r' _BF'), (r' [x:=]-?(?:\(|\[|\||\\|/|\{|<){2,}', r' _BF'), (r' [x:=]-?[\(\[\|\\/\{<]', r' _F'), (r'\s\s', r' ')) replaces = (('"', ' '), ('\'', ' '), ('_', ' '), ('-', ' '), ('\n', ' '), ('\\n', ' '), ('\'', ' '), ('RT', '')) def repl(original, new, word): return word.replace(original, new) substitute = reduce(compose, [partial(re.sub, x, y) for (x, y) in subs]) replace = reduce(compose, [partial(repl, x, y) for (x, y) in replaces]) return compose(substitute, replace)
def test_compose(): def valueadd1(v): return v + 1 def valuemultiply2(v): return v * 2 assert functional.compose(valueadd1, valuemultiply2)(1) == 4
def test_weakrefs(self): from functional import compose import weakref f = compose(id, id) w = weakref.ref(f) assert w() is f
def __init__(self): params = dict() ddap = DefaultDataAccessParams() params['term_map'] = dict() for w in ddap.deaf_synonyms: params['term_map'] = dict(params['term_map'], **{w: '_' + w.replace(' ', '_')}) self.text_processor = functional.compose( TermReplacer(params['term_map'], term_padding_exp=r'\b').replace_terms, YbossText.toascii_lower)
def test_without_unpack_2(self): def minus_4(a): return a - 4 def mul_2(a): return a * 2 f = compose(mul_2, minus_4) self.assertEqual(f(4), 0)
def convert_format(from_format, to_format): """convert_format(from_format, to_format)(timestr) -> str Convert between two time formats. >>> convert_format('%d/%m/%Y', '%Y-%m-%d')('21/12/2112') '2112-12-21' """ return compose(strftime(to_format), strptime(from_format))
def test_with_unpack(self): from functional import id, compose def minus(a, b): return a - b # functional.id just passes the tuple through f = compose(minus, id, unpack=True) self.assertEqual(f((4, 5)), -1)
def __init__(self, json): to_datetime = compose(datetime.datetime.utcfromtimestamp, float) self.title = json.get('resolved_title').strip() self.url = json.get('resolved_url') self.item_id = json.get('item_id') self.time_added = to_datetime(json.get('time_added')) self.time_read = to_datetime(json.get('time_read')) self.time_updated = to_datetime(json.get('time_updated')) self._status = int(json.get('status'))
def find_first(self, words): prefixes = self.__data.values() words = list(map(compose(str.lower, str.strip), words)) def has_words(prefix: MultiDictionary.Word): return all([prefix.clean.count(word) > 0 for word in words]) prefixes = list(filter(has_words, prefixes)) return list( random.choices(population=[word.prefix for word in prefixes], weights=[(word.as_first / 2) + 2 for word in prefixes], k=min(len(prefixes), 5)))
def __init__(self, **kwargs): kwargs = dict({ 'key_terms': list(), 'term_map': {'the': 'the'}, # dict() # TODO: dict() didn't work, so I put essentially empty {the:the} 'yb_to_tc': self.tc_flat_from_yb, 'yboss_kwargs': dict() }, **kwargs) for k, v in kwargs.iteritems(): setattr(self, k, v) for w in self.key_terms: self.term_map = dict(self.term_map, **{w: '_' + w.replace(' ', '_')}) self.key_terms_ts = TermStats.from_terms(self.term_map.values()) self.text_preprocess = functional.compose( TermReplacer(self.term_map, term_padding_exp=r'\b').replace_terms, YbossText.toascii_lower) self.yb = Yboss(**self.yboss_kwargs) delattr(self, 'yboss_kwargs')
def filter_out_words(words, filtering_type): def word_filter(word): return word and (word[0] == '*' or word[0] == '.') def word_filter2(word): return not (word and (word[0] == '/' or word[0] == '\\')) filtr = word_filter if filtering_type == 'easy' else word_filter2 words = filter(filtr, words) words = map(compose(strip_newline, strip_newline), words) words = map(strip_indicators, words) for c in FORBIDDEN_CHARACTERS: words = filter(lambda x: x.find(c) == -1, words) return words
def listCommands(self): s = [] # compute maximum width of a command name ids = List(map(GetA('id'), self.commands)) width = max(map(len, ids)) fmt = "{{0:<{0}}".format(width + 2).format def p(x): if len(s) > 0 and len(s[-1]) < (6 * width): s[-1] += x else: s.append(x) drap(compose(p, fmt), sorted(ids)) s.append("=== Macros ===") s.extend(map_("{0[0]} => '{0[1]}'".format, iteritems(self.macros))) maybePage(s)
def __init__(self, **kwargs): kwargs = dict( { 'key_terms': list(), 'term_map': { 'the': 'the' }, # dict() # TODO: dict() didn't work, so I put essentially empty {the:the} 'yb_to_tc': self.tc_flat_from_yb, 'yboss_kwargs': dict() }, **kwargs) for k, v in kwargs.iteritems(): setattr(self, k, v) for w in self.key_terms: self.term_map = dict(self.term_map, **{w: '_' + w.replace(' ', '_')}) self.key_terms_ts = TermStats.from_terms(self.term_map.values()) self.text_preprocess = functional.compose( TermReplacer(self.term_map, term_padding_exp=r'\b').replace_terms, YbossText.toascii_lower) self.yb = Yboss(**self.yboss_kwargs) delattr(self, 'yboss_kwargs')
def __add__(self, other): return FunctionalFunction(compose(other, self))
def index(request): d = datetime.now() weekday = d.weekday() now_hour = d.hour # put test hour here example test times: 745, 1249 now_minute = d.minute # put test minute here #now_hour = 14 #now_minute = 30 #weekday = 4 halls = Hall.objects.all() # Get latitude and longitude query params lat_str = request.GET.get('lat') lon_str = request.GET.get('lon') filter_type = request.GET.get('type') prox = (lat_str and lon_str) # Convert lat and lon query parameters to floats. If conversion fails, abort sort by proximity if prox: try: lat = float(lat_str) lon = float(lon_str) except ValueError: prox = False # If conversion successful, generate distances to halls if prox: current_loc_array = array((lat,lon)) loc_assoc = map(lambda h: (h.id,(h.lat,h.lon)), halls) one_degree_to_mile = 64.812936859 # Create dictionary from hall ids to distances in miles dist_dict = dict() for pair in loc_assoc: hall_loc_array = array(pair[1]) dist_dict[pair[0]] = one_degree_to_mile * norm(current_loc_array - hall_loc_array) # Select hour objects from halls previous day which might still be open now_hour_for_yesterday = now_hour + 24 weekday_for_yesterday = (weekday - 1) % 7 openFromYesterday = Hour.objects.filter( day=weekday_for_yesterday ).filter( Q(end_hour__gt=now_hour_for_yesterday) | (Q(end_hour=now_hour_for_yesterday) & Q(end_minute__gt=now_minute)) ).filter( Q(start_hour__lt=now_hour_for_yesterday) | (Q(start_hour=now_hour_for_yesterday) & Q(start_minute__lt=now_minute)) ) # Select hour objects from current day which are open Open = Hour.objects.filter( day=weekday ).filter( Q(end_hour__gt=now_hour) | (Q(end_hour=now_hour) & Q(end_minute__gt=now_minute)) ).filter( Q(start_hour__lt=now_hour) | (Q(start_hour=now_hour) & Q(start_minute__lt=now_minute)) ) open_from_yesterday_halls = openFromYesterday.values_list('host_hall', flat=True) Open = openFromYesterday | Open.exclude(host_hall__in=list(open_from_yesterday_halls)) open_halls = Open.values_list('host_hall', flat=True) # values list returns list of tuples of selected values # flat makes it a list instead of tuples # # This query block will return the next time that a hall opens on a day. #--------- #filter for blocks of time that haven't ended yet Closed = Hour.objects.filter( Q(end_hour__gt=now_hour) | (Q(end_hour=now_hour) & Q(end_minute__gt=now_minute)) ).exclude( Q(start_hour__lt=now_hour) | (Q(start_hour=now_hour) & Q(start_minute__lt=now_minute)) ).filter( day=weekday ).distinct( 'host_hall' ).exclude( host_hall__in=list(open_halls) ) closed_halls = Closed.values_list('host_hall', flat=True) closed_for_day = Hall.objects.exclude(id__in=list(closed_halls)).exclude(id__in=list(open_halls)) #get the halls/hours that you will need to get food items for (those that are open or will soon be open, and are not retail) menuhalls = list(Open.exclude(meal_type='RET').values_list('id', flat=True)) + list(Closed.exclude(meal_type='RET').values_list('id', flat=True)) #create a dictionary to lookup fooditems by hour food_item_dict = dict() for id in menuhalls: items = FoodItem.objects.filter( meal_menu__meal_time__id=id #time matches ).distinct('name') if filter_type: #if we're filtering by vegan/vegetarian items = items.filter(attributes__name=filter_type) food_item_dict[id]=items # Sort dining hall lists according to distance (if specified), or time and name if prox: Open = sorted(Open,key=compose(lambda x: dist_dict[x], operator.attrgetter('host_hall_id'))) else: Open = Open.order_by('end_hour','end_minute', 'host_hall__name') if prox: Closed = sorted(Closed,key=compose(lambda x: dist_dict[x], operator.attrgetter('host_hall_id'))) else: Closed = Closed.order_by('host_hall','start_hour','start_minute') t = loader.get_template('index.html') c = Context({ 'Halls': halls, 'date': d, 'day': weekday, 'nowHr': now_hour, 'nowMin': now_minute, 'openHalls': Open, 'closedHalls': Closed, 'closedForDay': closed_for_day, 'food_items': food_item_dict, #'distDict': dist_dict }) return HttpResponse(t.render(c))
# iteration via recursion def fsum(xs): return xs[0] + fsum(xs[1:]) if xs else 0 print fsum(range(10)) # map, reduce, functools.partial print map(functools.partial(operator.__add__, 1), [1,2,3]) print reduce(operator.__mul__, [1,2,3], 1) factorial = lambda n: reduce(operator.__mul__, range(1, n + 1), 1) print factorial(6) # function composition succ = lambda x: x + 1 square = lambda x: x**2 squaresucc = compose(square, succ) print squaresucc(5) # division by zero class Maybe(object): @classmethod def ret(cls, x): return Just(x) class Nothing(Maybe): def __init__(self): pass def __rshift__(self, f): return self def __repr__(self): return 'Nothing()' class Just(Maybe): def __init__(self, x): self._x = x def __rshift__(self, f):
def test_try_catch(self): ns = compose(j.Expr_stat, n) self.t('try{}catch(e){}', j.Try((), j.TryHandler(j.Name('e'), ()))) self.t('try{1;2;}catch(e){3;}', j.Try([ns(1),ns(2)], j.TryHandler(id('e'), [ns(3)])))
def test_try_catch(self): ns = compose(j.Expr_stat, n) self.t('try {\n}\ncatch (e) {\n}\n', j.Try((), j.TryHandler(j.Name('e'), ()))) self.t('try {\n 1;\n 2;\n}\ncatch (e) {\n 3;\n}\n', j.Try([ns(1),ns(2)], j.TryHandler(id('e'), [ns(3)])))
# HV: * About time to commit - make sure all edits are safeguarded. # Making good progress. baselineselection, sourceselection and # timeselection working # # Revision 1.2 2013-01-29 12:23:45 jive_cc # HV: * time to commit - added some more basic stuff # from six import iteritems from functools import reduce from functional import compose, is_not_none, map_ import jenums, hvutil, copy, operator # how to format a time range "(start, end)" as TaQL fmt_time_cond = "(TIME>={0[0]:.7f} && TIME<={0[1]:.7f})".format fmt_dd_select = "{0}/{1}/{2}:{3}".format range_repr = compose(hvutil.range_repr, hvutil.find_consecutive_ranges) class selection: def __init__(self): self.init() def init(self): self.chanSel = None self.polSel = None self.scanSel = [] self.plotType = None self.solint = None # AIPS legacy :D self.solchan = None # AIPS legacy :D self.averageTime = jenums.Averaging.NoAveraging self.averageChannel = jenums.Averaging.NoAveraging
def invalidate_template_cache(fragment_name, *variables): args = md5_constructor(u':'.join(apply(compose(urlquote, unicode), variables))) cache_key = 'template.cache.%s.%s' % (fragment_name, args.hexdigest()) cache.delete(cache_key)
else: return self.left.expr_type Assign = partial(Assign_expr, '=') Plus_assign = partial(Assign_expr, '+=') Sub_assign = partial(Assign_expr, '-=') Mul_assign = partial(Assign_expr, '*=') Div_assign = partial(Assign_expr, '/=') Mod_assign = partial(Assign_expr, '%=') Rshift_assign = partial(Assign_expr, '>>=') Rshift_zero_fill_assign = partial(Assign_expr, '>>>=') Lshift_assign = partial(Assign_expr, '<<=') Bit_or_assign = partial(Assign_expr, '|=') Bit_and_assign = partial(Assign_expr, '&=') Bit_xor_assign = partial(Assign_expr, '^=') AssignStat = compose(Expr_stat, Assign) class If(Ast): def __init__(self, value, first, second): super(If, self).__init__() self.value = value self.first, self.second = first, second _fields = ('value', 'first', 'second') class For(Ast): def __init__(self, init, cond, inc, stats): super(For, self).__init__() self.init, self.cond = init, cond self.inc, self.stats = inc, stats
def invalidate_template_cache(fragment_name, *variables): args = md5_constructor(u':'.join(map(compose(urlquote, unicode), variables))) cache_key = 'template.cache.%s.%s' % (fragment_name, args.hexdigest()) cache.delete(cache_key)
def fsum(xs): return xs[0] + fsum(xs[1:]) if xs else 0 print fsum(range(10)) # map, reduce, functools.partial print map(functools.partial(operator.__add__, 1), [1, 2, 3]) print reduce(operator.__mul__, [1, 2, 3], 1) factorial = lambda n: reduce(operator.__mul__, range(1, n + 1), 1) print factorial(6) # function composition succ = lambda x: x + 1 square = lambda x: x**2 squaresucc = compose(square, succ) print squaresucc(5) # division by zero class Maybe(object): @classmethod def ret(cls, x): return Just(x) class Nothing(Maybe): def __init__(self): pass def __rshift__(self, f):
from functional import compose lmap = compose(list, map)
def invalidate_template_cache(fragment_name, *variables): """ http://djangosnippets.org/snippets/1593/ """ args = md5(u':'.join(apply(compose(urlquote, unicode), variables))) cache_key = 'template.cache.%s.%s' % (fragment_name, args.hexdigest()) cache.delete(cache_key)
import oauth.oauth as oauth import logging from django_freshbooks.refreshbooks import client, adapters, transport def api_url(domain): """Returns the Freshbooks API URL for a given domain. >>> api_url('billing.freshbooks.com') 'https://billing.freshbooks.com/api/2.1/xml-in' """ return "https://%s/api/2.1/xml-in" % (domain, ) default_request_encoder = adapters.xml_request default_response_decoder = functional.compose( adapters.fail_to_exception_response, objectify.fromstring ) def logging_request_encoder(method, **params): encoded = default_request_encoder(method, **params) logging.debug(sys.stderr, "--- Request (%r, %r) ---" % (method, params)) logging.debug(encoded) return encoded def logging_response_decoder(response): logging.debug(sys.stderr, "--- Response ---") logging.debug(response) return default_response_decoder(response)
def matchedpatt(self,groups): return (groups.map(compose((bool,self.evalkw.search,str))), groups.map(compose((bool,self.brackets.search,str))), groups.map(compose((bool,self.quotes.search,str))))