def generate_network():#num_parents, num_leaves, approx_num_total_nodes, approx_connectivity): """Generates the network on page 362 on bishop... Too much work to generate random networks...""" #lambda to collapse the list of parent's states into a binary string get_parents_binary_string = lambda parents: f.foldl(lambda acc, element: acc<<1 | element.state, 0b0, parents) #simply looks up the binary string formed from the parents in the p_map p_up = lambda p_map, parents: p_map[get_parents_binary_string(parents)] ''' #simple network for testing purposes node1 = Bayesian_node(None, lambda: .4, None, 0) node2_p_dict = {0b0:.2, 0b1:.5} node2 = Bayesian_node(None, f.partial(p_up, node2_p_dict), [node1], 1) node1.children = [node2] return Network([node1], {0: node1, 1:node2})''' node1 = Bayesian_node(None, lambda: .4, None, 0) node2 = Bayesian_node(None, lambda: .7, None, 1) node3 = Bayesian_node(None, lambda: .3, None, 2) node4_p_dict = {0b000:.2, 0b001:.5, 0b010:.7, 0b100:.43, 0b011:.15, 0b101:.35, 0b110: .6, 0b111:.25} node4 = Bayesian_node(None, f.partial(p_up, node4_p_dict), [node1, node2, node3], 3) node5_p_dict = {0b00:.9, 0b01:.5, 0b10:.2, 0b11:.1} node5 = Bayesian_node(None, f.partial(p_up, node5_p_dict), [node1, node3],4) node6_p_dict = {0b1:.85, 0b0:.374} node6 = Bayesian_node(None, f.partial(p_up, node6_p_dict), [node4], 5) node7_p_dict = {0b00:.19, 0b01:.27, 0b10:.5, 0b11:.333} node7 = Bayesian_node(None, f.partial(p_up, node7_p_dict), [node4, node5], 6) node1.children = [node4, node5] node2.children = [node4] node3.children = [node4, node5] node4.children = [node6, node7] node5.children = [node7] return Network([node1,node2,node3], {0: node1, 1: node2, 2: node3, 3:node4, 4:node5, 5:node6, 6:node7})
def __init__(self, obj): super(Wrapper, self).__init__(obj) if isinstance(obj, types.ClassType): obj = obj() permitted_methods = {} for method in ('get','post', 'option', 'put', 'delete', 'head'): if hasattr(obj, method): fnc = getattr(obj, method) if not isinstance(fnc, types.FunctionType): fnc = partial(fnc, obj) permitted_methods[method.upper()] = fnc else: # in case we have decorated function instead of class.. if callable(obj): permitted_methods[method.upper()] = obj require_http_methods_decorator = http.require_http_methods(request_method_list=permitted_methods.keys()) for key, val in permitted_methods.items(): setattr(self, key.lower(), val) self.permitted_methods = permitted_methods.keys() self.inner = reduce(lambda fnc, dec: dec(fnc), (require_http_methods_decorator,)+decorators, self.inner )
def updateCode(self): aux_code_from_array = functional.partial(util._code_from_array, self.currentLine) self.codeTextBuffer.set_text(self.code_from_array(aux_code_from_array)) self.codeTextBuffer.apply_tag(self.textTag, self.codeTextBuffer.get_start_iter(), self.codeTextBuffer.get_end_iter()) self.codeTextBuffer.apply_tag(self.selectedLine, self.codeTextBuffer.get_iter_at_line(self.currentLine), self.codeTextBuffer.get_iter_at_line(self.currentLine+1)) self.codeTextView.set_buffer(self.codeTextBuffer) print self.interpreter.scope
def main( args=sys.argv[1:], optparse=qcat_options, getpassword=getpass.getpass, connect=amqp.Connection ): options, args = optparse(args) if options.password is None: options.password = getpassword() con = connect( host=options.host, userid=options.userid, password=options.password, virtual_host=options.vhost ) try: chan = con.channel() try: if options.declare_queue: chan.queue_declare( queue=options.queue, durable=options.durable, auto_delete=options.auto_delete ) if options.declare_exchange: chan.exchange_declare( exchange=options.exchange, type=options.exchange_type, durable=options.durable, auto_delete=options.auto_delete ) if options.bind_queue: chan.queue_bind( queue=options.queue, exchange=options.exchange, routing_key=options.routing_key ) chan.tx_select() consumer_tag = chan.basic_consume( callback=functional.partial(echo, chan), queue=options.queue ) try: print("Press ^C to exit...") while True: chan.wait() except KeyboardInterrupt: print() # A newline, so that ^C doesn't naff up readline. finally: chan.basic_cancel(consumer_tag) if options.drain: chan.tx_commit() else: chan.tx_rollback() finally: chan.close() finally: con.close()
def composeAll(*args): """Util for multiple function composition i.e. composed = composeAll([f, g, h]) composed(x) == f(g(h(x))) """ # adapted from https://docs.python.org/3.1/howto/functional.html return partial(functools.reduce, compose)(*args)
def show_settings(): def bind(bind_key): print(f"binding : {bind_key}") def press(key): settings.binds[bind_key] = key print(f"bind_key = {bind_key} | new_key = {key}") listener.stop() with Listener(on_press=press) as listener: listener.join() update_binds() ttk.Label(root).grid(columnspan=2) frame = tkinter.Frame() frame.grid(columnspan=2) ttk.Label(root, text="----------Settings----------").grid(columnspan=2, in_=frame) label = ttk.Label(root, text="Chat Prefix") label.grid(sticky=tkinter.E, in_=frame) ttk.Entry(root, textvariable=chat_prefix).grid(column=1, row=label.grid_info()['row'], sticky=tkinter.W, in_=frame) label = ttk.Label(root, text="Number of words") label.grid(sticky=tkinter.E, in_=frame) ttk.Entry(root, textvariable=prefix_size).grid(column=1, row=label.grid_info()['row'], sticky=tkinter.W, in_=frame) label = ttk.Label(root, text="Split chance") label.grid(sticky=tkinter.E, in_=frame) ttk.Entry(root, textvariable=split_chance).grid(column=1, row=label.grid_info()['row'], sticky=tkinter.W, in_=frame) label = ttk.Label(root, text="Chat delay") label.grid(sticky=tkinter.E, in_=frame) ttk.Entry(root, textvariable=chat_delay).grid(column=1, row=label.grid_info()['row'], sticky=tkinter.W, in_=frame) update_binds() for i in range(4): texts[i].grid(sticky=tkinter.E, in_=frame) ttk.Button(root, text="Bind", command=partial(bind, i)).grid(sticky=tkinter.W, column=1, row=texts[i].grid_info()['row'], in_=frame)
def gen(filename): pattern = ['MMU2H', '\|'] fr = open(filename).readlines() def ismatch(p, c): m = re.search(p, c) return m new_f = filter(lambda x: filter(partial(ismatch, c=x), pattern), fr) with open('dd.txt', 'w') as fw: fw.writelines(new_f)
def __hash__(self): attrs = ['__class__', 'foo', 'bar', 'baz'] self_getattr = partial(getattr, self) str_hash_getattr = compose(str, compose(hash, self_getattr)) # In Haskell, we could use concatMap here, since the String type # just an alias for a list of Chars. In Python, strings may obey # the sequence protocol, but that doesn't mean we can do # "abc" + [] return hash(''.join(map(str_hash_getattr, attrs)))
def possible_directions(frm): map_size = 20 add_points = lambda a, b: point(a._x + b._x, a._y + b._y) inside_map = lambda p: (p._x >=0 and p._x < map_size and p._y >=0 and p._y < map_size) directions = [point(-1,0),point(1,0),point(0,-1), point(0,1),point(-1,-1),point(-1,1), point(1,1),point(1,-1)] return filter(inside_map, map(partial(add_points, frm), directions))
def openFile(self, filename): if filename and os.path.isfile(filename): self.arrayCode = util.load_file(filename) self.code_from_array = functional.partial(util.code_from_array, self.arrayCode) # Currying self.interpreter = None self.set_interpreter() self.currentLine = self.interpreter.position self.updateCode() self.updateVariables() self.updateStack() elif filename: print "Arquivo nao encontrado:", filename
def test_partial(): def add(a, b): return a + b def add10(x): return add(x, 10) assert functional.map(add10, functional.range(1, 11)) == \ [ 11, 12, 13, 14, 15, 16, 17, 18, 19, 20 ] assert functional.map(functional.partial(add, 10), functional.range(1, 11)) == \ [ 11, 12, 13, 14, 15, 16, 17, 18, 19, 20 ]
def Recursion(*args): """ Creates a sequence of nested functions. - *args: a variable-length input to the function - functools.reduce: applies compose() to each argument in args - compose: strings together functions, defining a recursive function - partial: the returned function, which accepts a value, X, that is then passed through the recursive function >> Encoder = Recursion([f,g,z]) >> Encoder(x) = f(g(z(x))) """ return (partial(functools.reduce, compose)(*args))
def without_keys(keys): """Return a copy of a_dict with the given keys removed. rm_keys(keys)(a_dict) :: [K] -> {K:V} -> {K:V} Equivalent to the following in Python 3: {k:v for (k,v) in a_dict.items() if k not in keys} >>> a_dict = {'a': 2, 'b': 3, 'c': 4} >>> without_keys(['a', 'b'])(a_dict) {'c': 4} """ keys = frozenset(keys) # frozenset has efficient membership lookup return filter_keys_c(fnot(partial(operator.contains, keys)))
def _predicates(self,fields = None): from dataset import Variable from functional import partial,combinator,eq,or_,and_ if self.opt.flowid=='3': fields = ('dstIPs','dstPorts','srcIPs') elif self.opt.flowid=='4': fields = ('dstIPs','dstPorts','srcIPs','srcPorts') else: raise Exception('not implemented') i = 0 for f in self.filters: f.update( (k,map(ip2int,v)) for k,v in f.iteritems() if k in ('dstIPs','srcIPs') ) variable = combinator(Variable,self.fieldmap.get) # variable :: key -> Variable variableeq = combinator(partial(partial,eq),variable) # variableeq :: key -> (Variable ==) f['predicate'] = reduce(and_,( reduce(or_, map( variableeq(k) ,v )) for k,v in f.items() if k in fields and len(v) )) f['idx'] = i i+=1
def filter(global_conf, profile_dir): """A Paster filter for profiling applications. .. highlight:: ini To apply this to an application, use Paster's ``filter-with`` attribute :: [filter:profile] use = egg:profileware#profile profile_dir = %(here)s/profile [app:main] # ... filter-with = profile For each request, the app will write a file to ``./profile/`` that can be read out using the included ``profcat`` command. """ store = ProfileStore(profile_dir) return partial(ProfiledApp, store)
def _predicates(self, fields=None): from dataset import Variable from functional import partial, combinator, eq, or_, and_ if self.opt.flowid == '3': fields = ('dstIPs', 'dstPorts', 'srcIPs') elif self.opt.flowid == '4': fields = ('dstIPs', 'dstPorts', 'srcIPs', 'srcPorts') else: raise Exception('not implemented') i = 0 for f in self.filters: f.update((k, map(ip2int, v)) for k, v in f.iteritems() if k in ('dstIPs', 'srcIPs')) variable = combinator( Variable, self.fieldmap.get) # variable :: key -> Variable variableeq = combinator(partial( partial, eq), variable) # variableeq :: key -> (Variable ==) f['predicate'] = reduce( and_, (reduce(or_, map(variableeq(k), v)) for k, v in f.items() if k in fields and len(v))) f['idx'] = i i += 1
def define_tags(): import functional for tag in html_tags: globals()[tag] = functional.partial(XMLNode, tag)
def compose_funcs(*args): """Multiple function composition i.e. composed = compose_funcs([f, g, h]) composed(x) == f(g(h(x))) """ return partial(functools.reduce, compose)(*args)
def correlate_values(values,stop=None,resolution=None,debug=False,rule=None,AUTO_SIZE=50000,MAX_SIZE=0,MIN_RESOLUTION=0.05): ''' Correlates values to have all epochs in all columns :param values: {curve_name:[values]} :param resolution: two epochs with difference smaller than resolution will be considered equal :param stop: an end date for correlation :param rule: a method(tupleA,tupleB,epoch) like (min,max,median,average,last,etc...) that will take two last column (t,value) tuples and time and will return the tuple to keep ''' start = time.time() #print('correlate_values(%d x %d,resolution=%s,MAX_SIZE=%d) started at %s'%(len(values),max(len(v) for v in values.values()),resolution,MAX_SIZE,time.ctime(start))) stop = stop or start keys = sorted(values.keys()) table = dict((k,list()) for k in keys) index = dict((k,0) for k in keys) lasts = dict((k,(0,None)) for k in keys) first,last = min([t[0][0] if t else 1e12 for t in values.values()]),max([t[-1][0] if t else 0 for t in values.values()]) if resolution is None: #Avg: aproximated time resolution of each row avg = (last-first)/min((AUTO_SIZE/6,max(len(v) for v in values.values()) or 1)) if avg < 10: resolution = 1 elif 10 <= avg<60: resolution = 10 elif 60 <= avg<600: resolution = 60 elif 600 <= avg<3600: resolution = 600 else: resolution = 3600 #defaults print('correlate_values(...) resolution set to %2.3f -> %d s'%(avg,resolution)) assert resolution>MIN_RESOLUTION, 'Resolution must be > %s'%MIN_RESOLUTION if rule is None: rule = fun.partial(choose_first_value,tmin=-resolution*10) #if rule is None: rule = fun.partial(choose_last_max_value,tmin=-resolution*10) epochs = range(int(first*1000-resolution*1000),int(last*1000+resolution*1000),int(resolution*1000)) #Ranges in milliseconds if MAX_SIZE: epochs = epochs[:MAX_SIZE] for k,data in values.items(): #print('Correlating %s->%s values from %s'%(len(data),len(epochs),k)) i,v,end = 0,data[0] if data else (first,None),data[-1][0] if data else (last,None) for t in epochs: t = t*1e-3 #Correcting back to seconds v,tt = None,t+resolution #Inserted value will be (<end of interval>,<correlated value>) #The idea is that if there's a value in the interval, it is chosen #If there's no value, then it will be generated using previous/next values #If there's no next or previous then value will be None #NOTE: Already tried a lot of optimization, reducing number of IFs doesn't improve #Only could guess if iterating through values could be better than iterating times if i<len(data): for r in data[i:]: if r[0]>(tt): if v is None: #No value in the interval if not table[k]: v = (t,None) else: v = rule(*[table[k][-1],r,tt]) #Generating value from previous/next break #therefore, r[0]<=(t+resolution) else: i,v = i+1,(t,r[1]) ## A more ellaborated election (e.g. to maximize change) #elif v is None: #i,v = i+1,(t,r[1]) #else: #i,v = i+1,rule(*[v,r,tt]) else: #Filling table with Nones v = (t+resolution,None) table[k].append((tt,v[1])) #print('\t%s values in table'%(len(table[k]))) #print('Values correlated in %d milliseconds'%(1000*(time.time()-start))) return table
# -*- coding: utf-8 -*- import re from functional import partial regex = ["(.*)_MMU", "is (\d*\.\d*)ns"] pattern = map(re.compile, regex) def get_m(p, c): m = re.search(p, c) if m: return m.group(1) return None match = map(lambda x: partial(get_m, p=x), pattern) file_read = open("result_hot.txt").readlines() func_read = lambda fn: filter(lambda x: x, map(lambda x: fn(c=x), file_read)) match_list = map(func_read, match) file_write = open("ok.txt", "w") func_write = lambda (x, y): file_write.writelines( y + "\t" + match_list[1][2 * x] + "\n\t" + match_list[1][2 * x + 1] + "\n" ) map(func_write, enumerate(match_list[0])) file_write.close()
#!/usr/bin/env python3 import argparse import sys import functools import wikipedia import requests from time import sleep from urllib.parse import urlparse, urlunparse from functional import compose, partial from filecache import filecache multi_compose = partial(functools.reduce, compose) def main(): parser = argparse.ArgumentParser() parser.add_argument( 'urls', help='the file containing the list of url to transform. Default'\ ' to stdin if no file is given', nargs='?', type=argparse.FileType('r'), default=sys.stdin ) parser.add_argument( '-i', '--inferfunc',
def map_values_c(fun): """Curried version of map_values. map_values_c(fun)(a_dict) = map_values(fun, a_dict) """ return partial(map_values, fun)
def multi_compose(x): return functional.partial(functools.reduce, functional.compose)(x)
def __get__(self, instance, owner): #Support for being used as a bound method. if instance is None: return self else: return partial(self, instance)
def greater_than(n): return matcher(partial(flip(gt), n))
self.url = url self.count = count def __repr__(self): return "<Link %s (%d)>" % (self.url, self.count) # create the table if it doesn't exist engine = sql.create_engine('sqlite:///tweets.db') Tweet.metadata.create_all(engine) Link.metadata.create_all(engine) # get the timeline and links in it timeline = urllib.urlopen('http://www.twitter.com/statuses/public_timeline.json').read() timeline = json.loads(timeline) encode = partial(flip(encode), 'ascii') find_urls = re.compile(r'http://[^\s]+[^\s\.]') links = [] tweets = [Tweet(t['id']) for t in timeline] for matches in [map(encode, find_urls.findall(t['text'])) for t in timeline]: if len(matches) > 0: tweet_links = [] for url in matches: # find the real URL (after redirects) url = urllib.urlopen(url) url = url.geturl() tweet_links.append(Link(url, 1)) links.append(tweet_links) else: links.append(None)
def filter_keys_c(func): """Curried filter_keys. filter_keys_c(f)(a_dict) = filter_keys(f, a_dict) """ return partial(filter_keys, func)
import os FLAGS = gflags.FLAGS gflags.DEFINE_string('glob', '*.csv.bz2', 'Glob pattern to read from cache') gflags.DEFINE_string('start', None, 'Start date formatted as year/month/day') gflags.DEFINE_string('end', None, 'End date (inclusive) formatted as year/month/day') gflags.DEFINE_multistring('filter', [], 'Add filters') gflags.DEFINE_multistring('exclude', [], 'Add exclusions') gflags.DEFINE_boolean( 'details', True, 'If set to False, only the id# of the start and ending stations will be provided') compose_mult = partial(reduce, compose) STATION_DETAILS = set( ('start station longitude', 'start station latitude', 'end station longitude', 'end station latitude', 'end station name', 'start station name')) def int_or_none(value): try: return int(value) except: return None
def get_function(obj, name): if hasattr(obj, name): fnc = getattr(obj, name) if not isinstance(fnc, types.FunctionType): # we heave instance method fnc = partial(fnc, obj) return fnc
#!/usr/bin/env python3 import argparse import sys import functools import wikipedia import requests from time import sleep from urllib.parse import urlparse, urlunparse, unquote from functional import compose, partial from filecache import filecache multi_compose = partial(functools.reduce, compose) def main(): parser = argparse.ArgumentParser() parser.add_argument( 'urls', help='the file containing the list of url to transform. Default'\ ' to stdin if no file is given', nargs='?', type=argparse.FileType('r'), default=sys.stdin ) parser.add_argument( '-i', '--inferfunc',
def checktype(t): return partial(flip(assert_that), instance_of(t))
def compose_all(*args): """Util for multiple function composition i.e. composed = composeAll([f, g, h]) composed(x) # == f(g(h(x))) """ return partial(functools.reduce, compose)(*args)