def compile_file(f, name, c): base, ext = os.path.splitext(name) print 'read...' r = lisp_reader.reader(f) exp = r.read_all() if c.verbose: print '--- read ---' pp(exp) print 'transform...' t = transform.transformer(c) exp2 = t.go(exp) if c.verbose: print '--- transform ---' pp(exp2) w = nodes.walker(c) exp3 = w.go(exp2) print 'rename...' # alpha conversion c.var_dict = nodes.rename_variables(exp3, c) # find strongly connected components print 'call graph...' c.dep_graph = graph.build_dependency_graph(exp3) c.scc_graph, c.scc_map = graph.strongly(c.dep_graph) a = analyze.analyzer(c) exp4 = a.analyze(exp3) if c.verbose: print '--- analyzer ---' exp4.pprint() ic = byte_cps(c, verbose=c.verbose) exp5 = ic.go(exp4) if c.verbose: print '--- cps ---' cps.pretty_print(exp5) fo = open('%s.byc' % base, 'wb') num_regs = cps.the_register_allocator.max_reg b = compiler(fo, name, num_regs, c) b.go(exp5) fo.close()
def compile_file (f, name, c): base, ext = os.path.splitext (name) print 'read...' r = lisp_reader.reader (f) exp = r.read_all() if c.verbose: print '--- read ---' pp (exp) print 'transform...' t = transform.transformer (c) exp2 = t.go (exp) if c.verbose: print '--- transform ---' pp (exp2) w = nodes.walker (c) exp3 = w.go (exp2) print 'rename...' # alpha conversion c.var_dict = nodes.rename_variables (exp3, c) # find strongly connected components print 'call graph...' c.dep_graph = graph.build_dependency_graph (exp3) c.scc_graph, c.scc_map = graph.strongly (c.dep_graph) a = analyze.analyzer (c) exp4 = a.analyze (exp3) if c.verbose: print '--- analyzer ---' exp4.pprint() ic = byte_cps (c, verbose=c.verbose) exp5 = ic.go (exp4) if c.verbose: print '--- cps ---' cps.pretty_print (exp5) fo = open ('%s.byc' % base, 'wb') num_regs = cps.the_register_allocator.max_reg b = compiler (fo, name, num_regs, c) b.go (exp5) fo.close()
# save the data pickle.dump(data,open(args.output_file_name,"w")) if args.do_analysis: # get and configure the model import models model = getattr(models,model_name)(config=model_config) # iterate over rule data and analyze point-by-point saved_data_list = [] data = pickle.load(open(args.output_file_name)) for rule, rule_data in data.items(): logr.info(u"analyzing rule: {}".format(rule)) plotable_data = analyzer(rule_data,model,logr) # remove spaces in rule name rule_name = rule.replace(" ","-")[0:100] if args.do_plot: return_val = plotter(plotable_data,rule_name) # the plotter returns -1 if the counts data don't exist if return_val == -1: continue # save data if plotable_data != []: saved_data_list.append((rule_name,plotable_data)) def max_last_field_getter(tup): """ a function that acts on tuples like:
def analyze(self): # a = analyzer('BOW', 800, 600) a = analyzer('SIFT', 800, 600) a.createRawP() a.processRaw() self.delete()
parser = argparse.ArgumentParser() parser.add_argument("-i",dest="input_file_name",default="output.pkl") parser.add_argument("-c",dest="config_file_name",default=None,help="get configuration from this file") parser.add_argument("-t",dest="plot_title",default=None) args = parser.parse_args() plot_config = {} if args.config_file_name is not None: config = ConfigParser.SafeConfigParser() config.read(args.config_file_name) model_name = config.get("analyze","model_name") model_config = dict(config.items(model_name + "_model")) if config.has_section("plot"): plot_config = dict(config.items("plot")) else: plot_config["plot_title"] = "output" plot_config["plot_dir"] = "." else: model_config = {"alpha":0.99,"mode":"lc"} model_name = "Poisson" plot_config["plot_title"] = "output" plot_config["plot_dir"] = "." if args.plot_title is not None: plot_config["plot_title"] = args.plot_title model = getattr(models,model_name)(config=model_config) generator = pickle.load(open(args.input_file_name)) plotable_data = analyzer(generator,model,logr) plot(plotable_data,plot_config)
dest="config_file_name", default=None, help="get configuration from this file") parser.add_argument("-t", dest="plot_title", default=None) args = parser.parse_args() plot_config = {} if args.config_file_name is not None: config = ConfigParser.SafeConfigParser() config.read(args.config_file_name) model_name = config.get("analyze", "model_name") model_config = dict(config.items(model_name + "_model")) if config.has_section("plot"): plot_config = dict(config.items("plot")) else: plot_config["plot_title"] = "output" plot_config["plot_dir"] = "." else: model_config = {"alpha": 0.99, "mode": "lc"} model_name = "Poisson" plot_config["plot_title"] = "output" plot_config["plot_dir"] = "." if args.plot_title is not None: plot_config["plot_title"] = args.plot_title model = getattr(models, model_name)(config=model_config) generator = pickle.load(open(args.input_file_name)) plotable_data = analyzer(generator, model, logr) plot(plotable_data, plot_config)
def my_handler(event, context): # checks each message posted in the group to see if it contains # the name of the bot. if it does, it will then do a few more # string comparisons to know which function to call # check that it's not posted by itself to avoid infinite loops if "botpost" not in event['source_guid']: # if event['sender_id'] != "45530059": # putting the input text to lowercase for better matches input_text = event['text'].lower() # handle attachment URLs for images # the value will default to 'null' if a URL attachment is not found # true JSON path to what we want: [event][attachments][url] if event['attachments']: for url in event['attachments']: input_attachment = url.get('url', 'null') else: input_attachment = 'null' # vision & photoboi combo handler if ("@photo" in input_text) and ("@analyze" in input_text): # gets the image URL url_val = photoboi.photoboi() # passes the value to the vision API & gets back JSON json_content = analyze.visionAPI(url_val) # parses the JSON into a usable format list_a, list_b, list_c, list_d, list_e, list_f = analyze.parse_vision_JSON(json_content) # formats the message text into a block of text msg_val = analyze.vision_list_handler(list_a, list_b, list_c, list_d, list_e, list_f) # concatenate the pic URL and the vision text url_plus_vision_text = url_val + "\n\n" + msg_val post_text(url_plus_vision_text) # photoboi handler elif "@photo" in input_text: # gets the image URL and posts it pic_url = photoboi.photoboi() post_text(pic_url) # analyze handler elif "@analyze" in input_text: # send both vals bc the pic could be either place url_val, success = analyze.analyzer(input_attachment, input_text) # if it found a valid URL, proceed if success == True: json_content = analyze.visionAPI(url_val) list_a, list_b, list_c, list_d, list_e, list_f = analyze.parse_vision_JSON(json_content) msg_val = analyze.vision_list_handler(list_a, list_b, list_c, list_d, list_e, list_f) post_text(msg_val) else: post_text(url_val) # videoboi handler elif "@video" in input_text: # gets the video URL and posts it vid_url = videoboi.videoboi() post_text(vid_url) # odds handler elif "@odds" in input_text: # generate random number and post it rand_num = random.randint(0, 100) post_text(str(rand_num) + "%") elif "@crypto" in input_text: crypto_price = crypto.get_crypto(input_text) post_text(crypto_price) elif "@news" in input_text: news = news.get_news(input_text) post_text(news) elif "@help" in input_text: intro = "I recognize the following:\n" options = """@photo will grab a random photo from the history \n@video will grab a random vid from the history \n@analyze (attach pic or paste URL) \n@odds \n@crypto (+ help for more info) along with the coin name \n@news \n$[stock_name] \n@help""" msg = intro + options post_text(msg) elif "$" in input_text: # create a local file with the PNG chart local_img_path = stonks.get_stock(input_text) # send the file to the groupme image upload service img_url = post_image(local_img_path) # post the message to the chat and print the status code print(post_img_attachment(img_url)) elif "@pandemic" in input_text: # initiate boot sequence print("## INPUT DATA") print(event) print(booter.boot_sequence(event))
# save the data pickle.dump(data, open(args.output_file_name, "w")) if args.do_analysis: # get and configure the model import models model = getattr(models, model_name)(config=model_config) # iterate over rule data and analyze point-by-point saved_data_list = [] data = pickle.load(open(args.output_file_name)) for rule, rule_data in data.items(): logr.info(u"analyzing rule: {}".format(rule)) plotable_data = analyzer(rule_data, model, logr) # remove spaces in rule name rule_name = rule.replace(" ", "-")[0:100] if args.do_plot: return_val = plotter(plotable_data, rule_name) # the plotter returns -1 if the counts data don't exist if return_val == -1: continue # save data if plotable_data != []: saved_data_list.append((rule_name, plotable_data)) def max_last_field_getter(tup): """ a function that acts on tuples like: