def _company_research(): #TODO - check if api key is valid and increment request count #TODO - add name if name is present company_name = remove_accents(request.args['company_name']) #api_key = request.args['api_key'] api_key = "9a31a1defcdc87a618e12970435fd44741d7b88794f7396cbec486b8" qry = {'where':json.dumps({'company_name':company_name})} company = Parse().get('Company', qry).json()['results'] name = "" print company if company: q.enqueue(Webhook()._update_company_info, company[0], api_key, name) return company[0] else: q.enqueue(Companies()._research, company_name, api_key, name) return {'Research has started.': True}
def trapecio(a, b, fn, n): h = (b - a) / n prs = Parse() prs.setEc(fn) i = a + h sum = 0 while (i < b): prs.addVariable("x", i) sum += prs.evaluate() i += h prs.addVariable("x", a) fa = prs.evaluate() prs.addVariable("x", b) fb = prs.evaluate() resp = h * (((fa + fb) / 2) + sum) return resp
def parse_func (self): try: self.parse = Parse(self.input.get()) except Exception as e: messagebox.showerror("Error", "Somethings gone wrong, please check your input again") print(str(e)) return self.lbl_function.set(self.parse.str_function) self.lbl_derivative.set(self.parse.str_function_da) self.lbl_derivative_dq.set(self.parse.str_function_dq) try: self.lbl_riemann.set(self.parse.getRiemannIntegrals(float(self.left_bound.get()), float(self.right_bound.get()))) except Exception as e: messagebox.showerror("Error", "Somethings gone wrong, please check your input again") print(str(e)) return
def fetch_stocks(self, params): ''' if params==all fetch all stocks get_all_categories''' filter = Filter() parser = Parse() stocklist = [] if params=='all': cats = filter.get_all_categories() for cat in cats: params = [('sc', cat)] try: stocklist.extend(self.fetch_stocks(params)) except Exception, e: print cat print e #print stocklist print 'exited prematurely' exit()
def get_answer(question, relevant): r_parsed = Parse().parse(relevant) r_tree = Tree.fromstring(r_parsed) for i in range(len(r_tree[0])): node = r_tree[0][i] if i == 0 and node.label() == "PP" and " ".join( node.leaves()).lower() not in question.lower(): answer = " ".join(node.leaves()) + "." answer = answer[0].upper() + answer[1:] return answer if node.label() == "VP": for sub_node in node: if (sub_node.label() == "PP" or sub_node.label() == "SBAR") and \ " ".join(sub_node.leaves()).lower() not in question.lower(): answer = " ".join(sub_node.leaves()) + "." answer = answer[0].upper() + answer[1:] return answer return relevant
def puntofijo(xn, fn, error): maxIt = 100 historial = [] xr = xn prs = Parse() fn = fn + "+x" prs.setEc(fn) historial.append([xn, 0]) for i in range(maxIt): xAnt = xr prs.addVariable("x", xr) xr = prs.evaluate() print("xr", xr) errorTemp = abs(xAnt - xr) historial.append([xr, errorTemp]) if (errorTemp < error): break return xr, historial
def email_research(): #TODO - add name support website = request.args['domain'] domain = "{}.{}".format(tldextract.extract(website).domain, tldextract.extract(website).tld) name = request.args['name'] if "name" in request.args.keys() else "" pattern = Parse().get('EmailPattern', {'domain':domain}).json() try: pattern = pattern['results'] except: print pattern api_key = "9a31a1defcdc87a618e12970435fd44741d7b88794f7396cbec486b8" if pattern: pattern = {'domain':domain, 'company_email_pattern': pattern[0]['company_email_pattern']} q.enqueue(Webhook()._update_company_email_pattern, pattern) #Webhook()._post(api_key, pattern, 'email_pattern') return pattern else: q.enqueue(EmailGuess().search_sources, domain, name, api_key, timeout=6000) return {'started': True}
def __init__(self, args): # create logger self.logger = logging.getLogger(__name__) self.logger.setLevel(logging.DEBUG) # create console handler and set level to debug ch = logging.StreamHandler() ch.setLevel(logging.DEBUG) # create formatter formatter = logging.Formatter( '%(asctime)s - %(name)s - %(levelname)s - %(message)s') # add formatter to ch ch.setFormatter(formatter) # add ch to logger self.logger.addHandler(ch) self.logger.debug("Starting Collector process in %s" % os.getcwd()) self.logger.debug("Gevent Version %s" % gevent.__version__) #TODO: move output file name to config #fname = "./NetFlow.%s.bin"%str(time.time()*100000) #WARN: might want to remove this after testing #self.out = open(fname,"wb") #create tool instances self.interface = Interface() self.parse = Parse() self.describe = Describe() self.standardize = Standardize() self.transform = Transform() self.partition = Partition() self.q = Queue() self.inWindow = False self.score = Score() #TODO: move csv name to config self.csv = CSV("output.csv") return super(Collector, self).__init__(args)
def add(cls, data): ''' :param data: str :returns: boolean ''' cls.check_for_heartbeat_request(data) parsed = Parse(data) if not parsed: logging.warning('Unable to parse log message: {}'.format(data)) return False filters = Filters.get() matched_filters = Filters_checker.check( filters=filters, parsed=parsed, ) store = Store(parsed=parsed, matched_filters=matched_filters) if not store.save(): return False return True
def main(): constants = Constants() database = Database() parse = Parse() grammar_config = Grammarconfig() user_command = "" query_suffix = '' if sys.argv[1][0:5] == constants.limit: query_suffix = ' ' + sys.argv[1] # First time search tips print(ct.Fore.MAGENTA + ct.Formatting.BOLD + constants.search_tips_001 + ct.Formatting.RESET_ALL) print(ct.Fore.MAGENTA + ct.Formatting.BOLD + constants.search_tips_002 + ct.Formatting.RESET_ALL) while user_command != constants.quit: print("") user_command = input(ct.Fore.MAGENTA + ct.Formatting.BOLD + constants.prompt + ct.Formatting.RESET_ALL) logging.info("User command: {}".format(user_command)) if user_command != constants.quit: query = parse.nl_command(user_command, grammar_config) if query != constants.failcode: if constants.limit not in query: # limit result set if set in runtime & not already restricted in query query = query + query_suffix try: database.execute_query(query) except mysql.connector.Error as err: logging.info("SQL query syntax error: {}".format(err)) else: database.output_results(database.cursor) else: print(ct.Fore.RED + ct.Formatting.BOLD + constants.dont_understand + ct.Formatting.RESET_ALL) logging.info("Query parsing failed") else: break database.cursor_close() database.connection_close() print(ct.Fore.MAGENTA + ct.Formatting.BOLD + constants.goodbye + ct.Formatting.RESET_ALL)
def __main__(): nj_municipals = json.load(open('./json/nj_municipals.json')) counties = list(nj_municipals.keys()) if len(sys.argv) == 1: url, date = Link() elif len(sys.argv) == 2: _, date = Link() url = sys.argv[1] else: url = sys.argv[1] date = sys.argv[2] print(url) print(date) data = Parse(url, counties) total_df = Clean(csv_file, data, date, nj_municipals) Update(total_df, csv_file) Today(total_df, date, counties, json_file)
def simpson(a, b, fn, n): h = (b - a) / (2 * n) prs = Parse() prs.setEc(fn) parSum = 0 imparSum = 0 i = a + h xi = np.arange(i, b, h) for index in range(len(xi)): prs.addVariable("x", xi[index]) if (index % 2 == 0): imparSum += prs.evaluate() else: parSum += prs.evaluate() prs.addVariable("x", a) fa = prs.evaluate() prs.addVariable("x", b) fb = prs.evaluate() resp = (h / 3) * (fa + fb + (2 * parSum) + (4 * imparSum)) return resp
def get_label(self, question): tree = Parse().parse(question) tree = Tree.fromstring(str(tree)) for s in tree.subtrees( lambda t: t.label() == "WP" or t.label() == "WRB"): if "who" in s.leaves() or "Who" in s.leaves(): return "who" if "what" in s.leaves() or "What" in s.leaves(): return "what" if "why" in s.leaves() or "Why" in s.leaves(): return "why" if "where" in s.leaves() or "Where" in s.leaves(): return "where" if "when" in s.leaves() or "When" in s.leaves(): return "when" else: return "other" for _ in tree.subtrees(lambda t: t.label() == "WP$"): return "other" return "binary"
def inbound_parse(): """Process POST from Inbound Parse and save received data.""" parse = Parse(config, request) # Sample proccessing action updata = parse.key_values() if ("kmahyyg" in updata['to']): yygdata = json.dumps(updata) YYGFile.write(yygdata) YYGFile.close() elif ("ecswvern" in updata['to']): ecsdata = json.dumps(updata) ECSFile.write(ecsdata) ECSFile.close() else: spamdata = json.dumps(updata) SpamFile.write(spamdata) SpamFile.close() # Tell SendGrid's Inbound Parse to stop sending POSTs # Everything is 200 OK :) # Now send the corresponding file to each chat id return jsonify({'status': 200, 'bmsg': 'OK'})
def run(file, count, minsylls, maxsylls): """ Actually runs the generator. Note that all parameters are supplied by the CLI through click. """ # Perform error checking if minsylls > maxsylls: click.echo("ERROR: minsylls cannot be greater than maxsylls") sys.exit(2) elif minsylls < 1: click.echo("ERROR: minsylls must be greater than 1") sys.exit(2) elif count < 1: click.echo("ERROR: count must be greater than 1") sys.exit(2) global CATEGORIES, SYLLABLES parser = Parse(file) CATEGORIES = parser.categories SYLLABLES = parser.syllables print_words(count, minsylls, maxsylls)
def main(): if sys.argv[1][-3:] != "asm": print("Error: wrong file type for input, use \".ams\" file !") sys.exit() inputfile = sys.argv[1] #inputfile = "Max.asm" outputfile = inputfile[:-3] + "hack" #outputfile = "Max.hack" par = Parse(inputfile) code = CodeTrans() symbols = SymbolTable() symbol_search(par, symbols) variable_search(par, symbols) binary_list = assembler(par, symbols, code) with open(outputfile, "w") as of: for bcode in binary_list: of.write(bcode + "\n")
def createpptx(mdfile, pptxfile, template): parse = Parse() parse.read(mdfile) title_name, top_subtitle, allslides = parse.get_title() prs = Presentation(template) xml_slides = prs.slides._sldIdLst removeslides = list(xml_slides) for slide in removeslides: xml_slides.remove(slide) slide = prs.slides.add_slide(prs.slide_layouts[0]) try: title = slide.shapes.title title.text = title_name subtitle = slide.placeholders[1] subtitle.text = top_subtitle except: print("No Title in template file") exit() for slides in allslides: page = Page() page.split_elem(slides) sub_layout = prs.slide_layouts[1] slide = prs.slides.add_slide(sub_layout) shapes = slide.shapes title_shape = shapes.title body_shape = shapes.placeholders[1] title_shape.text = page.title tf = body_shape.text_frame tf.text = page.text prs.save(pptxfile)
def pinmuxgen(pth=None, verify=True): """ populating the file with the code """ p = Parse(pth, verify) iocells = Interfaces() iocells.ifaceadd('io', p.N_IO, io_interface, 0) ifaces = Interfaces(pth) #ifaces.ifaceadd('io', p.N_IO, io_interface, 0) init(p, ifaces) bp = 'bsv_src' if pth: bp = os.path.join(pth, bp) if not os.path.exists(bp): os.makedirs(bp) bl = os.path.join(bp, 'bsv_lib') if not os.path.exists(bl): os.makedirs(bl) cwd = os.path.split(__file__)[0] # copy over template and library files shutil.copyfile(os.path.join(cwd, 'Makefile.template'), os.path.join(bp, 'Makefile')) cwd = os.path.join(cwd, 'bsv_lib') for fname in ['AXI4_Lite_Types.bsv', 'Semi_FIFOF.bsv']: shutil.copyfile(os.path.join(cwd, fname), os.path.join(bl, fname)) bus = os.path.join(bp, 'busenable.bsv') pmp = os.path.join(bp, 'pinmux.bsv') ptp = os.path.join(bp, 'PinTop.bsv') bvp = os.path.join(bp, 'bus.bsv') write_pmp(pmp, p, ifaces, iocells) write_ptp(ptp, p, ifaces) write_bvp(bvp, p, ifaces) write_bus(bus, p, ifaces)
def jacobiana(LFn, Lx, err): tparse = Parse() Jmatrix = [] h = err / 10 TLx = [] for i in range(len(LFn)): mTemp = [] for j in range(len(Lx)): TLx = list(Lx) TLx[j] = Lx[j] + h tparse.setEc(LFn[i]) tparse.addVarFromList(TLx) rH = tparse.evaluate() TLx = list(Lx) TLx[j] = Lx[j] - h tparse.addVarFromList(TLx) r_H = tparse.evaluate() DR = (rH - r_H) / (2 * (h * 100)) mTemp.append(DR) Jmatrix.append(mTemp) result = Jmatrix return result
def write_vm1(inputfile, in_filename, ctabel, cw): par = Parse(inputfile, ctabel) cw.setInputname(in_filename) while par.hasMoreCommands(): par.advance() ctype = par.cmdType() if ctype == "C_ARITHMETIC": cw.writeArithmetic(par.arg1()) elif ctype == "C_PUSH" or ctype == "C_POP": cw.writePushPop(ctype, par.arg1(), int(par.arg2())) elif ctype == "C_LABEL": cw.writeLabel(par.arg1()) elif ctype == "C_IF": cw.writeIf(par.arg1()) elif ctype == "C_GOTO": cw.writeGoto(par.arg1()) elif ctype == "C_FUNCTION": cw.writeFunction(par.arg1(), int(par.arg2())) elif ctype == "C_RETURN": cw.writeReturn() elif ctype == "C_CALL": cw.writeCall(par.arg1(), int(par.arg2()))
def __init__(self, baud=None, port=None, delay=None): if baud is None: baud = Config.get("modem")["baud"] if port is None: port = Config.get("modem")["port"] self.baud = baud self.port = port self.serialPort = serial.Serial(baudrate=baud, port=port) self.data = "" self.delay = delay self.sbdring_time = 0 self.sbdix_time = 0 self.retry_count = 0 # Send two commands simultaneously self.send_command("AT+SBDAREG=1;+SBDMTA=1;+SBDD2") self.ready = False self.Parser = Parse()
def _daily_collect(self, profile_id): profile = Parse().get("ProspectProfile/" + profile_id, {"include": "profiles"}) _signal = [ i["press_id"] for i in profile.json()["profiles"] if i["className"] == "TwitterProfile" ] d1, d2 = Helper()._timestamp() qry = {"signal": _signal[0], "timestamp": {"$gte": d1, "$lte": d2}} press = Parse().get( "Tweet", { "limit": 1000, "skip": 0, "count": True, "where": json.dumps(qry), "order": "-timestamp" }).json()["results"] profile = profile.json() report = { "user": profile["user"], "user_company": profile["user_company"] } report["profile"] = Parse()._pointer("ProspectProfile", profile["objectId"]) _report = Parse().create("SignalReport", report).json()["objectId"] _report = Parse()._pointer("SignalReport", _report) cos = pd.DataFrame(press) if cos.empty: return cos = cos[cos.company_name.notnull()].drop_duplicates("company_name") cos["report"] = [_report] * len(cos.index) Parse()._batch_df_create("PeopleSignal", cos) # TODO - Queue ProspectTitle Search if present q.enqueue(PeopleSignal()._check_for_people_signal, cos, profile, _report)
# print answer # print "^^^" return answer if node.label() == "VP": for sub_node in node: if (sub_node.label() == "PP" or sub_node.label() == "SBAR") and " ".join(sub_node.leaves()).lower() not in question.lower(): answer = " ".join(sub_node.leaves()) + "." answer = answer[0].upper() + answer[1:] # print answer return answer return relevant WH = WH() P = Parse() # test_q = 'when did Clint Dempsey score against Ghana 29 seconds into the group play match ?' # test_relevant = 'On June 16, Clint Dempsey scored against Ghana 29 seconds into the group play match.' # test = When_answer() # print test_q # print test_relevant # test.get_answer(test_q, test_relevant) # test2 = Where_answer() # test_q2 = "where did he obtain his master degree" # test_relevant2 = "In England he obtained his master degree." # test_relevant3 = "the picture is above the wall" # test_q3 = "where is the picture?" # print test_q2 # print test_relevant2 # test2.get_answer(test_q2, test_relevant2)
import logging from parse import Parse if __name__ == '__main__': logging.basicConfig( level="DEBUG", filename="/var/log/dp_more.log", format= "%(asctime)s[%(levelname)s][%(filename)s.%(funcName)s]%(message)s") Parse().parse_all_info()
import threading # Application imports sys.path.append("../modules") from config import Config from modem import Modem from logger import log from queue import Queue from parse import Parse # Global variables QDIR = "moqueue" # Instantiate objects App = Modem() Parser = Parse() Q = Queue(QDIR) # Callback is fired when an SBD message comes in. def _callback(data): print("Callback: %s" % data) message_response = Config.get("respond")["response"] string_to_match = Config.get("respond")["match"] if string_to_match in data: Q.add(message_response) old = os.stat(QDIR).st_mtime log.debug("Old time: %s" % repr(old))
"$": Value("builtin", lambda x, y, env: env_lookup(env, y.value[1:])), "->": Value("special", fn_), "=>": Value("special", fn_early), "fn": Value("builtin", fn), "?=": Value("builtin", lambda x, y, env: match_(x, y, env)), ":=": Value("builtin", lambda x, y, env: assign_(x, y, env)), "::=": Value("builtin", lambda x, y, env: assign_(x, y, env[0])), "ENV": Value("builtin", lambda x, y, env: Value("env", env[1])), # help "pr": Value("builtin", print_), }) ENV = (ENV0, {}) # test fns ENV[1]["foo"] = Value("fn", Fn(ENV, "a", "b", Parse(r"a+b+b"))) ENV[1]["map_"] = Value("fn", Fn(ENV, "x", "y", Parse(r"\f; \col ?= y | \a(col)\b ?= x ? (a map_(f; col)) col (b map_(f; col)) :| x f()"))) ENV[1]["map"] = Value("fn", Fn(ENV, "x", "f", Parse(r"x is_cons() ? x map_(f; x color.)"))) def env_lookup(env, key): while True: parent, env_dict = env if key in env_dict: return env_dict[key] if not parent: raise Exception(f"Can't env lookup: {key}") env = parent
if __name__ == '__main__': time1 = time.time() parser = argparse.ArgumentParser() parser.add_argument( '-u', "--url", type=str, default= 'https://www.qcsanbao.cn/webqcba/DVMProducerServlet?method=getWhereList&p=1', help="要爬取的网站") args = parser.parse_args() url = args.url base_url = configs["basic_url"] r = get_redis_connect() dl = Download() par = Parse() # 制作列表页的url_list make_url_list( base_url, par.parse_main_page_get_total_pagenum( dl.download_first_page(url, logger), configs["test"])) threading_list = [] # 列表页的解析详情页的数据url,存放在redis中,并且下载列表页html threading_list.extend([ Thread(target=download_and_parse_page, args=("url_list", r, par.parse_main_page_get_detail_page_url, dl.download_first_page, dl.download_list_page_html, lock, logger)) for _ in range(configs["thread_num"])
help='Log file name for tegrastats data') parser.add_argument( '--verbose', '-v', action='store_true', help='Prints verbose messages while running tegrastats') parser.add_argument( '--only_parse', '-p', action='store_true', help='Parse tegrastats log file without running tegrastats') parser.add_argument( '--graph', '-g', action='store_true', help='Plots some useful graphs from tegrastats data parsed') options = parser.parse_args() tegrastats = Tegrastats(options.interval, options.log_file, options.verbose) parser = Parse(options.interval, options.log_file) if not options.only_parse: status = tegrastats.run() csv_file = parser.parse_file() if options.graph: graph = Graph(csv_file) graph.plots()
def __init__(self, file_name): self.code = Code() self.parser = Parse(file_name)
from nltk.tree import Tree as Tree from parse import Parse from pattern.en import conjugate from pattern.en import tenses sNLP = Parse() BE_VB_LIST = [ "is", "was", "are", "am", "were", "will", "would", "could", "might", "may", "should", "can" ] DO_DID_DOES = ["do", "did", "does"] VB_LIST = ["VBZ", "VBP", "VBD"] class Binary: def convert(self, text, tree): parse_by_structure = [] NEG = 0 NP = 0 VP = 0 for t in tree[0]: if t.label() == "VP": VP = 1 if t.label() == "NP": NP = 1 if t.label() != "VP": parse_by_structure += (t.leaves()) else: for tt in t: if tt.label() != "RB":