def test_lazy(self): parser = option(seq(token("hoge"),lazy(lambda:parser))) self.assertEqual(parser("",0), [True, None, 0]) self.assertEqual(parser("hoge",0), [True, ["hoge",None], 4]) self.assertEqual(parser("hogehoge",0), [True, ["hoge", ["hoge",None]], 8]) self.assertEqual(parser("hoge",1), [True, None, 1]) self.assertEqual(parser("hogeh",0), [True, ["hoge",None], 4])
def test_seq(self): # /hoge(hoge)*/ = /(hoge)+/ parser = seq(token("hoge"),many(token("hoge"))) s = "hogehoge" self.assertEqual(parser(s,0), [True,["hoge",["hoge"]],8]) self.assertEqual(parser(s,4), [True,["hoge",[]],8]) self.assertEqual(parser("abc",0), [False,None,0])
def test_many(self): # /(hoge)*/ parser = many(token("hoge")) s = "hogehoge" self.assertEqual(parser(s,0), [True,["hoge","hoge"],8]) self.assertEqual(parser(s,4), [True,["hoge"],8]) self.assertEqual(parser(s,1), [True,[],1])
def test_choice(self): # /(hoge|fuga)*/ parser = many(choice(token("hoge"),token("fuga"))) s = "hogefugahoge" self.assertEqual(parser(s,0), [True,["hoge","fuga","hoge"],12]) self.assertEqual(parser(s,4), [True,["fuga","hoge"],12]) self.assertEqual(parser(s,8), [True,["hoge"],12]) self.assertEqual(parser("a",0), [True,[],0])
def test_bracket(self): parser = brackets(word) assert (parser('(a)') == 'a', '') assert (parser(' ( ab )') == 'ab', '') assert (parser(' ( a b )') is None) assert (parser('()') is None) assert (brackets(parser)('( (a) )') == 'a', '')
def test_token(self): # /hoge/ parser = token("hoge") s = "hoge" self.assertEqual(parser(s,0), [True,"hoge",4]) self.assertEqual(parser(s,1), [False,None,1]) s = "ahoge" self.assertEqual(parser(s,0), [False,None,0]) self.assertEqual(parser(s,1), [True,"hoge",5])
def run(self): os.system("cls" if os.name == "nt" else "clear") print "Acerque la targeta de la universidad al lector de la derecha." nfc = NFC.NFC() # Bucle hasta la lectura de una targeta while True: response = nfc.read(sys.argv[2]) if response != None: break parser(response) # Inicializacion del entorno grafico menu = menu2()
def main(argv): if len(argv) < 3: print('Usage: python3 pyrecon.py CSV1 CSV2 OUTFILE') AParser = parser(sys.argv[1]) BParser = parser(sys.argv[2]) A = AParser.parse() #print(A) B = BParser.parse() #print(B) CompDict = compdict(A,B,sys.argv[3]) #CompDict = compdict(B,A,sys.argv[3]) CompDict.compare() print("Reconciliation logged to %s." % (sys.argv[3]))
def main(): #get path to plugin directory plugin_path = "" try: opts, args = getopt.getopt(sys.argv[1:], "p:", ["help", "path="]) except getopt.GetoptError: print "Error" sys.exit(2) for opt, arg in opts: if opt == "--help": print "Opcje:" print " '--help' -- help" print " '--path' -- path to plugin directory" print "Example command:" print "main.py --path 'plugins/my_plugin/'" elif opt in ("-p", "--path"): plugin_path = arg #open file to write log from julius f = open(os.path.join(PROGRAM_PATH, '../output.log'), 'w') #open command files and return dictionary of commands commands_dict = read_commands(plugin_path) #create absolute path to julius julius_path = os.path.join(PROGRAM_PATH, '../julius_engine/julius') config_path = os.path.join(PROGRAM_PATH, '../config.jconf') cmd = [julius_path, '-C', config_path] #add dfa file of plugin dfa_path = '../' + plugin_path + 'sample.dfa' dfa_path = os.path.join(PROGRAM_PATH, dfa_path) cmd.extend(['-dfa', dfa_path]) #add dict file of plugin dict_path = '../' + plugin_path + 'sample.dict' dict_path = os.path.join(PROGRAM_PATH, dict_path) cmd.extend(['-v', dict_path]) #open subprocess and read stdout p = subprocess.Popen(cmd, stdout=subprocess.PIPE) while True: #read stdout line out = p.stdout.readline() #check if line is empty or if subprocess has finished if out == '' and p.poll() != None: break if out != '': #write stdout to file f.write(out) #print stdout to terminal sys.stdout.write(out) sys.stdout.flush() #parse stdout line parser(out, commands_dict)
def test_regexp(self): parser = regexp(re.compile("[1-9][0-9]*")) self.assertEqual(parser("2016",0), [True, "2016", 4]) self.assertEqual(parser("2016",1), [False, None, 1]) self.assertEqual(parser("2016",2), [True, "16", 4]) parser = regexp(re.compile("abc",re.I)) self.assertEqual(parser("abc",0), [True,"abc",3]) self.assertEqual(parser("AbC",0), [True,"AbC",3]) parser1 = regexp(re.compile("aa|a")) parser2 = regexp(re.compile("a|aa")) s = "aa" self.assertEqual(parser1(s,0), [True, "aa", 2]) self.assertEqual(parser2(s,0), [True, "a", 1])
def active_parser(self, log): try: if log[1]: text_parser = parser(self.read_path + log[0], True) txt_length, cit_dict, sentence_dict = ref_file = text_parser.logic_control else: text_parser = parser(self.read_path + log[0] + '/') txt_length, cit_dict, sentence_dict, ref_file = text_parser.logic_control except: txt_length = 0 cit_dict = 'Error File' ref_file = 'Error File' sentence_dict = 'Error File' return log[0], txt_length, cit_dict, sentence_dict, ref_file
def values(self, dict_): '''parameters: - @data (dict) set object properties (static and dynamic), it's possible to set all dynamic properties with date string with __useDate__ key @data need to have "type_id" key or "FK_@@tablename@@Type" key ''' self.previousState = self.values.copy() if dict_.get('ID', None): del dict_['ID'] if self.fk_table_type_name not in dict_ and 'type_id' not in dict_ and not self.type_id: raise Exception('object type not exists') if 'type_name' in dict_: self.type_name = dict_.get('type_name') else: type_id = dict_.get(self.fk_table_type_name, None) or dict_.get( 'type_id', None) or self.type_id self._type = self.session.query(self.TypeClass).get(type_id) useDate = parser(dict_.get('__useDate__', None) ) for prop, value in dict_.items(): self.setValue(prop, value, useDate) if self.hasLinkedField: useDateLinked = useDate or self.linkedFieldDate() self.updateLinkedField(dict_, useDate=useDateLinked)
def main(): connect_to_db() command = None while command != "quit": input_string = raw_input("HBA Database> ") parsed_input = parser(input_string) command = parsed_input[0] args = parsed_input[1][:] if command == "student": get_student_by_github(*args) elif command == "new_student": make_new_student(*args) elif command == "project": get_projects_by_title(*args) elif command == "create_project": make_new_project(*args) elif command == "project_grade": get_project_grade(*args) elif command == "assign_grade": assign_grade(*args) elif command == "get_grades": get_student_grades(*args) CONN.close()
def generate_gforth_script(x): parser_out = parser(x) output = "" #output = '\n' + chr(92) + ' input content: ' + str(x) + '\n' if parser_out: parse_tree = parser_out[1] #print_tree(parse_tree) type_errors = type_checker(parse_tree) for item in type_errors: output += item #output += chr(92) + ' -------------------------------' + '\n' gforth_code = generator2(parse_tree) if (gforth_code != -1): #print gforth_code output += str(gforth_code) + ' ' return output else: return output else: output += chr(92) + ' parsing failed on: ' + x return output + '\n'
def runFileAtLine(line , debugFlag , curLine): retTuple = parser(line , debugFlag) if retTuple[0]: if str(retTuple[1])[:6] == "Error:": sys.exit("Error at line " + str(curLine) + "\n\t" + line + "\n\t" + retTuple[1]) else: print retTuple[1]
def cdf(metric, log, rc=[]): if metric not in CUM_METRICS and metric not in AVG_METRICS and metric not in ABS_METRICS: print(BColors.WARNING + 'WARNING: "' + metric + '" is not a valid metric...' + BColors.ENDC) print ('\tChoose from: ' + BColors.BOLD + ', '.join(CUM_METRICS) +\ ', '.join(AVG_METRICS) + ', '.join(ABS_METRICS) + BColors.ENDC) sys.exit(1) if len(rc) == 0: inopts = Input_Options() rc = parser(log, inopts) cdf_map = [] # map a metric sample to its probability samples = [] sample = None ps = 0.0 # probability step (=1/len(samples)) if metric in ABS_METRICS: abs_map = absolute(metric, log) for session in abs_map: samples.append(session[1]) else: for session in rc: for record in session: try: sample = float(record[FIELDS_MAP[metric]]) samples.append(sample) except: continue samples.sort() ps = 1 / float(len(samples)) p = 0.0 for s in samples: p += ps cdf_map.append([s, p]) return cdf_map
def average(metric, log, rc=[]): if metric not in AVG_METRICS: print(BColors.WARNING + 'WARNING: "' + metric + '" is not a valid metric...' + BColors.ENDC) print('\tChoose from: ' + BColors.BOLD + ', '.join(AVG_METRICS) + BColors.ENDC) sys.exit(1) if len(rc) == 0: inopts = Input_Options() rc = parser(log, inopts) metric_map = [] # map a session id to its avg metric for session in rc: metric_sum = 0.0 n_samples = 0 for record in session: try: metric_sum += float(record[FIELDS_MAP[metric]]) n_samples += 1 except: continue if n_samples != 0: metric_map.append( [record[FIELDS_MAP['Sid']], float(metric_sum / n_samples)]) return metric_map
def main(): debugFlag = False #Debugger initially off #Begin interactive user interface if len(sys.argv) < 2: print '\nPhotorg 1.0.0 Created by Michael duPont' print 'Type "help" to view quick language guide or "quit" to exit\n' cmdString = "" while cmdString != "quit": #Check to see of Debugger value changed if cmdString == "debug on": debugFlag = True print "Debugger on" elif cmdString == "debug off": debugFlag = False print "Debugger off" #Else, continue to parser else: if cmdString != "": retTuple = parser(cmdString , debugFlag) if retTuple[0]: print retTuple[1] cmdString = raw_input(">>> ") print 'Goodbye\n' #Read in from a .lang file elif (1 < len(sys.argv) < 4) and (sys.argv[1].endswith(".ptg")): try: cmdDebugFlag = False if len(sys.argv) == 3: if sys.argv[2][:1].lower() == "t": debugFlag = True cmdDebugFlag = True print "Debug has been enabled" curLine = 0 fin = open(sys.argv[1]) for line in fin: curLine = curLine + 1 line = line.strip() if line != "" and line != "help": if not cmdDebugFlag: if line == "debug on": debugFlag = True elif line == "debug off": debugFlag = False else: runFileAtLine(line , debugFlag , curLine) else: if line != "debug on" and line != "debug off": runFileAtLine(line , debugFlag , curLine) fin.close() except IOError: sys.exit("Error: File %s was not found!" % sys.argv[1]) else: sys.exit("Usage: %s (file-name.ptg) (debug [False]/True)" % sys.argv[0])
def dparse_expression(text, start_symbol, grammar = read_grammar(GRAMMAR)): parser = ParsePrinter(grammar) print '============================================================================================' print 'parsing {}'.format(text) # , dont_merge_epsilon_trees=True, dont_compare_stacks=True result = parser(text, dont_use_height_for_disambiguation=False, dont_use_greediness_for_disambiguation=False, start_symbol=start_symbol, ambiguity_fn=ambiguity_function) print '\n'.join([parser.pnode_to_string(n) for n in result.getStructure()]) return result
def parseTree(self): print("Parsing The Tree Please Wait..") self.disk_tree = Tree() parserInstance = parser(self.disk_tree) parserInstance.generate(self.disk_tree.head) self.treeViewPermnanet = QtWidgets.QTreeWidget() self.disk_tree.tree_print(self.disk_tree.head, self.treeViewPermnanet) print("Done Parsing The Tree!")
def setFileName(self, fileName): """Informs the code writer that the translations of a new VM file has started.""" p = parser(fileName) self.staticPrefix = fileName.split('.')[0] while(p.hasMoreCommands()): p.advance() self.dispatchWriter(p.commandType(), p.currentCommand)
def setValue(self, propertyName, value, useDate=None): ''' Set object properties (static and dynamic) - value can have two forms: { value: value date: date } or value ''' # extract value and date from dict value if isinstance(value, dict) and "date" in value: useDate = parser(value.get("date")) value = value.get("value", None) HasStaticProperties.setValue(self, propertyName, value) if not hasattr(self, propertyName) and propertyName not in self.__table__.c: self.setDynamicValue(propertyName, parser(value), useDate) self.__values__[propertyName] = value
def start_parsers(self): if len(self.tasks) != 0: nl_q = self.tasks[0] self.tasks.remove(nl_q) self.parsers = [] for parser in parsers: t_parser = parser() self.parsers.append(t_parser) t_parser.start_process(nl_q, self.stop_others)
def test_huge(self): # tokenize("(defun (double x) (+ x x))")) # Just to see the original code snippet result = parser([("(", ""), ("identifier", "defun"), ("(", ""), ("identifier", "double"), ("identifier", "x"), (")", ""), ("(", ""), ("identifier", "+"), ("identifier", "x"), ("identifier", "x"), (")", ""), (")", "")]) self.assertEqual([[("identifier", "defun"), [("identifier", "double"), ("identifier", "x")], [("identifier", "+"), ("identifier", "x"), ("identifier", "x")]]], result)
def main(): tokens = lexer(filename, filename.name) for i in tokens: print(i) parsed = parser(tokens) functions, variables = conv(parsed) for i in functions: internal_functions[i.name] = i.value for i in variables: internal_variables[i.name] = i.value filew(internal_functions, internal_variables)
def test(ts): test_results = [] for t in ts: scanner_out = scanner(t) parser_out = parser(t) if parser_out: parse_tree = parser_out[1] #print_tree(parse_tree) gforth_code = generator2(parse_tree) print t, ' -> ', gforth_code else: print 'parsing failed on: ',t
def New_Page_Info(new_page): '''Regex(slowly) or Xpath(fast)''' # new_page_Info = re.findall(r'<td class=".*?">.*?<a href="(.*?)\.html".*?>(.*?)</a></td>', new_page, re.S) # # new_page_Info = re.findall(r'<td class=".*?">.*?<a href="(.*?)">(.*?)</a></td>', new_page, re.S) # bugs # results = [] # for url, item in new_page_Info: # results.append((item, url+".html")) # return results dom = parser(new_page) new_items = dom.xpath('//tr/td/a/text()') new_urls = dom.xpath('//tr/td/a/@href') assert (len(new_items) == len(new_urls)) return zip(new_items, new_urls)
def main(args="blocks-4-0.strips"): pra_valer = True print args if pra_valer: i = 2 solution = [] while len(solution) <= 5: cnf, names, tn, actions = parser([args, i]) print i solution = pycosat.solve(cnf) print solution i += 1 for i in solution: if i > 0 and (abs(i) % 100) in actions: print names[abs(i) % 100], '>', str(i) else: cnf, names, tn, actions = parser([args, 6]) for i in cnf: print[ '~ '[neg(j)] + names[abs(j) % tn] + '--' + str(abs(j) / tn) for j in i ]
def tabulate_stats(log, rc=[]): global session_table if len(rc) == 0: inopts = Input_Options() rc = parser(log, inopts) for session in rc: session_table[session[0][FIELDS_MAP['Sid']]] = Table_Record() rtt_map = average('Rtt', log, rc) jitt_map = average('Jitt', log, rc) tout_map = cumulative('Tout', log, rc) retx_map = cumulative('Retx', log, rc) nack_map = cumulative('Nack', log, rc) segm_map = cumulative('Segm', log, rc) strt_map = absolute('Strt', log, rc) reb_map = absolute('Reb', log, rc) res_map = video_resolution_dist(log, rc) for session in rtt_map: session_table[session[0]].Rtt = session[1] for session in jitt_map: session_table[session[0]].Jitt = session[1] for session in tout_map: session_table[session[0]].Tout = session[1] for session in retx_map: session_table[session[0]].Retx = session[1] for session in nack_map: session_table[session[0]].Nack = session[1] for session in segm_map: session_table[session[0]].Segm = session[1] for session in strt_map: session_table[session[0]].Strt = session[1] for session in reb_map: session_table[session[0]].Reb = session[1] for session in res_map: session_table[session[0]]._240p = session[1]['240p'] session_table[session[0]]._360p = session[1]['360p'] session_table[session[0]]._480p = session[1]['480p'] session_table[session[0]]._720p = session[1]['720p'] session_table[session[0]]._1080p = session[1]['1080p'] trecords = [] ''' # Uncomment the following line to inlcude users' geographical info append_user_location(rc) ''' for sid in session_table: trecord = [sid] trecord.extend(session_table[sid].array_format()) trecords.append(trecord) table = tabulate(trecords, header, tablefmt='pretty') print table
def convert(infile, parser, force): options = read_configuration('Parser') wikiparser = WikiParser(options) context = etree.iterparse(infile, events=("end",), tag='{http://www.mediawiki.org/xml/export-0.3/}text') counter, error_counter, update_counter = 0,0, 0 for x, text in context: # ret[title.text] = 0 # res.append(title.text.encode('utf-8')) counter +=1 title = '' try: #p = title.getparent().getchildren()[-1].getchildren()[-1].text id = text.getparent().getparent().getchildren()[1].text revision = text.getparent().getchildren()[0].text title = re.sub(r'/', '|', text.getparent().getparent().getchildren()[0].text) file_name = create_file_name(string.zfill(id, zfill_size), revision, title) #update or create file if force: print "File: %s" % (title, ) meta_data, content = wikiparser.parse(title, text.text) meta_data['title'].append(title.encode('utf-8')) # create_file(file_name, content) add_to_file(read_configuration('Return')['name'], meta_data, clean_blnak_lines(content)) else: if need_update(file_name): content = parser(text.text) create_file(file_name, content) except: exceptionType, exceptionValue, exceptionTraceback = sys.exc_info() traceback.print_exception(exceptionType, exceptionValue, exceptionTraceback, limit=9, file=sys.stdout) error_counter +=1 print "e -------- " print title print str(error_counter) + "/" + str(counter) + " is error" print "---" * 3 continue text.clear() while text.getprevious() is not None: del text.getparent()[0] print "OK: " + str(counter - error_counter)
def parse_response(self, response): parser = None if self.islogged(response) is False: self.recursive_flag = False req_once_logged = response.meta[ 'req_once_logged'] if 'req_once_logged' in response.meta else response.request if self.is_login_page(response) is True: self.logger.warning( "%s: On login page. Proceeding to log in. Have used %s attempts." % (self.login['username'], self.logintrial)) self.logintrial += 1 if self.logintrial > self.settings['MAX_LOGIN_RETRY']: self.wait_for_input("Too many login failed", req_once_logged) self.logintrial = 0 return yield self.make_request(reqtype='dologin', response=response, dont_filter=True, req_once_logged=req_once_logged) elif self.islogged(response) is False and self.is_login_page( response) is False: self.logintrial = 0 yield self.make_request(reqtype='loginpage', dont_filter=True, req_once_logged=req_once_logged) else: self.logger.warning('This is not supposed to happen.') elif self.islogged(response) is True: self.recursive_flag = True self.logintrial = 0 if response.meta['reqtype'] == 'dologin': self.logger.info( "Succesfully logged in as %s! Setting parsing flag." % (self.login['username'])) if self.is_ads(response): if self.is_multilisting(response): parse = self.parse_multiADs else: parser = self.parse_ads if parser is not None: for x in parser(response): yield x else: self.logger.warning( 'Outside blocks: This is not supposed to happen. HTML %s' % response.body)
def test_some_or(self): parser = someOp(parser=word, op=or_op_parser, f=(lambda xs, x: xs.append(x) or xs), z=list) assert (parser('|a') is None) assert (parser('a') == ['a'], '') assert (parser('a|b') == ['a', 'b'], '') assert (parser('a|b|c') == ['a', 'b', 'c'], '') assert (parser('a|b|c|') == ['a', 'b', 'c'], '|') assert (parser('a|b|c&') == ['a', 'b', 'c'], '&') assert (parser('a|b|c & (a|b)') == ['a', 'b', 'c'], ' & (a|b)')
def test_some_and(self): parser = someOp(parser=word, op=and_op_parser, f=(lambda xs, x: xs.append(x) or xs), z=list) assert (parser('|a') is None) assert (parser('a') == ['a'], '') assert (parser('a b') == ['a', 'b'], '') assert (parser('a b c') == ['a', 'b', 'c'], '') assert (parser('a b c ') == ['a', 'b', 'c'], ' ') assert (parser('a b c&') == ['a', 'b', 'c'], '&') assert (parser('a b c & (a b)') == ['a', 'b', 'c'], ' & (a b)')
def affiche_instance_unique_enumeration(instance): """ int -> None Affiche dans une fenetre l'instance apres lui avoir appliqué la méthode énumération""" str1 = str(instance) N, M, Seq_lignes, Seq_colonnes, max_lignes, max_colonnes = parser( "./instances/" + str1 + ".txt") G = init_Jeu(N, M, Seq_lignes, Seq_colonnes) init_interface_graphique(N, M, config.Seq_lignes, config.Seq_colonnes, max_lignes, max_colonnes, G, str1) (ok, G1) = ENUMERATION(G) refresh_interface_graphique(G1) fermeture_interface_graphique() return None
def setValue(self, propertyName, value): ''' @propertyName :: string, @value :: string, integer, float, ... every type Database compliant ''' #check if propertyName corresponding to a column if not hasattr(self, propertyName): if propertyName in self.__table__.c: print(propertyName) print(self.__table__.c) propertyName = class_mapper(inspect(self).class_ ).get_property_by_column( self.__table__.c[propertyName] ).key else: return setattr(self, propertyName, parser(value)) self.__values__[propertyName] = value
def cumulative(metric, log, rc=[]): if metric not in CUM_METRICS: print(BColors.WARNING + 'WARNING: "' + metric + '" is not a valid metric...' + BColors.ENDC) print('\tChoose from: ' + BColors.BOLD + ', '.join(CUM_METRICS) + BColors.ENDC) sys.exit(1) if len(rc) == 0: inopts = Input_Options() rc = parser(log, inopts) metric_map = [] # map a session id to its enumerated metric for session in rc: counter = 0 for record in session: try: counter += int(record[FIELDS_MAP[metric]]) except: continue metric_map.append([record[FIELDS_MAP['Sid']], counter]) return metric_map
def absolute(metric, log, rc=[]): if metric not in ABS_METRICS: print(BColors.WARNING + 'WARNING: "' + metric + '" is not a valid metric...' + BColors.ENDC) print('\tChoose from: ' + BColors.BOLD + ', '.join(ABS_METRICS) + BColors.ENDC) sys.exit(1) if len(rc) == 0: inopts = Input_Options() rc = parser(log, inopts) metric_map = [] # map a session id to its absolute metric for session in rc: abs_metric = None try: abs_metric = float(session[-1][FIELDS_MAP[metric]]) except: continue if abs_metric != None: metric_map.append([session[0][FIELDS_MAP['Sid']], abs_metric]) return metric_map
def main(self, response): os.system('clear') print(self.config.banner()) html = parser(response, 'html.parser') print('_________________________________________________________') print('\n(\033[0;96m•\033[0m) ACTIVE USER : '******'utf-8') + html.title.text.upper()) print('_________________________________________________________') print(self.menu) try: choose = int(raw_input('Choose >> ')) except ValueError: exit('\n\033[0;91mYou stuppid.\033[0m') if choose == 1: exit(friends_list.main(self, self.cookie, self.url, self.config)) elif choose == 2: exit(friends.main(self, self.cookie, self.url, self.config)) elif choose == 3: exit(search_name.main(self, self.cookie, self.url, self.config)) elif choose == 4: exit(likes.main(self, self.cookie, self.url, self.config)) elif choose == 5: exit(crack.Brute().main()) elif choose == 0: ask = raw_input('\nAre you Sure? [y/N]: ') if ask.lower() == 'y': print('\nRemoving cookies...') time.sleep(2) os.remove('log/cookies.log') print('\n\033[0;92mSuccess removed!\033[0m') time.sleep(2) login.loginFb(self, self.url, self.config) self.cookie = self.config.loadCookie() self.start() else: self.cookie = self.config.loadCookie() print('\ncanceled!') self.start() else: exit('\n\033[0;91mYou stuppid.\033[0m')
def video_resolution_dist(log, rc=[]): res_dist_map = [] # map a sid to its video resolution distribution # Required BW : Number of samples if len(rc) == 0: inopts = Input_Options() rc = parser(log, inopts) for session in rc: dist = {'240p': 0, '360p': 0, '480p': 0, '720p': 0, '1080p': 0} samples_in_session = 0 for record in session: try: dist = bandwidth_categorizer(int(record[FIELDS_MAP['Ebw']]), dist) samples_in_session += 1 except Exception as err: continue if samples_in_session == 0: continue for resolution in dist: dist[resolution] = float( dist[resolution]) / float(samples_in_session) res_dist_map.append([session[0][FIELDS_MAP['Sid']], dist]) return res_dist_map
def temps_moyen_coloration(instance, nombre_de_tests): """ int * int -> None Affiche le temps en seconde que prend la fonction COLORATION sur l'instance en effectuant une moeyenne de nombre_de_tests""" # res : int res = 0 for i in range(nombre_de_tests): str1 = str(instance) # Initialisation du chronometre time_start = time.time() N, M, Seq_lignes, Seq_colonnes, max_lignes, max_colonnes = parser( "./instances/" + str1 + ".txt") G = init_Jeu(N, M, Seq_lignes, Seq_colonnes) (ok, G1) = COLORATION(G) # Fin du chronometre time_end = time.time() res += time_end - time_start print("Instance ", str1, " \tResolution : ", ok, "\ttemps : ", round(res / nombre_de_tests, 6), " secondes") return None
def main(): # Auth setup store = file.Storage('token.json') creds = store.get() if not creds or creds.invalid: flow = client.flow_from_clientsecrets('credentials.json', SCOPES) creds = tools.run_flow(flow, store) service = build('sheets', 'v4', http=creds.authorize(Http())) inputFile = input("Please enter file name...") values = parser(inputFile) # Input body with info updatedSheet = { 'values': [values[0], values[1][1:]], 'majorDimension': 'COLUMNS' } date = { 'values': [values[1][0]] } # Call the Sheets API result = service.spreadsheets().values().update(spreadsheetId=SPREADSHEET_ID,range=RANGE,body=updatedSheet,valueInputOption='RAW').execute()
def generator(x): parser_out = parser(x) output = "" #output = '\n' + chr(92) + ' input content: ' + str(x) + '\n' if parser_out: parse_tree = parser_out[1] #print_tree(parse_tree) type_errors = type_checker(parse_tree) for item in type_errors: output += item #output += chr(92) + ' -------------------------------' + '\n' gforth_code = generator2(parse_tree) output += " " + gforth_code# + ' CR' print output else: output += chr(92) + ' parsing failed on: ' + x print output + '\n'
if "__pycache__" in all_files: all_files.remove("__pycache__") processing_files = all_files #random.sample(all_files, 6) for file_name in processing_files: execution_time = [] total_event = 0 for i in range(6): directory_prepare() # clean up target start_time = time.time() total_event = 0 p = parser() parsed_result = p.parsingFile(data_path + file_name) p.structurePrettyPrint(parsed_result) db = database(db_name) v = validator(p, db) v.validate_rule_parameters() v.validate_source_database() i = interpreter(p, db) i.field_mappings() i.events_generator() total_event += i.event_count sorting_csv_files()
#!/usr/bin/python import csv from operators.xor import * from operators.conditional import * from operators.biconditional import * from parser import * from permutator import * from isolate_variables import * string = input("Type in your statement: ") string = parser(string) variables = isolate_variables(string) command = "f = lambda " + variables + ": " + string exec(command) variables = variables.replace(",", "") variables = variables.replace(" ", "") truth_input = permutator(len(variables)) fields = list(variables) + [string] truth_output = [] #Not the most efficent design, but fairly minor compared to how laggy #permutator.py can be past 7 variables. Still might want to redesign it. for i in range(0, len(truth_input)):
def imp_parse(tokens): ast = parser()(tokens, 0) return ast
#import sqlite3 #conn = sqlite3.connect(database) # db defined in config.py #c = conn.cursor() # Start taking Logs timestamp = strftime("%Y-%m-%d-%H:%M:%S") log_name = "log-" + timestamp + ".log" if not os.path.exists("log"): os.makedirs("log") log = open("log/" + log_name, 'w') twitter_Queue = Queue.Queue(10000) exit_event = threading.Event() signature_count = mutable_int() parser_thread = parser("parser", 20, wh_url_base, wh_url_id1, wh_url_id2, twitter_Queue, signature_count, exit_event, database) tweetbot = Tweeter(consumer_key, consumer_secret, access_token, access_token_secret, msg_preamble, msg_postamble, twitter_Queue, signature_count, exit_event, 10) parser_thread.start() tweetbot.start() raw_input("Press enter to end . . .") exit_event.set() print "exiting . . ." parser_thread.join() tweetbot.join() log.close()
#!/usr/bin/python3 from parser import * #expr = "sin(cos(sin(cos(3.4*x*sin(x)))))" expr = "sin(x*x)" print(parser(expr).D().simplify())
def __init__(self, logfile, parser): self.people = {} self.logfile = logfile self.parser = parser(self.logfile) self.process()
change_comm_number(presence, time, state1, state2, tr_value, state_of_nodes,inistate,graph_oriented,community,comm_file_desc) elif tr_rule == "probability": tr_value = float(tr_value) change_comm_proba(presence, time, state1, state2, tr_value, state_of_nodes,inistate,graph_oriented,community,comm_file_desc) else : print("Accepted transition are percentage, number and probability") sys.exit() else : print("Accepted rules are none, neighborhood and community") sys.exit() parser(model_file,statelist,edgelist) #parser('si.xml',statelist,edgelist) i = 0 #for state in statelist : # print state.attributes["id"].value t_end_graph = read_graph(graph_file,t_end_graph) graph_desc_file = open(graph_file,'r') lign = graph_desc_file.readline() if comm_file == "" : comm_file_desc = None else : comm_file_desc = open(comm_file,'r') l = lign.split("=") if "no" in l[1] : graph_oriented = "no" else :
def test_option(self): # /(hoge)?/ parser = option(token("hoge")) self.assertEqual(parser("hoge",0), [True, "hoge", 4]) self.assertEqual(parser("hoge",1), [True, None, 1])
def test_map(self): parser = map(token("hoge"),lambda x:x+"!") self.assertEqual(parser("hoge",0), [True, "hoge!", 4]) self.assertEqual(parser("hoge",1), [False, None, 1])
print('= ',end='') prettyPrint(tree.right) print(';') elif tree.type == ELSE: print("else") prettyPrint(tree.left) elif tree.type == WHILE: print("while ",end='') print('(',end='') prettyPrint(tree.left) print(')',end='') print() prettyPrint(tree.right) elif tree.type == RETURN: print("return ",end='') if tree.left: prettyPrint(tree.left) print(';') elif tree.type == PRINT: print("print",end='') print("(",end='') prettyPrint(tree.left) print(")",end='') print(";") elif tree.type == EMPTY: print(end='') else: print("invalid expression!") prettyPrint(parser(sys.argv[1]))
def main(): env = newEnvironment() tree = parser(sys.argv[1]) evaluate(tree,env)
from globalTypes import * from parser import * from semantica import * from cgen import * f = open('sample.c-', 'r') programa = f.read() progLong = len(programa) programa = programa + '$' posicion = 0 globales(programa, posicion, progLong) AST = parser(True) semantica(AST, True) codeGen(AST, 'ensamblador.txt')
import calendar import time import os from parser import * start_time_offset = [] server_ip = "148.147.61.57" LOG_FILE = server_ip+".json" cmd = "iperf3 -c %s -t 120 -J --logfile %s" % (server_ip, LOG_FILE) start_time = calendar.timegm(time.localtime()) print "Start Time", start_time os.system(cmd) time.sleep(3) parse = parser(start_time) parse.extract("/Users/vsathiam/Documents/Scripts/xrp-scale-test/%s" % LOG_FILE)
from semantica import * from parser import * from globalTypes import * f = open('sample.c-', 'r') programa = f.read() # lee todo el archivo a compilar progLong = len(programa) # longitud original del programa programa = programa + '$' # agregar un caracter $ que represente EOF posicion = 0 # posición del caracter actual del string # función para pasar los valores iniciales de las variables globales globales(programa, posicion, progLong) AST = parser(False) semantica(AST, True)
import requests import sys import time startTime = time.time() # Start TCP session sess = requests.Session() r = sess.get('http://edt.univ-tours.fr/direct/myplanning.jsp?login=ade-etudiant&password=test', headers=headerSess) responeHeader = r.headers headerSess['Cookie'] = parserBegin(r.headers['Set-Cookie'], ';') + "; " + '"{"state":{"sortField":"s:NAME", "sortDir":"s:ASC"}}"' # Get GWT r = sess.get('http://edt.univ-tours.fr/direct/gwtdirectplanning/gwtdirectplanning.nocache.js', headers=headerSess) GWT = parser(r.text, "',ac='", "',bc='") # Get js keys with GWT r = sess.get('http://edt.univ-tours.fr/direct/gwtdirectplanning/' + GWT + '.cache.html', headers=headerSess) headerSess['X-GWT-Permutation'] = GWT headerSess['X-GWT-Module-Base'] = 'http://edt.univ-tours.fr/direct/gwtdirectplanning/' keyCorePlanningServiceProxy = parser(r.text, "function SB(){vB();xx.call(this,$moduleBase,'CorePlanningServiceProxy','", "',uB)}") keyConfigurationServiceProxy = parser(r.text, "function by(){Cx();xx.call(this,$moduleBase,'ConfigurationServiceProxy','", "',Bx)}") keyWebClientServiceProxy = parser(r.text, "function vF(){fF();xx.call(this,$moduleBase,'WebClientServiceProxy','", "',eF)}") keyMyPlanningClientServiceProxy = parser(r.text, "function b3b(){_2b();xx.call(this,$moduleBase,'MyPlanningClientServiceProxy','", "',$2b)}") keyDirectPlanningServiceProxy = parser(r.text, "function x0b(){s0b();xx.call(this,$moduleBase,'DirectPlanningPlanningServiceProxy','", "',r0b)}") ## # @fn initAde() # @brief Method to register the application (init the session with the cookie)
def compute_output(self): result = [["Id_Produit", "Id_Categorie"]] id_position = ID_POSITION if self.train: file_name = VALIDATION_FILE brand_position = BRAND_POSITION price_positin = PRICE_POSITION file_len = TRAIN_LEN validation_len = VALIDATION_LEN cat_position = C3_ID_POSITION cdiscount_position = CDISCOUNT_POSITION else: file_name = TEST_FILE brand_position = BRAND_POSITION_TEST file_len = TEST_LEN cdiscount_position = 1 spam_reader = parser(file_name) self.temp_score = 0 self.reset_count(file_len) print "computing output" next(spam_reader) if not self.batch: for item in spam_reader: self.smart_count(so_far=True) cat = self.compute_category(item) if self.train: if self.skip_cdiscount_function(item): continue real_cat = item[cat_position] self.temp_score += int(real_cat == cat) else: result.append([item[id_position], cat]) if self.loop_break: break else: print "batch" items = [] real_cats = [] for item in spam_reader: self.smart_count() it = self.pre_build_item(item) cat = item[cat_position] if self.train: if self.skip_cdiscount_function(item): continue real_cats.append(cat) items.append((it, cat)) else: items.append(it) if self.loop_break: break prediction = self.compute_batch_category(items) print "prediction done, computing score" for i in xrange(len(items)): self.temp_score += int(str(prediction[i]) == str(items[i][1])) print self.temp_score if self.train: self.score = self.temp_score / float(validation_len) * 100 print "score : %s " % (self.score,) else: self.result = result
# -*- coding: utf-8 -*- """ Created on Sun Apr 24 10:39:34 2016 @author: Khalil """ import parser asm = open("parser.asm", "r") #arquivoentrada hack = open("arquivo.hack","w") #arquivosaida parser = parser() #Convertendo de .asm para um arquivo .hack for i in asm.readlines(): x = parser.readlines(i) hack.write(x) asm.close() hack.close()