def __init__(self): code = (" var myset = set() \n var oo = set() n = 653") p = Parser() p.parse(code) raw_input("Press Enter to continue...")
def test_one(self): l = Lexer('tests/sample.ps') l.lex() self.assertEqual([(t.name, t.value) for t in l.tokens], [('KEYWORD', 'program'), ('IDENTIFIER', 'hellowld'), ('SEMICOLON', ';'), ('KEYWORD', 'begin'), ('IDENTIFIER', 'writeln'), ('LP', '('), ('BASE10_NUM', '2'), ('RP', ')'), ('SEMICOLON', ';'), ('IDENTIFIER', 'readln'), ('SEMICOLON', ';'), ('KEYWORD', 'end')]) p = Parser(l.tokens) try: p.parse() except SyntaxError: self.fail()
def test_two(self): l = Lexer('tests/sample2.ps') l.lex() self.assertEqual([(t.name, t.value) for t in l.tokens], [('KEYWORD', 'program'), ('IDENTIFIER', 'exFunction'), ('SEMICOLON', ';'), ('KEYWORD', 'var'), ('IDENTIFIER', 'a'), ('COMMA', ','), ('IDENTIFIER', 'b'), ('COMMA', ','), ('IDENTIFIER', 'ret'), ('COLON', ':'), ('KEYWORD', 'integer'), ('SEMICOLON', ';'), ('KEYWORD', 'function'), ('IDENTIFIER', 'max'), ('LP', '('), ('IDENTIFIER', 'num1'), ('COMMA', ','), ('IDENTIFIER', 'num2'), ('COLON', ':'), ('KEYWORD', 'integer'), ('RP', ')'), ('COLON', ':'), ('KEYWORD', 'integer'), ('SEMICOLON', ';'), ('KEYWORD', 'var'), ('IDENTIFIER', 'result'), ('COLON', ':'), ('KEYWORD', 'integer'), ('SEMICOLON', ';'), ('KEYWORD', 'begin'), ('KEYWORD', 'if'), ('LP', '('), ('IDENTIFIER', 'num1'), ('GT', '>'), ('IDENTIFIER', 'num2'), ('RP', ')'), ('KEYWORD', 'then'), ('IDENTIFIER', 'result'), ('ATTRIB', ':='), ('IDENTIFIER', 'num1'), ('KEYWORD', 'else'), ('IDENTIFIER', 'result'), ('ATTRIB', ':='), ('IDENTIFIER', 'num2'), ('SEMICOLON', ';'), ('IDENTIFIER', 'max'), ('ATTRIB', ':='), ('IDENTIFIER', 'result'), ('SEMICOLON', ';'), ('KEYWORD', 'end'), ('SEMICOLON', ';'), ('KEYWORD', 'begin'), ('IDENTIFIER', 'a'), ('ATTRIB', ':='), ('BASE10_NUM', '100'), ('SEMICOLON', ';'), ('IDENTIFIER', 'b'), ('ATTRIB', ':='), ('BASE10_NUM', '200'), ('SEMICOLON', ';'), ('IDENTIFIER', 'ret'), ('ATTRIB', ':='), ('IDENTIFIER', 'max'), ('LP', '('), ('IDENTIFIER', 'a'), ('COMMA', ','), ('IDENTIFIER', 'b'), ('RP', ')'), ('SEMICOLON', ';'), ('IDENTIFIER', 'writeln'), ('LP', '('), ('IDENTIFIER', 'ret'), ('RP', ')'), ('SEMICOLON', ';'), ('KEYWORD', 'end')]) p = Parser(l.tokens) try: p.parse() except SyntaxError: self.fail()
def process_node_create_html(self, nodeid): if self.options.verbose: print 'html', nodeid errors = 0 infile = os.path.join('src', self.book.name, self.stage_name, nodeid + '.txt') success = True try: parser = Parser(self, self.options) if not parser.parse(infile, nodeid, todo=True): success = False except Exception as e: print 'Exception:', e exc_type, exc_value, exc_traceback = sys.exc_info() traceback.print_exception(exc_type, exc_value, exc_traceback) success = False if not success: print '%s' % nodeid print 'Parse failure' errors += 1 sys.exit(0) parser.export_html( os.path.join(self.book.name, self.stage_name, nodeid + '.html')) return errors
def process_html(self, infile, outfile, options): """ Process simple (non-node) src file and generate an HTML file. """ errors = 0 name = os.path.splitext(os.path.basename(infile))[0] make_dir(os.path.dirname(outfile)) success = True if options.verbose: print ' %s -> html' % infile try: parser = Parser(None, options) if not parser.parse(infile, name): print 'Failure during parse_main' success = False except: exc_type, exc_value, exc_traceback = sys.exc_info() traceback.print_exception(exc_type, exc_value, exc_traceback) success = False if not success: print '%s' % file print 'Parse failure' errors += 1 else: parser.export_html(outfile) return errors
def test_parses_succeessfully(self): reader = CsvReader(os.path.join(TEST_DIR, '3.csv')) formatter = WeekFormatter() parser = Parser(reader, formatter) actual = parser.parse() expected = [{ 'square': 9, 'day': 'mon', 'value': 3, 'description': 'third_desc 9' }, { 'square': 9, 'day': 'tue', 'value': 3, 'description': 'third_desc 9' }, { 'square': 4, 'day': 'wed', 'value': 2, 'description': 'third_desc 4' }, { 'double': 4, 'day': 'thu', 'value': 2, 'description': 'third_desc 4' }, { 'double': 2, 'day': 'fri', 'value': 1, 'description': 'third_desc 2' }] for dct1, dct2 in zip(expected, actual): self.assertDictEqual(dct1, dct2)
def test_parse_select(self): select = "select * from me where a=1 and b=2" p = Parser() res = p.parse(select) print(res) self.assertEqual(res["type"] == "search", True) self.assertEqual(res["fields"] == "*", True) self.assertEqual(res["table"] == "me", True) select = "select a, b, c from me where a=1 and b=2" res = p.parse(select) print(res) field = ["a", 'b', 'c'] return_filed = res["fields"] self.assertEqual( all(field[i] == return_filed[i] for i in range(len(return_filed))), True)
def main(args): parser = Parser() iface = parser.parse(args.specification.read()) for generator in args.generators: for file_path, content in generator.generate(iface).items(): write_generated_file(path.join(generator.name, file_path), content, args.out_directory, args.update)
def test_parse_update(self): update = "update me set a = 2, b = 3" p = Parser() res = p.parse(update) print(res) self.assertEqual(res["type"] == "update", True) update_val = {"a": "2", "b": "3"} return_update_val = res["fields"] self.assertEqual( all(update_val[key] == return_update_val[key] for key in update_val.keys()), True) self.assertEqual(res["table"] == "me", True)
def test_parse_insert(self): insert = "insert into me (a,b) values (2,3)" p = Parser() res = p.parse(insert) print(res) insert_val = {"a": "2", "b": "3"} return_insert_val = res["fields"] self.assertEqual( all(insert_val[key] == return_insert_val[key] for key in insert_val.keys()), True) self.assertEqual(res["type"] == "insert", True) self.assertEqual(res["table"] == "me", True)
def execute(program_container: ProgramContainer): """ Executes the given program. :param program_container: the program container containing the program that shall be executed. """ try: try: parsed_program = ast_tree_of_intermediate_code(program_container) except IntermediateCodeError as b: logger.error(b.message()) parsed_program = Parser.parse(program_container) generate_tree_based_intermediate_code(parsed_program, program_container) logger.debug('Parsed AST for %s: %s' % (program_container.origin, parsed_program)) global_env = GlobalEnvironment(name='__main__', file_path=program_container.origin) parsed_program.execute(global_env, None) logger.debug('Environment after execution: %s' % global_env) except (ParserErrors, ExecutionError) as p: print(p.message(), file=sys.stderr)
def process_node_path(self, stage_src, src, stage_dst, dst, node_path): if self.options.verbose: print 'path %s -> %s' % (src, dst) #print node_path if stage_dst != self.stage_name: print '%s -> %s' % (src, dst) print 'Unexpected stage name: %s instead of %s' % (stage_dst, self.stage_name) errors += 1 sys.exit(0) self.book.copy_snapshot_dir(stage_src, src, stage_dst, dst) errors = 0 nodeid = dst[0:3] infile = os.path.join('src', self.book.name, stage_dst, nodeid + '.txt') success = True try: parser = Parser(self, self.options, pathcheck=True) if not parser.parse(infile, nodeid, fullnodeid=dst): success = False except: exc_type, exc_value, exc_traceback = sys.exc_info() traceback.print_exception(exc_type, exc_value, exc_traceback) success = False if not success: print '%s -> %s' % (src, dst) print node_path print 'Parse failure' errors += 1 sys.exit(0) if not self.check_function_order(self.stage_name, dst): print '%s -> %s' % (src, dst) print node_path print 'Function order fail' errors += 1 sys.exit(0) return errors
def run(source: str, interpreter: Interpreter) -> None: scanner = Scanner(source) scanner.scan_tokens() if scanner.had_error: print(f'Error scanning') return parser = Parser(scanner.tokens) statements = parser.parse() if parser.had_error: print(f'Error parsing') for err in parser.errors: parser_error_print(err, scanner.source_lines) else: resolve_statements(statements) try: interpreter.interpret(statements) except LoxRuntimeError as err: print(f'{err.msg}')
def main(code=None): t = Tokenizer() p = Parser() g = Generator() if code is None: file_name = sys.argv[1] file = open(file_name) code = file.read() file.close() tmp = sys.stdout sys.stdout = open('a.s', 'w') tokens = t.tokenize(code) nodes = p.parse(tokens) g.generate(nodes, p.variables) sys.stdout = tmp del tmp
def run( self, text: str, file_name: str, repl_mode: bool = False, print_tokens: bool = False, ) -> List[LangType]: lexer = Lexer(text, file_name) tokens = lexer.make_tokens() if print_tokens: print(tokens) # Generate AST parser = Parser(tokens) results: List[LangType] = [] for ast in parser.parse(repl_mode): context = Context(file_name, self.syntax_table) res = ast.visit(context) results.append(res) return results
def main(): """Entry point""" input_file_name = sys.argv[1] output_file_name = sys.argv[2] input_file = open(input_file_name, "r") assembly_text = input_file.read() input_file.close() try: tokens = tokenize(assembly_text) parser = Parser(tokens) instructions = parser.parse() code_gen = CodeGenerator(instructions) mif_text = code_gen.mif_format() output_file = open(output_file_name, "w") output_file.write(mif_text) output_file.close() except RuntimeError as err: print err.message
def main(): args = parse_args() try: with open(args.input, 'r') as input_file: js_code = input_file.read() if args.beautify: js_code = beautify(js_code) print("Beautified code:") print(js_code) lexer = Lexer() lexer.input(js_code) if args.lexer: print("Lexer:") for token in lexer: print(token) elif args.parser: parser = Parser() tree = parser.parse(js_code) print("Parser:") visit(tree, 0) except IOError: errprint('no such file or directory "%s"' % args.input) sys.exit(1)
TCP_PORT = 50000 BUFFER_SIZE = 4096 # Create socket and bind it to TCP address & port s = socket.socket(socket.AF_INET, socket.SOCK_STREAM) s.bind((TCP_IP, TCP_PORT)) led = CameraLED() parser = Parser(); # Infinite loop while 1: # Listen for a connection s.listen(1) # Connection found. Accept connection conn, addr = s.accept() data = conn.recv(BUFFER_SIZE).rstrip() tokens = parser.parse(data); #print tokens l = len(tokens) if l > 0 and tokens[0] == "led": if l > 1 and tokens[1] == "on": # print "Turn LED on" led.turnOn() elif l > 1 and tokens[1] == "off": # print "Turn LED off" led.turnOff() conn.send(data) conn.close()
from lexer.lexer import Lexer from parser.parser import Parser text_input = """ inteiro:x; inteiro:y; leia(x); leia(y); para x ate y passo 1 imprima(x); fim_para """ lexer = Lexer().get_lexer() pg = Parser() pg.parse() parser = pg.get_parser() for line in list(filter(None, text_input.split('\n'))): tokens = lexer.lex(line) parser.parse(tokens).eval()
pass from parser.parser import Parser from logic.logic import Logic from logging.logging import Logging logic = Logic() logging = Logging(logic) parser = Parser(logic, logging) if len(sys.argv) > 1 and sys.argv[1] != "-cli": logging.start() parser.default_script() sleep(0.1) #wait for pybroker connection parser.print(">".join(sys.argv[1:])) parser.parse(" ".join(sys.argv[1:])) if sys.argv[1] == "log": print("Press enter to end logging") input() elif use_ui: ui = Ui() ui.logging = logging logging.ui = ui parser.ui = ui ui.parser = parser ui.start() else: run = True logging.start() if parser.default_script(): while run:
class Crawler: """ A Crawler class for crawling GoogleMaps gas station prices. """ def __init__(self, cities, gas_stations, storage, min_sleep_time=15, max_sleep_time=60): """ Initializes a crawler. Args: cities: a list of "city, state" gas_stations: a list of gas stations sleep_time: number of seconds to sleep after web request """ # list of cities self.cities = cities # list of gas stations self.gas_stations = gas_stations # sleep time self.min_sleep_time = min_sleep_time self.max_sleep_time = max_sleep_time # parser for the gas station blocks self.parser = Parser() # parameters to extract self.params = [ 'address', 'brand', 'lat', 'lon', 'price_1', 'price_2', 'price_3' ] # storage medium self.storage = storage def crawl(self): """ Begin crawling. """ # for each city for city in self.cities: # catch error try: # store data per city so less connections to db made data = [] # for each gas station for gas_station in self.gas_stations: # search the area res = self._search(city, gas_station) # politeness sleeping within a random range, so we don't seem like a robot time.sleep( randint(self.min_sleep_time, self.max_sleep_time)) # append the results data.append(res) # store the results self.storage.store_data(res) except Exception as e: print(e) def _search(self, city, gas_station): """ Search the latitude and longitude for gas station information. Args: city: the city to search gas_station: the gas station to search Return: A list of gas station information for each station within the html from the url request """ # print feedback print('Searching %s gas stations in %s --> ' % (gas_station, city), end='', flush=True) # url to search url = 'https://www.google.com/maps/search/%s+gas+prices+%s' % ( gas_station.replace(' ', '+'), city.replace(' ', '+')) # read the url with urllib.request.urlopen(url) as response: # parse the html for gas station information res = self.parser.parse(html=response.read().decode('utf-8'), gas_station=gas_station, params=self.params) # print feedback print('Found %d' % len(res), flush=True) # return results return res # print feedback print('Found %d' % 0, flush=True) # return empty list if couldn't open url return []
TCP_PORT = 50000 BUFFER_SIZE = 4096 # Create socket and bind it to TCP address & port s = socket.socket(socket.AF_INET, socket.SOCK_STREAM) s.bind((TCP_IP, TCP_PORT)) led = CameraLED() parser = Parser() # Infinite loop while 1: # Listen for a connection s.listen(1) # Connection found. Accept connection conn, addr = s.accept() data = conn.recv(BUFFER_SIZE).rstrip() tokens = parser.parse(data) #print tokens l = len(tokens) if l > 0 and tokens[0] == "led": if l > 1 and tokens[1] == "on": # print "Turn LED on" led.turnOn() elif l > 1 and tokens[1] == "off": # print "Turn LED off" led.turnOff() conn.send(data) conn.close()