def main(argv): input_stream = FileStream(argv[1]) lexer = LittleLexer(input_stream) lexer.removeErrorListener(ConsoleErrorListener.INSTANCE) stream = CommonTokenStream(lexer) parser = LittleParser(stream) parser.removeErrorListener(ConsoleErrorListener.INSTANCE) tree = parser.program() visitor = Visitor() visitor.visit(tree)
def main(argv): input_stream = FileStream(argv[1]) lexer = LittleLexer(input_stream) lexer.removeErrorListener(ConsoleErrorListener.INSTANCE) stream = CommonTokenStream(lexer) parser = LittleParser(stream) parser.removeErrorListener(ConsoleErrorListener.INSTANCE) tree = parser.program() listener = SymbolTableListener() walker = ParseTreeWalker() walker.walk(listener, tree)
def main(testcase_filename): lexer = LittleLexer(FileStream(testcase_filename)) stream = CommonTokenStream(lexer) parser = LittleParser(stream) tree = parser.program() walker = ParseTreeWalker() symbol_table_builder = SymbolTableBuilder() walker.walk(symbol_table_builder, tree) # symbol_table_builder.print_symbol_table() a = AST(symbol_table_builder.symbol_table) walker.walk(a, tree) a.print_ast() ''' # Step 3 Code symbol_table_builder = SymbolTableBuilder() walker = ParseTreeWalker() walker.walk(symbol_table_builder, tree) symbol_table_builder.print_symbol_table() ''' ''' # Step 2 Code # retrieve token list and pass it into the parser lexer = LittleLexer(FileStream(testcase_filename)) stream = CommonTokenStream(lexer) parser = LittleParser(stream) # deactivate the parser error messages parser.removeErrorListeners() # run the tokens through the parse tree parser.program() # if there were syntax errors the program is accepted, otherwise not accepted if parser._syntaxErrors == 0: print("Accepted") else: print("Not accepted") ''' '''
def main(argv): input_stream = FileStream(argv[1]) lexer = LittleLexer(input_stream) lexer.removeErrorListener(ConsoleErrorListener.INSTANCE) stream = CommonTokenStream(lexer) parser = LittleParser(stream) parser.removeErrorListener(ConsoleErrorListener.INSTANCE) parser.program() if parser.getNumberOfSyntaxErrors() > 0: print('Not accepted') else: print('Accepted')
def main(argv): input = FileStream(argv[1]) lexer = LittleLexer(input) stream = CommonTokenStream(lexer) parser = LittleParser(stream) tree = parser.prog() listener = MyListener() walker = ParseTreeWalker() # creates the symbol table, and the ast tree walker.walk(listener, tree) symbol_table = listener.getTable() ast_stack = listener.getStack() #generate IR code using the AST stack and the symbol table ir = IRGenerate(ast_stack, symbol_table) # print tiny code print(";Printing Tiny Code from Driver") for tiny in ir.getTinyList(): print(tiny) print("sys halt")
def __init__(self, *args, **kw): super(FileProcessing, self).__init__(*args, **kw) self.language_processor = kw['language'] self.article_db_name = kw['articles'] self.article_import = self.article_db_name + '.import' self.offset_db_name = kw['offsets'] self.offset_import = self.offset_db_name + '.import' self.file_import = self.offset_db_name + '.files' self.template_db_name = kw['templates'] self.ignored_templates = kw['ignored_templates'] for filename in [self.article_db_name, self.article_import, self.offset_db_name, self.offset_import, self.template_db_name, self.file_import]: if os.path.exists(filename): os.remove(filename) self.restricted_count = 0 self.redirect_count = 0 self.article_count = 0 self.template_count = 0 self.template_redirect_count = 0 self.all_titles = [] self.convert = LittleParser().convert self.redirects = {} self.articles = {} self.offsets = {} self.total_character_count = 0 self.time = time.time() self.template_db = sqlite3.connect(self.template_db_name) self.template_db.execute('pragma synchronous = 0') self.template_db.execute('pragma temp_store = 2') self.template_db.execute('pragma read_uncommitted = true') self.template_db.execute('pragma cache_size = 20000000') self.template_db.execute('pragma default_cache_size = 20000000') self.template_db.execute('pragma journal_mode = off') self.template_db.execute(''' create table templates ( title varchar primary key, body varchar ) ''') self.template_db.execute(''' create table redirects ( title varchar primary key, redirect varchar ) ''') self.template_db.commit() self.template_cursor = self.template_db.cursor()
def findHost(term, id_list, out_dir, debug, verbose, seq_directory=CONF['seq_dir'], tax_directory=CONF['taxonomy_dir'], \ improper_host_path=CONF['improper_host_path'], processed_seq_directory=CONF['processed_seq_dir'], rettype='native'): '''Argumenty: - term - to co dostajemy po termCreation - ids - lista identyfikatorow sekwencji z bazy nuccore - temp_directory - folder na czesciowe wyniki - directory - w którym folderze ma być to zapisywane - seq_directory - folder na pliki xml z sekwencjami - tex_directory - folder na pliki xml z bazy taxonomy''' seqs = [] # createEmptyFile(improper_host_path) tax_directory = os.path.join(out_dir, tax_directory) seq_directory = os.path.join(out_dir, seq_directory) processed_seq_directory = os.path.join(out_dir, processed_seq_directory) createDirIfNotExists(tax_directory) createDirIfNotExists(seq_directory) createDirIfNotExists(processed_seq_directory) while id_list: if verbose or not (len(id_list) % 20): print '%s ids left' % len(id_list) id_ = id_list[0] path = os.path.join(seq_directory, id_) fasta_path = path + '.fasta' processed_path = os.path.join(processed_seq_directory, id_) # ściągnięcie pliku while not os.path.exists(path): try: handle = Entrez.efetch(db="nuccore", id=id_, rettype=rettype, retmode="xml") handle_fasta = Entrez.efetch(db='nuccore', id=id_, rettype='fasta', retmode='text') if verbose: print handle.geturl() with open(path, 'w') as file_handle: file_handle.write(handle.read()) time.sleep(1) with open(fasta_path, 'w') as fasta_write_handle: fasta_write_handle.write(handle_fasta.read()) except (urllib2.URLError, socket.timeout, socket.error), e: logger.error(e) print e time.sleep(1) # parsowanie pliku i wyłuskiwanie cech try: # seq - obiekt typu SequenceRepresentation if os.path.exists(processed_path): seq = pickle.load(open(processed_path, 'rb')) else: with open(path) as handle: seq = LittleParser.fromHandle(handle, fasta_path, tax_directory, debug, verbose) pickle.dump(seq, open(processed_path, 'wb'), pickle.HIGHEST_PROTOCOL) seqs.append(seq) id_list.remove(id_) except EOFError, e: logger.info(e) os.remove(processed_path)