def __init__(self, tokenizer): self.tokenizer = tokenizer self.output_file = open('temp', 'w') self.CLASSES = [] self.classTable = symbolTable.SymbolTable() self.subRoutineTable = symbolTable.SymbolTable() self.vm_writer = VMWriter.VMWriter() self.if_label_counter = 0 self.while_label_counter = 0
def __init__(self, tokenizer, output_name): self.classTable = symbolTable.SymbolTable() self.subRoutineTable = symbolTable.SymbolTable() self.vm_writer = VMWriter.VMWriter(output_name) self.tokenizer = tokenizer self.out_file_name = tokenizer.getFileName() self.output_file = open(self.out_file_name + '.xml', 'w') self.CLASSES = [] self.class_name = None self.if_label_counter = 0 self.while_label_counter = 0
def __init__(self, syms=None): """ initialization """ print 'tree with stub methods' self.queue = [] self.lastph = None self.symbols = syms if syms != None else symbolTable.SymbolTable()
def __init__(self, inputFile, outputFile): self.asmFile = open(inputFile, "r") self.checkIt = open("Check.txt", "w") self.hackFile = open(outputFile, 'w') self.newTable = symbolTable.SymbolTable() self.newTable.__init__() self.translation = Code.Code() self.translation.__init__()
def __init__(self): path = sys.argv[1] ############ self.code = code.Code() self.symbolTable = symbolTable.SymbolTable() ########### self.varSymIndex = 16 self.line = 0 self.run = 0 self.file = open(path, 'r+') self.name = path.split('/')[-1].split('.')[0]
def __init__(self, generator, filename): ''' Initialize a token generator generates pairs of (token_type, token) ''' # self.generator = ((type, token) for (type, token) in tokens) self.generator = generator self.type, self.token = self.generator.next() self.className = None self.subroutineName = None self.symbolTable = symbolTable.SymbolTable() self.vmwriter = vmwriter.Vmwriter(filename) #for creating unique labels for flow control self.runningIdx = 0
def __init__(self): """ initialization """ print 'context with empty symbol table' syms = symbolTable.SymbolTable() syms.getSyntaxTypeIndexNumber( 'sent') # seed with syntactic types for test syms.getSyntaxTypeIndexNumber('end') # syms.getSyntaxTypeIndexNumber('noun') # syms.getSyntaxTypeIndexNumber('verb') # syms.getSyntaxTypeIndexNumber('unkn') # syms.getSyntaxTypeIndexNumber('date') # syms.getSyntaxTypeIndexNumber('time') # syms.getSyntaxTypeIndexNumber('num') # syms.getSyntaxTypeIndexNumber('ssn') # self.syms = syms self.wghtg = Weighting()
self.splits[nt].append(ru) # add rule to grammar table return ru # # unit test # if __name__ == '__main__': import ellyConfiguration import dumpEllyGrammar import punctuationRecognizer filn = sys.argv[1] if len(sys.argv) > 1 else 'test' sym = symbolTable.SymbolTable() # print sym base = ellyConfiguration.baseSource + '/' inp = ellyDefinitionReader.EllyDefinitionReader(base + filn + '.g.elly') if inp.error != None: print inp.error sys.exit(1) print 'reading', '[' + filn + ']', len( inp.buffer), 'lines of rule definitions' try: gtb = GrammarTable(sym, inp) pnc = punctuationRecognizer.PunctuationRecognizer(sym) # print gtb dumpEllyGrammar.dumpAll(sym, gtb, 5) except ellyException.TableFailure: print >> sys.stderr, 'exiting'
def __init__(self): """ create environment for testing semantic procedure arguments: self """ stb = symbolTable.SymbolTable() # empty hry = conceptualHierarchy.ConceptualHierarchy() # empty ctx = interpretiveContext.InterpretiveContext(stb, {}, {}, hry) self.context = ctx # make available ptb = parseTreeBase.ParseTreeBase() # just for generating phrases self.toknL = ellyToken.EllyToken( 'uvwxxyz') # insert dummy data that might self.toknR = ellyToken.EllyToken('abcdefg') # be replaced from outside ctx.addTokenToListing(self.toknL) # put a token in first position ctx.addTokenToListing(self.toknR) # and a token in second x = ctx.syms.getSyntaxTypeIndexNumber( 'x') # for consistency, define two y = ctx.syms.getSyntaxTypeIndexNumber( 'y') # syntactic categories for rules fbs = ellyBits.EllyBits(symbolTable.FMAX) # zero feature bits exL = grammarRule.ExtendingRule(x, fbs) # dummy rules as a place for exR = grammarRule.ExtendingRule(x, fbs) # attaching semantic procedures spl = grammarRule.SplittingRule(y, fbs) # for testing # dummy semantic procedures gX = ["left", "right"] # generative gL = ["obtain"] # gR = ["obtain"] # gP = ["append did it!"] # for standalone generative subprocedure cX = [] # cognitive cL = [">> +1"] # cR = [">> -1"] # ctx.pushStack() # needed for local variables usable in testing ctx.setLocalVariable( "vl", "LLLL") # make two variables available to work with ctx.setLocalVariable("vr", "RRRR") # ctx.setProcedure('do', self._genp(gP)) # define procedure 'do' exL.gens = self._genp(gL) # assign semantic procedures to rules exL.cogs = self._cogp(cL) # exR.gens = self._genp(gR) # exR.cogs = self._cogp(cR) # spl.gens = self._genp(gX) # spl.cogs = self._cogp(cX) # phr = ptb.makePhrase(0, spl) # make phrase for splitting plus phr.krnl.lftd = ptb.makePhrase(0, exL) # left and right descendants phr.krnl.rhtd = ptb.makePhrase(1, exR) # defined by left and right # extending rules from above phr.ntok = 1 stb.getFeatureSet('!one,two', True) # define semantic feature print stb.smindx smx = stb.smindx['!'] # ix = smx['one'] # print 'ix=', ix phr.krnl.semf.set(ix) # turn on feature for phrase ix = smx['two'] # print 'ix=', ix phr.krnl.semf.set(ix) # turn on feature for phrase print 'semf=', phr.krnl.semf self.phrase = phr # make phrase available
""" dummy method """ self.tokns = [] self.wghtg = Wtg() name = sys.argv[1] if len(sys.argv) > 1 else 'test' deep = int(sys.argv[2]) if len(sys.argv) > 2 else 100 base = ellyConfiguration.baseSource + '/' rdr = ellyDefinitionReader.EllyDefinitionReader(base + name + '.g.elly') if rdr.error != None: print(rdr.error, file=sys.stderr) sys.exit(1) print('loading', '[' + base + name + '.g.elly]', len(rdr.buffer), 'lines') stbu = symbolTable.SymbolTable() gtbu = grammarTable.GrammarTable(stbu, rdr) ctxu = Ctx() tksu = ctxu.tokns tree = ParseTreeWithDisplay(stbu, gtbu, None, ctxu) print() print(tree) print() print(dir(tree)) print() cat = stbu.getSyntaxTypeIndexNumber('num') fbs = ellyBits.EllyBits(symbolTable.FMAX) tree.addLiteralPhrase(cat, fbs) tree.digest()
print('=', str(r.vem)) # show each match print(' ', r.nspan, 'chars matched, endings included') if r.suffx != '': print(' ending=', '[' + r.suffx + ']') # print ( 'generative semantics' ) showCode(r.vem.gen.logic) print() print('--') nams = arg[0] if len(arg) > 0 else 'test' dfns = nams + source limt = sys.argv[2] if len(sys.argv) > 2 else 24 erul = load(nams + grammar) # get pickled Elly rules if erul == None: ustb = symbolTable.SymbolTable() # if none, make new symbol table else: ustb = erul.stb # else, get existing symbol table unkns = ustb.findUnknown() # check for new symbols added print("new symbols") for us in unkns: print('[' + us + ']') # show every symbol print() print('source=', dfns) inp = ellyDefinitionReader.EllyDefinitionReader(dfns) if inp.error != None: print(inp.error, file=sys.stderr) sys.exit(1)
def __init__(self, system, create, rid=None): """ load all definitions from binary or text files arguments: self - system - which PyElly application create - whether to create new binary rid - PyElly release ID exceptions: TableFailure on error """ super(Grammar, self).__init__() self.rls = rid sysf = system + grammar if create: print("recompiling grammar rules") self.stb = symbolTable.SymbolTable() # new empty table to fill in el = [] try: self.mtb = macroTable.MacroTable(self.inpT(system, 'm')) except ellyException.TableFailure: el.append('macro') try: self.gtb = grammarTable.GrammarTable(self.stb, self.inpT(system, 'g')) self.stb.setBaseSymbols() except ellyException.TableFailure: el.append('grammar') try: self.ptb = patternTable.PatternTable(self.stb, self.inpT(system, 'p')) except ellyException.TableFailure: el.append('pattern') try: self.hry = conceptualHierarchy.ConceptualHierarchy( self.inpT(system, 'h')) except ellyException.TableFailure: el.append('concept') try: self.ntb = nameTable.NameTable(self.inpT(system, 'n')) except ellyException.TableFailure: el.append('name') try: self.ctb = compoundTable.CompoundTable(self.stb, self.inpT(system, 't')) except ellyException.TableFailure: el.append('compound') sa = self.inpT(system, 'stl') pa = self.inpT(system, 'ptl') try: self.man = morphologyAnalyzer.MorphologyAnalyzer(sa, pa) except ellyException.TableFailure: el.append('morphology') if len(el) > 0: print('rule FAILures on', el, file=sys.stderr) raise ellyException.TableFailure if self.rls != None: ellyPickle.save(self, sysf) else: print("loading saved grammar rules from", sysf) gram = ellyPickle.load(sysf) if gram == None: raise ellyException.TableFailure if gram.rls != rid: print('inconsistent PyElly version for saved rules', file=sys.stderr) sys.exit(1) self.stb = gram.stb # copy in saved language definition objects self.mtb = gram.mtb # self.gtb = gram.gtb # self.ptb = gram.ptb # self.ctb = gram.ctb # self.ntb = gram.ntb # self.hry = gram.hry # self.man = gram.man #
cdg == semanticCommand.Gproc and not arg[0] in pnx): print ( '>{0:3d} **** call to unknown subprocedure: {1}'.format(loc,arg[0]) ) noe += 1 else: print ( '>{0:3d} {1} {2}'.format(loc,com,arg) ) cod = cod[dl:] loc += dl return noe if __name__ == "__main__": import ellyDefinitionReader import symbolTable stbd = symbolTable.SymbolTable() print ( 'generative semantic compilation test' ) srcd = sys.argv[1] if len(sys.argv) > 1 else 'generativeDefinerTest.txt' inpd = ellyDefinitionReader.EllyDefinitionReader(srcd) if inpd.error != None: print ( "cannot read procedure definition" , file=sys.stderr ) print ( inpd.error , file=sys.stderr ) sys.exit(1) print ( 'input=' , srcd ) codg = compileDefinition(stbd,inpd) if codg == None: print ( "semantic compilation error" , file=sys.stderr ) sys.exit(1)
__author__ = 'Cheech Wife' import symbolTable # Parser API: # Remove whitespace and comments # Get,read line # Evaluate line information ( A, C commands) # Based on command type returns symbol name for A, dest for C newTable = symbolTable.SymbolTable() newTable.__init__() class Parser(object): def __init__(self, inputFile, outputFile): self.asmFile = open(inputFile, "r") self.hackFile = open(outputFile, "w") self.checkIt = open("Check.txt", "w") def __del__(self): print ('end') def hasMoreCommands(self): currentCommand = self.asmFile.readline() print (currentCommand) #print (self.asmFile) if currentCommand == '': print('Empty') return False else: print ('Not Empty') return True
def __init__(self, system): """ initialization of processing rules arguments: system - root name of PyElly tables to load """ nfail = 0 # error count for reporting self.rul = None self.tks = None # token list for output self.ptr = Tree() try: self.rul = ellyDefinition.Grammar(system, True, None) except ellyException.TableFailure: nfail += 1 d = self.rul # language rules self.gtb = d.gtb if d != None else None mtb = d.mtb if d != None else None self.sbu = substitutionBuffer.SubstitutionBuffer(mtb) try: inflx = self.sbu.stemmer except AttributeError: inflx = None if d != None: d.man.suff.infl = inflx # define root restoration logic stb = d.stb if d != None else symbolTable.SymbolTable() try: voc = ellyDefinition.Vocabulary(system, True, stb) except ellyException.TableFailure: nfail += 1 if nfail > 0: print('exiting: table generation FAILures', file=sys.stderr) sys.exit(1) self.vtb = voc.vtb self.pnc = punctuationRecognizer.PunctuationRecognizer(stb) self.iex = entityExtractor.EntityExtractor(self.ptr, stb) # set up extractors self.trs = simpleTransform.SimpleTransform() ntabl = d.ntb if ntabl != None and ntabl.filled(): nameRecognition.setUp(ntabl) ellyConfiguration.extractors.append([nameRecognition.scan, 'name'])
returns: string representation """ fs = ' +....-....' if self.synf == None else self.synf.id + ' ' + str(self.synf) return 'type=' + str(self.catg) + ' ' + fs # # unit test # if __name__ == '__main__': import symbolTable stb = symbolTable.SymbolTable() stb.getSyntaxTypeIndexNumber('sent') stb.getSyntaxTypeIndexNumber('end') stb.getSyntaxTypeIndexNumber('unkn') stb.getSyntaxTypeIndexNumber('...') # note that ... may not have syntactic features specified spcl = sys.argv[1:] if len(sys.argv) > 1 else [ '...[.0,1]' , 'unkn[:x]' ] print 'testing' , len(spcl) , 'examples' for spc in spcl: ns = scan(spc) sx = spc[:ns] print ns , 'chars in possible specification' try: ss = SyntaxSpecification(stb,sx)
def __init__(self, system, restore=None): """ initialization arguments: system - root name for PyElly tables to load restore - name of session to continue """ nfail = 0 # error count for reporting self.rul = None self.gundef = [] # record orphan symbols by module self.vundef = [] # self.pundef = [] # self.eundef = [] # # print ( 'EllyBase.__init__()' ) # aid = './' + system # try: # print ( 'a rul time=' , _timeModified(aid,rules) ) # print ( 'a voc time=' , _timeModified(aid,vocabulary) ) # except: # print ( '\n**** a rul or voc time exception' ) sysf = system + rules redefine = not _isSaved(system, rules, _rules) # print ( '0 redefine=' , redefine ) try: self.rul = ellyDefinition.Grammar(system, redefine, release) except ellyException.TableFailure: nfail += 1 if nfail == 0: self.gundef = self.rul.stb.findUnknown() if redefine: ellyPickle.save(self.rul, sysf) # try: # print ( 'b rul time=' , _timeModified(aid,rules) ) # print ( 'b voc time=' , _timeModified(aid,vocabulary) ) # except: # print ( '\n**** b rul or voc time exception' ) # print ( '1 redefine=' , redefine ) if restore != None: self.ses = ellyPickle.load(restore + '.' + system + _session) else: self.ses = ellySession.EllySession() s = self.ses # session info d = self.rul # language rules # print ( '0:' , len(d.stb.ntname) , 'syntactic categories' ) # print ( 'base language=' , ellyConfiguration.language ) mtb = d.mtb if d != None else None self.sbu = substitutionBuffer.SubstitutionBuffer(mtb) try: inflx = self.sbu.stemmer # print ( 'inflx=' , inflx ) except AttributeError: inflx = None # print ( 'inflx=' , inflx ) if d != None: d.man.suff.infl = inflx # define root restoration logic # print ( '2 redefine=' , redefine ) if not redefine: if not _isSaved(system, vocabulary, _vocabulary) or _notVocabularyToDate(system): redefine = True stb = d.stb if d != None else symbolTable.SymbolTable() # print ( self.rul.stb ) # print ( stb ) if nfail > 0: print('exiting: table generation FAILures', file=sys.stderr) sys.exit(1) # print ( '1:' , len(stb.ntname) , 'syntactic categories' ) self.ctx = extendedContext.ExtendedContext(stb, d.gtb.pndx, s.globals, d.hry) for z in d.gtb.initzn: # initialize global symbols for parsing self.ctx.glbls[z[0]] = z[1] # print ( '2:' , len(stb.ntname) , 'syntactic categories' ) self.pnc = punctuationRecognizer.PunctuationRecognizer(stb) self.pundef = stb.findUnknown() # print ( '3:' , len(stb.ntname) , 'syntactic categories' ) nto = len(stb.ntname) # for consistency check if noParseTree: self.ptr = NoParseTree(stb, d.gtb, d.ptb, self.ctx) elif ellyConfiguration.treeDisplay: self.ptr = parseTreeWithDisplay.ParseTreeWithDisplay( stb, d.gtb, d.ptb, self.ctx) else: self.ptr = parseTree.ParseTree(stb, d.gtb, d.ptb, self.ctx) ntabl = d.ntb if ntabl != None and ntabl.filled(): nameRecognition.setUp(ntabl) ellyConfiguration.extractors.append([nameRecognition.scan, 'name']) self.iex = entityExtractor.EntityExtractor(self.ptr, stb) # set up extractors self.eundef = stb.findUnknown() if ellyConfiguration.rewriteNumbers: self.trs = simpleTransform.SimpleTransform() else: self.trs = None # no automatic conversion of written out numbers # print ( '4:' , len(stb.ntname) , 'syntactic categories' ) # print ( '3 redefine=' , redefine ) if redefine: print('recompiling vocabulary rules') try: voc = ellyDefinition.Vocabulary(system, redefine, stb) except ellyException.TableFailure: voc = None nfail += 1 if ellyConfiguration.treeDisplay: print("tree display ON") else: print("tree display OFF") # try: # print ( 'c rul time=' , _timeModified(aid,rules) ) # print ( 'c voc time=' , _timeModified(aid,vocabulary) ) # except: # print ( 'rul or voc time exception' ) # print ( 'vundef=' , self.vundef ) if voc != None: self.vtb = voc.vtb self.vundef = stb.findUnknown() # print ( 'vundef=' , self.vundef ) ntn = len( stb.ntname) # do consistency check on syntactic category count if nto != ntn: print(file=sys.stderr) print( 'WARNING: grammar rules should predefine all syntactic categories', file=sys.stderr) print(' referenced in language definition files', file=sys.stderr) for i in range(nto, ntn): print(' ', stb.ntname[i].upper(), '=', i, file=sys.stderr) print(file=sys.stderr) if nfail > 0: print('exiting: table generation FAILures', file=sys.stderr) sys.exit(1) sys.stderr.flush()