def main(argv): inputStr = argv[0] try: if os.path.isdir(inputStr): files = [ file for file in os.listdir(inputStr) if file.endswith('.jack') ] for file in files: tokenizedArray = tokenizeFile(inputStr + '/' + file) compilerObj = Compiler.Compiler( Compiler.handleTabsArray(tokenizedArray)) compilerObj.compileEngine() outputFileName = file.replace(".jack", ".xml") outputStr = inputStr + '/' + outputFileName # writeArrayToFile(tokenizedArray, outputFileName, True) writeArrayToFile(compilerObj.compiledArray, outputStr, False) else: tokenizedArray = tokenizeFile(inputStr) compilerObj = Compiler.Compiler( Compiler.handleTabsArray(tokenizedArray)) compilerObj.compileEngine() outputFileName = inputStr.replace(".jack", ".xml") # writeArrayToFile(tokenizedArray, outputFileName, True) writeArrayToFile(compilerObj.compiledArray, outputFileName, False) except TypeError: print("I Love Nand")
def slave_trigger(numSeqs): """ Create slave trigger link lists. """ return [[ Compiler.create_padding_LL(1), Compiler.create_padding_LL(1, True), Compiler.create_padding_LL(1) ] for _ in range(numSeqs)], Compiler.markerWFLib
def compile_dir(dir_path): comiler = Compiler() for filePath in os.listdir(dir_path): filePath = dir_path + '/' + filePath if filePath.endswith('.h') is False: continue print(filePath) contents = comiler.compile(filePath) print(contents) output_file = filePath.replace('.h', '.m') print(output_file) with open(output_file, 'w+') as fw: fw.write(contents)
def calc_exp(exp, c): # Compile exps before evaluation exps = Compiler.compile(exp) # Evaluate each compiled expression for exp in exps: c.calc(exp)
def __init__(self, sparql=None, compiler=None, evaluator=None, multiline_parser=None, options=['time'], debug=False, load_translations=True): self.sparql = sparql if self.sparql: self.n = sparql.n else: self.n = Namespaces.Namespaces() # self.translator = translator if compiler: self.compiler = compiler else: self.compiler = Compiler.Compiler(self.n) if evaluator == None: evaluator = Evaluator.Evaluator(self.n) self.evaluator = evaluator self.parser = Parser.Parser(self.n) self.urigen = UniqueURIGenerator() if multiline_parser == None: multiline_parser = MultilineParser.MultilineParser(self.n, self) self.multiline_parser = multiline_parser self.options = options self.cum_comp_time = 0 self.cum_eval_time = 0 if not debug: self.compiler.debug_off() if load_translations: from loadTranslations import loadTranslations loadTranslations(self)
def __init__(self): self.instructionstream = [] self.compiler =\ Compiler.Compiler(self.instructionstream) self.instructionStreamOffset = 0 self.pcTest = 0 self.pc = 0 self.memory = Memory.Memory() self.pins = { "Vdd": 0, "D7": 0, "D6": 0, "D5": 0, "D4": 0, "D3": 0, "D2": 0, "D1": 0, "D0": 0, "Vcc": 0, "S2": 0, "S1": 0, "S0": 0, "Sync": 0, "Phase 2": 0, "Phase 1": 0, "Ready": 0, "Interrupt": 0, }
def setup(compiler=Compiler.get()): if Args.ide: return Debug.info(compiler) Debug.info(compiler.CXX)
def compilation(): prog = Compiler.run(args, options, merge_opens=options.merge_opens, debug=options.debug) prog.write_bytes(options.outfile) if options.asmoutfile: for tape in prog.tapes: tape.write_str(options.asmoutfile + '-' + tape.name)
def compilation(): prog = Compiler.run(args, options, param=int(options.param), merge_opens=options.merge_opens, emulate=options.emulate, assemblymode=options.assemblymode, debug=options.debug) prog.write_bytes(options.outfile) if options.asmoutfile: for tape in prog.tapes: tape.write_str(options.asmoutfile + '-' + tape.name)
def init(): index() config = load_configuration() processor = Putil(config) processor.run_data_import() comp = Compiler(processor.data, config) comp.run_compilation() newFilenameSeventhTable = "DataAnalytics" freshfilename = newFilenameSeventhTable+'.csv' tablenew = "DataAnalytics" pathNew = './CsvData/'+freshfilename (ret, out, err)= run_cmd(['hdfs', 'dfs', '-rm','/tmp/Wheeltrue/CsvData/'+newFilenameSeventhTable+'.csv']) (ret, out, err)= run_cmd(['hdfs', 'dfs', '-copyFromLocal',pathNew,'/tmp/Wheeltrue/CsvData']) if (err != ''): print "Script Errored out while copying data into HWX" print "Original Error message:"+ err sys.exit(1) HwxConnection(tablenew , pathNew)
def align(linkList, mode, length): for miniLL in linkList: miniLL_length = sum([entry.totLength for entry in miniLL]) paddingEntry = Compiler.create_padding_LL(length - miniLL_length) if mode == 'left': miniLL.append(paddingEntry) elif mode == 'right': miniLL.insert(0, paddingEntry) else: raise NameError("Unknown aligment mode")
def compilation(): prog = Compiler.run(args, options, debug=options.debug) prog.write_bytes(options.outfile) if options.asmoutfile: for tape in prog.tapes: tape.write_str(options.asmoutfile + '-' + tape.name) if prog.public_input_file is not None: print('WARNING: %s is required to run the program' % \ prog.public_input_file.name)
def SetCompilerByName(self, cmpl, name): '''设置编译器,例如修改了编译器的配置 cmpl 是 Compiler 实例或者字典''' if isinstance(cmpl, dict): cmpl = Compiler.Compiler(cmpl) dstIdx = -1 for idx, elm in enumerate(self.compilers): if elm["name"] == name: dstIdx = idx break if dstIdx != -1: self.compilers[dstIdx] = cmpl.ToDict()
def __init__(self, parser, name): Node.__init__(self, parser) self.name = name try: p = Parser.Parser(name + '.lam') compiler = Compiler.Compiler(p) compiler.compile() except IOError: pass #No lam module found, python modules will be sought by the python code except (SyntacticError, SemanticError) as se: print "In module " + name + ": " raise se
def test_positive_expressions(statement): assert sut.is_expression(statement) == True # @parameterized.expand([ # 'int a = 12 ;', # 'int = 2 ;', # '0 = a ;', # '1 = 2 ;', # 'one = two ', # ]) # def test_negative_assignments(statement): # assert sut.is_assignment(statement) == False
def delay(linkList, delay, samplingRate): ''' Delays a mini link list by the given amount. ''' sampShift = int(round(delay * samplingRate)) if sampShift <= 0: # no need to inject zero delays return for miniLL in linkList: # loop through and look for WAIT instructions # use while loop because len(miniLL) will change as we inject delays ct = 0 while ct < len(miniLL): if miniLL[ct] == ControlFlow.Wait() or miniLL[ct] == ControlFlow.Sync(): miniLL.insert(ct+1, Compiler.create_padding_LL(sampShift)) ct += 1
def delay(linkList, delay, samplingRate): ''' Delays a mini link list by the given amount. ''' sampShift = int(round(delay * samplingRate)) if sampShift <= 0: # no need to inject zero delays return for miniLL in linkList: # loop through and look for WAIT instructions # use while loop because len(miniLL) will change as we inject delays ct = 0 while ct < len(miniLL): if miniLL[ct] == ControlFlow.Wait( ) or miniLL[ct] == ControlFlow.Sync(): miniLL.insert(ct + 1, Compiler.create_padding_LL(sampShift)) ct += 1
def compile_file(input_file, output_file, run): print("Compiling file '%s'..." % input_file) with open(input_file, "rt", encoding="unicode_escape") as f: code = f.read() brainfuck_code = Compiler.compile(code) brainfuck_code += "\n" with open(output_file, "wt") as f: f.write(brainfuck_code) print("Compiled successfully to '%s'" % output_file) if run: print("Running compiled code...") Interpreter.brainfuck(brainfuck_code)
def compilation(): prog = Compiler.run(args, options, param=int(options.param), merge_opens=options.merge_opens, emulate=options.emulate, assemblymode=options.assemblymode, debug=options.debug) prog.write_bytes(options.outfile) if options.asmoutfile: for tape in prog.tapes: tape.write_str(options.asmoutfile + '-' + tape.name) if prog.use_public_input_file: print('WARNING: %s is required to run the program' % \ prog.public_input_file.name)
def apply_SSB(linkList, wfLib, SSBFreq, samplingRate): #Negative because of negative frequency qubits phaseStep = -2*pi*SSBFreq/samplingRate #Bits of phase precision #Choose usual DAC vertical precision arbirarily phasePrecision = 2**14 def round_phase(phase, precision): """ Helper function to round a phase to a certain binary precision. """ #Convert radians to portion of circle and then to integer precision round to precision intPhase = round(phasePrecision*np.mod(phase/2.0/pi,1)) return int(intPhase), 2*pi*(intPhase/phasePrecision) #Keep a dictionary of pulses and phases pulseDict = {} for miniLL in linkList: curFrame = 0.0 for entry in miniLL: #If it's a zero then just adjust the frame and move on if entry.key == Compiler.TAZKey: curFrame += phaseStep*entry.length continue # expand time-amplitude pulses in-place if entry.isTimeAmp: entry.isTimeAmp = False shape = wfLib[entry.key][0] * np.ones(entry.length, dtype=np.complex) else: shape = np.copy(wfLib[entry.key]) intPhase, truncPhase = round_phase(curFrame, 14) pulseTuple = (entry.key, intPhase, entry.length) if pulseTuple in pulseDict: entry.key = pulseDict[pulseTuple] else: phaseRamp = phaseStep*np.arange(0.5, shape.size) shape *= np.exp(1j*(truncPhase + phaseRamp)) shapeHash = Compiler.hash_pulse(shape) if shapeHash not in wfLib: wfLib[shapeHash] = shape pulseDict[pulseTuple] = shapeHash entry.key = shapeHash curFrame += phaseStep*entry.length
def apply_SSB(linkList, wfLib, SSBFreq, samplingRate): #Negative because of negative frequency qubits phaseStep = -2 * pi * SSBFreq / samplingRate #Bits of phase precision #Choose usual DAC vertical precision arbirarily phasePrecision = 2**14 def round_phase(phase, precision): """ Helper function to round a phase to a certain binary precision. """ #Convert radians to portion of circle and then to integer precision round to precision intPhase = round(phasePrecision * np.mod(phase / 2.0 / pi, 1)) return int(intPhase), 2 * pi * (intPhase / phasePrecision) #Keep a dictionary of pulses and phases pulseDict = {} for miniLL in linkList: curFrame = 0.0 for entry in miniLL: #If it's a zero then just adjust the frame and move on if entry.key == Compiler.TAZKey: curFrame += phaseStep * entry.length elif entry.isTimeAmp: raise NotImplementedError("Unable to handle SSB square pulses") else: intPhase, truncPhase = round_phase(curFrame, 14) pulseTuple = (entry.key, intPhase) if pulseTuple in pulseDict: entry.key = pulseDict[pulseTuple] else: shape = np.copy(wfLib[entry.key]) phaseRamp = phaseStep * np.arange(0.5, shape.size) shape *= np.exp(1j * (truncPhase + phaseRamp)) shapeHash = Compiler.hash_pulse(shape) if shapeHash not in wfLib: wfLib[shapeHash] = shape pulseDict[pulseTuple] = shapeHash entry.key = shapeHash curFrame += phaseStep * entry.length
def build_waveforms(seq): import PulseSequencer, Compiler # import here to avoid circular imports wires = Compiler.compile_sequence(seq) # build a concatenated waveform for each channel channels = wires.keys() concatShapes = {q: np.array([0], dtype=np.complex128) for q in channels} for q in channels: # TODO: deal with repeated sections frame = 0 for pulse in wires[q]: if isinstance(pulse, PulseSequencer.Pulse): shape = np.exp(1j * (frame + pulse.phase)) * pulse.amp * pulse.shape frame += pulse.frameChange concatShapes[q] = np.append(concatShapes[q], shape) # add an extra zero to make things look more normal for q in channels: concatShapes[q] = np.append(concatShapes[q], 0) return concatShapes
def build_waveforms(seq): import PulseSequencer, Compiler #import here to avoid circular imports wires = Compiler.compile_sequence(seq) # build a concatenated waveform for each channel channels = wires.keys() concatShapes = {q: np.array([0], dtype=np.complex128) for q in channels} for q in channels: # TODO: deal with repeated sections frame = 0 for pulse in wires[q]: if isinstance(pulse, PulseSequencer.Pulse): shape = np.exp(1j * (frame + pulse.phase)) * pulse.amp * pulse.shape frame += pulse.frameChange concatShapes[q] = np.append(concatShapes[q], shape) # add an extra zero to make things look more normal for q in channels: concatShapes[q] = np.append(concatShapes[q], 0) return concatShapes
def compilation(): prog = Compiler.run(args, options) if prog.public_input_file is not None: print('WARNING: %s is required to run the program' % \ prog.public_input_file.name)
print 'main.py help show this message and exit' print 'main.py update update the file used as a test case and comparator, please run this parameter' print ' before you run the daemon' print 'main.py run the daemon' exit() else: print 'unknown argument(s)' while 1 == 1: list1 = os.listdir(paths.rootCompilerPath + 'upload/') pat = re.compile('index.html') for i in list1: if i[0] == '.': continue if pat.search(i) != None: continue comp = Compiler(paths.rootCompilerPath + 'upload/' + i) if blacklist.check(paths.rootCompilerPath + 'upload/' + i): comp.malcode() else: comp.compile() g = comp.test() os.system('mkdir -p ' + paths.rootCompilerPath + 'backup/' + comp.soal) os.system('mv -f ' + paths.rootCompilerPath + 'upload/' + i + ' ' + paths.rootCompilerPath + 'backup/' + comp.soal + '/' ) if comp.compiled != 1: continue os.system('rm -f ' + comp.outputPath + '*')
import sys sys.path.append("../") from Compiler import * c = Compiler() c.debugOn() c.showMessage("message") print "message shown"
def merge_APS_markerData(IQLL, markerLL, markerNum): ''' Helper function to merge two marker channels into an IQ channel. ''' if len(markerLL) == 0: return markerAttr = 'markerDelay' + str(markerNum) # expand link lists to the same length (copying first element of shorter one) for miniLL_IQ, miniLL_m in izip_longest(IQLL, markerLL): if not miniLL_IQ: IQLL.append([ControlFlow.Wait(), Compiler.create_padding_LL(MIN_ENTRY_LENGTH), Compiler.create_padding_LL(MIN_ENTRY_LENGTH)]) if not miniLL_m: markerLL.append([Compiler.create_padding_LL(MIN_ENTRY_LENGTH)]) #Step through the all the miniLL's together for miniLL_IQ, miniLL_m in zip(IQLL, markerLL): #Find the cummulative length for each entry of IQ channel timePts = np.cumsum([0] + [len(entry) for entry in miniLL_IQ]) #Find the switching points of the marker channels switchPts = [] prevKey = TAZKey t = 0 for entry in miniLL_m: if hasattr(entry, 'key') and prevKey != entry.key: switchPts.append(t) prevKey = entry.key t += len(entry) # Push on an extra switch point if we have an odd number of switches (to maintain state) if len(switchPts) % 2 == 1: switchPts.append(t) #Assume switch pts seperated by 1 point are single trigger blips blipPts = (np.diff(switchPts) == 1).nonzero()[0] for pt in blipPts[::-1]: del switchPts[pt+1] #Ensure the IQ LL is long enough to support the blips if switchPts and max(switchPts) >= timePts[-1]: dt = max(switchPts) - timePts[-1] if hasattr(miniLL_IQ[-1], 'isTimeAmp') and miniLL_IQ[-1].isTimeAmp: miniLL_IQ[-1].length += dt + 4 else: # inject before any control flow statements at the end of the sequence idx = len(miniLL_IQ) while idx > 0 and isinstance(miniLL_IQ[idx-1], ControlFlow.ControlInstruction): idx -=1 miniLL_IQ.insert(idx, Compiler.create_padding_LL(max(dt+4, MIN_ENTRY_LENGTH))) #Now map onto linklist elements curIQIdx = 0 trigQueue = [] for switchPt in switchPts: # skip if: # 1) control-flow instruction # 2) the trigger count is too long # 3) the previous trigger pulse entends into the current entry while (isinstance(miniLL_IQ[curIQIdx], ControlFlow.ControlInstruction) or (switchPt - timePts[curIQIdx]) > (ADDRESS_UNIT * MAX_TRIGGER_COUNT) or len(trigQueue) > 1): # update the trigger queue, dropping triggers that have played trigQueue = [t - miniLL_IQ[curIQIdx].length for t in trigQueue] trigQueue = [t for t in trigQueue if t >= 0] curIQIdx += 1 # add padding pulses if needed if curIQIdx >= len(miniLL_IQ): pad = max(MIN_ENTRY_LENGTH, min(trigQueue, 0)) miniLL_IQ.append(Compiler.create_padding_LL(pad)) #Push on the trigger count #If are switch point is before the start of the LL entry then we are in trouble... if switchPt - timePts[curIQIdx] < 0: #See if the previous entry was a TA pair and whether we can split it needToShift = switchPt - timePts[curIQIdx-1] if isinstance(miniLL_IQ[curIQIdx-1], Compiler.LLWaveform) and \ miniLL_IQ[curIQIdx-1].isTimeAmp and \ miniLL_IQ[curIQIdx-1].length > (needToShift + MIN_ENTRY_LENGTH): miniLL_IQ.insert(curIQIdx, deepcopy(miniLL_IQ[curIQIdx-1])) miniLL_IQ[curIQIdx-1].length = needToShift-ADDRESS_UNIT miniLL_IQ[curIQIdx].length -= needToShift-ADDRESS_UNIT miniLL_IQ[curIQIdx].markerDelay1 = None miniLL_IQ[curIQIdx].markerDelay2 = None setattr(miniLL_IQ[curIQIdx], markerAttr, ADDRESS_UNIT) #Recalculate the timePts timePts = np.cumsum([0] + [len(entry) for entry in miniLL_IQ]) else: setattr(miniLL_IQ[curIQIdx], markerAttr, 0) print("Had to push marker blip out to start of next entry.") else: setattr(miniLL_IQ[curIQIdx], markerAttr, switchPt - timePts[curIQIdx]) trigQueue.insert(0, switchPt - timePts[curIQIdx]) # update the trigger queue trigQueue = [t - miniLL_IQ[curIQIdx].length for t in trigQueue] trigQueue = [t for t in trigQueue if t >= 0] curIQIdx += 1 #Replace any remaining empty entries with None for miniLL_IQ in IQLL: for entry in miniLL_IQ: if not hasattr(entry, markerAttr): setattr(entry, markerAttr, None)
def GetCompilerByName(self, name): '''返回的是实例''' for i in self.compilers: if i["name"] == name: return Compiler.Compiler(i) return None
def compilation(): prog = Compiler.run(args, options, merge_opens=options.merge_opens, debug=options.debug) prog.write_bytes(options.outfile)
# Generated from Nmod.g4 by ANTLR 4.7 from antlr4 import * if __name__ is not None and "." in __name__: from .NmodParser import NmodParser else: from NmodParser import NmodParser from Compiler import * c = Compiler() # This class defines a complete listener for a parse tree produced by NmodParser. class NmodListener(ParseTreeListener): # Enter a parse tree produced by NmodParser#program. def enterProgram(self, ctx:NmodParser.ProgramContext): pass # Exit a parse tree produced by NmodParser#program. def exitProgram(self, ctx:NmodParser.ProgramContext): pass # Enter a parse tree produced by NmodParser#f_type. def enterF_type(self, ctx:NmodParser.F_typeContext): pass # Exit a parse tree produced by NmodParser#f_type. def exitF_type(self, ctx:NmodParser.F_typeContext): pass
def printNode(node, deep): if node.kind != Parser.EMPTY: print("-" * deep + Parser.TYPES[node.kind]) if node.operand1: printNode(node.operand1, deep + 1) if node.operand2: printNode(node.operand2, deep + 1) if node.operand3: printNode(node.operand3, deep + 1) prog = parser.parse() printNode(prog, 0) compiler = Compiler() program = compiler.compile(prog) i = 0 while i < len(program): operation = VMTYPES[program[i]] if operation in VMTYPESWithARG: print(str(i) + ": " + operation) i += 1 print(str(i) + ": " + str(program[i])) i += 1 else: print(str(i) + ": " + operation) i = i + 1 vm = VM()
def create_gate_seqs(linkList, gateBuffer=0, gateMinWidth=0, samplingRate=1.2e9): ''' Helper function that takes a set of analog channel LL and creates a LL with appropriate blanking on a marker channel. ''' # convert times into samples gateBuffer = int(round(gateBuffer * samplingRate)) gateMinWidth = int(round(gateMinWidth * samplingRate)) #Initialize list of sequences to return gateSeqs = [] # Time from end of previous LL entry that trigger needs to go # high to gate pulse startDelay = gateBuffer for miniLL in linkList: #Initialize a zero-length padding sequence gateSeq = [Compiler.create_padding_LL(0)] # we need to pad the miniLL with an extra entry if the last entry is not a zero if not miniLL[-1].isZero: miniLL.append(Compiler.create_padding_LL(MIN_ENTRY_LENGTH)) #Step through sequence changing state as necessary blankHigh = False for entry in miniLL: #If we are low and the current entry is high then we need to add an element if not blankHigh and not entry.isZero: gateSeq.append(Compiler.create_padding_LL(entry.totLength, high=True)) blankHigh = True #If we are high and the next entry is low then we need to add an element elif blankHigh and entry.isZero: gateSeq.append(Compiler.create_padding_LL(entry.totLength, high=False)) blankHigh = False #Otherwise we just continue along in the same state else: gateSeq[-1].length += entry.totLength #Go back through and add the gate buffer to the start of each marker high period. #Assume that we start low and alternate low-high-low from the construction above #Step through every high pulse and look at the previous one for entryct in range(1, len(gateSeq),2): #If the previous low pulse is less than the gate buffer then we'll drop it #and add its length and the length of the current high entry to the previous high entry if gateSeq[entryct-1].length < gateBuffer: #Look for the last valid previous high entry goodIdx = entryct-2 while not gateSeq[goodIdx]: goodIdx -= 2 gateSeq[goodIdx].length += \ gateSeq[entryct-1].totLength + gateSeq[entryct].totLength #Mark the two dropped entries as removed by setting them to none gateSeq[entryct-1] = None gateSeq[entryct] = None #Otherwise we subtract the gate buffer from the previous length else: gateSeq[entryct-1].length -= gateBuffer gateSeq[entryct].length += gateBuffer entryct += 2 #Remove dropped entries gateSeq = filter(lambda x : x is not None, gateSeq) #Loop through again and make sure that all the low points between pulses are sufficiently long #Given the above construction we should have the low-high-low form for entryct in range(2, len(gateSeq)-1, 2): if gateSeq[entryct].length < gateMinWidth: #Consolidate this and the next entry onto the previous one #Look for the last valid previous high entry goodIdx = entryct-1 while not gateSeq[goodIdx]: goodIdx -= 2 gateSeq[goodIdx].length += \ gateSeq[entryct].totLength + gateSeq[entryct+1].totLength #Mark the two dropped entries as removed by setting them to none gateSeq[entryct] = None gateSeq[entryct+1] = None entryct = 2 #Remove dropped entries gateSeq = filter(lambda x : x is not None, gateSeq) #Add it on gateSeqs.append(gateSeq) return gateSeqs
import math import sys import Parser as prs import PrePro as pp import SymbolTable as st import Compiler as cp file = open(sys.argv[1], "r") code = file.read() file.close() filtered = pp.PrePro.filter(code) symbolTable = st.SymbolTable() compiler = cp.Compiler() node = prs.Parser.run(filtered) node.Evaluate(symbolTable, compiler) compiler.flush()
import csv import Compiler with open("submissions.txt") as csv_file: csv_reader = csv.reader(csv_file, delimiter=',') result = [] line_count = 0 for row in csv_reader: if line_count == 0: print('Column names are ' + ", ".join(row)) line_count += 1 else: result.append(row) print(row[0] + " - " + row[1] + " - " + row[2] + " - " + row[3] + " - " + row[4] + " - " + row[5]) line_count += 1 for row in result: Compiler.my_compiler_function(row)
import Compiler # Chequeos iniciales if __name__ == "__main__": import sys if len(sys.argv) < 2: print 'Llamar por consola: mj.py <Archivo.mj>' elif sys.argv[1][-3:] != '.mj': print 'Error archivo invalido' else: Compiler.empezar()
def slave_trigger(numSeqs): """ Create slave trigger link lists. """ return [[Compiler.create_padding_LL(1), Compiler.create_padding_LL(1, True), Compiler.create_padding_LL(1)] for _ in range(numSeqs)], Compiler.markerWFLib
import System import Compiler print("OS: ", end = "") print(System.os()) print("Default compiler: ", end = "") print(Compiler.get())
#!/usr/bin/env python from Compiler import * from Earley_Parser import * from VirtualMachine import * import KeywordSub def separate_a_section(): print "="*100 if __name__ == "__main__": c, vm = Compiler(), VM() source, rules = Tokenizer.get_list(), Tokenizer.get_rules() s = EarleyParser(rules).parse(KeywordSub.keywords_substitution(source)) if(s is None): print print "Syntax error." sys.exit(1) # separate_a_section() # PrintTree(s) # separate_a_section() c.Compile(s) c.program += [EXIT] separate_a_section() vm.Execute(c.program) separate_a_section() print 'Values : ' + str(vm.var_values)
def verilog(): File.cd(Git.root_dir()) Time.stamp() if Args.build: Verilog.build() print("Verilog build time: " + Time.duration()) elif Args.run: Verilog.run() print("Verilog build and flash time: " + Time.duration()) elif Args.simulate: Verilog.simulate() print("Verilog test build time: " + Time.duration()) elif Args.flash: Verilog.flash() print("Verilog flash time: " + Time.duration()) else: print("No argument provided to build script") if Args.verilog: verilog() exit() if Args.compilers_info: Compiler.print_info() exit() cpp()
#!/usr/bin/python -tt import sys from Compiler import * DEBUG = False for arg in sys.argv: if arg == "-d": DEBUG = True doc = sys.stdin.read() compiler = Compiler(DEBUG) result = compiler.compile(doc) print(result)