def pragmaIncbin(ppt, line, result): "Includes a binary file" filename = line.expect("STRING").value offset = IR.ConstantExpr(0) size = None if str(line.lookahead(0)) == ",": line.pop() offset = FE.parse_expr(line) if str(line.lookahead(0)) == ",": line.pop() size = FE.parse_expr(line) line.expect("EOL") if type(filename) == str: try: f = file(os.path.join(FE.context_directory, filename), "rb") if offset.hardcoded and (size is None or size.hardcoded): # We know how big it will be, we can just use the values. # First check to make sure they're sane if offset.value() < 0: Err.log("Offset may not be negative") f.close() return f.seek(0, 2) # Seek to end of file if offset.value() > f.tell(): Err.log("Offset runs past end of file") f.close() return if size is not None: if size.value() < 0: Err.log("Length may not be negative") f.close() return if offset.value() + size.value() > f.tell(): Err.log(".incbin length too long") f.close() return if size is None: size = IR.ConstantExpr(-1) f.seek(offset.value()) bytes = f.read(size.value()) bytes = [IR.ConstantExpr(ord(x)) for x in bytes] result.append(IR.Node(ppt, "Byte", *bytes)) else: # offset or length could change based on label placement. # This seems like an unbelievably bad idea, but since we # don't have constant prop it will happen for any symbolic # alias. Don't use symbolic aliases when extracting tiny # pieces out of humongous files, I guess. bytes = f.read() bytes = [IR.ConstantExpr(ord(x)) for x in bytes] if size is None: size = IR.SequenceExpr([IR.ConstantExpr(len(bytes)), "-", offset]) result.append(IR.Node(ppt, "ByteRange", offset, size, *bytes)) f.close() except IOError: Err.log("Could not read " + filename) return
def generatePOTCAR(theory, element_order): try: filenames = [] # Retrieve the atomic POTCAR file according the element and theory # Psuedopotentials used for each element are defined in StaticData.py and can be changed for element in element_order: pseudopotential = pseudopotentials[element] if theory == 'SCAN': path = 'POTPAW_PBE_52_SCAN/%s/POTCAR' % (pseudopotential) elif theory == 'GGA': path = 'POT_GGA_PAW_PBE/%s' % (pseudopotential) else: raise Errors.TheoryInputError(theory) filenames.append(path) # Combine all individual pseudopotentials into a POTCAR file with open('POTCAR', 'w') as outfile: for fname in filenames: with open(fname) as infile: outfile.write(infile.read()) print('\n') print('POTCAR file succesfully generated!') print( '--------------------------------------------------------------------' ) except Exception as e: raise Errors.PotcarGenerationError(e)
def __init__(self, bins, range): """ bins: A tuple each number is how many spatial bins in each dimension (up to 3) range: A list of [min, max] pairs; the limits of the spatial geometry in each dimension. """ try: self.dimension = len(bins) except TypeError: self.dimension = 1 if self.dimension != 1: raise Errors.GeometryError( "Geometry currently only suppors 1-D geometry") elif self.dimension != len(range): raise Errors.GeometryError("Bins and Range must have same degree") else: self.bins = bins self.range = range self.edges = scipy.zeros(self.bins + 1) self.centers = scipy.zeros(self.bins) # Bin centers width = self.max - self.min for i in xrange(self.bins + 1): edge = self.min + i * (width / float(self.bins)) self.edges[i] = edge for i in xrange(len(self.centers)): self.centers[i] = self.edges[i] + (self.edges[i + 1] - self.edges[i]) / 2.0
def assemble(self, node, mode, env): "A generic instruction called by the visitor methods themselves" (opcode, expr, expr2) = node.data bin_op = Ops.opcodes[opcode][mode] if bin_op is None: Err.log('%s does not have mode "%s"' % (opcode.upper(), Ops.modes[mode])) return inst_bytes = [] self.outputbyte(IR.ConstantExpr(bin_op), env, inst_bytes) arglen = Ops.lengths[mode] val1 = None val2 = None if expr is not None: val1 = expr.value(env) if expr2 is not None: val2 = expr2.value(env) if mode == 15: # ZP Relative mode is wildly nonstandard expr2 = self.relativize(expr2, env, arglen) self.outputbyte(expr, env, inst_bytes) self.outputbyte(expr2, env, inst_bytes) else: if mode == 14: expr = self.relativize(expr, env, arglen) if arglen == 1: self.outputbyte(expr, env, inst_bytes) elif arglen == 2: self.outputword(expr, env, inst_bytes) self.listing.listInstruction( self.listing_string(env.getPC(), inst_bytes, mode, opcode, val1, val2)) env.incPC(1 + arglen) self.code += 1 + arglen
def generateINCAR(system_name, theory, ordered_ion_list, type): try: MAGMOM_input = generateMAGMOM(ordered_ion_list) NUPDOWN_input = str( sum([ ion.magmom * multiplicity for [ion, multiplicity] in ordered_ion_list ])) print( 'Calculated magnetic moment inputs for each ion following the order of elements in the POSCAR file:' ) print(MAGMOM_input) print('NUPDOWN =', NUPDOWN_input) print( '--------------------------------------------------------------------' ) if theory == 'SCAN': incar_str = IncarTemplate.SCAN_INCAR elif theory == 'GGA-dia': incar_str = IncarTemplate.GGA_DIA_INCAR elif theory == 'GGA-para': incar_str = IncarTemplate.GGA_PARA_INCAR else: raise Errors.TheoryInputError(theory) incar_str.replace('<system>', system_name) if type == 'relax' or 'relax1': incar_str.replace('<ibrion>', '2') elif type == 'spe': incar_str.replace('<ibrion>', '-1') except Exception as e: raise Errors.IncarGenerationError(e)
def CreateModuleVariable(self, variableName, variableDescription, variableType, variableMode, value=None): self.__ValidateArgs() # Validating Variable Type if variableMode == "Internal": raise err.Conflict( "A Variable with the mode '{0}' is not support by Modules !". format(variableMode)) return None # Setting Value if variableMode != "Static": value = None jsonContent = self.OpenModule() # Validating Uniquness if variableName in jsonContent["ModuleVariables"]: raise err.Conflict( "A Module Variable with the name '{0}' already exists !". format(variableName)) return None else: jsonContent = self.OpenSchema() jsonContent["Modules"][ self.moduleName]["ModuleVariables"][variableName] = ( js.VariableJSON(variableName, variableDescription, variableType, variableMode, value)) fl.Write(self.schemaMetaData, js.Dump(jsonContent), True) return "Variable '{0}' created successfully !".format(variableName)
def setup_errors(self, options, result): if options.use_listing_file: result.listing_file = Utils.replace_suffix(source, ".lis") path = result.listing_file else: path = None Errors.open_listing_file(path=path, echo_to_stderr=options.errors_to_stderr)
def run_pipeline(pipeline, source, printtree=True): from Cython.Compiler.Visitor import PrintTree error = None data = source try: try: for phase in pipeline: if phase is not None: if DebugFlags.debug_verbose_pipeline: t = time() print "Entering pipeline phase %r" % phase if not printtree and isinstance(phase, PrintTree): continue data = phase(data) if DebugFlags.debug_verbose_pipeline: print " %.3f seconds" % (time() - t) except CompileError, err: # err is set Errors.report_error(err) error = err except InternalError, err: # Only raise if there was not an earlier error if Errors.num_errors == 0: raise error = err
def deleteVid(self, id): idx = self.findCamera(id) if messagebox.askokcancel("Delete", "Do you want to delete {}?".format( self.cameras[idx]['video_source']), parent=self): try: url = 'https://iguard-backend.herokuapp.com/api/v1/KDY7AehrzAlOVJd-i09GVA/camera/{}'.format( self.cameras[idx]['id']) with requests.Session() as session: req = session.delete(url=url) except: Errors.networkConnectionError() return res = dict(req.json()) if 'success' in res.keys(): del self.cameras[idx] self.updateConfig() self.vid_list[idx].destroy() del self.vid_list[idx] else: Errors.unknownError() return
def registerUser(self): params = { 'login' : self.username.get_data(), 'name' : self.first_name.get_data(), 'surname' : self.second_name.get_data(), 'email' : self.email.get_data(), 'city' : self.city.get_data(), 'street' : self.street.get_data(), 'house' : self.house.get_data(), 'password' : self.password.get_data(), 'password_confirmation' : self.password_confirmation.get_data() } try: url = "https://iguard-backend.herokuapp.com/api/v1/KDY7AehrzAlOVJd-i09GVA/register/user" res = dict(self.session.post(url = url, data = params).json()) except: Errors.networkConnectionError() return if "errors" in res.keys(): error_text = "" for error in res["errors"]: error_text += error + "\n" errorLabel = tk.Label(self, bg = 'red', fg = 'white', text = error_text) errorLabel.pack(side = tk.TOP, fill = tk.X) errorLabel.bind("<Button-1>", lambda e : errorLabel.destroy()) else: self.addUser(res)
def CreateProjectVariable(self, variableName, variableDescription, variableType, variableMode, value=None): self.__ValidateArgs() # Validating Variable Type if variableMode != "Static" and variableMode != "Runtime": raise err.Conflict( "A Variable with the mode '{0}' is not support by Projects !". format(variableMode)) return None # Setting Value if variableMode != "Static": value = None jsonContent = self.OpenProject() # Validating Uniquness if variableName in jsonContent["ProjectVariables"]: raise err.Conflict( "A Project Variable with the name '{0}' already exists !". format(variableName)) return None else: jsonContent["ProjectVariables"][variableName] = (js.VariableJSON( variableName, variableDescription, variableType, variableMode, value)) fl.Write(self.projectMetaData, js.Dump(jsonContent), True) return "Variable '{0}' created successfully !".format(variableName)
def setItems(self, **items): # modify an old dictionary dummyNew = dict.fromkeys(items) selfKeys = self.getItems() for key, val in items.iteritems(): if key not in selfKeys: raise Errors.TemplUndefinedFieldError(key) tmpl = self._template[key] if tmpl[1] is not None: if not isinstance(val, tmpl[1]): raise Errors.TemplFieldTypeError(key) if tmpl[2] is not None: if len(val) != tmpl[2]: raise Errors.TemplFieldLengthError(key, tmpl[2]) for v in val: if not isinstance(v, tmpl[3]): raise Errors.TemplFieldContentError(key) del dummyNew[key] if tmpl[4] is None: self.__dict__[key] = val else: if tmpl[2] is None: self.__dict__[key] = tmpl[4](val) else: self.__dict__[key] = [tmpl[4](v) for v in val]
def load(fileName=None, src=None, patchFileName=None): if not (fileName or src): raise "Catastrophic problem with load - neither a fileName nor a sourcefile" p = m3parser.Parser() p.verbose = int(Options.options.verbosity) if fileName: if not os.path.exists(fileName): error("File %s does not exist" % fileName) return f = open(fileName) txt = f.read() else: txt = src txt = compro.commentkiller(txt) txt = compro.umlautkiller(txt) if Options.options.errorTest: Errors.findDirective(txt) try: topNode = p(txt) except tpg.SyntacticError, e: error(e.msg, None, fileName or patchFileName, e.line, catastrophic=True, code="CAT002") raise
def setup_errors(self, options): if options.use_listing_file: result.listing_file = Utils.replace_suffix(source, ".lis") Errors.open_listing_file(result.listing_file, echo_to_stderr = options.errors_to_stderr) else: Errors.open_listing_file(None)
def addAgent(self, agent): assert isinstance(agent, Agent), ERR.TYPE_MISMATCH(agent, Agent) assert agent.isUnique(self.agents.values()), ERR.AGENT_NOT_UNIQUE(agent) self.agents[agent.id] = agent self.num_agents += 1 if self.visualize: self.visual.flush()
def __init__(self, libinfoFile, tablename=defaultTable, conf=None, species=defaultSpecies, verbose=False, log=sys.stderr): "create load_libinfo instance, connect to MySQL and check for existance of libinfo file" # set attributes self.verbose = verbose self.log = log self.file = libinfoFile self.tablename = tablename self.conf = None self.species = species self.__db = None # db connection self.__cursor = None # check libinfoFile if not os.path.exists(self.file): self.__inform("ERROR: library info file %s does not exist\n" % self.file) raise Errors.ObjectInitError('load_libinfo', "library info file '%s' does not exist\n" % self.file) # init configuration if conf is not None and isinstance(conf, configuration.Configuration): self.conf = conf elif conf is not None and os.path.exists(conf): self.conf = configuration.Configuration(filename=conf) else: self.conf = configuration.Configuration() # check species if not self.species in self.conf.getSpeciesList(): self.__inform("ERROR: species %s is not in configuration file %s\n" % (self.species, self.conf.configFile())) raise Errors.ObjectInitError('load_libinfo', "species '%s' is not in configuration file %s\n" % (self.species, self.conf.configFile())) # connect to MySQL self.__connect()
def getEPIRecord(rec_name,basepath=None): if basepath is None: basepath='../SharedData/PublicEEG' infoxml = os.path.join(basepath,'Descriptions.xml') if not os.path.exists(infoxml): raise Errors.FileIOFileNotFound(infoxml) from xml.etree import ElementTree import re xmldoc = ElementTree.parse(infoxml) record = xmldoc.find(rec_name) if record is None: raise Errors.FileIORecordNotFound(rec_name) fields = record.getchildren() rec_dict = dict() for fd in fields: if fd.tag in ( 'Num_seizures', 'Age_at_recording', 'Age_at_surgery', 'Num_inter-ictal_events'): val = int(fd.text) elif fd.tag == 'Hardware_filters': ft_txt = re.split('-|Hz',fd.text) val = [ float(ft_txt[0]), float(ft_txt[1]) ] elif fd.tag == 'Software_filters': ft_txt = re.split('Hz',fd.text) val = float(ft_txt[0]) else: val = fd.text rec_dict[fd.tag]=val rec_dict['EEG_file']=os.path.join(basepath,rec_name,rec_name+'_EEG_DATA.edf') rec_dict['basepath']=basepath return rec_dict
def __init__(self, parent, bg='#3A79D1', **kwargs): tk.Frame.__init__(self, parent, bg=bg, **kwargs) self.list_of_users = [] self.list_of_ids = [] self.scrollable = VerticalScrolledFrame(self, bg='white', **kwargs) self.scrollable.interior.configure(bg='white') self.scrollable.pack(side=tk.BOTTOM, fill=tk.BOTH, expand=True) self.vid_list = [] self.addVidForm() try: with open('config.json') as f: self.config = json.load(f) self.cameras = self.config['cameras'] except: Errors.configNotFoundError() return for i in range(len(self.cameras)): self.addVideoLabel(self.cameras[i]['video_source'], self.cameras[i]['id'], self.cameras[i]['user'])
def visitMacroEnd(self, node, env): if self.inDef: Macro.endMacro() node.nodetype = "None" node.data = [] self.inDef = False elif not self.nestedError: Err.log("Unmatched .macend")
def pragmaIncbin(ppt, line, result): "Includes a binary file" filename = line.expect("STRING").value offset = IR.ConstantExpr(0) size = None if str(line.lookahead(0)) == ",": line.pop() offset = FE.parse_expr(line) if str(line.lookahead(0)) == ",": line.pop() size = FE.parse_expr(line) line.expect("EOL") if type(filename) == str: try: f = file(os.path.join(FE.context_directory, filename), "rb") if offset.hardcoded and (size is None or size.hardcoded): # We know how big it will be, we can just use the values. # First check to make sure they're sane if offset.value() < 0: Err.log("Offset may not be negative") f.close() return f.seek(0, 2) # Seek to end of file if offset.value() > f.tell(): Err.log("Offset runs past end of file") f.close() return if size is not None: if size.value() < 0: Err.log("Length may not be negative") f.close() return if offset.value() + size.value() > f.tell(): Err.log(".incbin length too long") f.close() return if size is None: size = IR.ConstantExpr(-1) f.seek(offset.value()) bytes = f.read(size.value()) bytes = [IR.ConstantExpr(ord(x)) for x in bytes] result.append(IR.Node(ppt, "Byte", *bytes)) else: # offset or length could change based on label placement. # This seems like an unbelievably bad idea, but since we # don't have constant prop it will happen for any symbolic # alias. Don't use symbolic aliases when extracting tiny # pieces out of humongous files, I guess. bytes = f.read() bytes = [IR.ConstantExpr(ord(x)) for x in bytes] if size is None: size = IR.SequenceExpr( [IR.ConstantExpr(len(bytes)), "-", offset]) result.append(IR.Node(ppt, "ByteRange", offset, size, *bytes)) f.close() except IOError: Err.log("Could not read " + filename) return
def __call__(self, keyw): if self.argname is not None and self.argname in keyw: k = keyw[self.argname] del keyw[self.argname] if k is None: pass elif k in self.options: # Make sure it isn't contradicted by the corresponding boolean: if k in keyw and not keyw[k]: raise Errors.OptionError( 'Arguments %s and %s are contradictory' % ( self.argname, k, )) else: # Store the option into the boolean to be processed below: keyw[k] = 1 else: raise Errors.OptionError('Illegal option %s=%s' % ( self.argname, k, )) # Now scan the booleans and make sure that at most one is set: option = None for i in range(len(self.options)): k = self.options[i] if k in keyw: newval = keyw[k] del keyw[k] if newval: if option is not None: raise Errors.OptionError( 'Arguments %s and %s cannot both be specified' % ( option, k, )) else: option = k else: # newval was false. This is only legal if this # option only has two possible values: if len(self.options) == 2: option = self.options[1 - i] else: pass if option is None: if self.default is None: return None else: option = self.default retval = [] if self.fixedword is not None: retval.append(self.fixedword) retval.append(option) return retval
def reset(self): "Clears out program counter, segment, and scoping information" self.pc = 0 self.segmentdict = {} self.segment = "*text-default*" self.scopecount = 0 if len(self.stack) > 1: Err.log("Unmatched .scope") self.stack = [0]
def visitMacroBegin(self, node, env): if self.inDef: Err.log("Nested macro definition") self.nestedError = True else: Macro.newMacro(node.data[0]) node.nodetype = "None" node.data = [] self.inDef = True
def setup_errors(self, options, result): Errors.reset() # clear any remaining error state if options.use_listing_file: result.listing_file = Utils.replace_suffix(source, ".lis") path = result.listing_file else: path = None Errors.open_listing_file(path=path, echo_to_stderr=options.errors_to_stderr)
def parse_token_definition(self, token_spec): if type(token_spec) != types.TupleType: raise Errors.InvalidToken("Token definition is not a tuple") if len(token_spec) != 2: raise Errors.InvalidToken("Wrong number of items in token definition") pattern, action = token_spec if not isinstance(pattern, Regexps.RE): raise Errors.InvalidToken("Pattern is not an RE instance") return (pattern, action)
def relativize(self, expr, env, arglen): "Convert an expression into one for use in relative addressing" arg = expr.value(env) arg = arg - (env.getPC() + arglen + 1) if arg < -128 or arg > 127: Err.log("Branch target out of bounds") arg = 0 if arg < 0: arg += 256 return IR.ConstantExpr(arg)
def __getitem__(self, item): if item[0] == '_': for dict in [self.dicts[i] for i in self.stack]: if item in dict: return dict[item] else: if item in self.dicts[0]: return self.dicts[0][item] Err.log("Unknown label '%s'" % item) return 0
def isConnected(self): self.connected_URL = "{}isHardwareConnected".format(self.URL) response = requests.get(self.connected_URL) status = response.content.decode("utf-8") if response.status_code == 400: Errors.raiseError(status) elif status == "true": return True elif status == "false": return False
def endMacro(): global currentname global currentbody global macros if currentname is None: Err.log("Internal error! Ended a non-existent macro!") else: macros[currentname] = currentbody currentname = None currentbody = None
def run_pipeline(self, pipeline, source): err = None data = source try: for phase in pipeline: if phase is not None: data = phase(data) except CompileError, err: # err is set Errors.report_error(err)
def reset(self, poses={}): self.collisionOccured = False if self.visualize: self.visual.flush() assert type(poses) == dict, ERR.TYPE_MISMATCH(poses, dict) for i in self.agents.values(): try: i.reset(poses[i.id]) except KeyError: print ERR.RESET_POSE_MISSING(i.id) i.reset()
def get(self, virtual_pin): self.get_URL = "{}get/{}".format(self.URL, virtual_pin) response = requests.get(self.get_URL) content = response.content.decode("utf-8") if (response.status_code == 400): Errors.raiseError(content) status = json.loads(content) if self.DEBUG_MODE: print("GET URL is:\t{}".format(self.get_URL)) print("Got Value:\t{}".format(status[0])) return status[0]
def newMacro(name): "Start creating a new macro with the specified name." global currentname global currentbody global macros if currentname is not None: Err.log("Internal error! Nested macro attempt!") else: if name in macros: Err.log("Duplicate macro definition '%s'" % name) currentname = name currentbody = []
def add_token(token): "Converts a substring into a single lexeme" if token == "": return if token == "0": result.append(Lexeme("NUM", 0)) return firstchar = token[0] rest = token[1:] if firstchar == '"': result.append(Lexeme("STRING", rest)) return elif firstchar in bases: try: result.append(Lexeme("NUM", long(rest, bases[firstchar][1]))) return except ValueError: Err.log('Invalid ' + bases[firstchar][0] + ' constant: ' + rest) result.append(Lexeme("NUM", 0)) return elif firstchar.isdigit(): try: result.append(Lexeme("NUM", long(token))) except ValueError: Err.log('Identifiers may not begin with a number') result.append(Lexeme("LABEL", "ERROR")) return elif firstchar == "'": if len(rest) == 1: result.append(Lexeme("NUM", ord(rest))) else: Err.log("Invalid character constant '" + rest + "'") result.append(Lexeme("NUM", 0)) return elif firstchar in punctuation: if rest != "": Err.log("Internal lexer error! '" + token + "' can't happen!") result.append(Lexeme(firstchar)) return else: # Label, opcode, or index register id = token.lower() if is_opcode(id): result.append(Lexeme("OPCODE", id)) elif id == "x": result.append(Lexeme("X")) elif id == "y": result.append(Lexeme("Y")) else: result.append(Lexeme("LABEL", id)) return # should never reach here Err.log("Internal lexer error: add_token fall-through")
def run_pipeline(self, pipeline, source): error = None data = source try: for phase in pipeline: if phase is not None: if DebugFlags.debug_verbose_pipeline: print "Entering pipeline phase %r" % phase data = phase(data) except CompileError, err: # err is set Errors.report_error(err) error = err
def teardown_errors(self, err, options, result): source_desc = result.compilation_source.source_desc if not isinstance(source_desc, FileSourceDescriptor): raise RuntimeError("Only file sources for code supported") Errors.close_listing_file() result.num_errors = Errors.num_errors if result.num_errors > 0: err = True if err and result.c_file: try: Utils.castrate_file(result.c_file, os.stat(source_desc.filename)) except EnvironmentError: pass result.c_file = None
def expect(self, *tokens): """Reads a token from the ParseLine line and returns it if it's of a type in the sequence tokens. Otherwise, it logs an error.""" token = self.pop() if token.type in tokens: return token if 'LABEL' in tokens: if token.type in ['X', 'Y']: token.value = token.type.lower() token.type = 'LABEL' return token elif token.type == 'OPCODE': token.type = 'LABEL' return token Err.log('Expected: "' + '", "'.join(tokens) + '"') return token
def expandMacro(ppt, name, arglist): global macros if name not in macros: Err.log("Undefined macro '%s'" % name) return IR.NullNode argexprs = [IR.Node(ppt, "Label", "_*%d" % i, arg) for (i, arg) in zip(xrange(1, sys.maxint), arglist)] bindexprs = [IR.Node(ppt, "Label", "_%d" % i, IR.LabelExpr("_*%d" % i)) for i in range(1, len(arglist) + 1)] body = [IR.Node("%s->%s" % (ppt, node.ppt), node.nodetype, *node.data) for node in macros[name]] invocation = ( [IR.Node(ppt, "ScopeBegin")] + argexprs + [IR.Node(ppt, "ScopeBegin")] + bindexprs + body + [IR.Node(ppt, "ScopeEnd"), IR.Node(ppt, "ScopeEnd")] ) return IR.SequenceNode(ppt, invocation)
def pragmaCharmapbin(ppt, line, result): "Load a new character map from a file" global currentcharmap filename = line.expect("STRING").value line.expect("EOL") if type(filename) == str: try: f = file(os.path.join(FE.context_directory, filename), "rb") bytes = f.read() f.close() except IOError: Err.log("Could not read " + filename) return if len(bytes) == 256: currentcharmap = bytes else: Err.log("Character map " + filename + " not 256 bytes long")
def pragmaCharmap(ppt, line, result): "Modify the character map." global currentcharmap, basecharmap if str(line.lookahead(0)) == "EOL": currentcharmap = basecharmap else: bytes = readData(line) try: base = bytes[0].data newsubstr = "".join([chr(x.data) for x in bytes[1:]]) currentcharmap = currentcharmap[:base] + newsubstr + \ currentcharmap[base + len(newsubstr):] if len(currentcharmap) != 256 or base < 0 or base > 255: Err.log("Charmap replacement out of range") currentcharmap = currentcharmap[:256] except ValueError: Err.log("Illegal character in .charmap directive")
def compile(self, source, options = None): # Compile a Pyrex implementation file in this context # and return a CompilationResult. if not options: options = default_options result = CompilationResult() cwd = os.getcwd() source = os.path.join(cwd, source) if options.use_listing_file: result.listing_file = replace_suffix(source, ".lis") Errors.open_listing_file(result.listing_file, echo_to_stderr = options.errors_to_stderr) else: Errors.open_listing_file(None) if options.output_file: result.c_file = os.path.join(cwd, options.output_file) else: result.c_file = replace_suffix(source, ".c") module_name = self.extract_module_name(source) initial_pos = (source, 1, 0) scope = self.find_module(module_name, pos = initial_pos, need_pxd = 0) try: tree = self.parse(source, scope.type_names, pxd = 0) tree.process_implementation(scope, result) except CompileError: result.c_file = None Errors.close_listing_file() result.num_errors = Errors.num_errors if result.num_errors > 0: result.c_file = None if result.c_file and not options.c_only and c_compile: result.object_file = c_compile(result.c_file) if not options.obj_only and c_link: result.extension_file = c_link(result.object_file) return result
def parse_file(ppt, filename, load_once=False): "Loads an Ophis source file, and returns an IR list." global context_directory, loadedfiles Err.currentpoint = ppt old_context = context_directory if filename != '-': if context_directory is not None: filename = os.path.abspath(os.path.join(context_directory, filename)) if load_once and filename in loadedfiles: if Cmd.print_loaded_files: print>>sys.stderr, "Skipping " + filename return IR.NullNode loadedfiles[filename] = True if Cmd.print_loaded_files: if filename != '-': print>>sys.stderr, "Loading " + filename else: print>>sys.stderr, "Loading from standard input" try: if filename != '-': if context_directory is not None: filename = os.path.join(context_directory, filename) f = file(filename) linelist = f.readlines() f.close() context_directory = os.path.abspath(os.path.dirname(filename)) else: context_directory = os.getcwd() linelist = sys.stdin.readlines() pptlist = ["%s:%d" % (filename, i + 1) for i in range(len(linelist))] lexlist = map(lex, pptlist, linelist) IRlist = map(parse_line, pptlist, lexlist) IRlist = [node for node in IRlist if node is not IR.NullNode] context_directory = old_context return IR.SequenceNode(ppt, IRlist) except IOError: Err.log("Could not read " + filename) context_directory = old_context return IR.NullNode
def run_pipeline(pipeline, source): error = None data = source try: try: for phase in pipeline: if phase is not None: if DebugFlags.debug_verbose_pipeline: t = time() print "Entering pipeline phase %r" % phase data = phase(data) if DebugFlags.debug_verbose_pipeline: print " %.3f seconds" % (time() - t) except CompileError, err: # err is set Errors.report_error(err) error = err except InternalError, err: # Only raise if there was not an earlier error if Errors.num_errors == 0: raise error = err
def atom(): "Parses lowest-priority expression components." global templabelcount next = line.lookahead(0).type if next == "NUM": return IR.ConstantExpr(line.expect("NUM").value) elif next in ["LABEL", "X", "Y", "OPCODE"]: return IR.LabelExpr(line.expect("LABEL").value) elif next == "^": line.expect("^") return IR.PCExpr() elif next == "[": line.expect("[") result = parse_expr(line) line.expect("]") return result elif next == "+": offset = 0 while next == "+": offset += 1 line.expect("+") next = line.lookahead(0).type return IR.LabelExpr("*" + str(templabelcount + offset)) elif next == "-": offset = 1 while next == "-": offset -= 1 line.expect("-") next = line.lookahead(0).type return IR.LabelExpr("*" + str(templabelcount + offset)) elif next == ">": line.expect(">") return IR.HighByteExpr(atom()) elif next == "<": line.expect("<") return IR.LowByteExpr(atom()) else: Err.log('Expected: expression')
def teardown_errors(self, err, options, result): source_desc = result.compilation_source.source_desc if not isinstance(source_desc, FileSourceDescriptor): raise RuntimeError("Only file sources for code supported") Errors.close_listing_file() result.num_errors = Errors.num_errors if result.num_errors > 0: err = True if err and result.c_file: try: Utils.castrate_file(result.c_file, os.stat(source_desc.filename)) except EnvironmentError: pass result.c_file = None if result.c_file and not options.c_only and c_compile: result.object_file = c_compile(result.c_file, verbose_flag = options.show_version, cplus = options.cplus) if not options.obj_only and c_link: result.extension_file = c_link(result.object_file, extra_objects = options.objects, verbose_flag = options.show_version, cplus = options.cplus)
def compile(self, source, options = None): # Compile a Pyrex implementation file in this context # and return a CompilationResult. if not options: options = default_options result = CompilationResult() cwd = os.getcwd() source = os.path.join(cwd, source) if options.use_listing_file: result.listing_file = replace_suffix(source, ".lis") Errors.open_listing_file(result.listing_file, echo_to_stderr = options.errors_to_stderr) else: Errors.open_listing_file(None) if options.output_file: result.c_file = os.path.join(cwd, options.output_file) else: if options.cplus: result.c_file = replace_suffix(source, cplus_suffix) else: result.c_file = map_suffix(source, pyx_to_c_suffix, ".c") module_name = self.extract_module_name(source) initial_pos = (source, 1, 0) def_scope = self.find_module(module_name, pos = initial_pos, need_pxd = 0) imp_scope = ImplementationScope(def_scope) errors_occurred = False try: tree = self.parse(source, imp_scope, pxd = 0) tree.process_implementation(imp_scope, options, result) except CompileError: errors_occurred = True Errors.close_listing_file() result.num_errors = Errors.num_errors if result.num_errors > 0: errors_occurred = True if errors_occurred and result.c_file: try: st = os.stat(source) castrate_file(result.c_file, st) except EnvironmentError: pass result.c_file = None if result.c_file and not options.c_only and c_compile: result.object_file = c_compile(result.c_file, verbose_flag = options.show_version, cplus = options.cplus) if not options.obj_only and c_link: result.extension_file = c_link(result.object_file, extra_objects = options.objects, verbose_flag = options.show_version, cplus = options.cplus) return result
def aux(): "Accumulates all IR nodes defined by this line." if line.lookahead(0).type == "EOL": pass elif line.lookahead(1).type == ":": newlabel = line.expect("LABEL").value line.expect(":") result.append(IR.Node(ppt, "Label", newlabel, IR.PCExpr())) aux() elif line.lookahead(0).type == "*": global templabelcount templabelcount = templabelcount + 1 result.append(IR.Node(ppt, "Label", "*" + str(templabelcount), IR.PCExpr())) line.expect("*") aux() elif line.lookahead(0).type == "." or line.lookahead(0).type == "`": which = line.expect(".", "`").type if (which == "."): pragma = line.expect("LABEL").value else: pragma = "invoke" pragmaFunction = "pragma" + pragma.title() for mod in pragma_modules: if hasattr(mod, pragmaFunction): getattr(mod, pragmaFunction)(ppt, line, result) break else: Err.log("Unknown pragma " + pragma) else: # Instruction opcode = line.expect("OPCODE").value arg2 = None if line.lookahead(0).type == "#": mode = "Immediate" line.expect("#") arg = parse_expr(line) line.expect("EOL") elif line.lookahead(0).type == "(": line.expect("(") arg = parse_expr(line) if line.lookahead(0).type == ",": mode = "PointerX" line.expect(",") line.expect("X") line.expect(")") line.expect("EOL") else: line.expect(")") tok = line.expect(",", "EOL").type if tok == "EOL": mode = "Pointer" else: mode = "PointerY" line.expect("Y") line.expect("EOL") elif line.lookahead(0).type == "EOL": mode = "Implied" arg = None else: arg = parse_expr(line) tok = line.expect("EOL", ",").type if tok == ",": # Parser has to special-case the BBXn instructions, # Which uniquely take two addresses if opcode[:3] in ["bbs", "bbr"]: arg2 = parse_expr(line) mode = "Memory2" else: tok = line.expect("X", "Y").type if tok == "X": mode = "MemoryX" else: mode = "MemoryY" line.expect("EOL") else: mode = "Memory" result.append(IR.Node(ppt, mode, opcode, arg, arg2))
def nonfatal_error(self, exc): return Errors.report_error(exc)
def lex(point, line): """Turns a line of source into a sequence of lexemes.""" Err.currentpoint = point result = [] def is_opcode(op): "Tests whether a string is an opcode or an identifier" return op in Ops.opcodes def add_token(token): "Converts a substring into a single lexeme" if token == "": return if token == "0": result.append(Lexeme("NUM", 0)) return firstchar = token[0] rest = token[1:] if firstchar == '"': result.append(Lexeme("STRING", rest)) return elif firstchar in bases: try: result.append(Lexeme("NUM", long(rest, bases[firstchar][1]))) return except ValueError: Err.log('Invalid ' + bases[firstchar][0] + ' constant: ' + rest) result.append(Lexeme("NUM", 0)) return elif firstchar.isdigit(): try: result.append(Lexeme("NUM", long(token))) except ValueError: Err.log('Identifiers may not begin with a number') result.append(Lexeme("LABEL", "ERROR")) return elif firstchar == "'": if len(rest) == 1: result.append(Lexeme("NUM", ord(rest))) else: Err.log("Invalid character constant '" + rest + "'") result.append(Lexeme("NUM", 0)) return elif firstchar in punctuation: if rest != "": Err.log("Internal lexer error! '" + token + "' can't happen!") result.append(Lexeme(firstchar)) return else: # Label, opcode, or index register id = token.lower() if is_opcode(id): result.append(Lexeme("OPCODE", id)) elif id == "x": result.append(Lexeme("X")) elif id == "y": result.append(Lexeme("Y")) else: result.append(Lexeme("LABEL", id)) return # should never reach here Err.log("Internal lexer error: add_token fall-through") def add_EOL(): "Adds an end-of-line lexeme" result.append(Lexeme("EOL")) # Actual routine begins here value = "" quotemode = False backslashmode = False for c in line.strip(): if backslashmode: backslashmode = False value = value + c elif c == "\\": backslashmode = True elif quotemode: if c == '"': quotemode = False else: value = value + c elif c == ';': add_token(value) value = "" break elif c == '.' and value != "": value = value + c elif c.isspace(): add_token(value) value = "" elif c in punctuation: add_token(value) add_token(c) value = "" elif c == '"': add_token(value) value = '"' quotemode = True else: value = value + c if backslashmode: Err.log("Backslashed newline") if quotemode: Err.log("Unterminated string constant") add_token(value) add_EOL() return result
def run_all(): """Transforms the source infiles to a binary outfile. Returns a shell-style exit code: 1 if there were errors, 0 if there were no errors. """ Err.count = 0 Tamagotchi.process(CmdLine.infiles) z = Frontend.parse(CmdLine.infiles) env = Environment.Environment() m = Passes.ExpandMacros() i = Passes.InitLabels() l_basic = Passes.UpdateLabels() l = Passes.FixPoint("label update", [l_basic], lambda: not l_basic.changed) # The instruction selector is a bunch of fixpoints, and which # passes run depends on the command line options a bit. c_basic = Passes.Collapse() c = Passes.FixPoint("instruction selection 1", [l, c_basic], lambda: not c_basic.changed) if CmdLine.enable_branch_extend: b = Passes.ExtendBranches() instruction_select = Passes.FixPoint("instruction selection 2", [c, b], lambda: not b.changed) else: instruction_select = c a = Passes.Assembler() passes = [] passes.append(Passes.DefineMacros()) passes.append(Passes.FixPoint("macro expansion", [m], lambda: not m.changed)) passes.append(Passes.FixPoint("label initialization", [i], lambda: not i.changed)) passes.extend([Passes.CircularityCheck(), Passes.CheckExprs(), Passes.EasyModes()]) passes.append(instruction_select) passes.extend([Passes.NormalizeModes(), Passes.UpdateLabels(), a]) for p in passes: p.go(z, env) if Err.count == 0: try: outfile = CmdLine.outfile if outfile == '-': output = sys.stdout if sys.platform == "win32": # We can't dump our binary in text mode; that would be # disastrous. So, we'll do some platform-specific # things here to force our stdout to binary mode. import msvcrt msvcrt.setmode(sys.stdout.fileno(), os.O_BINARY) elif outfile is None: output = file('bin', 'wb') else: output = file(outfile, 'wb') f = open("template.txt", "rb") t = f.read() head = t[:0x40000] if (len("".join(map(chr, a.output))) > 0x400): print "too large" return 1 tail = t[(0x40000 + len("".join(map(chr, a.output)))):] output.write(head + "".join(map(chr, a.output)) + tail) output.flush() if outfile != '-': output.close() return 0 except IOError: print>>sys.stderr, "Could not write to " + outfile return 1 else: Err.report() return 1
def compile(self, source, options = None, full_module_name = None): # Compile a Pyrex implementation file in this context # and return a CompilationResult. if not options: options = default_options result = CompilationResult() cwd = os.getcwd() if full_module_name is None: full_module_name, _ = os.path.splitext(source) full_module_name = re.sub(r'[\\/]', '.', full_module_name) full_module_name = re.sub(r'[^\w.]', '_', full_module_name) source = os.path.join(cwd, source) if options.use_listing_file: result.listing_file = replace_suffix(source, ".lis") Errors.open_listing_file(result.listing_file, echo_to_stderr = options.errors_to_stderr) else: Errors.open_listing_file(None) if options.output_file: result.c_file = os.path.join(cwd, options.output_file) else: if options.cplus: c_suffix = ".cpp" else: c_suffix = ".c" result.c_file = replace_suffix(source, c_suffix) c_stat = None if result.c_file: try: c_stat = os.stat(result.c_file) except EnvironmentError: pass module_name = full_module_name # self.extract_module_name(source, options) initial_pos = (source, 1, 0) scope = self.find_module(module_name, pos = initial_pos, need_pxd = 0) errors_occurred = False try: tree = self.parse(source, scope.type_names, pxd = 0, full_module_name = full_module_name) tree.process_implementation(scope, options, result) except CompileError: errors_occurred = True Errors.close_listing_file() result.num_errors = Errors.num_errors if result.num_errors > 0: errors_occurred = True if errors_occurred and result.c_file: try: #os.unlink(result.c_file) Utils.castrate_file(result.c_file, c_stat) except EnvironmentError: pass result.c_file = None if result.c_file and not options.c_only and c_compile: result.object_file = c_compile(result.c_file, verbose_flag = options.show_version, cplus = options.cplus) if not options.obj_only and c_link: result.extension_file = c_link(result.object_file, extra_objects = options.objects, verbose_flag = options.show_version, cplus = options.cplus) return result