def load_extension_module(space, path, name): if os.sep not in path: path = os.curdir + os.sep + path # force a '/' in the path state = space.fromcache(State) if state.find_extension(name, path) is not None: return old_context = state.package_context state.package_context = name, path try: from pypy.rlib import rdynload try: ll_libname = rffi.str2charp(path) try: dll = rdynload.dlopen(ll_libname) finally: lltype.free(ll_libname, flavor='raw') except rdynload.DLOpenError, e: raise operationerrfmt( space.w_ImportError, "unable to load extension module '%s': %s", path, e.msg) try: initptr = rdynload.dlsym(dll, 'init%s' % (name.split('.')[-1],)) except KeyError: raise operationerrfmt( space.w_ImportError, "function init%s not found in library %s", name, path) initfunc = rffi.cast(initfunctype, initptr) generic_cpy_call(space, initfunc) state.check_and_raise_exception()
def set_param(space, __args__): '''Configure the tunable JIT parameters. * set_param(name=value, ...) # as keyword arguments * set_param("name=value,name=value") # as a user-supplied string * set_param("off") # disable the jit * set_param("default") # restore all defaults ''' # XXXXXXXXX args_w, kwds_w = __args__.unpack() if len(args_w) > 1: msg = "set_param() takes at most 1 non-keyword argument, %d given" raise operationerrfmt(space.w_TypeError, msg, len(args_w)) if len(args_w) == 1: text = space.str_w(args_w[0]) try: jit.set_user_param(None, text) except ValueError: raise OperationError(space.w_ValueError, space.wrap("error in JIT parameters string")) for key, w_value in kwds_w.items(): if key == 'enable_opts': jit.set_param(None, 'enable_opts', space.str_w(w_value)) else: intval = space.int_w(w_value) for name, _ in unroll_parameters: if name == key and name != 'enable_opts': jit.set_param(None, name, intval) break else: raise operationerrfmt(space.w_TypeError, "no JIT parameter '%s'", key)
def readline_w(self, space, w_limit=None): # For backwards compatibility, a (slowish) readline(). limit = convert_size(space, w_limit) old_size = -1 has_peek = space.findattr(self, space.wrap("peek")) builder = StringBuilder() size = 0 while limit < 0 or size < limit: nreadahead = 1 if has_peek: w_readahead = space.call_method(self, "peek", space.wrap(1)) if not space.isinstance_w(w_readahead, space.w_str): raise operationerrfmt( space.w_IOError, "peek() should have returned a bytes object, " "not '%s'", space.type(w_readahead).getname(space), ) length = space.len_w(w_readahead) if length > 0: n = 0 buf = space.str_w(w_readahead) if limit >= 0: while True: if n >= length or n >= limit: break n += 1 if buf[n - 1] == "\n": break else: while True: if n >= length: break n += 1 if buf[n - 1] == "\n": break nreadahead = n w_read = space.call_method(self, "read", space.wrap(nreadahead)) if not space.isinstance_w(w_read, space.w_str): raise operationerrfmt( space.w_IOError, "peek() should have returned a bytes object, " "not '%s'", space.type(w_read).getname(space), ) read = space.str_w(w_read) if not read: break size += len(read) builder.append(read) if read[-1] == "\n": break return space.wrap(builder.build())
def reload(space, w_module): """Reload the module. The module must have been successfully imported before.""" if not space.is_w(space.type(w_module), space.type(space.sys)): raise OperationError( space.w_TypeError, space.wrap("reload() argument must be module")) w_modulename = space.getattr(w_module, space.wrap("__name__")) modulename = space.str0_w(w_modulename) if not space.is_w(check_sys_modules(space, w_modulename), w_module): raise operationerrfmt( space.w_ImportError, "reload(): module %s not in sys.modules", modulename) try: w_mod = space.reloading_modules[modulename] # Due to a recursive reload, this module is already being reloaded. return w_mod except KeyError: pass space.reloading_modules[modulename] = w_module try: namepath = modulename.split('.') subname = namepath[-1] parent_name = '.'.join(namepath[:-1]) parent = None if parent_name: w_parent = check_sys_modules_w(space, parent_name) if w_parent is None: raise operationerrfmt( space.w_ImportError, "reload(): parent %s not in sys.modules", parent_name) w_path = space.getattr(w_parent, space.wrap("__path__")) else: w_path = None find_info = find_module( space, modulename, w_modulename, subname, w_path) if not find_info: # ImportError msg = "No module named %s" raise operationerrfmt(space.w_ImportError, msg, modulename) try: try: return load_module(space, w_modulename, find_info, reuse=True) finally: if find_info.stream: find_info.stream.close() except: # load_module probably removed name from modules because of # the error. Put back the original module object. space.sys.setmodule(w_module) raise finally: del space.reloading_modules[modulename]
def _getfunc(space, CDLL, w_name, w_argtypes, w_restype): argtypes_w, argtypes, w_restype, restype = unpack_argtypes( space, w_argtypes, w_restype) if space.isinstance_w(w_name, space.w_str): name = space.str_w(w_name) try: func = CDLL.cdll.getpointer(name, argtypes, restype, flags = CDLL.flags) except KeyError: raise operationerrfmt( space.w_AttributeError, "No symbol %s found in library %s", name, CDLL.name) except LibFFIError: raise got_libffi_error(space) return W_FuncPtr(func, argtypes_w, w_restype) elif space.isinstance_w(w_name, space.w_int): ordinal = space.int_w(w_name) try: func = CDLL.cdll.getpointer_by_ordinal( ordinal, argtypes, restype, flags = CDLL.flags) except KeyError: raise operationerrfmt( space.w_AttributeError, "No ordinal %d found in library %s", ordinal, CDLL.name) except LibFFIError: raise got_libffi_error(space) return W_FuncPtr(func, argtypes_w, w_restype) else: raise OperationError(space.w_TypeError, space.wrap( 'function name must be a string or integer'))
def seek_w(self, space, pos, mode=0): self._check_closed(space) if not 0 <= mode <= 2: raise operationerrfmt(space.w_ValueError, "Invalid whence (%d, should be 0, 1 or 2)", mode ) elif mode == 0 and pos < 0: raise operationerrfmt(space.w_ValueError, "negative seek position: %d", pos ) elif mode != 0 and pos != 0: raise OperationError(space.w_IOError, space.wrap("Can't do nonzero cur-relative seeks") ) # XXX: this makes almost no sense, but its how CPython does it. if mode == 1: pos = self.pos elif mode == 2: pos = len(self.buf) assert pos >= 0 self.pos = pos return space.wrap(pos)
def ParserCreate(space, w_encoding=None, w_namespace_separator=None, w_intern=None): """ParserCreate([encoding[, namespace_separator]]) -> parser Return a new XML parser object.""" if space.is_none(w_encoding): encoding = None elif space.isinstance_w(w_encoding, space.w_str): encoding = space.str_w(w_encoding) else: raise operationerrfmt( space.w_TypeError, 'ParserCreate() argument 1 must be string or None, not %T', w_encoding) if space.is_none(w_namespace_separator): namespace_separator = 0 elif space.isinstance_w(w_namespace_separator, space.w_str): separator = space.str_w(w_namespace_separator) if len(separator) == 0: namespace_separator = 0 elif len(separator) == 1: namespace_separator = ord(separator[0]) else: raise OperationError( space.w_ValueError, space.wrap('namespace_separator must be at most one character,' ' omitted, or None')) else: raise operationerrfmt( space.w_TypeError, 'ParserCreate() argument 2 must be string or None, not %T', w_namespace_separator) # Explicitly passing None means no interning is desired. # Not passing anything means that a new dictionary is used. if w_intern is None: w_intern = space.newdict() elif space.is_w(w_intern, space.w_None): w_intern = None if namespace_separator: xmlparser = XML_ParserCreateNS( encoding, rffi.cast(rffi.CHAR, namespace_separator)) else: xmlparser = XML_ParserCreate(encoding) # Currently this is just the size of the pointer and some estimated bytes. # The struct isn't actually defined in expat.h - it is in xmlparse.c # XXX: find a good estimate of the XML_ParserStruct rgc.add_memory_pressure(XML_Parser_SIZE + 300) if not xmlparser: raise OperationError(space.w_RuntimeError, space.wrap('XML_ParserCreate failed')) parser = W_XMLParserType(space, xmlparser, w_intern) XML_SetUnknownEncodingHandler( parser.itself, UnknownEncodingHandlerData_callback, rffi.cast(rffi.VOIDP, parser.id)) return space.wrap(parser)
def test_operationerrfmt_R(space): operr = operationerrfmt(space.w_ValueError, "illegal newline value: %R", space.wrap('foo')) assert operr._compute_value(space) == "illegal newline value: 'foo'" operr = operationerrfmt(space.w_ValueError, "illegal newline value: %R", space.wrap("'PyLadies'")) expected = "illegal newline value: \"'PyLadies'\"" assert operr._compute_value(space) == expected
def test_operationerrfmt_T(space): operr = operationerrfmt(space.w_AttributeError, "'%T' object has no attribute '%s'", space.wrap('foo'), 'foo') assert operr._compute_value(space) == "'str' object has no attribute 'foo'" operr = operationerrfmt("w_type", "'%T' object has no attribute '%s'", space.wrap('foo'), 'foo') assert operr._compute_value(space) == "'str' object has no attribute 'foo'"
def raiseattrerror(space, w_obj, name, w_descr=None): if w_descr is None: raise operationerrfmt(space.w_AttributeError, "'%T' object has no attribute '%s'", w_obj, name) else: raise operationerrfmt(space.w_AttributeError, "'%T' object attribute '%s' is read-only", w_obj, name)
def test_operationerrfmt(): operr = operationerrfmt("w_type", "abc %s def %d", "foo", 42) assert isinstance(operr, OperationError) assert operr.w_type == "w_type" assert operr._w_value is None assert operr._compute_value() == "abc foo def 42" operr2 = operationerrfmt("w_type2", "a %s b %d c", "bar", 43) assert operr2.__class__ is operr.__class__ operr3 = operationerrfmt("w_type2", "a %s b %s c", "bar", "4b") assert operr3.__class__ is not operr.__class__
def getindex(self, space, item): if item >= self.size: raise operationerrfmt(space.w_IndexError, '%d above array size', item) if item < 0: item += self.size if item < 0: raise operationerrfmt(space.w_IndexError, '%d below zero', item) return item
def descr_delattr(self, space, w_attr): name = unwrap_attr(space, w_attr) if name in ("__dict__", "__name__", "__bases__"): raise operationerrfmt(space.w_TypeError, "cannot delete attribute '%s'", name) try: space.delitem(self.w_dict, w_attr) except OperationError, e: if not e.match(space, space.w_KeyError): raise raise operationerrfmt(space.w_AttributeError, "class %s has no attribute '%s'", self.name, name)
def get_method(space, b_type, name, b_paramtypes): try: method = b_type.GetMethod(name, b_paramtypes) except AmbiguousMatchException: msg = 'Multiple overloads for %s could match' raise operationerrfmt(space.w_TypeError, msg, name) if method is None: msg = 'No overloads for %s could match' raise operationerrfmt(space.w_TypeError, msg, name) return method
def raiseattrerror(space, w_obj, name, w_descr=None): w_type = space.type(w_obj) typename = w_type.getname(space) if w_descr is None: raise operationerrfmt(space.w_AttributeError, "'%s' object has no attribute '%s'", typename, name) else: raise operationerrfmt(space.w_AttributeError, "'%s' object attribute '%s' is read-only", typename, name)
def _convert_error(self, expected, w_got): space = self.space if isinstance(w_got, cdataobj.W_CData): return operationerrfmt(space.w_TypeError, "initializer for ctype '%s' must be a %s, " "not cdata '%s'", self.name, expected, w_got.ctype.name) else: return operationerrfmt(space.w_TypeError, "initializer for ctype '%s' must be a %s, " "not %T", self.name, expected, w_got)
def format(space, w_obj, w_format_spec): w_descr = space.lookup(w_obj, '__format__') if w_descr is None: raise operationerrfmt(space.w_TypeError, "'%T' object does not define __format__", w_obj) w_res = space.get_and_call_function(w_descr, w_obj, w_format_spec) if not space.isinstance_w(w_res, space.w_basestring): msg = "%T.__format__ must return string or unicode, not %T" raise operationerrfmt(space.w_TypeError, msg, w_obj, w_res) return w_res
def call(self, space, args_w): from pypy.module._rawffi.array import W_ArrayInstance from pypy.module._rawffi.structure import W_StructureInstance from pypy.module._rawffi.structure import W_Structure argnum = len(args_w) if argnum != len(self.argshapes): msg = "Wrong number of arguments: expected %d, got %d" raise operationerrfmt(space.w_TypeError, msg, len(self.argshapes), argnum) args_ll = [] for i in range(argnum): argshape = self.argshapes[i] w_arg = args_w[i] if isinstance(argshape, W_Structure): # argument by value arg = space.interp_w(W_StructureInstance, w_arg) xsize, xalignment = size_alignment(self.ptr.argtypes[i]) if (arg.shape.size != xsize or arg.shape.alignment != xalignment): msg = ("Argument %d should be a structure of size %d and " "alignment %d, " "got instead size %d and alignment %d") raise operationerrfmt(space.w_TypeError, msg, i+1, xsize, xalignment, arg.shape.size, arg.shape.alignment) else: arg = space.interp_w(W_ArrayInstance, w_arg) if arg.length != 1: msg = ("Argument %d should be an array of length 1, " "got length %d") raise operationerrfmt(space.w_TypeError, msg, i+1, arg.length) argletter = argshape.itemcode letter = arg.shape.itemcode if letter != argletter: if not (argletter in TYPEMAP_PTR_LETTERS and letter in TYPEMAP_PTR_LETTERS): msg = "Argument %d should be typecode %s, got %s" raise operationerrfmt(space.w_TypeError, msg, i+1, argletter, letter) args_ll.append(arg.ll_buffer) # XXX we could avoid the intermediate list args_ll try: if self.resshape is not None: result = self.resshape.allocate(space, 1, autofree=True) self.ptr.call(args_ll, result.ll_buffer) return space.wrap(result) else: self.ptr.call(args_ll, lltype.nullptr(rffi.VOIDP.TO)) return space.w_None except StackCheckError, e: raise OperationError(space.w_ValueError, space.wrap(e.message))
def _missing_ffi_type(self, cifbuilder, is_result_type): space = self.space if self.size < 0: raise operationerrfmt(space.w_TypeError, "ctype '%s' has incomplete type", self.name) if is_result_type: place = "return value" else: place = "argument" raise operationerrfmt(space.w_NotImplementedError, "ctype '%s' (size %d) not supported as %s", self.name, self.size, place)
def check_user_subclass(w_self, w_subtype): space = w_self.space if not isinstance(w_subtype, W_TypeObject): raise operationerrfmt(space.w_TypeError, "X is not a type object ('%s')", space.type(w_subtype).getname(space)) if not w_subtype.issubtype(w_self): raise operationerrfmt(space.w_TypeError, "%s.__new__(%s): %s is not a subtype of %s", w_self.name, w_subtype.name, w_subtype.name, w_self.name) if w_self.instancetypedef is not w_subtype.instancetypedef: raise operationerrfmt(space.w_TypeError, "%s.__new__(%s) is not safe, use %s.__new__()", w_self.name, w_subtype.name, w_subtype.name) return w_subtype
def format(space, w_obj, w_format_spec): w_descr = space.lookup(w_obj, '__format__') if w_descr is None: typename = space.type(w_obj).getname(space) raise operationerrfmt(space.w_TypeError, "'%s' object does not define __format__", typename) w_res = space.get_and_call_function(w_descr, w_obj, w_format_spec) if not space.is_true(space.isinstance(w_res, space.w_basestring)): typename = space.type(w_obj).getname(space) restypename = space.type(w_res).getname(space) raise operationerrfmt(space.w_TypeError, "%s.__format__ must return string or unicode, not %s", typename, restypename) return w_res
def check_user_subclass(w_self, w_subtype): space = w_self.space if not isinstance(w_subtype, W_TypeObject): raise operationerrfmt(space.w_TypeError, "X is not a type object ('%T')", w_subtype) if not w_subtype.issubtype(w_self): raise operationerrfmt(space.w_TypeError, "%N.__new__(%N): %N is not a subtype of %N", w_self, w_subtype, w_subtype, w_self) if w_self.instancetypedef is not w_subtype.instancetypedef: raise operationerrfmt(space.w_TypeError, "%N.__new__(%N) is not safe, use %N.__new__()", w_self, w_subtype, w_subtype) return w_subtype
def call_args(self, args): space = self.space if self.w_instance is not None: # bound method return space.call_obj_args(self.w_function, self.w_instance, args) # unbound method w_firstarg = args.firstarg() if w_firstarg is not None and ( space.abstract_isinstance_w(w_firstarg, self.w_class)): pass # ok else: myname = self.getname(space, "") clsdescr = self.w_class.getname(space, "") if clsdescr: clsdescr += " instance" else: clsdescr = "instance" if w_firstarg is None: instdescr = "nothing" else: instname = space.abstract_getclass(w_firstarg).getname(space, "") if instname: instdescr = instname + " instance" else: instdescr = "instance" msg = ("unbound method %s() must be called with %s " "as first argument (got %s instead)") raise operationerrfmt(space.w_TypeError, msg, myname, clsdescr, instdescr) return space.call_args(self.w_function, args)
def delitem(space, w_obj, w_key): w_descr = space.lookup(w_obj, '__delitem__') if w_descr is None: raise operationerrfmt(space.w_TypeError, "'%T' object does not support item deletion", w_obj) return space.get_and_call_function(w_descr, w_obj, w_key)
def unicode_to_decimal_w(space, w_unistr): if not isinstance(w_unistr, W_UnicodeObject): raise operationerrfmt(space.w_TypeError, "expected unicode, got '%s'", space.type(w_unistr).getname(space)) unistr = w_unistr._value result = ['\0'] * len(unistr) digits = [ '0', '1', '2', '3', '4', '5', '6', '7', '8', '9'] for i in xrange(len(unistr)): uchr = ord(unistr[i]) if unicodedb.isspace(uchr): result[i] = ' ' continue try: result[i] = digits[unicodedb.decimal(uchr)] except KeyError: if 0 < uchr < 256: result[i] = chr(uchr) else: w_encoding = space.wrap('decimal') w_start = space.wrap(i) w_end = space.wrap(i+1) w_reason = space.wrap('invalid decimal Unicode string') raise OperationError(space.w_UnicodeEncodeError, space.newtuple([w_encoding, w_unistr, w_start, w_end, w_reason])) return ''.join(result)
def hash(space, w_obj): w_hash = space.lookup(w_obj, '__hash__') if w_hash is None: # xxx there used to be logic about "do we have __eq__ or __cmp__" # here, but it does not really make sense, as 'object' has a # default __hash__. This path should only be taken under very # obscure circumstances. return default_identity_hash(space, w_obj) if space.is_w(w_hash, space.w_None): raise operationerrfmt(space.w_TypeError, "'%T' objects are unhashable", w_obj) w_result = space.get_and_call_function(w_hash, w_obj) w_resulttype = space.type(w_result) if space.is_w(w_resulttype, space.w_int): return w_result elif space.is_w(w_resulttype, space.w_long): return space.hash(w_result) elif space.isinstance_w(w_result, space.w_int): # be careful about subclasses of 'int'... return space.wrap(space.int_w(w_result)) elif space.isinstance_w(w_result, space.w_long): # be careful about subclasses of 'long'... bigint = space.bigint_w(w_result) return space.wrap(bigint.hash()) else: raise OperationError(space.w_TypeError, space.wrap("__hash__() should return an int or long"))
def get_type_and_offset_for_field(self, space, name): try: w_field = self.name2w_field[name] except KeyError: raise operationerrfmt(space.w_AttributeError, '%s', name) return w_field.w_ffitype, w_field.offset
def check_and_find_best_base(space, bases_w): """The best base is one of the bases in the given list: the one whose layout a new type should use as a starting point. This version checks that bases_w is an acceptable tuple of bases. """ w_bestbase = find_best_base(space, bases_w) if w_bestbase is None: raise OperationError(space.w_TypeError, space.wrap("a new-style class can't have " "only classic bases")) if not w_bestbase.instancetypedef.acceptable_as_base_class: raise operationerrfmt(space.w_TypeError, "type '%s' is not an " "acceptable base class", w_bestbase.instancetypedef.name) # check that all other bases' layouts are superclasses of the bestbase w_bestlayout = w_bestbase.w_same_layout_as or w_bestbase for w_base in bases_w: if isinstance(w_base, W_TypeObject): w_layout = w_base.w_same_layout_as or w_base if not issublayout(w_bestlayout, w_layout): raise OperationError(space.w_TypeError, space.wrap("instance layout conflicts in " "multiple inheritance")) return w_bestbase
def is_package(self, space, fullname): filename = self.make_filename(fullname) for _, is_package, ext in ENUMERATE_EXTS: if self.have_modulefile(space, filename + ext): return space.wrap(is_package) raise operationerrfmt(get_error(space), "Cannot find module %s in %s", filename, self.name)
def _do_combine_starstarargs_wrapped(self, keys_w, w_starstararg): space = self.space keywords_w = [None] * len(keys_w) keywords = [None] * len(keys_w) i = 0 for w_key in keys_w: try: key = space.str_w(w_key) except OperationError, e: if e.match(space, space.w_TypeError): raise OperationError( space.w_TypeError, space.wrap("keywords must be strings")) if e.match(space, space.w_UnicodeEncodeError): # Allow this to pass through key = None else: raise else: if self.keywords and key in self.keywords: raise operationerrfmt(self.space.w_TypeError, "got multiple values " "for keyword argument " "'%s'", key) keywords[i] = key keywords_w[i] = space.getitem(w_starstararg, w_key) i += 1
def allocate_instance(self, cls, w_subtype): """Allocate the memory needed for an instance of an internal or user-defined type, without actually __init__ializing the instance.""" w_type = self.gettypeobject(cls.typedef) if self.is_w(w_type, w_subtype): instance = instantiate(cls) elif cls.typedef.acceptable_as_base_class: # the purpose of the above check is to avoid the code below # to be annotated at all for 'cls' if it is not necessary w_subtype = w_type.check_user_subclass(w_subtype) if cls.typedef.applevel_subclasses_base is not None: cls = cls.typedef.applevel_subclasses_base # if not we_are_translated(): if issubclass(cls, model.W_Object): # If cls is missing from model.typeorder, then you # need to add it there (including the inheritance # relationship, if any) assert cls in self.model.typeorder, repr(cls) # if (self.config.objspace.std.withmapdict and cls is W_ObjectObject and not w_subtype.needsdel): from pypy.objspace.std.mapdict import get_subclass_of_correct_size subcls = get_subclass_of_correct_size(self, cls, w_subtype) else: subcls = get_unique_interplevel_subclass( self.config, cls, w_subtype.hasdict, w_subtype.nslots != 0, w_subtype.needsdel, w_subtype.weakrefable) instance = instantiate(subcls) assert isinstance(instance, cls) instance.user_setup(self, w_subtype) else: raise operationerrfmt(self.w_TypeError, "%s.__new__(%s): only for the type %s", w_type.name, w_subtype.getname(self), w_type.name) return instance
def build_cli_class(space, namespace, classname, fullname, assemblyname): assembly_qualified_name = '%s, %s' % (fullname, assemblyname) b_type = System.Type.GetType(assembly_qualified_name) if b_type is None: raise operationerrfmt(space.w_ImportError, "Cannot load .NET type: %s", fullname) # this is where we locate the interfaces inherited by the class # set the flag hasIEnumerable if IEnumerable interface has been by the class hasIEnumerable = b_type.GetInterface( "System.Collections.IEnumerable") is not None # this is where we test if the class is Generic # set the flag isClassGeneric isClassGeneric = False if b_type.get_IsGenericType(): isClassGeneric = True w_staticmethods, w_methods = get_methods(space, b_type) w_properties, w_indexers = get_properties(space, b_type) return build_wrapper(space, space.wrap(namespace), space.wrap(classname), space.wrap(assemblyname), w_staticmethods, w_methods, w_properties, w_indexers, space.wrap(hasIEnumerable), space.wrap(isClassGeneric))
def _str_join_many_items(space, w_self, list_w, size): self = w_self._value reslen = len(self) * (size - 1) for i in range(size): w_s = list_w[i] if not space.isinstance_w(w_s, space.w_str): if space.isinstance_w(w_s, space.w_unicode): # we need to rebuild w_list here, because the original # w_list might be an iterable which we already consumed w_list = space.newlist(list_w) w_u = space.call_function(space.w_unicode, w_self) return space.call_method(w_u, "join", w_list) raise operationerrfmt( space.w_TypeError, "sequence item %d: expected string, %s " "found", i, space.type(w_s).getname(space)) reslen += len(space.str_w(w_s)) sb = StringBuilder(reslen) for i in range(size): if self and i != 0: sb.append(self) sb.append(space.str_w(list_w[i])) return space.wrap(sb.build())
def _index_of_single_item(self, space, w_idx): if space.isinstance_w(w_idx, space.w_int): idx = space.int_w(w_idx) if idx < 0: idx = self.shape[0] + idx if idx < 0 or idx >= self.shape[0]: raise OperationError(space.w_IndexError, space.wrap("index out of range")) return self.start + idx * self.strides[0] index = [space.int_w(w_item) for w_item in space.fixedview(w_idx)] item = self.start for i in range(len(index)): v = index[i] if v < 0: v += self.shape[i] if v < 0 or v >= self.shape[i]: raise operationerrfmt( space.w_IndexError, "index (%d) out of range (0<=index<%d", i, self.shape[i], ) item += v * self.strides[i] return item
def descr_set__name__(space, w_type, w_value): w_type = _check(space, w_type) if not w_type.is_heaptype(): raise operationerrfmt(space.w_TypeError, "can't set %s.__name__", w_type.name) w_type.name = space.str_w(w_value)
def descr_set__bases__(space, w_type, w_value): # this assumes all app-level type objects are W_TypeObject from pypy.objspace.std.typeobject import W_TypeObject from pypy.objspace.std.typeobject import check_and_find_best_base from pypy.objspace.std.typeobject import get_parent_layout from pypy.objspace.std.typeobject import is_mro_purely_of_types w_type = _check(space, w_type) if not w_type.is_heaptype(): raise operationerrfmt(space.w_TypeError, "can't set %s.__bases__", w_type.name) if not space.isinstance_w(w_value, space.w_tuple): raise operationerrfmt(space.w_TypeError, "can only assign tuple to %s.__bases__, not %s", w_type.name, space.type(w_value).getname(space)) newbases_w = space.fixedview(w_value) if len(newbases_w) == 0: raise operationerrfmt( space.w_TypeError, "can only assign non-empty tuple to %s.__bases__, not ()", w_type.name) for w_newbase in newbases_w: if isinstance(w_newbase, W_TypeObject): if w_type in w_newbase.compute_default_mro(): raise OperationError( space.w_TypeError, space.wrap("a __bases__ item causes" " an inheritance cycle")) w_oldbestbase = check_and_find_best_base(space, w_type.bases_w) w_newbestbase = check_and_find_best_base(space, newbases_w) oldlayout = w_oldbestbase.get_full_instance_layout() newlayout = w_newbestbase.get_full_instance_layout() if oldlayout != newlayout: raise operationerrfmt( space.w_TypeError, "__bases__ assignment: '%s' object layout" " differs from '%s'", w_newbestbase.getname(space), w_oldbestbase.getname(space)) # invalidate the version_tag of all the current subclasses w_type.mutated(None) # now we can go ahead and change 'w_type.bases_w' saved_bases_w = w_type.bases_w temp = [] try: for w_oldbase in saved_bases_w: if isinstance(w_oldbase, W_TypeObject): w_oldbase.remove_subclass(w_type) w_type.bases_w = newbases_w for w_newbase in newbases_w: if isinstance(w_newbase, W_TypeObject): w_newbase.add_subclass(w_type) # try to recompute all MROs mro_subclasses(space, w_type, temp) except: for cls, old_mro in temp: cls.mro_w = old_mro w_type.bases_w = saved_bases_w raise if (space.config.objspace.std.withtypeversion and w_type.version_tag() is not None and not is_mro_purely_of_types(w_type.mro_w)): # Disable method cache if the hierarchy isn't pure. w_type._version_tag = None for w_subclass in w_type.get_subclasses(): if isinstance(w_subclass, W_TypeObject): w_subclass._version_tag = None assert w_type.w_same_layout_as is get_parent_layout(w_type) # invariant
def raise_type_err(space, argument, expected, w_obj): type_name = space.type(w_obj).getname(space, '?') raise operationerrfmt(space.w_TypeError, "argument %s must be %s, not %s", argument, expected, type_name)
def check_mode_ok(self, mode): if (not mode or mode[0] not in ['r', 'w', 'a', 'U']): space = self.space raise operationerrfmt(space.w_ValueError, "invalid mode: '%s'", mode)
def descr_new_cdll(space, w_type, name): try: cdll = CDLL(name) except DLOpenError, e: raise operationerrfmt(space.w_OSError, '%s: %s', name, e.msg or 'unspecified error')
def gettypeerror(space, operatorsymbol, *args_w): msg = _gettypeerrormsg(len(args_w)) type_names = _gettypenames(space, *args_w) return operationerrfmt(space.w_TypeError, msg, operatorsymbol, *type_names)
def error(self, w_ffitype): raise operationerrfmt(self.space.w_TypeError, 'Unsupported ffi type to convert: %s', w_ffitype.name)
def getindex(self, space, attr): try: return self.name_to_index[attr] except KeyError: raise operationerrfmt(space.w_AttributeError, "C Structure has no attribute %s", attr)
def _get_type_(space, key): try: return TYPEMAP[key] except KeyError: raise operationerrfmt(space.w_ValueError, "Unknown type letter %s", key)
def descr_control(self, space, w_changelist, max_events, w_timeout=None): self.check_closed(space) if max_events < 0: raise operationerrfmt( space.w_ValueError, "Length of eventlist must be 0 or positive, got %d", max_events) if space.is_w(w_changelist, space.w_None): changelist_len = 0 else: changelist_len = space.len_w(w_changelist) with lltype.scoped_alloc(rffi.CArray(kevent), changelist_len) as changelist: with lltype.scoped_alloc(rffi.CArray(kevent), max_events) as eventlist: with lltype.scoped_alloc(timespec) as timeout: if not space.is_w(w_timeout, space.w_None): _timeout = space.float_w(w_timeout) if _timeout < 0: raise operationerrfmt( space.w_ValueError, "Timeout must be None or >= 0, got %s", str(_timeout)) sec = int(_timeout) nsec = int(1e9 * (_timeout - sec)) rffi.setintfield(timeout, 'c_tv_sec', sec) rffi.setintfield(timeout, 'c_tv_nsec', nsec) ptimeout = timeout else: ptimeout = lltype.nullptr(timespec) if not space.is_w(w_changelist, space.w_None): i = 0 for w_ev in space.listview(w_changelist): ev = space.interp_w(W_Kevent, w_ev) changelist[i].c_ident = ev.event.c_ident changelist[i].c_filter = ev.event.c_filter changelist[i].c_flags = ev.event.c_flags changelist[i].c_fflags = ev.event.c_fflags changelist[i].c_data = ev.event.c_data changelist[i].c_udata = ev.event.c_udata i += 1 pchangelist = changelist else: pchangelist = lltype.nullptr(rffi.CArray(kevent)) nfds = syscall_kevent(self.kqfd, pchangelist, changelist_len, eventlist, max_events, ptimeout) if nfds < 0: raise exception_from_errno(space, space.w_IOError) else: elist_w = [None] * nfds for i in xrange(nfds): evt = eventlist[i] w_event = W_Kevent(space) w_event.event = lltype.malloc(kevent, flavor="raw") w_event.event.c_ident = evt.c_ident w_event.event.c_filter = evt.c_filter w_event.event.c_flags = evt.c_flags w_event.event.c_fflags = evt.c_fflags w_event.event.c_data = evt.c_data w_event.event.c_udata = evt.c_udata elist_w[i] = w_event return space.newlist(elist_w)
except OperationError, e: if e.match(space, space.w_AttributeError): raise OperationError( space.w_TypeError, space.wrap( "argument must be an int, or have a fileno() method.")) raise w_fd = space.call_function(w_fileno) if not space.is_true(space.isinstance(w_fd, space.w_int)): raise OperationError(space.w_TypeError, space.wrap('filneo() return a non-integer')) fd = space.int_w(w_fd) if fd < 0: raise operationerrfmt( space.w_ValueError, "file descriptor cannot be a negative integer (%d)", fd) return fd class Poll(Wrappable): def __init__(self): self.fddict = {} def register(self, space, w_fd, events=defaultevents): fd = as_fd_w(space, w_fd) self.fddict[fd] = events register.unwrap_spec = ['self', ObjSpace, W_Root, int] def unregister(self, space, w_fd):
def descr__new__(space, w_subtype, __args__): raise operationerrfmt(space.w_TypeError, "cannot create '%s' instances", w_subtype.getname(space, '?'))
def descr_classmethod__new__(space, w_type, w_function): if not space.is_true(space.callable(w_function)): typename = space.type(w_function).getname(space, '?') raise operationerrfmt(space.w_TypeError, "'%s' object is not callable", typename) return space.wrap(ClassMethod(w_function))
def letter2tp(space, key): try: return UNPACKED_TYPECODES[key] except KeyError: raise operationerrfmt(space.w_ValueError, "Unknown type letter %s", key)
def ord__Unicode(space, w_uni): if len(w_uni._value) != 1: raise operationerrfmt(space.w_TypeError, "ord() expected a character, got a unicode of length %d", len(w_uni._value)) return space.wrap(ord(w_uni._value[0]))
def open(space, w_file, mode="r", buffering=-1, encoding=None, errors=None, newline=None, closefd=True): from pypy.module._io.interp_bufferedio import (W_BufferedRandom, W_BufferedWriter, W_BufferedReader) if not (space.isinstance_w(w_file, space.w_basestring) or space.isinstance_w(w_file, space.w_int) or space.isinstance_w(w_file, space.w_long)): raise operationerrfmt(space.w_TypeError, "invalid file: %s", space.str_w(space.repr(w_file))) reading = writing = appending = updating = text = binary = universal = False uniq_mode = {} for flag in mode: uniq_mode[flag] = None if len(uniq_mode) != len(mode): raise operationerrfmt(space.w_ValueError, "invalid mode: %s", mode) for flag in mode: if flag == "r": reading = True elif flag == "w": writing = True elif flag == "a": appending = True elif flag == "+": updating = True elif flag == "t": text = True elif flag == "b": binary = True elif flag == "U": universal = True reading = True else: raise operationerrfmt(space.w_ValueError, "invalid mode: %s", mode) rawmode = "" if reading: rawmode += "r" if writing: rawmode += "w" if appending: rawmode += "a" if updating: rawmode += "+" if universal and (writing or appending): raise OperationError( space.w_ValueError, space.wrap("can't use U and writing mode at once")) if text and binary: raise OperationError( space.w_ValueError, space.wrap("can't have text and binary mode at once")) if reading + writing + appending > 1: raise OperationError( space.w_ValueError, space.wrap("must have exactly one of read/write/append mode")) if binary and encoding is not None: raise OperationError( space.w_ValueError, space.wrap("binary mode doesn't take an errors argument")) if binary and newline is not None: raise OperationError( space.w_ValueError, space.wrap("binary mode doesn't take a newline argument")) w_raw = space.call_function(space.gettypefor(W_FileIO), w_file, space.wrap(rawmode), space.wrap(closefd)) isatty = space.is_true(space.call_method(w_raw, "isatty")) line_buffering = buffering == 1 or (buffering < 0 and isatty) if line_buffering: buffering = -1 if buffering < 0: buffering = DEFAULT_BUFFER_SIZE if space.config.translation.type_system == 'lltype' and 'st_blksize' in STAT_FIELD_TYPES: fileno = space.int_w(space.call_method(w_raw, "fileno")) try: st = os.fstat(fileno) except OSError: # Errors should never pass silently, except this one time. pass else: if st.st_blksize > 1: buffering = st.st_blksize if buffering < 0: raise OperationError(space.w_ValueError, space.wrap("invalid buffering size")) if buffering == 0: if not binary: raise OperationError(space.w_ValueError, space.wrap("can't have unbuffered text I/O")) return w_raw if updating: buffer_cls = W_BufferedRandom elif writing or appending: buffer_cls = W_BufferedWriter elif reading: buffer_cls = W_BufferedReader else: raise operationerrfmt(space.w_ValueError, "unknown mode: '%s'", mode) w_buffer = space.call_function(space.gettypefor(buffer_cls), w_raw, space.wrap(buffering)) if binary: return w_buffer w_wrapper = space.call_function(space.gettypefor(W_TextIOWrapper), w_buffer, space.wrap(encoding), space.wrap(errors), space.wrap(newline), space.wrap(line_buffering)) space.setattr(w_wrapper, space.wrap("mode"), space.wrap(mode)) return w_wrapper
def descr__hash__unhashable(space, w_obj): typename = space.type(w_obj).getname(space, '?') raise operationerrfmt(space.w_TypeError, "'%s' objects are unhashable", typename)
def check_complete(self, space): if self.fields_w is None: raise operationerrfmt(space.w_ValueError, "%s has an incomplete type", self.w_ffitype.name)
def _gettmarg(space, w_tup, allowNone=True): if allowNone and space.is_w(w_tup, space.w_None): # default to the current local time tt = rffi.r_time_t(int(pytime.time())) t_ref = lltype.malloc(rffi.TIME_TP.TO, 1, flavor='raw') t_ref[0] = tt pbuf = c_localtime(t_ref) lltype.free(t_ref, flavor='raw') if not pbuf: raise OperationError(space.w_ValueError, space.wrap(_get_error_msg())) return pbuf tup_w = space.fixedview(w_tup) if len(tup_w) != 9: raise operationerrfmt( space.w_TypeError, "argument must be sequence of " "length 9, not %d", len(tup_w)) y = space.int_w(tup_w[0]) tm_mon = space.int_w(tup_w[1]) if tm_mon == 0: tm_mon = 1 tm_mday = space.int_w(tup_w[2]) if tm_mday == 0: tm_mday = 1 tm_yday = space.int_w(tup_w[7]) if tm_yday == 0: tm_yday = 1 rffi.setintfield(glob_buf, 'c_tm_mon', tm_mon) rffi.setintfield(glob_buf, 'c_tm_mday', tm_mday) rffi.setintfield(glob_buf, 'c_tm_hour', space.int_w(tup_w[3])) rffi.setintfield(glob_buf, 'c_tm_min', space.int_w(tup_w[4])) rffi.setintfield(glob_buf, 'c_tm_sec', space.int_w(tup_w[5])) rffi.setintfield(glob_buf, 'c_tm_wday', space.int_w(tup_w[6])) rffi.setintfield(glob_buf, 'c_tm_yday', tm_yday) rffi.setintfield(glob_buf, 'c_tm_isdst', space.int_w(tup_w[8])) if _POSIX: if _CYGWIN: pass else: # actually never happens, but makes annotator happy glob_buf.c_tm_zone = lltype.nullptr(rffi.CCHARP.TO) rffi.setintfield(glob_buf, 'c_tm_gmtoff', 0) w_accept2dyear = _get_module_object(space, "accept2dyear") accept2dyear = space.int_w(w_accept2dyear) if y < 1900: if not accept2dyear: raise OperationError(space.w_ValueError, space.wrap("year >= 1900 required")) if 69 <= y <= 99: y += 1900 elif 0 <= y <= 68: y += 2000 else: raise OperationError(space.w_ValueError, space.wrap("year out of range")) if rffi.getintfield(glob_buf, 'c_tm_wday') < 0: raise OperationError(space.w_ValueError, space.wrap("day of week out of range")) rffi.setintfield(glob_buf, 'c_tm_year', y - 1900) rffi.setintfield(glob_buf, 'c_tm_mon', rffi.getintfield(glob_buf, 'c_tm_mon') - 1) rffi.setintfield(glob_buf, 'c_tm_wday', (rffi.getintfield(glob_buf, 'c_tm_wday') + 1) % 7) rffi.setintfield(glob_buf, 'c_tm_yday', rffi.getintfield(glob_buf, 'c_tm_yday') - 1) return glob_buf
def unaryop_impl(space, w_obj): w_impl = space.lookup(w_obj, specialname) if w_impl is None: typename = space.type(w_obj).getname(space) raise operationerrfmt(space.w_TypeError, errormsg, typename) return space.get_and_call_function(w_impl, w_obj)
class W_CDLL(Wrappable): def __init__(self, space, name, cdll): self.cdll = cdll self.name = name self.w_cache = space.newdict() self.space = space @unwrap_spec(flags=int) def ptr(self, space, w_name, w_argtypes, w_restype, flags=FUNCFLAG_CDECL): """ Get a pointer for function name with provided argtypes and restype """ resshape = unpack_resshape(space, w_restype) w = space.wrap argtypes_w = space.fixedview(w_argtypes) w_argtypes = space.newtuple(argtypes_w) w_key = space.newtuple([w_name, w_argtypes, w(resshape)]) try: return space.getitem(self.w_cache, w_key) except OperationError, e: if e.match(space, space.w_KeyError): pass else: raise # Array arguments not supported directly (in C, an array argument # will be just a pointer). And the result cannot be an array (at all). argshapes = unpack_argshapes(space, w_argtypes) ffi_argtypes = [shape.get_basic_ffi_type() for shape in argshapes] if resshape is not None: ffi_restype = resshape.get_basic_ffi_type() else: ffi_restype = ffi_type_void if space.is_true(space.isinstance(w_name, space.w_str)): name = space.str_w(w_name) try: ptr = self.cdll.getrawpointer(name, ffi_argtypes, ffi_restype, flags) except KeyError: raise operationerrfmt(space.w_AttributeError, "No symbol %s found in library %s", name, self.name) elif (_MS_WINDOWS and space.is_true(space.isinstance(w_name, space.w_int))): ordinal = space.int_w(w_name) try: ptr = self.cdll.getrawpointer_byordinal( ordinal, ffi_argtypes, ffi_restype, flags) except KeyError: raise operationerrfmt(space.w_AttributeError, "No symbol %d found in library %s", ordinal, self.name) else: raise OperationError( space.w_TypeError, space.wrap("function name must be string or integer")) w_funcptr = W_FuncPtr(space, ptr, argshapes, resshape) space.setitem(self.w_cache, w_key, w_funcptr) return w_funcptr
def _precheck_for_new(space, w_type): from pypy.objspace.std.typeobject import W_TypeObject if not isinstance(w_type, W_TypeObject): raise operationerrfmt(space.w_TypeError, "X is not a type object (%s)", space.type(w_type).getname(space)) return w_type
class PyFrame(eval.Frame): """Represents a frame for a regular Python function that needs to be interpreted. See also pyopcode.PyStandardFrame and nestedscope.PyNestedScopeFrame. Public fields: * 'space' is the object space this frame is running in * 'code' is the PyCode object this frame runs * 'w_locals' is the locals dictionary to use * 'w_globals' is the attached globals dictionary * 'builtin' is the attached built-in module * 'valuestack_w', 'blockstack', control the interpretation """ __metaclass__ = extendabletype frame_finished_execution = False last_instr = -1 last_exception = None f_backref = jit.vref_None w_f_trace = None # For tracing instr_lb = 0 instr_ub = 0 instr_prev_plus_one = 0 is_being_profiled = False escaped = False # see mark_as_escaped() def __init__(self, space, code, w_globals, outer_func): if not we_are_translated(): assert type(self) in (space.FrameClass, CPythonFrame), ( "use space.FrameClass(), not directly PyFrame()") self = hint(self, access_directly=True, fresh_virtualizable=True) assert isinstance(code, pycode.PyCode) self.pycode = code eval.Frame.__init__(self, space, w_globals) self.locals_stack_w = [None] * (code.co_nlocals + code.co_stacksize) self.nlocals = code.co_nlocals self.valuestackdepth = code.co_nlocals self.lastblock = None make_sure_not_resized(self.locals_stack_w) check_nonneg(self.nlocals) # if space.config.objspace.honor__builtins__: self.builtin = space.builtin.pick_builtin(w_globals) # regular functions always have CO_OPTIMIZED and CO_NEWLOCALS. # class bodies only have CO_NEWLOCALS. self.initialize_frame_scopes(outer_func, code) self.f_lineno = code.co_firstlineno def mark_as_escaped(self): """ Must be called on frames that are exposed to applevel, e.g. by sys._getframe(). This ensures that the virtualref holding the frame is properly forced by ec.leave(), and thus the frame will be still accessible even after the corresponding C stack died. """ self.escaped = True def append_block(self, block): assert block.previous is self.lastblock self.lastblock = block def pop_block(self): block = self.lastblock self.lastblock = block.previous return block def blockstack_non_empty(self): return self.lastblock is not None def get_blocklist(self): """Returns a list containing all the blocks in the frame""" lst = [] block = self.lastblock while block is not None: lst.append(block) block = block.previous return lst def set_blocklist(self, lst): self.lastblock = None i = len(lst) - 1 while i >= 0: block = lst[i] i -= 1 block.previous = self.lastblock self.lastblock = block def get_builtin(self): if self.space.config.objspace.honor__builtins__: return self.builtin else: return self.space.builtin def initialize_frame_scopes(self, outer_func, code): # regular functions always have CO_OPTIMIZED and CO_NEWLOCALS. # class bodies only have CO_NEWLOCALS. # CO_NEWLOCALS: make a locals dict unless optimized is also set # CO_OPTIMIZED: no locals dict needed at all # NB: this method is overridden in nestedscope.py flags = code.co_flags if flags & pycode.CO_OPTIMIZED: return if flags & pycode.CO_NEWLOCALS: self.w_locals = self.space.newdict(module=True) else: assert self.w_globals is not None self.w_locals = self.w_globals def run(self): """Start this frame's execution.""" if self.getcode().co_flags & pycode.CO_GENERATOR: from pypy.interpreter.generator import GeneratorIterator return self.space.wrap(GeneratorIterator(self)) else: return self.execute_frame() def execute_frame(self, w_inputvalue=None, operr=None): """Execute this frame. Main entry point to the interpreter. The optional arguments are there to handle a generator's frame: w_inputvalue is for generator.send()) and operr is for generator.throw()). """ # the following 'assert' is an annotation hint: it hides from # the annotator all methods that are defined in PyFrame but # overridden in the {,Host}FrameClass subclasses of PyFrame. assert (isinstance(self, self.space.FrameClass) or not self.space.config.translating) executioncontext = self.space.getexecutioncontext() executioncontext.enter(self) got_exception = True w_exitvalue = self.space.w_None try: executioncontext.call_trace(self) # if operr is not None: ec = self.space.getexecutioncontext() next_instr = self.handle_operation_error(ec, operr) self.last_instr = intmask(next_instr - 1) else: # Execution starts just after the last_instr. Initially, # last_instr is -1. After a generator suspends it points to # the YIELD_VALUE instruction. next_instr = r_uint(self.last_instr + 1) if next_instr != 0: self.pushvalue(w_inputvalue) # try: w_exitvalue = self.dispatch(self.pycode, next_instr, executioncontext) except Exception: executioncontext.return_trace(self, self.space.w_None) raise executioncontext.return_trace(self, w_exitvalue) # clean up the exception, might be useful for not # allocating exception objects in some cases self.last_exception = None got_exception = False finally: executioncontext.leave(self, w_exitvalue, got_exception) return w_exitvalue execute_frame.insert_stack_check_here = True # stack manipulation helpers def pushvalue(self, w_object): depth = self.valuestackdepth self.locals_stack_w[depth] = w_object self.valuestackdepth = depth + 1 def popvalue(self): depth = self.valuestackdepth - 1 assert depth >= self.nlocals, "pop from empty value stack" w_object = self.locals_stack_w[depth] self.locals_stack_w[depth] = None self.valuestackdepth = depth return w_object # we need two popvalues that return different data types: # one in case we want list another in case of tuple def _new_popvalues(): @jit.unroll_safe def popvalues(self, n): values_w = [None] * n while True: n -= 1 if n < 0: break values_w[n] = self.popvalue() return values_w return popvalues popvalues = _new_popvalues() popvalues_mutable = _new_popvalues() del _new_popvalues @jit.unroll_safe def peekvalues(self, n): values_w = [None] * n base = self.valuestackdepth - n assert base >= self.nlocals while True: n -= 1 if n < 0: break values_w[n] = self.locals_stack_w[base + n] return values_w @jit.unroll_safe def dropvalues(self, n): n = hint(n, promote=True) finaldepth = self.valuestackdepth - n assert finaldepth >= self.nlocals, "stack underflow in dropvalues()" while True: n -= 1 if n < 0: break self.locals_stack_w[finaldepth + n] = None self.valuestackdepth = finaldepth @jit.unroll_safe def pushrevvalues(self, n, values_w): # n should be len(values_w) make_sure_not_resized(values_w) while True: n -= 1 if n < 0: break self.pushvalue(values_w[n]) @jit.unroll_safe def dupvalues(self, n): delta = n - 1 while True: n -= 1 if n < 0: break w_value = self.peekvalue(delta) self.pushvalue(w_value) def peekvalue(self, index_from_top=0): # NOTE: top of the stack is peekvalue(0). # Contrast this with CPython where it's PEEK(-1). index_from_top = hint(index_from_top, promote=True) index = self.valuestackdepth + ~index_from_top assert index >= self.nlocals, "peek past the bottom of the stack" return self.locals_stack_w[index] def settopvalue(self, w_object, index_from_top=0): index_from_top = hint(index_from_top, promote=True) index = self.valuestackdepth + ~index_from_top assert index >= self.nlocals, "settop past the bottom of the stack" self.locals_stack_w[index] = w_object @jit.unroll_safe def dropvaluesuntil(self, finaldepth): depth = self.valuestackdepth - 1 finaldepth = hint(finaldepth, promote=True) while depth >= finaldepth: self.locals_stack_w[depth] = None depth -= 1 self.valuestackdepth = finaldepth def save_locals_stack(self): return self.locals_stack_w[:self.valuestackdepth] def restore_locals_stack(self, items_w): self.locals_stack_w[:len(items_w)] = items_w self.init_cells() self.dropvaluesuntil(len(items_w)) def make_arguments(self, nargs): return Arguments(self.space, self.peekvalues(nargs)) def argument_factory(self, arguments, keywords, keywords_w, w_star, w_starstar): return Arguments(self.space, arguments, keywords, keywords_w, w_star, w_starstar) @jit.dont_look_inside def descr__reduce__(self, space): from pypy.interpreter.mixedmodule import MixedModule from pypy.module._pickle_support import maker # helper fns w_mod = space.getbuiltinmodule('_pickle_support') mod = space.interp_w(MixedModule, w_mod) new_inst = mod.get('frame_new') w = space.wrap nt = space.newtuple cells = self._getcells() if cells is None: w_cells = space.w_None else: w_cells = space.newlist([space.wrap(cell) for cell in cells]) if self.w_f_trace is None: f_lineno = self.get_last_lineno() else: f_lineno = self.f_lineno values_w = self.locals_stack_w[self.nlocals:self.valuestackdepth] w_valuestack = maker.slp_into_tuple_with_nulls(space, values_w) w_blockstack = nt( [block._get_state_(space) for block in self.get_blocklist()]) w_fastlocals = maker.slp_into_tuple_with_nulls( space, self.locals_stack_w[:self.nlocals]) if self.last_exception is None: w_exc_value = space.w_None w_tb = space.w_None else: w_exc_value = self.last_exception.get_w_value(space) w_tb = w(self.last_exception.get_traceback()) tup_state = [ w(self.f_backref()), w(self.get_builtin()), w(self.pycode), w_valuestack, w_blockstack, w_exc_value, # last_exception w_tb, # self.w_globals, w(self.last_instr), w(self.frame_finished_execution), w(f_lineno), w_fastlocals, space.w_None, #XXX placeholder for f_locals #f_restricted requires no additional data! space.w_None, ## self.w_f_trace, ignore for now w(self.instr_lb), #do we need these three (that are for tracing) w(self.instr_ub), w(self.instr_prev_plus_one), w_cells, ] return nt([new_inst, nt([]), nt(tup_state)]) @jit.dont_look_inside def descr__setstate__(self, space, w_args): from pypy.module._pickle_support import maker # helper fns from pypy.interpreter.pycode import PyCode from pypy.interpreter.module import Module args_w = space.unpackiterable(w_args) w_f_back, w_builtin, w_pycode, w_valuestack, w_blockstack, w_exc_value, w_tb,\ w_globals, w_last_instr, w_finished, w_f_lineno, w_fastlocals, w_f_locals, \ w_f_trace, w_instr_lb, w_instr_ub, w_instr_prev_plus_one, w_cells = args_w new_frame = self pycode = space.interp_w(PyCode, w_pycode) if space.is_w(w_cells, space.w_None): closure = None cellvars = [] else: from pypy.interpreter.nestedscope import Cell cells_w = space.unpackiterable(w_cells) cells = [space.interp_w(Cell, w_cell) for w_cell in cells_w] ncellvars = len(pycode.co_cellvars) cellvars = cells[:ncellvars] closure = cells[ncellvars:] # do not use the instance's __init__ but the base's, because we set # everything like cells from here # XXX hack from pypy.interpreter.function import Function outer_func = Function(space, None, closure=closure, forcename="fake") PyFrame.__init__(self, space, pycode, w_globals, outer_func) f_back = space.interp_w(PyFrame, w_f_back, can_be_None=True) new_frame.f_backref = jit.non_virtual_ref(f_back) new_frame.builtin = space.interp_w(Module, w_builtin) new_frame.set_blocklist([ unpickle_block(space, w_blk) for w_blk in space.unpackiterable(w_blockstack) ]) values_w = maker.slp_from_tuple_with_nulls(space, w_valuestack) for w_value in values_w: new_frame.pushvalue(w_value) if space.is_w(w_exc_value, space.w_None): new_frame.last_exception = None else: from pypy.interpreter.pytraceback import PyTraceback tb = space.interp_w(PyTraceback, w_tb) new_frame.last_exception = OperationError(space.type(w_exc_value), w_exc_value, tb) new_frame.last_instr = space.int_w(w_last_instr) new_frame.frame_finished_execution = space.is_true(w_finished) new_frame.f_lineno = space.int_w(w_f_lineno) fastlocals_w = maker.slp_from_tuple_with_nulls(space, w_fastlocals) new_frame.locals_stack_w[:len(fastlocals_w)] = fastlocals_w if space.is_w(w_f_trace, space.w_None): new_frame.w_f_trace = None else: new_frame.w_f_trace = w_f_trace new_frame.instr_lb = space.int_w(w_instr_lb) #the three for tracing new_frame.instr_ub = space.int_w(w_instr_ub) new_frame.instr_prev_plus_one = space.int_w(w_instr_prev_plus_one) self._setcellvars(cellvars) # XXX what if the frame is in another thread?? space.frame_trace_action.fire() def hide(self): return self.pycode.hidden_applevel def getcode(self): return hint(self.pycode, promote=True) @jit.dont_look_inside def getfastscope(self): "Get the fast locals as a list." return self.locals_stack_w @jit.dont_look_inside def setfastscope(self, scope_w): """Initialize the fast locals from a list of values, where the order is according to self.pycode.signature().""" scope_len = len(scope_w) if scope_len > self.nlocals: raise ValueError, "new fastscope is longer than the allocated area" # don't assign directly to 'locals_stack_w[:scope_len]' to be # virtualizable-friendly for i in range(scope_len): self.locals_stack_w[i] = scope_w[i] self.init_cells() def init_cells(self): """Initialize cellvars from self.locals_stack_w. This is overridden in nestedscope.py""" pass def getfastscopelength(self): return self.nlocals def getclosure(self): return None def _getcells(self): return None def _setcellvars(self, cellvars): pass ### line numbers ### def fget_f_lineno(self, space): "Returns the line number of the instruction currently being executed." if self.w_f_trace is None: return space.wrap(self.get_last_lineno()) else: return space.wrap(self.f_lineno) def fset_f_lineno(self, space, w_new_lineno): "Returns the line number of the instruction currently being executed." try: new_lineno = space.int_w(w_new_lineno) except OperationError, e: raise OperationError(space.w_ValueError, space.wrap("lineno must be an integer")) if self.w_f_trace is None: raise OperationError( space.w_ValueError, space.wrap("f_lineno can only be set by a trace function.")) line = self.pycode.co_firstlineno if new_lineno < line: raise operationerrfmt(space.w_ValueError, "line %d comes before the current code.", new_lineno) elif new_lineno == line: new_lasti = 0 else: new_lasti = -1 addr = 0 lnotab = self.pycode.co_lnotab for offset in xrange(0, len(lnotab), 2): addr += ord(lnotab[offset]) line += ord(lnotab[offset + 1]) if line >= new_lineno: new_lasti = addr new_lineno = line break if new_lasti == -1: raise operationerrfmt(space.w_ValueError, "line %d comes after the current code.", new_lineno) # Don't jump to a line with an except in it. code = self.pycode.co_code if ord(code[new_lasti]) in (DUP_TOP, POP_TOP): raise OperationError( space.w_ValueError, space.wrap( "can't jump to 'except' line as there's no exception")) # Don't jump into or out of a finally block. f_lasti_setup_addr = -1 new_lasti_setup_addr = -1 blockstack = [] addr = 0 while addr < len(code): op = ord(code[addr]) if op in (SETUP_LOOP, SETUP_EXCEPT, SETUP_FINALLY): blockstack.append([addr, False]) elif op == POP_BLOCK: setup_op = ord(code[blockstack[-1][0]]) if setup_op == SETUP_FINALLY: blockstack[-1][1] = True else: blockstack.pop() elif op == END_FINALLY: if len(blockstack) > 0: setup_op = ord(code[blockstack[-1][0]]) if setup_op == SETUP_FINALLY: blockstack.pop() if addr == new_lasti or addr == self.last_instr: for ii in range(len(blockstack)): setup_addr, in_finally = blockstack[~ii] if in_finally: if addr == new_lasti: new_lasti_setup_addr = setup_addr if addr == self.last_instr: f_lasti_setup_addr = setup_addr break if op >= HAVE_ARGUMENT: addr += 3 else: addr += 1 assert len(blockstack) == 0 if new_lasti_setup_addr != f_lasti_setup_addr: raise operationerrfmt( space.w_ValueError, "can't jump into or out of a 'finally' block %d -> %d", f_lasti_setup_addr, new_lasti_setup_addr) if new_lasti < self.last_instr: min_addr = new_lasti max_addr = self.last_instr else: min_addr = self.last_instr max_addr = new_lasti delta_iblock = min_delta_iblock = 0 addr = min_addr while addr < max_addr: op = ord(code[addr]) if op in (SETUP_LOOP, SETUP_EXCEPT, SETUP_FINALLY): delta_iblock += 1 elif op == POP_BLOCK: delta_iblock -= 1 if delta_iblock < min_delta_iblock: min_delta_iblock = delta_iblock if op >= stdlib_opcode.HAVE_ARGUMENT: addr += 3 else: addr += 1 f_iblock = 0 block = self.lastblock while block: f_iblock += 1 block = block.previous min_iblock = f_iblock + min_delta_iblock if new_lasti > self.last_instr: new_iblock = f_iblock + delta_iblock else: new_iblock = f_iblock - delta_iblock if new_iblock > min_iblock: raise OperationError( space.w_ValueError, space.wrap("can't jump into the middle of a block")) while f_iblock > new_iblock: block = self.pop_block() block.cleanup(self) f_iblock -= 1 self.f_lineno = new_lineno self.last_instr = new_lasti
def reduce(self, space, w_obj, multidim, promote_to_largest, axis, keepdims=False, out=None): from pypy.module.micronumpy.interp_numarray import convert_to_array, \ Scalar, ReduceArray, W_NDimArray if self.argcount != 2: raise OperationError(space.w_ValueError, space.wrap("reduce only " "supported for binary functions")) assert isinstance(self, W_Ufunc2) obj = convert_to_array(space, w_obj) if axis >= len(obj.shape): raise OperationError(space.w_ValueError, space.wrap("axis(=%d) out of bounds" % axis)) if isinstance(obj, Scalar): raise OperationError(space.w_TypeError, space.wrap("cannot reduce " "on a scalar")) size = obj.size if self.comparison_func: dtype = interp_dtype.get_dtype_cache(space).w_booldtype else: dtype = find_unaryop_result_dtype( space, obj.find_dtype(), promote_to_float=self.promote_to_float, promote_to_largest=promote_to_largest, promote_bools=True ) shapelen = len(obj.shape) if self.identity is None and size == 0: raise operationerrfmt(space.w_ValueError, "zero-size array to " "%s.reduce without identity", self.name) if shapelen > 1 and axis >= 0: if keepdims: shape = obj.shape[:axis] + [1] + obj.shape[axis + 1:] else: shape = obj.shape[:axis] + obj.shape[axis + 1:] if out: #Test for shape agreement if len(out.shape) > len(shape): raise operationerrfmt(space.w_ValueError, 'output parameter for reduction operation %s' + ' has too many dimensions', self.name) elif len(out.shape) < len(shape): raise operationerrfmt(space.w_ValueError, 'output parameter for reduction operation %s' + ' does not have enough dimensions', self.name) elif out.shape != shape: raise operationerrfmt(space.w_ValueError, 'output parameter shape mismatch, expecting [%s]' + ' , got [%s]', ",".join([str(x) for x in shape]), ",".join([str(x) for x in out.shape]), ) #Test for dtype agreement, perhaps create an itermediate #if out.dtype != dtype: # raise OperationError(space.w_TypeError, space.wrap( # "mismatched dtypes")) return self.do_axis_reduce(obj, out.find_dtype(), axis, out) else: result = W_NDimArray(shape, dtype) return self.do_axis_reduce(obj, dtype, axis, result) if out: if len(out.shape)>0: raise operationerrfmt(space.w_ValueError, "output parameter " "for reduction operation %s has too many" " dimensions",self.name) arr = ReduceArray(self.func, self.name, self.identity, obj, out.find_dtype()) val = loop.compute(arr) assert isinstance(out, Scalar) out.value = val else: arr = ReduceArray(self.func, self.name, self.identity, obj, dtype) val = loop.compute(arr) return val
def descr__new__(space, w_subtype, w_dtype, w_align=None, w_copy=None, w_shape=None): # w_align and w_copy are necessary for pickling cache = get_dtype_cache(space) if w_shape is not None and (space.isinstance_w(w_shape, space.w_int) or space.len_w(w_shape) > 0): subdtype = descr__new__(space, w_subtype, w_dtype, w_align, w_copy) assert isinstance(subdtype, W_Dtype) size = 1 if space.isinstance_w(w_shape, space.w_int): w_shape = space.newtuple([w_shape]) shape = [] for w_dim in space.fixedview(w_shape): dim = space.int_w(w_dim) shape.append(dim) size *= dim return W_Dtype( types.VoidType(subdtype.itemtype.get_element_size() * size), NPY_VOID, NPY_VOIDLTR, "void" + str(8 * subdtype.itemtype.get_element_size() * size), NPY_VOIDLTR, space.gettypefor(interp_boxes.W_VoidBox), shape=shape, subdtype=subdtype) if space.is_none(w_dtype): return cache.w_float64dtype elif space.isinstance_w(w_dtype, w_subtype): return w_dtype elif space.isinstance_w(w_dtype, space.w_str): name = space.str_w(w_dtype) if ',' in name: return dtype_from_spec(space, name) try: return cache.dtypes_by_name[name] except KeyError: pass if name[0] in 'VSUc' or name[0] in '<>=' and name[1] in 'VSUc': return variable_dtype(space, name) raise OperationError(space.w_TypeError, space.wrap("data type %s not understood" % name)) elif space.isinstance_w(w_dtype, space.w_list): return dtype_from_list(space, w_dtype) elif space.isinstance_w(w_dtype, space.w_tuple): w_dtype0 = space.getitem(w_dtype, space.wrap(0)) w_dtype1 = space.getitem(w_dtype, space.wrap(1)) subdtype = descr__new__(space, w_subtype, w_dtype0, w_align, w_copy) assert isinstance(subdtype, W_Dtype) if subdtype.get_size() == 0: name = "%s%d" % (subdtype.kind, space.int_w(w_dtype1)) return descr__new__(space, w_subtype, space.wrap(name), w_align, w_copy) return descr__new__(space, w_subtype, w_dtype0, w_align, w_copy, w_shape=w_dtype1) elif space.isinstance_w(w_dtype, space.w_dict): return dtype_from_dict(space, w_dtype) for dtype in cache.builtin_dtypes: if w_dtype in dtype.alternate_constructors: return dtype if w_dtype is dtype.w_box_type: return dtype msg = "data type not understood (value of type %T not expected here)" raise operationerrfmt(space.w_TypeError, msg, w_dtype)
def reduce(self, space, w_obj, promote_to_largest, w_axis, keepdims=False, out=None, dtype=None, cumulative=False): if self.argcount != 2: raise OperationError( space.w_ValueError, space.wrap("reduce only " "supported for binary functions")) assert isinstance(self, W_Ufunc2) obj = convert_to_array(space, w_obj) if obj.get_dtype().is_flexible_type(): raise OperationError( space.w_TypeError, space.wrap('cannot perform reduce for flexible type')) obj_shape = obj.get_shape() if obj.is_scalar(): return obj.get_scalar_value() shapelen = len(obj_shape) axis = unwrap_axis_arg(space, shapelen, w_axis) assert axis >= 0 dtype = interp_dtype.decode_w_dtype(space, dtype) if dtype is None: if self.comparison_func: dtype = interp_dtype.get_dtype_cache(space).w_booldtype else: dtype = find_unaryop_result_dtype( space, obj.get_dtype(), promote_to_float=self.promote_to_float, promote_to_largest=promote_to_largest, promote_bools=True) if self.identity is None: for i in range(shapelen): if space.is_none(w_axis) or i == axis: if obj_shape[i] == 0: raise operationerrfmt( space.w_ValueError, "zero-size array to " "%s.reduce without identity", self.name) if shapelen > 1 and axis < shapelen: temp = None if cumulative: shape = obj_shape[:] temp_shape = obj_shape[:axis] + obj_shape[axis + 1:] if out: dtype = out.get_dtype() temp = W_NDimArray.from_shape(space, temp_shape, dtype, w_instance=obj) elif keepdims: shape = obj_shape[:axis] + [1] + obj_shape[axis + 1:] else: shape = obj_shape[:axis] + obj_shape[axis + 1:] if out: # Test for shape agreement # XXX maybe we need to do broadcasting here, although I must # say I don't understand the details for axis reduce if len(out.get_shape()) > len(shape): raise operationerrfmt( space.w_ValueError, 'output parameter for reduction operation %s' + ' has too many dimensions', self.name) elif len(out.get_shape()) < len(shape): raise operationerrfmt( space.w_ValueError, 'output parameter for reduction operation %s' + ' does not have enough dimensions', self.name) elif out.get_shape() != shape: raise operationerrfmt( space.w_ValueError, 'output parameter shape mismatch, expecting [%s]' + ' , got [%s]', ",".join([str(x) for x in shape]), ",".join([str(x) for x in out.get_shape()]), ) dtype = out.get_dtype() else: out = W_NDimArray.from_shape(space, shape, dtype, w_instance=obj) return loop.do_axis_reduce(shape, self.func, obj, dtype, axis, out, self.identity, cumulative, temp) if cumulative: if out: if out.get_shape() != [obj.get_size()]: raise OperationError( space.w_ValueError, space.wrap("out of incompatible size")) else: out = W_NDimArray.from_shape(space, [obj.get_size()], dtype, w_instance=obj) loop.compute_reduce_cumulative(obj, out, dtype, self.func, self.identity) return out if out: if len(out.get_shape()) > 0: raise operationerrfmt( space.w_ValueError, "output parameter " "for reduction operation %s has too many" " dimensions", self.name) dtype = out.get_dtype() res = loop.compute_reduce(obj, dtype, self.func, self.done_func, self.identity) if out: out.set_scalar_value(res) return out return res