def descr_init(self, space, __args__): # this is on the silly side w_source, w_encoding, w_errors = __args__.parse_obj( None, 'bytearray', init_signature, init_defaults) if w_source is None: w_source = space.wrap('') # Unicode argument if w_encoding is not None: from pypy.objspace.std.unicodeobject import ( _get_encoding_and_errors, encode_object) encoding, errors = _get_encoding_and_errors( space, w_encoding, w_errors) # if w_source is an integer this correctly raises a # TypeError the CPython error message is: "encoding or # errors without a string argument" ours is: "expected # unicode, got int object" w_source = encode_object(space, w_source, encoding, errors) # Is it an int? try: count = space.int_w(w_source) except OperationError as e: if not e.match(space, space.w_TypeError): raise else: if count < 0: raise oefmt(space.w_ValueError, "bytearray negative count") self.data = ['\0'] * count return data = makebytearraydata_w(space, w_source) self.data = data
def descr_decode(self, space, w_encoding=None, w_errors=None): from pypy.objspace.std.unicodeobject import ( _get_encoding_and_errors, decode_object, unicode_from_string) encoding, errors = _get_encoding_and_errors(space, w_encoding, w_errors) if encoding is None and errors is None: return unicode_from_string(space, self) return decode_object(space, self, encoding, errors)
def unicode_w(self, space): # Use the default encoding. w_defaultencoding = space.call_function(space.sys.get( 'getdefaultencoding')) encoding, errors = _get_encoding_and_errors(space, w_defaultencoding, space.w_None) if encoding is None and errors is None: return space.unicode_w(unicode_from_string(space, self)) return space.unicode_w(decode_object(space, self, encoding, errors))
def descr_decode(self, space, w_encoding=None, w_errors=None): from pypy.objspace.std.unicodeobject import ( _get_encoding_and_errors, decode_object, unicode_from_string) encoding, errors = _get_encoding_and_errors(space, w_encoding, w_errors) from pypy.objspace.std.bytearrayobject import W_BytearrayObject if (encoding is None and errors is None and not isinstance(self, W_BytearrayObject)): return unicode_from_string(space, self) return decode_object(space, self, encoding, errors)
def descr_init(self, space, __args__): # this is on the silly side w_source, w_encoding, w_errors = __args__.parse_obj( None, 'bytearray', init_signature, init_defaults) if w_source is None: w_source = space.wrap('') if w_encoding is None: w_encoding = space.w_None if w_errors is None: w_errors = space.w_None # Unicode argument if not space.is_w(w_encoding, space.w_None): from pypy.objspace.std.unicodeobject import ( _get_encoding_and_errors, encode_object ) encoding, errors = _get_encoding_and_errors(space, w_encoding, w_errors) # if w_source is an integer this correctly raises a # TypeError the CPython error message is: "encoding or # errors without a string argument" ours is: "expected # unicode, got int object" w_source = encode_object(space, w_source, encoding, errors) # Is it an int? try: count = space.int_w(w_source) except OperationError as e: if not e.match(space, space.w_TypeError): raise else: if count < 0: raise oefmt(space.w_ValueError, "bytearray negative count") self.data = ['\0'] * count return data = makebytearraydata_w(space, w_source) self.data = data
def descr_encode(self, space, w_encoding=None, w_errors=None): from pypy.objspace.std.unicodeobject import (_get_encoding_and_errors, encode_object) encoding, errors = _get_encoding_and_errors(space, w_encoding, w_errors) return encode_object(space, self, encoding, errors)
def descr_encode(self, space, w_encoding=None, w_errors=None): from pypy.objspace.std.unicodeobject import ( _get_encoding_and_errors, encode_object) encoding, errors = _get_encoding_and_errors(space, w_encoding, w_errors) return encode_object(space, self, encoding, errors)