def process_response(self, request, response): """Return a HTTP 422 response if a JSON response was requested and an HTML page with form errors is returned. """ if is_json_requested(request) and response._headers["content-type"][1].startswith("text/html") and \ response.status_code == 200: user = request.user if not user.is_authenticated(): # Login view was returned return HttpResponseUnauthorised() hash_ = hashlib.sha1() hash_.update(six.binary_type(random.random())) # For some very obscure reason, a random number was not enough -- # it led to collisions time after time. hash_.update(six.binary_type(time.time())) hash_value = hash_.hexdigest() ErrorPage.objects.create(hash_value=hash_value, user=user, requested_url=request.get_full_path(), html=response.content) return HttpResponseUnprocessableEntity( json.dumps((1, request.build_absolute_uri( django.core.urlresolvers.reverse( "jb_common.views.show_error_page", kwargs={"hash_value": hash_value})))), content_type="application/json") return response
def unquoting_view(request, *args, **kwargs): if six.PY2: return view(request, *[urllib.parse.unquote(six.binary_type(value)).decode("utf-8") for value in args], **dict((key, urllib.parse.unquote(six.binary_type(value)).decode("utf-8")) for key, value in kwargs.items())) return view(request, *[urllib.parse.unquote(value) for value in args], **dict((key, urllib.parse.unquote(value)) for key, value in kwargs.items()))
def test_binary_1_max_length(self): # Okay m = SizeFieldModel(binary1=six.binary_type(1) * (2 ** 8 - 1)) m.save() # Bad - Data too long m = SizeFieldModel(binary1=six.binary_type(1) * (2 ** 8)) with pytest.raises(DataError) as excinfo: m.save() assert excinfo.value.args[0] == 1406
def get_interface_ip(ifname): scket = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) try: try: if_name_short = six.binary_type(ifname[:15], 'utf-8') except TypeError: if_name_short = six.binary_type(ifname[:15]) return socket.inet_ntoa(fcntl.ioctl(scket.fileno(), 0x8915, struct.pack('256s', if_name_short))[20:24]) finally: scket.close()
def test_binary_1_max_length(self): # Okay m = SizeFieldModel(binary1=six.binary_type(1) * (2**8 - 1)) m.save() # Bad - Data too long m = SizeFieldModel(binary1=six.binary_type(1) * (2**8)) with pytest.raises(DataError) as excinfo: m.save() assert excinfo.value.args[0] == 1406
def encode_response(cls, response): """ Encodes a Django HTTP response into ASGI http.response message(s). """ # Collect cookies into headers. # Note that we have to preserve header case as there are some non-RFC # compliant clients that want things like Content-Type correct. Ugh. response_headers = [] for header, value in response.items(): if isinstance(header, six.text_type): header = header.encode("ascii") if isinstance(value, six.text_type): value = value.encode("latin1") response_headers.append( ( six.binary_type(header), six.binary_type(value), ) ) for c in response.cookies.values(): response_headers.append( ( b'Set-Cookie', c.output(header='').encode("ascii"), ) ) # Make initial response message message = { "status": response.status_code, "headers": response_headers, } # Streaming responses need to be pinned to their iterator if response.streaming: # Access `__iter__` and not `streaming_content` directly in case # it has been overridden in a subclass. for part in response: for chunk, more in cls.chunk_bytes(part): message['content'] = chunk # We ignore "more" as there may be more parts; instead, # we use an empty final closing message with False. message['more_content'] = True yield message message = {} # Final closing message yield { "more_content": False, } # Other responses just need chunking else: # Yield chunks of response for chunk, last in cls.chunk_bytes(response.content): message['content'] = chunk message['more_content'] = not last yield message message = {}
def encode_response(cls, response): """ Encodes a Django HTTP response into ASGI http.response message(s). """ # Collect cookies into headers. # Note that we have to preserve header case as there are some non-RFC # compliant clients that want things like Content-Type correct. Ugh. response_headers = [] for header, value in response.items(): if isinstance(header, six.text_type): header = header.encode("ascii") if isinstance(value, six.text_type): value = value.encode("latin1") response_headers.append( ( six.binary_type(header), six.binary_type(value), ) ) for c in response.cookies.values(): response_headers.append( ( b'Set-Cookie', c.output(header='').encode("ascii"), ) ) # Make initial response message message = { "status": response.status_code, "headers": response_headers, } # Streaming responses need to be pinned to their iterator if response.streaming: # Access `__iter__` and not `streaming_content` directly in case # it has been overridden in a subclass. for part in response: for chunk, more in cls.chunk_bytes(part): message['content'] = chunk # We ignore "more" as there may be more parts; instead, # we use an empty final closing message with False. message['more_content'] = True yield message message = {} # Final closing message message["more_content"] = False yield message # Other responses just need chunking else: # Yield chunks of response for chunk, last in cls.chunk_bytes(response.content): message['content'] = chunk message['more_content'] = not last yield message message = {}
def encode_response(self, response): """ Encodes a Django HTTP response into an ASGI http.response message(s). """ # Collect cookies into headers. # Note that we have to preserve header case as there are some non-RFC # compliant clients that want things like Content-Type correct. Ugh. response_headers = [] for header, value in response.items(): if isinstance(header, six.binary_type): header = header.decode("latin1") if isinstance(value, six.text_type): value = value.encode("latin1") response_headers.append( ( six.text_type(header), six.binary_type(value), ) ) for c in response.cookies.values(): response_headers.append( ( 'Set-Cookie', six.binary_type(c.output(header='')), ) ) # Make initial response message message = { "status": response.status_code, "status_text": response.reason_phrase, "headers": response_headers, } # Streaming responses need to be pinned to their iterator if response.streaming: for part in response.streaming_content: for chunk in self.chunk_bytes(part): message['content'] = chunk message['more_content'] = True yield message message = {} # Final closing message yield { "more_content": False, } # Other responses just need chunking else: # Yield chunks of response for chunk, last in self.chunk_bytes(response.content): message['content'] = chunk message['more_content'] = not last yield message message = {}
def write_output(self): """ Write the stdout/stderror we got from MCP Client out to files if necessary. """ for task in self.groupTasks: with task.outputLock: self._write_file_to_disk( task.standardOutputFile, six.binary_type(task.results['stdout'])) self._write_file_to_disk( task.standardErrorFile, six.binary_type(task.results['stderror']))
def unquoting_view(request, *args, **kwargs): if six.PY2: return view( request, *[ urllib_parse.unquote( six.binary_type(value)).decode("utf-8") for value in args ], **dict((key, urllib_parse.unquote(six.binary_type( value)).decode("utf-8")) for key, value in kwargs.items())) return view( request, *[urllib_parse.unquote(value) for value in args], **dict((key, urllib_parse.unquote(value)) for key, value in kwargs.items()))
def encode_response(self, response): """ Encodes a Django HTTP response into an ASGI http.response message(s). """ # Collect cookies into headers. # Note that we have to preserve header case as there are some non-RFC # compliant clients that want things like Content-Type correct. Ugh. response_headers = [] for header, value in response.items(): if isinstance(header, six.binary_type): header = header.decode("latin1") if isinstance(value, six.text_type): value = value.encode("latin1") response_headers.append(( six.text_type(header), six.binary_type(value), )) for c in response.cookies.values(): response_headers.append(( 'Set-Cookie', six.binary_type(c.output(header='')), )) # Make initial response message message = { "status": response.status_code, "status_text": response.reason_phrase, "headers": response_headers, } # Streaming responses need to be pinned to their iterator if response.streaming: for part in response.streaming_content: for chunk in self.chunk_bytes(part): message['content'] = chunk message['more_content'] = True yield message message = {} # Final closing message yield { "more_content": False, } # Other responses just need chunking else: # Yield chunks of response for chunk, last in self.chunk_bytes(response.content): message['content'] = chunk message['more_content'] = not last yield message message = {}
def setup_from_none(): lang = os.environ['LANG'][:2] if 'LANG' in os.environ else 'fr' if six.PY3: clear_modules() from lucterios.framework.settings import fill_appli_settings import types import gc gc.collect() lct_modules = [] glob = LucteriosGlobal() _, mod_applis, mod_modules = glob.installed() for mod_item in mod_applis: lct_modules.append(mod_item[0]) for mod_item in mod_modules: lct_modules.append(mod_item[0]) try: module = types.ModuleType("default_setting") except TypeError: module = types.ModuleType(six.binary_type("default_setting")) setattr(module, '__file__', ".") setattr(module, 'SECRET_KEY', "default_setting") setattr(module, 'LANGUAGE_CODE', lang) setattr(module, 'DATABASES', {'default': { 'ENGINE': 'django.db.backends.dummy' }}) fill_appli_settings("lucterios.standard", lct_modules, module) sys.modules["default_setting"] = module os.environ.setdefault("DJANGO_SETTINGS_MODULE", "default_setting") import django from django import db django.setup() set_locale_lang(lang) db.close_old_connections()
def setup_from_none(): if six.PY3: clear_modules() from lucterios.framework.settings import fill_appli_settings import types import gc gc.collect() lct_modules = [] glob = LucteriosGlobal() _, mod_applis, mod_modules = glob.installed() for mod_item in mod_applis: lct_modules.append(mod_item[0]) for mod_item in mod_modules: lct_modules.append(mod_item[0]) try: module = types.ModuleType("default_setting") except TypeError: module = types.ModuleType(six.binary_type("default_setting")) setattr(module, '__file__', "") setattr(module, 'SECRET_KEY', "default_setting") setattr( module, 'DATABASES', {'default': {'ENGINE': 'django.db.backends.dummy'}}) fill_appli_settings("lucterios.standard", lct_modules, module) sys.modules["default_setting"] = module os.environ.setdefault("DJANGO_SETTINGS_MODULE", "default_setting") import django from django import db django.setup() db.close_old_connections()
def message(self): """Construct an outgoing message for the e-mail. This will construct a message based on the data provided to the constructor. This represents the e-mail that will later be sent using :py:meth:`send`. After calling this method, the message's ID will be stored in the :py:attr:`message_id` attribute for later reference. This does not need to be called manually. It's called by :py:meth:`send`. Returns: django.core.mail.message.SafeMIMEText: The resulting message. """ msg = super(EmailMessage, self).message() self.message_id = msg["Message-ID"] for name, value_list in self._headers.iterlists(): for value in value_list: msg.add_header(six.binary_type(name), value) return msg
def message(self): """Construct an outgoing message for the e-mail. This will construct a message based on the data provided to the constructor. This represents the e-mail that will later be sent using :py:meth:`send`. After calling this method, the message's ID will be stored in the :py:attr:`message_id` attribute for later reference. This does not need to be called manually. It's called by :py:meth:`send`. Returns: django.core.mail.message.SafeMIMEText: The resulting message. """ msg = super(EmailMessage, self).message() self.message_id = msg['Message-ID'] for name, value_list in self._headers.iterlists(): for value in value_list: msg.add_header(six.binary_type(name), value) return msg
def effective_default(self, field): """ Returns a field's effective database default value """ if field.has_default(): # 如果有默认值,则使用默认值 default = field.get_default() elif not field.null and field.blank and field.empty_strings_allowed: # 如果不允许为空,但是django运行不输入数据(对应的input就不回传服务器), 则使用空字符串 if field.get_internal_type() == "BinaryField": default = six.binary_type() else: default = six.text_type() else: default = None # If it's a callable, call it # default可以为函数(这个需要注意?) if six.callable(default): default = default() # Run it through the field's get_db_prep_save method so we can send it # to the database. default = field.get_db_prep_save(default, self.connection) return default
def effective_default(self, field): """ Returns a field's effective database default value """ if field.has_default(): default = field.get_default() elif not field.null and field.blank and field.empty_strings_allowed: if field.get_internal_type() == "BinaryField": default = six.binary_type() else: default = six.text_type() elif getattr(field, 'auto_now', False) or getattr(field, 'auto_now_add', False): default = datetime.now() internal_type = field.get_internal_type() if internal_type == 'DateField': default = default.date elif internal_type == 'TimeField': default = default.time elif internal_type == 'DateTimeField': default = timezone.now else: default = None # If it's a callable, call it if callable(default): default = default() # Run it through the field's get_db_prep_save method so we can send it # to the database. default = field.get_db_prep_save(default, self.connection) return default
def effective_default(self, field): """ Returns a field's effective database default value """ if field.has_default(): default = field.get_default() elif not field.null and field.blank and field.empty_strings_allowed: if field.get_internal_type() == "BinaryField": default = six.binary_type() else: default = six.text_type() elif getattr(field, 'auto_now', False) or getattr( field, 'auto_now_add', False): default = datetime.now() internal_type = field.get_internal_type() if internal_type == 'DateField': default = default.date elif internal_type == 'TimeField': default = default.time elif internal_type == 'DateTimeField': default = timezone.now else: default = None # If it's a callable, call it if callable(default): default = default() # Run it through the field's get_db_prep_save method so we can send it # to the database. default = field.get_db_prep_save(default, self.connection) return default
def get_scmtool_class(self): if not hasattr(self, "_scmtool_class"): path = self.class_name i = path.rfind(".") module, attr = path[:i], path[i + 1 :] try: mod = __import__(six.binary_type(module), {}, {}, [six.binary_type(attr)]) except ImportError as e: raise ImproperlyConfigured('Error importing SCM Tool %s: "%s"' % (module, e)) try: self._scmtool_class = getattr(mod, attr) except AttributeError: raise ImproperlyConfigured('Module "%s" does not define a "%s" SCM Tool' % (module, attr)) return self._scmtool_class
def get_policy(self): policy_object = { "expiration": self.get_expiration_date(), "conditions": self.get_conditions(), } policy_json = json.dumps(policy_object) policy_json = policy_json.replace('\n', '').replace('\r', '') policy = b64encode(binary_type(policy_json.encode('utf-8'))) return policy
def message(self): msg = super(EmailMessage, self).message() self.message_id = msg['Message-ID'] for name, value_list in self._headers.iterlists(): for value in value_list: msg.add_header(six.binary_type(name), value) return msg
def test_streaming_response(self): r = StreamingHttpResponse(iter(['hello', 'world'])) # iterating over the response itself yields bytestring chunks. chunks = list(r) self.assertEqual(chunks, [b'hello', b'world']) for chunk in chunks: self.assertIsInstance(chunk, six.binary_type) # and the response can only be iterated once. self.assertEqual(list(r), []) # even when a sequence that can be iterated many times, like a list, # is given as content. r = StreamingHttpResponse(['abc', 'def']) self.assertEqual(list(r), [b'abc', b'def']) self.assertEqual(list(r), []) # streaming responses don't have a `content` attribute. self.assertFalse(hasattr(r, 'content')) # and you can't accidentally assign to a `content` attribute. with self.assertRaises(AttributeError): r.content = 'xyz' # but they do have a `streaming_content` attribute. self.assertTrue(hasattr(r, 'streaming_content')) # that exists so we can check if a response is streaming, and wrap or # replace the content iterator. r.streaming_content = iter(['abc', 'def']) r.streaming_content = (chunk.upper() for chunk in r.streaming_content) self.assertEqual(list(r), [b'ABC', b'DEF']) # coercing a streaming response to bytes doesn't return a complete HTTP # message like a regular response does. it only gives us the headers. r = StreamingHttpResponse(iter(['hello', 'world'])) self.assertEqual( six.binary_type(r), b'Content-Type: text/html; charset=utf-8') # and this won't consume its content. self.assertEqual(list(r), [b'hello', b'world']) # additional content cannot be written to the response. r = StreamingHttpResponse(iter(['hello', 'world'])) with self.assertRaises(Exception): r.write('!') # and we can't tell the current position. with self.assertRaises(Exception): r.tell() r = StreamingHttpResponse(iter(['hello', 'world'])) self.assertEqual(r.getvalue(), b'helloworld')
def submit(self, client, job): # Log tasks to DB, before submitting the batch, as mcpclient then updates them Task.bulk_log(self.tasks, job) data = {"tasks": {}} for task in self.tasks: task_uuid = six.text_type(task.uuid) data["tasks"][task_uuid] = self.serialize_task(task) pickled_data = cPickle.dumps(data) self.pending = client.submit_job( task=six.binary_type(job.name), data=pickled_data, unique=six.binary_type(self.uuid), wait_until_complete=False, background=False, max_retries=0, ) logger.debug("Submitted gearman job %s (%s)", self.uuid, job.name)
def to_python(self, value): if isinstance(value, six.binary_type): return self.decompress(value) elif isinstance(value, buffer if six.PY2 else memoryview): return self.decompress(six.binary_type(value)) elif (isinstance(value, six.text_type) and value.startswith(self._prefix) and value.endswith(self._suffix)): return self.value_from_string(value) else: return value
def get_pseudo(self, message): # remove the context seperator (added when using pgettext). if CONTEXT_SEPARATOR in message: message = message.split(CONTEXT_SEPARATOR)[1] # on python 2 we get a unicode unconditionally due to the # magic character. We need to cast it back to a # Python 2 string (bytes). if six.PY2: message = six.binary_type(message) return self.make_pseudo(message)
def get_scmtool_class(self): if not hasattr(self, '_scmtool_class'): path = self.class_name i = path.rfind('.') module, attr = path[:i], path[i + 1:] try: mod = __import__(six.binary_type(module), {}, {}, [six.binary_type(attr)]) except ImportError as e: raise ImproperlyConfigured( 'Error importing SCM Tool %s: "%s"' % (module, e)) try: self._scmtool_class = getattr(mod, attr) except AttributeError: raise ImproperlyConfigured( 'Module "%s" does not define a "%s" SCM Tool' % (module, attr)) return self._scmtool_class
def sync_users_remote(): from storages.backends.azure_storage import AzureStorage storage = AzureStorage() user_sync = UserMapper() with storage.open('saml/etools.dat') as csvfile: reader = csv.DictReader(csvfile, delimiter=six.binary_type('|')) for row in reader: uni_row = { six.text_type(key, 'latin-1'): six.text_type(value, 'latin-1') for key, value in six.iteritems(row) } user_sync.create_or_update_user(uni_row)
def respond_with_exception(self, e): """ :param e: Exception object. :type e: Exception :return: Response with status code of 404 if e is ``Http404`` object, else 400. :rtype: HttpResponse """ if isinstance(e, Http404): status = 404 else: status = getattr(e, "status_code", 400) return self.render_to_response(self._results_to_context((binary_type(e), False, [])), status=status)
def parse_signed_data(cls, signed_request, secret=facebook_settings.FACEBOOK_APP_SECRET): ''' Thanks to http://stackoverflow.com/questions/3302946/how-to-base64-url-decode-in-python and http://sunilarora.org/parsing-signedrequest-parameter-in-python-bas ''' from open_facebook.utils import base64_url_decode_php_style l = signed_request.split('.', 2) encoded_sig = l[0] payload = l[1] from open_facebook.utils import json sig = base64_url_decode_php_style(encoded_sig) import hmac import hashlib data = json.loads(smart_str(base64_url_decode_php_style(payload))) algo = data.get('algorithm').upper() if algo != 'HMAC-SHA256': error_format = 'Unknown algorithm we only support HMAC-SHA256 user asked for %s' error_message = error_format % algo send_warning(error_message) logger.error('Unknown algorithm') return None else: expected_sig = hmac.new(six.binary_type(secret, 'utf8'), msg=six.binary_type(payload, 'utf8'), digestmod=hashlib.sha256).digest() if sig != expected_sig: error_format = 'Signature %s didnt match the expected signature %s' error_message = error_format % (sig, expected_sig) send_warning(error_message) return None else: logger.debug('valid signed request received..') return data
def process_response(self, request, response): """Return a HTTP 422 response if a JSON response was requested and an HTML page with form errors is returned. """ if is_json_requested(request) and response._headers["content-type"][1].startswith("text/html") and \ response.status_code == 200: user = request.user if not user.is_authenticated(): # Login view was returned return HttpResponseUnauthorised() hash_ = hashlib.sha1() hash_.update(six.binary_type(random.random())) # For some very obscure reason, a random number was not enough -- # it led to collisions time after time. hash_.update(six.binary_type(time.time())) hash_value = hash_.hexdigest() ErrorPage.objects.create(hash_value=hash_value, user=user, requested_url=request.get_full_path(), html=response.content) return HttpResponseUnprocessableEntity( json.dumps((1, request.build_absolute_uri( django.core.urlresolvers.reverse("jb_common.views.show_error_page", kwargs={"hash_value": hash_value})))), content_type="application/json") return response
def test_update_binary(self): CaseTestModel.objects.update( binary=Case( # fails on postgresql on Python 2.7 if output_field is not # set explicitly When(integer=1, then=Value(b'one', output_field=models.BinaryField())), When(integer=2, then=Value(b'two', output_field=models.BinaryField())), default=Value(b'', output_field=models.BinaryField()), ), ) self.assertQuerysetEqual( CaseTestModel.objects.all().order_by('pk'), [(1, b'one'), (2, b'two'), (3, b''), (2, b'two'), (3, b''), (3, b''), (4, b'')], transform=lambda o: (o.integer, six.binary_type(o.binary)) )
def render(self, context): page = self.page.resolve(context) kwargs = {} # Retrieve variable instances from context where necessary for argname, argvalue in self.kwargs.items(): try: kwargs[argname] = argvalue.resolve(context) except AttributeError: kwargs[argname] = argvalue except VariableDoesNotExist: kwargs[argname] = None previous_label = six.binary_type( kwargs.get("previous_label", "Previous Page")) next_label = six.binary_type(kwargs.get("next_label", "Next Page")) previous_title = six.binary_type( kwargs.get("previous_title", "Previous Page")) next_title = six.binary_type(kwargs.get("next_title", "Next Page")) url_view_name = kwargs.get("url_view_name", None) if url_view_name is not None: url_view_name = six.binary_type(url_view_name) url_param_name = six.binary_type(kwargs.get("url_param_name", "page")) url_extra_args = kwargs.get("url_extra_args", []) url_extra_kwargs = kwargs.get("url_extra_kwargs", {}) url_get_params = kwargs.get("url_get_params", context['request'].GET) previous_page_url = None if page.has_previous(): previous_page_url = get_page_url(page.previous_page_number(), context.current_app, url_view_name, url_extra_args, url_extra_kwargs, url_param_name, url_get_params) next_page_url = None if page.has_next(): next_page_url = get_page_url(page.next_page_number(), context.current_app, url_view_name, url_extra_args, url_extra_kwargs, url_param_name, url_get_params) return get_template("bootstrap_pagination/pager.html").render( Context( { 'page': page, 'previous_label': previous_label, 'next_label': next_label, 'previous_title': previous_title, 'next_title': next_title, 'previous_page_url': previous_page_url, 'next_page_url': next_page_url }, autoescape=False))
def get_saml_login_request(binding=BINDING_HTTP_REDIRECT): conf = SPConfig() conf.load(copy.deepcopy(sp_conf_dict)) client = Saml2Client(conf) if binding == BINDING_HTTP_REDIRECT: session_id, result = client.prepare_for_authenticate( entityid="test_generic_idp", relay_state="", binding=binding, ) return parse.parse_qs(parse.urlparse( result['headers'][0][1]).query)['SAMLRequest'][0] elif binding == BINDING_HTTP_POST: session_id, request_xml = client.create_authn_request( "http://localhost:9000/idp/sso/post", binding=binding) return base64.b64encode(binary_type(request_xml, 'UTF-8'))
def respond_with_exception(self, e): """ :param e: Exception object. :type e: Exception :return: Response with status code of 404 if e is ``Http404`` object, else 400. :rtype: HttpResponse """ if isinstance(e, Http404): status = 404 else: status = getattr(e, 'status_code', 400) return self.render_to_response( self._results_to_context((binary_type(e), False, [],)), status=status )
def get_db_prep_save(self, value): "Returns a value suitable for storage into a CharField" if not value: return None hashed_name = md5(six.binary_type(time.time())).hexdigest() + value.name[-4:] image_path = pjoin(self._upload_to, hashed_name) dest_name = pjoin(settings.MEDIA_ROOT, image_path) directory = pjoin(settings.MEDIA_ROOT, self._upload_to) if not os.path.exists(directory): os.makedirs(directory) with open(dest_name, 'wb+') as dest_file: for chunk in value.chunks(): dest_file.write(chunk) return six.text_type(image_path)
def to_python(self, value): if value: if isinstance(value, dict): return value try: # Needs str return json.loads( six.text_type(value, encoding="utf-8") if isinstance( value, six.binary_type) else value) except ValueError: try: # Needs bytes return pickle.loads( six.binary_type(value, encoding="utf-8") if isinstance( value, six.text_type) else value) except ValueError: return value return {}
def test_lazy_object_to_string(self): class Klazz(object): if six.PY3: def __str__(self): return "Î am ā Ǩlâzz." def __bytes__(self): return b"\xc3\x8e am \xc4\x81 binary \xc7\xa8l\xc3\xa2zz." else: def __unicode__(self): return "Î am ā Ǩlâzz." def __str__(self): return b"\xc3\x8e am \xc4\x81 binary \xc7\xa8l\xc3\xa2zz." t = lazy(lambda: Klazz(), Klazz)() self.assertEqual(six.text_type(t), "Î am ā Ǩlâzz.") self.assertEqual(six.binary_type(t), b"\xc3\x8e am \xc4\x81 binary \xc7\xa8l\xc3\xa2zz.")
def test_middleware_with_rest_framework(self): # Test anonymous user. self.assertEqual(get_current_user(), None) url = reverse('test_app:api_index') response = self.client.get(url) response_content = json.loads(response.content.decode('utf-8')) self.assertEqual(response.status_code, 200) self.assertEqual(response_content, six.text_type('AnonymousUser')) self.assertEqual(get_current_user(), None) # Test logged in user (session auth). self.client.login(username=self.user.username, password=self.user_password) response = self.client.get(url) response_content = json.loads(response.content.decode('utf-8')) self.assertEqual(response.status_code, 200) self.assertEqual(response_content, six.text_type(self.user)) self.assertEqual(get_current_user(), None) # Test logged in user (basic auth). basic_auth = '{0}:{1}'.format(self.user.username, self.user_password) basic_auth = six.binary_type(basic_auth.encode('utf-8')) basic_auth = base64.b64encode(basic_auth).decode('ascii') client_kwargs = {'HTTP_AUTHORIZATION': 'Basic %s' % basic_auth} client = Client(**client_kwargs) response = client.get(url) response_content = json.loads(response.content.decode('utf-8')) self.assertEqual(response.status_code, 200) self.assertEqual(response_content, six.text_type(self.user)) self.assertEqual(get_current_user(), None) # Test impersonate(None) within view requested by logged in user. self.client.login(username=self.user.username, password=self.user_password) response = self.client.get(url + '?impersonate=1') response_content = json.loads(response.content.decode('utf-8')) self.assertEqual(response.status_code, 200) self.assertEqual(response_content, six.text_type(None)) self.assertEqual(get_current_user(), None) # Test when request raises exception. try: response = self.client.get(url + '?raise=1') except RuntimeError: response = None self.assertEqual(response, None) self.assertEqual(get_current_user(), None)
def effective_default(self, field): """ Returns a field's effective database default value """ if field.has_default(): default = field.get_default() elif not field.null and field.blank and field.empty_strings_allowed: if field.get_internal_type() == "BinaryField": default = six.binary_type() else: default = six.text_type() else: default = None # If it's a callable, call it if six.callable(default): default = default() # Run it through the field's get_db_prep_save method so we can send it # to the database. default = field.get_db_prep_save(default, self.connection) return default
def recompute_svn_backend(): """Recomputes the SVNTool client backend to use. Normally, this is only called once, but it may be used to reset the backend for use in testing. """ global Client global has_svn_backend Client = None has_svn_backend = False required_module = None for backend_path in settings.SVNTOOL_BACKENDS: try: mod = __import__(six.binary_type(backend_path), fromlist=['Client', 'has_svn_backend']) # Check that this is a valid SVN backend. if (not hasattr(mod, 'has_svn_backend') or not hasattr(mod, 'Client')): logging.error('Attempted to load invalid SVN backend %s', backend_path) continue has_svn_backend = mod.has_svn_backend # We want either the winning SVN backend or the first one to show # up in the required module dependencies list. if has_svn_backend or not required_module: SVNTool.dependencies['modules'] = [mod.Client.required_module] if has_svn_backend: # We found a suitable backend. logging.info('Using %s backend for SVN', backend_path) Client = mod.Client break except ImportError: logging.error('Unable to load SVN backend %s', backend_path, exc_info=1)
def render(self, context): page = self.page.resolve(context) kwargs = {} # Retrieve variable instances from context where necessary for argname, argvalue in self.kwargs.items(): try: kwargs[argname] = argvalue.resolve(context) except AttributeError: kwargs[argname] = argvalue except VariableDoesNotExist: kwargs[argname] = None previous_label = six.binary_type(kwargs.get("previous_label", "Previous Page")) next_label = six.binary_type(kwargs.get("next_label", "Next Page")) previous_title = six.binary_type(kwargs.get("previous_title", "Previous Page")) next_title = six.binary_type(kwargs.get("next_title", "Next Page")) url_view_name = kwargs.get("url_view_name", None) if url_view_name is not None: url_view_name = six.binary_type(url_view_name) url_param_name = six.binary_type(kwargs.get("url_param_name", "page")) url_extra_args = kwargs.get("url_extra_args", []) url_extra_kwargs = kwargs.get("url_extra_kwargs", {}) url_get_params = kwargs.get("url_get_params", context['request'].GET) previous_page_url = None if page.has_previous(): previous_page_url = get_page_url(page.previous_page_number(), context.current_app, url_view_name, url_extra_args, url_extra_kwargs, url_param_name, url_get_params) next_page_url = None if page.has_next(): next_page_url = get_page_url(page.next_page_number(), context.current_app, url_view_name, url_extra_args, url_extra_kwargs, url_param_name, url_get_params) return get_template("bootstrap_pagination/pager.html").render( Context({ 'page': page, 'previous_label': previous_label, 'next_label': next_label, 'previous_title': previous_title, 'next_title': next_title, 'previous_page_url': previous_page_url, 'next_page_url': next_page_url }, autoescape=False))
def process_message(self, msg): data = json.loads(six.binary_type(msg.body)) ts = data.pop('@timestamp') data.pop("@version") msg = data.pop('message') seq = data.pop('seq', 0) dt = datetime.strptime(ts, "%Y-%m-%dT%H:%M:%S.%fZ") result = { 'ts': time.mktime(dt.timetuple()), 'ms': dt.microsecond, 'seq': seq, 'message': msg, 'data': data } self.messages.append(result) d = dt.date() if not self.current_date: self.current_date = d if d != self.current_date: self.current_date = d self.push_messages() if len(self.messages) >= self.block_size: self.push_messages()
def test_gearman_task_submission(simple_job, simple_task, mocker): # Mock to avoid db writes mocker.patch("server.tasks.backends.gearman_backend.Task.bulk_log") mocker.patch.object(GearmanTaskBackend, "TASK_BATCH_SIZE", 1) mock_client = mocker.patch( "server.tasks.backends.gearman_backend.MCPGearmanClient") backend = GearmanTaskBackend() backend.submit_task(simple_job, simple_task) task_data = format_gearman_request([simple_task]) submit_job_kwargs = mock_client.return_value.submit_job.call_args[1] assert submit_job_kwargs["task"] == six.binary_type(simple_job.name) # Comparing pickled strings is fragile, so compare the python version assert cPickle.loads(submit_job_kwargs["data"]) == cPickle.loads(task_data) try: uuid.UUID(submit_job_kwargs["unique"]) except ValueError: pytest.fail("Expected unique to be a valid UUID.") assert submit_job_kwargs["wait_until_complete"] is False assert submit_job_kwargs["background"] is False assert submit_job_kwargs["max_retries"] == 0
def get_binay(text): if six.PY2: return six.binary_type(text) else: return six.binary_type(text, 'ascii')
def get_secret_access_key(self): return binary_type(self.secret_access_key.encode('utf-8'))
def test_streaming_response(self): r = StreamingHttpResponse(iter(["hello", "world"])) # iterating over the response itself yields bytestring chunks. chunks = list(r) self.assertEqual(chunks, [b"hello", b"world"]) for chunk in chunks: self.assertIsInstance(chunk, six.binary_type) # and the response can only be iterated once. self.assertEqual(list(r), []) # even when a sequence that can be iterated many times, like a list, # is given as content. r = StreamingHttpResponse(["abc", "def"]) self.assertEqual(list(r), [b"abc", b"def"]) self.assertEqual(list(r), []) # iterating over Unicode strings still yields bytestring chunks. r.streaming_content = iter(["hello", "café"]) chunks = list(r) # '\xc3\xa9' == unichr(233).encode('utf-8') self.assertEqual(chunks, [b"hello", b"caf\xc3\xa9"]) for chunk in chunks: self.assertIsInstance(chunk, six.binary_type) # streaming responses don't have a `content` attribute. self.assertFalse(hasattr(r, "content")) # and you can't accidentally assign to a `content` attribute. with self.assertRaises(AttributeError): r.content = "xyz" # but they do have a `streaming_content` attribute. self.assertTrue(hasattr(r, "streaming_content")) # that exists so we can check if a response is streaming, and wrap or # replace the content iterator. r.streaming_content = iter(["abc", "def"]) r.streaming_content = (chunk.upper() for chunk in r.streaming_content) self.assertEqual(list(r), [b"ABC", b"DEF"]) # coercing a streaming response to bytes doesn't return a complete HTTP # message like a regular response does. it only gives us the headers. r = StreamingHttpResponse(iter(["hello", "world"])) self.assertEqual(six.binary_type(r), b"Content-Type: text/html; charset=utf-8") # and this won't consume its content. self.assertEqual(list(r), [b"hello", b"world"]) # additional content cannot be written to the response. r = StreamingHttpResponse(iter(["hello", "world"])) with self.assertRaises(Exception): r.write("!") # and we can't tell the current position. with self.assertRaises(Exception): r.tell() r = StreamingHttpResponse(iter(["hello", "world"])) self.assertEqual(r.getvalue(), b"helloworld")
def get_binay(text): return six.binary_type(text, 'ascii')
def writedata(self, text): try: self.viewfile.write(six.binary_type(text, 'UTF-8')) except Exception: self.viewfile.write(six.binary_type(text))
def generic_relation_factory( ct_field='content_type', fk_field='object_pk', gr_field='content_object', manager_attr='objects', class_name='GenericRelationModel', class_name_blank=None, blank=False, fk_field_type=None ): """ Creates a abstract model with a generic relation key. The factory function produces abstract base model classes with **the only** generic relation foreign key. Probably, it's not the most flexible approach, but it's just work in many of products, so it should be left here. :param ct_field: Name of ForeignKey field to the ``contenttypes.ContentType`` model. Default value is ``content_type``. :type ct_field: str :param fk_field: Name of ``object_id`` field. Could be different types. Default value is ``object_pk`` :type fk_field: str :param gr_field: Name of ``generic foreign key`` virtual field that adds some useful managers. Default is ``content_object`` :type gr_field: str :param manager_attr: Name of manager with the generic relations feature. Default value is ``objects``, so by default it also changes the default manager to generic one :type manager_attr: str :param class_name: Name of the dynamically created abstract model class with a required ``content_type`` and ``object_id`` fields. If empty or omitted, the name will be generated automatically. Default value is ``None`` :type class_name: str|NoneType :param class_name_blank: Name of the dynamically created abstract model class with an optional (mean blank and nullable) content_type and object_id fields. If empty or omitted, the name will be generated automatically. Default value is ``None`` :type class_name_blank: str|NoneType :param blank: Whether a generic foreign key could be an optional field (i.e. nullable and blank=True). Default value is ``False`` :type blank: bool :param fk_field_type: :type fk_field_type: models.Field|NoneType :return: """ fk_field_type = fk_field_type or models.CharField(max_length=255) class Meta(object): abstract = True index_together = [ (ct_field, fk_field) ] if blank: class_name = class_name_blank or 'Blank{0}'.format(class_name) if not six.PY3: class_name = six.binary_type(class_name) docstring = DOCSTRING.format(ct_field=ct_field, fk_field=fk_field, gr_field=gr_field, class_name=class_name) return type(class_name, (models.Model, ), { gr_field: GenericRelationField( replace_manager=True, ct_field=ct_field, fk_field=fk_field, fk_field_type=fk_field_type, manager_attr_name='objects', blank=blank, ), manager_attr: GenericRelationManager(), 'Meta': Meta, '__module__': __name__, '__doc__': docstring })