def itob(i): """Integer to two bytes""" # devide in two parts (bytes) i1 = i % 256 i2 = int(i / 256) # make string (little endian) return six.int2byte(i1) + six.int2byte(i2)
def get_graphics_control_ext(self, duration=0.1, dispose=2, transparent_flag=0, transparency_index=0): """ Graphics Control Extension. A sort of header at the start of each image. Specifies duration and transparancy. Dispose ------- * 0 - No disposal specified. * 1 - Do not dispose. The graphic is to be left in place. * 2 - Restore to background color. The area used by the graphic must be restored to the background color. * 3 - Restore to previous. The decoder is required to restore the area overwritten by the graphic with what was there prior to rendering the graphic. * 4-7 -To be defined. """ bb = '\x21\xF9\x04' # low bit 1 == transparency, bb += six.int2byte(((dispose & 3) << 2) | (transparent_flag & 1)) # 2nd bit 1 == user input , next 3 bits, the low two of which are used, # are dispose. bb += itob(int(duration * 100)) # in 100th of seconds bb += six.int2byte(transparency_index) # transparency index bb += '\x00' # end return bb
class FernetSigner(Signer): version = six.int2byte(0x80) def __init__(self, key=None): """ :type key: any :rtype: None """ self.digest = hashes.SHA256() self.key = force_bytes(key or settings.SECRET_KEY) def signature(self, value): """ :type value: any :rtype: HMAC """ h = HMAC(self.key, self.digest, backend=settings.CRYPTOGRAPHY_BACKEND) h.update(force_bytes(value)) return h def sign(self, value): """ :type value: any :rtype: bytes """ payload = struct.pack('>cQ', self.version, int(time.time())) payload += force_bytes(value) return payload + self.signature(payload).finalize() def unsign(self, signed_value, ttl=None): """ Retrieve original value and check it wasn't signed more than max_age seconds ago. :type signed_value: bytes :type ttl: int | datetime.timedelta """ h_size, d_size = struct.calcsize('>cQ'), self.digest.digest_size fmt = '>cQ%ds%ds' % (len(signed_value) - h_size - d_size, d_size) try: version, timestamp, value, sig = struct.unpack(fmt, signed_value) except struct.error: raise BadSignature('Signature is not valid') if version != self.version: raise BadSignature('Signature version not supported') if ttl is not None: if isinstance(ttl, datetime.timedelta): ttl = ttl.total_seconds() # Check timestamp is not older than ttl age = abs(time.time() - timestamp) if age > ttl + _MAX_CLOCK_SKEW: raise SignatureExpired('Signature age %s > %s seconds' % (age, ttl)) try: self.signature(signed_value[:-d_size]).verify(sig) except InvalidSignature: raise BadSignature('Signature "%s" does not match' % binascii.b2a_base64(sig)) return value
def generate_key(): """Generate a random API key.""" # 32 * 8 = 256 random bits random_bytes = b''.join(int2byte(random.randint(0, 255)) for _ in range(32)) random_hash = hashlib.sha256(random_bytes).digest() replacements = [b'rA', b'aZ', b'gQ', b'hH', b'hG', b'aR', b'DD'] random_repl = random.choice(replacements) return base64.b64encode(random_hash, random_repl).rstrip(b'=').decode('utf-8')
def from_hex(cls, hexstring): """Create a BitVector instance from a hexstring.""" hexstring = cls._hexdigits.sub("", hexstring) if len(hexstring) % 2 != 0: raise ValueError("hexstring must contain an even number of digits") hex_octets = [hexstring[i:i+2] for i in range(0, len(hexstring), 2)] octetstring = b''.join([six.int2byte(int(octet, 16)) for octet in hex_octets]) return cls(octetstring)
def generate_key(): """Generate a random API key.""" # 32 * 8 = 256 random bits random_bytes = b''.join( int2byte(random.randint(0, 255)) for _ in range(32)) random_hash = hashlib.sha256(random_bytes).digest() replacements = [b'rA', b'aZ', b'gQ', b'hH', b'hG', b'aR', b'DD'] random_repl = random.choice(replacements) return base64.b64encode(random_hash, random_repl).rstrip(b'=').decode('utf-8')
class StreamingTest(SimpleTestCase): """ Tests streaming. """ short_string = b"This string is too short to be worth compressing." compressible_string = b'a' * 500 incompressible_string = b''.join( six.int2byte(random.randint(0, 255)) for _ in range(500)) sequence = [b'a' * 500, b'b' * 200, b'a' * 300] sequence_unicode = [u'a' * 500, u'é' * 200, u'a' * 300] request_factory = RequestFactory() def setUp(self): self.req = self.request_factory.get('/') self.req.META['HTTP_ACCEPT_ENCODING'] = 'gzip, deflate, br' self.req.META[ 'HTTP_USER_AGENT'] = 'Mozilla/5.0 (Windows NT 5.1; rv:9.0.1) Gecko/20100101 Firefox/9.0.1' self.resp = HttpResponse() self.resp.status_code = 200 self.resp.content = self.compressible_string self.resp['Content-Type'] = 'text/html; charset=UTF-8' self.stream_resp = StreamingHttpResponse(self.sequence) self.stream_resp['Content-Type'] = 'text/html; charset=UTF-8' self.stream_resp_unicode = StreamingHttpResponse(self.sequence_unicode) self.stream_resp_unicode['Content-Type'] = 'text/html; charset=UTF-8' def test_compress_streaming_response(self): """ Compression is performed on responses with streaming content. """ r = CompressionMiddleware().process_response(self.req, self.stream_resp) self.assertEqual(brotli.decompress(b''.join(r)), b''.join(self.sequence)) self.assertEqual(r.get('Content-Encoding'), 'br') self.assertFalse(r.has_header('Content-Length')) self.assertEqual(r.get('Vary'), 'Accept-Encoding') def test_compress_streaming_response_unicode(self): """ Compression is performed on responses with streaming Unicode content. """ r = CompressionMiddleware().process_response(self.req, self.stream_resp_unicode) self.assertEqual( brotli.decompress(b''.join(r)), b''.join(x.encode('utf-8') for x in self.sequence_unicode)) self.assertEqual(r.get('Content-Encoding'), 'br') self.assertFalse(r.has_header('Content-Length')) self.assertEqual(r.get('Vary'), 'Accept-Encoding')
def b32_encode(ref, length=5): """ Encode a number as a b32 string. :param ref: Number to encode :type ref: int :param length: Number of bytes to encode :type length: int :return: Base32 encoded string :rtype: str """ part = b"" for i in moves.range(length): symbol = (ref >> (8 * i)) & 0xFF part += int2byte(int(symbol)) # Encode the byte string in base32 return base64.b32encode(part).decode().rstrip("=")
class GZipMiddlewareTest(TestCase): """ Tests the GZip middleware. """ short_string = b"This string is too short to be worth compressing." compressible_string = b'a' * 500 uncompressible_string = b''.join( six.int2byte(random.randint(0, 255)) for _ in xrange(500)) sequence = [b'a' * 500, b'b' * 200, b'a' * 300] def setUp(self): self.req = HttpRequest() self.req.META = { 'SERVER_NAME': 'testserver', 'SERVER_PORT': 80, } self.req.path = self.req.path_info = "/" self.req.META['HTTP_ACCEPT_ENCODING'] = 'gzip, deflate' self.req.META[ 'HTTP_USER_AGENT'] = 'Mozilla/5.0 (Windows NT 5.1; rv:9.0.1) Gecko/20100101 Firefox/9.0.1' self.resp = HttpResponse() self.resp.status_code = 200 self.resp.content = self.compressible_string self.resp['Content-Type'] = 'text/html; charset=UTF-8' self.stream_resp = StreamingHttpResponse(self.sequence) self.stream_resp['Content-Type'] = 'text/html; charset=UTF-8' @staticmethod def decompress(gzipped_string): return gzip.GzipFile(mode='rb', fileobj=BytesIO(gzipped_string)).read() def test_compress_response(self): """ Tests that compression is performed on responses with compressible content. """ r = GZipMiddleware().process_response(self.req, self.resp) self.assertEqual(self.decompress(r.content), self.compressible_string) self.assertEqual(r.get('Content-Encoding'), 'gzip') self.assertEqual(r.get('Content-Length'), str(len(r.content))) def test_compress_streaming_response(self): """ Tests that compression is performed on responses with streaming content. """ r = GZipMiddleware().process_response(self.req, self.stream_resp) self.assertEqual(self.decompress(b''.join(r)), b''.join(self.sequence)) self.assertEqual(r.get('Content-Encoding'), 'gzip') self.assertFalse(r.has_header('Content-Length')) def test_compress_non_200_response(self): """ Tests that compression is performed on responses with a status other than 200. See #10762. """ self.resp.status_code = 404 r = GZipMiddleware().process_response(self.req, self.resp) self.assertEqual(self.decompress(r.content), self.compressible_string) self.assertEqual(r.get('Content-Encoding'), 'gzip') def test_no_compress_short_response(self): """ Tests that compression isn't performed on responses with short content. """ self.resp.content = self.short_string r = GZipMiddleware().process_response(self.req, self.resp) self.assertEqual(r.content, self.short_string) self.assertEqual(r.get('Content-Encoding'), None) def test_no_compress_compressed_response(self): """ Tests that compression isn't performed on responses that are already compressed. """ self.resp['Content-Encoding'] = 'deflate' r = GZipMiddleware().process_response(self.req, self.resp) self.assertEqual(r.content, self.compressible_string) self.assertEqual(r.get('Content-Encoding'), 'deflate') def test_no_compress_ie_js_requests(self): """ Tests that compression isn't performed on JavaScript requests from Internet Explorer. """ self.req.META[ 'HTTP_USER_AGENT'] = 'Mozilla/4.0 (compatible; MSIE 5.00; Windows 98)' self.resp['Content-Type'] = 'application/javascript; charset=UTF-8' r = GZipMiddleware().process_response(self.req, self.resp) self.assertEqual(r.content, self.compressible_string) self.assertEqual(r.get('Content-Encoding'), None) def test_no_compress_uncompressible_response(self): """ Tests that compression isn't performed on responses with uncompressible content. """ self.resp.content = self.uncompressible_string r = GZipMiddleware().process_response(self.req, self.resp) self.assertEqual(r.content, self.uncompressible_string) self.assertEqual(r.get('Content-Encoding'), None)
class GZipMiddlewareTest(SimpleTestCase): """ Tests the GZipMiddleware. """ short_string = b"This string is too short to be worth compressing." compressible_string = b'a' * 500 incompressible_string = b''.join( six.int2byte(random.randint(0, 255)) for _ in range(500)) sequence = [b'a' * 500, b'b' * 200, b'a' * 300] sequence_unicode = ['a' * 500, 'é' * 200, 'a' * 300] request_factory = RequestFactory() def setUp(self): self.req = self.request_factory.get('/') self.req.META['HTTP_ACCEPT_ENCODING'] = 'gzip, deflate' self.req.META[ 'HTTP_USER_AGENT'] = 'Mozilla/5.0 (Windows NT 5.1; rv:9.0.1) Gecko/20100101 Firefox/9.0.1' self.resp = HttpResponse() self.resp.status_code = 200 self.resp.content = self.compressible_string self.resp['Content-Type'] = 'text/html; charset=UTF-8' self.stream_resp = StreamingHttpResponse(self.sequence) self.stream_resp['Content-Type'] = 'text/html; charset=UTF-8' self.stream_resp_unicode = StreamingHttpResponse(self.sequence_unicode) self.stream_resp_unicode['Content-Type'] = 'text/html; charset=UTF-8' @staticmethod def decompress(gzipped_string): with gzip.GzipFile(mode='rb', fileobj=BytesIO(gzipped_string)) as f: return f.read() @staticmethod def get_mtime(gzipped_string): with gzip.GzipFile(mode='rb', fileobj=BytesIO(gzipped_string)) as f: f.read() # must read the data before accessing the header return f.mtime def test_compress_response(self): """ Compression is performed on responses with compressible content. """ r = GZipMiddleware().process_response(self.req, self.resp) self.assertEqual(self.decompress(r.content), self.compressible_string) self.assertEqual(r.get('Content-Encoding'), 'gzip') self.assertEqual(r.get('Content-Length'), str(len(r.content))) def test_compress_streaming_response(self): """ Compression is performed on responses with streaming content. """ r = GZipMiddleware().process_response(self.req, self.stream_resp) self.assertEqual(self.decompress(b''.join(r)), b''.join(self.sequence)) self.assertEqual(r.get('Content-Encoding'), 'gzip') self.assertFalse(r.has_header('Content-Length')) def test_compress_streaming_response_unicode(self): """ Compression is performed on responses with streaming Unicode content. """ r = GZipMiddleware().process_response(self.req, self.stream_resp_unicode) self.assertEqual( self.decompress(b''.join(r)), b''.join(x.encode('utf-8') for x in self.sequence_unicode)) self.assertEqual(r.get('Content-Encoding'), 'gzip') self.assertFalse(r.has_header('Content-Length')) def test_compress_file_response(self): """ Compression is performed on FileResponse. """ with open(__file__, 'rb') as file1: file_resp = FileResponse(file1) file_resp['Content-Type'] = 'text/html; charset=UTF-8' r = GZipMiddleware().process_response(self.req, file_resp) with open(__file__, 'rb') as file2: self.assertEqual(self.decompress(b''.join(r)), file2.read()) self.assertEqual(r.get('Content-Encoding'), 'gzip') self.assertIsNot(r.file_to_stream, file1) def test_compress_non_200_response(self): """ Compression is performed on responses with a status other than 200 (#10762). """ self.resp.status_code = 404 r = GZipMiddleware().process_response(self.req, self.resp) self.assertEqual(self.decompress(r.content), self.compressible_string) self.assertEqual(r.get('Content-Encoding'), 'gzip') def test_no_compress_short_response(self): """ Compression isn't performed on responses with short content. """ self.resp.content = self.short_string r = GZipMiddleware().process_response(self.req, self.resp) self.assertEqual(r.content, self.short_string) self.assertIsNone(r.get('Content-Encoding')) def test_no_compress_compressed_response(self): """ Compression isn't performed on responses that are already compressed. """ self.resp['Content-Encoding'] = 'deflate' r = GZipMiddleware().process_response(self.req, self.resp) self.assertEqual(r.content, self.compressible_string) self.assertEqual(r.get('Content-Encoding'), 'deflate') def test_no_compress_incompressible_response(self): """ Compression isn't performed on responses with incompressible content. """ self.resp.content = self.incompressible_string r = GZipMiddleware().process_response(self.req, self.resp) self.assertEqual(r.content, self.incompressible_string) self.assertIsNone(r.get('Content-Encoding')) def test_compress_deterministic(self): """ Compression results are the same for the same content and don't include a modification time (since that would make the results of compression non-deterministic and prevent ConditionalGetMiddleware from recognizing conditional matches on gzipped content). """ r1 = GZipMiddleware().process_response(self.req, self.resp) r2 = GZipMiddleware().process_response(self.req, self.resp) self.assertEqual(r1.content, r2.content) self.assertEqual(self.get_mtime(r1.content), 0) self.assertEqual(self.get_mtime(r2.content), 0)
class GZipMiddlewareTest(SimpleTestCase): """ Tests the GZip middleware. """ short_string = b"This string is too short to be worth compressing." compressible_string = b'a' * 500 uncompressible_string = b''.join( six.int2byte(random.randint(0, 255)) for _ in range(500)) sequence = [b'a' * 500, b'b' * 200, b'a' * 300] sequence_unicode = ['a' * 500, 'é' * 200, 'a' * 300] def setUp(self): self.req = RequestFactory().get('/') self.req.META['HTTP_ACCEPT_ENCODING'] = 'gzip, deflate' self.req.META[ 'HTTP_USER_AGENT'] = 'Mozilla/5.0 (Windows NT 5.1; rv:9.0.1) Gecko/20100101 Firefox/9.0.1' self.resp = HttpResponse() self.resp.status_code = 200 self.resp.content = self.compressible_string self.resp['Content-Type'] = 'text/html; charset=UTF-8' self.stream_resp = StreamingHttpResponse(self.sequence) self.stream_resp['Content-Type'] = 'text/html; charset=UTF-8' self.stream_resp_unicode = StreamingHttpResponse(self.sequence_unicode) self.stream_resp_unicode['Content-Type'] = 'text/html; charset=UTF-8' @staticmethod def decompress(gzipped_string): with gzip.GzipFile(mode='rb', fileobj=BytesIO(gzipped_string)) as f: return f.read() def test_compress_response(self): """ Tests that compression is performed on responses with compressible content. """ r = GZipMiddleware().process_response(self.req, self.resp) self.assertEqual(self.decompress(r.content), self.compressible_string) self.assertEqual(r.get('Content-Encoding'), 'gzip') self.assertEqual(r.get('Content-Length'), str(len(r.content))) def test_compress_streaming_response(self): """ Tests that compression is performed on responses with streaming content. """ r = GZipMiddleware().process_response(self.req, self.stream_resp) self.assertEqual(self.decompress(b''.join(r)), b''.join(self.sequence)) self.assertEqual(r.get('Content-Encoding'), 'gzip') self.assertFalse(r.has_header('Content-Length')) def test_compress_streaming_response_unicode(self): """ Tests that compression is performed on responses with streaming Unicode content. """ r = GZipMiddleware().process_response(self.req, self.stream_resp_unicode) self.assertEqual( self.decompress(b''.join(r)), b''.join(x.encode('utf-8') for x in self.sequence_unicode)) self.assertEqual(r.get('Content-Encoding'), 'gzip') self.assertFalse(r.has_header('Content-Length')) def test_compress_file_response(self): """ Tests that compression is performed on FileResponse. """ open_file = lambda: open(__file__, 'rb') with open_file() as file1: file_resp = FileResponse(file1) file_resp['Content-Type'] = 'text/html; charset=UTF-8' r = GZipMiddleware().process_response(self.req, file_resp) with open_file() as file2: self.assertEqual(self.decompress(b''.join(r)), file2.read()) self.assertEqual(r.get('Content-Encoding'), 'gzip') self.assertIsNot(r.file_to_stream, file1) def test_compress_non_200_response(self): """ Tests that compression is performed on responses with a status other than 200. See #10762. """ self.resp.status_code = 404 r = GZipMiddleware().process_response(self.req, self.resp) self.assertEqual(self.decompress(r.content), self.compressible_string) self.assertEqual(r.get('Content-Encoding'), 'gzip') def test_no_compress_short_response(self): """ Tests that compression isn't performed on responses with short content. """ self.resp.content = self.short_string r = GZipMiddleware().process_response(self.req, self.resp) self.assertEqual(r.content, self.short_string) self.assertEqual(r.get('Content-Encoding'), None) def test_no_compress_compressed_response(self): """ Tests that compression isn't performed on responses that are already compressed. """ self.resp['Content-Encoding'] = 'deflate' r = GZipMiddleware().process_response(self.req, self.resp) self.assertEqual(r.content, self.compressible_string) self.assertEqual(r.get('Content-Encoding'), 'deflate') def test_no_compress_uncompressible_response(self): """ Tests that compression isn't performed on responses with uncompressible content. """ self.resp.content = self.uncompressible_string r = GZipMiddleware().process_response(self.req, self.resp) self.assertEqual(r.content, self.uncompressible_string) self.assertEqual(r.get('Content-Encoding'), None)
class EncryptedTextField(CalculatedField, models.BinaryField): description = _('Encrypted text') _pad = six.int2byte(0) _prefix = '[{(#' _suffix = '#)}]' @cached_property def _cipher(self): key = self.get_secret_key() return self.cipher.new(key) @cached_property def _prefix_len(self): return len(self._prefix) @cached_property def _suffix_len(self): return len(self._suffix) def __init__(self, *args, **kwargs): self.cipher = kwargs.pop('cipher', AES) editable = kwargs.pop('editable', True) self.min_length = kwargs.pop('min_length', None) self.secret_key = kwargs.pop('secret_key', None) super(EncryptedTextField, self).__init__(*args, **kwargs) self.editable = editable # BinaryField sets editable == False if self.min_length is not None: self.validators.append(MinLengthValidator(self.min_length)) def check(self, **kwargs): errors = super(EncryptedTextField, self).check(**kwargs) errors.extend(self._check_max_length_attribute(**kwargs)) errors.extend(self._check_min_length_attribute(**kwargs)) return errors _check_max_length_attribute = check_max_length_attribute _check_min_length_attribute = check_min_length_attribute def deconstruct(self): name, path, args, kwargs = super(models.BinaryField, self).deconstruct() path = path.replace('yepes.fields.encrypted', 'yepes.fields') clean_keywords(self, kwargs, variables={ 'calculated': False, 'cipher': AES, 'editable': True, 'min_length': None, 'secret_key': None, }) return name, path, args, kwargs def decrypt(self, encrypted): decrypted = self._cipher.decrypt(force_bytes(encrypted)) return decrypted.lstrip(self._pad).decode('utf-8') def encrypt(self, text): bytes = force_text(text).encode('utf-8') up_to_sixteen = 16 - ((len(bytes) % 16) or 16) bytes = (self._pad * up_to_sixteen) + bytes return self._cipher.encrypt(bytes) def formfield(self, **kwargs): kwargs.setdefault('form_class', forms.CharField) kwargs.setdefault('widget', forms.Textarea) kwargs.setdefault('min_length', self.min_length) kwargs.setdefault('max_length', self.max_length) return super(EncryptedTextField, self).formfield(**kwargs) def from_db_value(self, value, expression, connection, context): if value is None: return value else: return self.decrypt(value) def get_lookup(self, lookup_name): if lookup_name == 'exact': return Exact elif lookup_name == 'in': return In else: return None def get_prep_value(self, value): value = models.Field.get_prep_value(self, value) if value is None: return value else: return self.encrypt(value) def get_secret_key(self): if self.secret_key: key = force_bytes(self.secret_key) if self.cipher in CIPHER_KEY_LENGTHS: valid_lengths = CIPHER_KEY_LENGTHS[self.cipher] if len(key) not in valid_lengths: raise InvalidLengthError(reversed(valid_lengths)) return key else: key = force_bytes(settings.SECRET_KEY) key_length = len(key) if self.cipher in CIPHER_KEY_LENGTHS: # Try to return the longest valid key. for length in CIPHER_KEY_LENGTHS[self.cipher]: if key_length >= length: return key[-length:] else: raise TooShortError() else: return key[0:16] # Cut key to most common length. def to_python(self, value): if isinstance(value, six.binary_type): return self.decrypt(value) elif isinstance(value, buffer if six.PY2 else memoryview): return self.decrypt(six.binary_type(value)) elif (isinstance(value, six.text_type) and value.startswith(self._prefix) and value.endswith(self._suffix)): return self.value_from_string() else: return value def value_from_string(self, string): encrypted_value = string[self._prefix_len:-self._suffix_len] return self.decrypt(b64decode(encrypted_value.encode('ascii'))) def value_to_string(self, obj): value = self._get_val_from_obj(obj) encrypted_value = b64encode(self.encrypt(value)).decode('ascii') return ''.join(self._prefix, encrypted_value, self._suffix)