def store_group(self, url, group): url_hash = generate_hash_for('url', url) group_hash = generate_hash_for('group', group) self.database.hset(url_hash, 'group', str_to_bytes(group_hash)) self.database.hset(group_hash, 'name', str_to_bytes(group)) self.database.hset(group_hash, url_hash, str_to_bytes(url)) self.database.hset(group_hash, 'url', str_to_bytes(url))
def store_content_type(self, url_hash, value): try: content_type, charset = value.split(';') except ValueError: # Weird e.g.: text/html;h5ai=0.20;charset=UTF-8 content_type, _, charset = value.split(';') value = content_type.strip().lower() if '=' in charset: _, charset = charset.split('=') self.database.hset(url_hash, 'charset', str_to_bytes(charset.strip().lower())) self.database.hset(url_hash, 'content-type', str_to_bytes(value))
def _unpack(self, payload, sep=str_to_bytes('\x00\x01')): raw_payload = b64decode(ensure_bytes(payload)) first_sep = raw_payload.find(sep) signer = raw_payload[:first_sep] signer_cert = self._cert_store[signer] # shift 3 bits right to get signature length # 2048bit rsa key has a signature length of 256 # 4096bit rsa key has a signature length of 512 sig_len = signer_cert.get_pubkey().key_size >> 3 sep_len = len(sep) signature_start_position = first_sep + sep_len signature_end_position = signature_start_position + sig_len signature = raw_payload[ signature_start_position:signature_end_position ] v = raw_payload[signature_end_position + sep_len:].split(sep) return { 'signer': signer, 'signature': signature, 'content_type': bytes_to_str(v[0]), 'content_encoding': bytes_to_str(v[1]), 'body': bytes_to_str(v[2]), }
def make_request(self, url, method, params): """Perform HTTP request and return the response.""" request = Request(url, str_to_bytes(params)) for key, val in items(self.http_headers): request.add_header(key, val) response = urlopen(request) # user catches errors. return response.read()
def _unpack(self, payload, sep=str_to_bytes('\x00\x01')): raw_payload = b64decode(ensure_bytes(payload)) first_sep = raw_payload.find(sep) signer = raw_payload[:first_sep] signer_cert = self._cert_store[signer] # shift 3 bits right to get signature length # 2048bit rsa key has a signature length of 256 # 4096bit rsa key has a signature length of 512 sig_len = signer_cert.get_pubkey().key_size >> 3 sep_len = len(sep) signature_start_position = first_sep + sep_len signature_end_position = signature_start_position + sig_len signature = raw_payload[ signature_start_position:signature_end_position] v = raw_payload[signature_end_position + sep_len:].split(sep) return { 'signer': signer, 'signature': signature, 'content_type': bytes_to_str(v[0]), 'content_encoding': bytes_to_str(v[1]), 'body': v[2], }
def _pack(self, body, content_type, content_encoding, signer, signature, sep=str_to_bytes('\x00\x01')): fields = sep.join( ensure_bytes(s) for s in [signer, signature, content_type, content_encoding, body] ) return b64encode(fields)
def store_webhook(self, url, callback_url): """ Store a webhook to be called when at callback_url when url is checked """ if callback_url not in self.get_webhooks_for_url(url): w_hash = generate_hash_for('webhook', url) self.database.rpush(w_hash, str_to_bytes(callback_url))
def __init__(self, key=None, cert=None, cert_store=None, digest='sha1', serializer='json'): self._key = key self._cert = cert self._cert_store = cert_store self._digest = str_to_bytes(digest) if not PY3 else digest self._serializer = serializer
def _unpack(self, payload, sep=str_to_bytes('\x00\x01')): raw_payload = b64decode(ensure_bytes(payload)) first_sep = raw_payload.find(sep) signer = raw_payload[:first_sep] signer_cert = self._cert_store[signer] sig_len = signer_cert._cert.get_pubkey().bits() >> 3 signature = raw_payload[ first_sep + len(sep):first_sep + len(sep) + sig_len ] end_of_sig = first_sep + len(sep) + sig_len+len(sep) v = raw_payload[end_of_sig:].split(sep) values = [bytes_to_str(signer), bytes_to_str(signature), bytes_to_str(v[0]), bytes_to_str(v[1]), bytes_to_str(v[2])] return { 'signer': values[0], 'signature': values[1], 'content_type': values[2], 'content_encoding': values[3], 'body': values[4], }
def _queue_bind(self, exchange, routing_key, pattern, queue): filename = '{}.exchange'.format(exchange) filename = os.path.join(self.control_folder,filename) try: d_fileno = os.open(self.control_folder,os.O_RDONLY) d = namedtuple('Directory',[]) d.fileno = lambda: d_fileno lock(d,LOCK_EX) if os.path.isfile(filename): f = open(filename,'r') exchange_table = loads(bytes_to_str(f.read())) f.close() else: exchange_table = [] queues = [tuple(q) for q in exchange_table] queue_val = (routing_key or '', pattern or '', queue or '' ) if (queue_val not in queues): exchange_table = [queue_val] + queues f = open(filename,'wb') f.write(str_to_bytes(dumps(exchange_table))) except OSError: raise ChannelError( f'Cannot open {filename!r}') finally: unlock(d) if (f): f.close()
def _unpack(self, payload, sep=str_to_bytes('\x00\x01')): raw_payload = b64decode(ensure_bytes(payload)) first_sep = raw_payload.find(sep) signer = raw_payload[:first_sep] signer_cert = self._cert_store[signer] sig_len = signer_cert._cert.get_pubkey().bits() >> 3 signature = raw_payload[first_sep + len(sep):first_sep + len(sep) + sig_len] end_of_sig = first_sep + len(sep) + sig_len + len(sep) v = raw_payload[end_of_sig:].split(sep) values = [ bytes_to_str(signer), bytes_to_str(signature), bytes_to_str(v[0]), bytes_to_str(v[1]), bytes_to_str(v[2]) ] return { 'signer': values[0], 'signature': values[1], 'content_type': values[2], 'content_encoding': values[3], 'body': values[4], }
def _unpack(self, payload, sep=str_to_bytes('\x00\x01')): values = b64decode(ensure_bytes(payload)).split(sep) return {'signer': bytes_to_str(values[0]), 'signature': ensure_bytes(values[1]), 'content_type': bytes_to_str(values[2]), 'content_encoding': bytes_to_str(values[3]), 'body': ensure_bytes(values[4])}
def _queue_bind(self, exchange, routing_key, pattern, queue): queues = self.get_table(exchange) queue_val = exchange_queue_t(routing_key or "", pattern or "", queue or "") if queue_val not in queues: queues.insert(0, queue_val) with self._get_exchange_file_obj(exchange, "wb") as f_obj: f_obj.write(str_to_bytes(dumps(queues)))
def test_pylibmc_bytes_key(self): with mock.reset_modules('celery.backends.cache'): with self.mock_pylibmc(): from celery.backends import cache cache._imp = [None] task_id, result = str_to_bytes(uuid()), 42 b = cache.CacheBackend(backend='memcache', app=self.app) b.store_result(task_id, result, state=states.SUCCESS) assert b.get_result(task_id) == result
def test_pylibmc_bytes_key(self): with reset_modules("celery.backends.cache"): with self.mock_pylibmc(): from celery.backends import cache cache._imp = [None] task_id, result = str_to_bytes(uuid()), 42 b = cache.CacheBackend(backend='memcache') b.store_result(task_id, result, status=states.SUCCESS) self.assertEqual(b.get_result(task_id), result)
def test_pylibmc_bytes_key(self): with reset_modules('celery.backends.cache'): with self.mock_pylibmc(): from celery.backends import cache cache._imp = [None] task_id, result = str_to_bytes(uuid()), 42 b = cache.CacheBackend(backend='memcache', app=self.app) b.store_result(task_id, result, state=states.SUCCESS) self.assertEqual(b.get_result(task_id), result)
def test_memcache_bytes_key(self): with self.mock_memcache(): with reset_modules("celery.backends.cache"): with mask_modules("pylibmc"): from celery.backends import cache cache._imp = [None] task_id, result = str_to_bytes(uuid()), 42 b = cache.CacheBackend(backend="memcache") b.store_result(task_id, result, status=states.SUCCESS) self.assertEqual(b.get_result(task_id), result)
def _put(self, queue, payload, **kwargs): """Put `message` onto `queue`.""" filename = '{}_{}.{}.msg'.format(int(round(monotonic() * 1000)), uuid.uuid4(), queue) filename = os.path.join(self.data_folder_out, filename) try: f = open(filename, 'wb') lock(f, LOCK_EX) f.write(str_to_bytes(dumps(payload))) except OSError: raise ChannelError(f'Cannot add file {filename!r} to directory') finally: unlock(f) f.close()
def _put(self, queue, payload, **kwargs): """Put `message` onto `queue`.""" filename = "%s_%s.%s.msg" % (int(round(monotonic() * 1000)), uuid.uuid4(), queue) filename = os.path.join(self.data_folder_out, filename) try: f = open(filename, "wb") lock(f, LOCK_EX) f.write(str_to_bytes(dumps(payload))) except (IOError, OSError): raise ChannelError("Cannot add file {0!r} to directory".format(filename)) finally: unlock(f) f.close()
def _put(self, queue, payload, **kwargs): """Put `message` onto `queue`.""" filename = '%s_%s.%s.msg' % (int(round( time.time() * 1000)), uuid.uuid4(), queue) filename = os.path.join(self.data_folder_out, filename) try: f = open(filename, 'wb') lock(f, LOCK_EX) f.write(str_to_bytes(dumps(payload))) except (IOError, OSError): raise StdChannelError( 'Filename [%s] could not be placed into folder.' % filename) finally: unlock(f) f.close()
def _put(self, queue, payload, **kwargs): """Put `message` onto `queue`.""" queue_folder = os.path.join(self.data_folder_out, queue) filename = '{}_{}.msg'.format(int(round(monotonic() * 1000)),uuid.uuid4()) filename = os.path.join(queue_folder, filename) f = None # define file descriptor try: os.makedirs(queue_folder, exist_ok = True) f = open(filename, 'wb') lock(f, LOCK_EX) f.write(str_to_bytes(dumps(payload))) except OSError: raise ChannelError(f'Cannot create {filename}') finally: if (f): unlock(f) f.close()
def _put(self, queue, payload, **kwargs): """Put `message` onto `queue`.""" filename = '%s_%s.%s.msg' % (int(round(time.time() * 1000)), uuid.uuid4(), queue) filename = os.path.join(self.data_folder_out, filename) try: f = open(filename, 'wb') lock(f, LOCK_EX) f.write(str_to_bytes(dumps(payload))) except (IOError, OSError): raise StdChannelError( 'Filename [%s] could not be placed into folder.' % filename) finally: unlock(f) f.close()
def _put(self, queue, payload, **kwargs): """Put `message` onto `queue`.""" filename = '%s_%s.%s.msg' % (int(round( time.time() * 1000)), uuid.uuid4(), queue) filename = os.path.join(self.data_folder_out, filename) try: f = open(filename, 'wb') lock(f, LOCK_EX) dumps(payload) f.write(str_to_bytes(dumps(payload))) except (IOError, OSError): raise StdChannelError( 'Cannot add file {0!r} to directory'.format(filename)) finally: unlock(f) f.close()
def _unpack(self, payload, sep=str_to_bytes("\x00\x01")): raw_payload = b64decode(ensure_bytes(payload)) first_sep = raw_payload.find(sep) signer = raw_payload[:first_sep] signer_cert = self._cert_store[signer] sig_len = signer_cert._cert.get_pubkey().bits() >> 3 signature = raw_payload[first_sep + len(sep) : first_sep + len(sep) + sig_len] end_of_sig = first_sep + len(sep) + sig_len + len(sep) v = raw_payload[end_of_sig:].split(sep) return { "signer": signer, "signature": signature, "content_type": bytes_to_str(v[0]), "content_encoding": bytes_to_str(v[1]), "body": bytes_to_str(v[2]), }
def store_metadata(self, url, response): url_hash = generate_hash_for('url', url) self.database.hset(url_hash, 'final-url', str_to_bytes(response.url)) self.database.hset(url_hash, 'final-status-code', str_to_bytes(response.status_code)) self.database.hset(url_hash, 'updated', str_to_bytes(datetime.now().isoformat())) if response.headers: for header in HEADERS: value = response.headers.get(header, '') # Special treatment for content type which may contain charset. if header == 'content-type' and ';' in value: self.store_content_type(url_hash, value) else: self.database.hset(url_hash, header, str_to_bytes(value)) # deal w/ redirect if any if len(response.history): self.database.hset(url_hash, 'redirect-url', str_to_bytes(response.history[0].url)) self.database.hset(url_hash, 'redirect-status-code', str_to_bytes(response.history[0].status_code)) return self.get_url(url_hash)
def test_dumps__no_serializer(self): ctyp, cenc, data = dumps(str_to_bytes('foo')) self.assertEqual(ctyp, 'application/data') self.assertEqual(cenc, 'binary')
def setenv(k, v): # noqa os.environ[str_to_bytes(k)] = str_to_bytes(v)
def _digest(self, data): return _digest(str_to_bytes(data)).hexdigest()
def test_dumps__no_serializer(self): ctyp, cenc, data = dumps(str_to_bytes('foo')) assert ctyp == 'application/data' assert cenc == 'binary'
def test_loads_bytes(self): self.assertEqual( loads(str_to_bytes(dumps({'x': 'z'})), decode_bytes=True), {'x': 'z'}, )
# Pickle serialization tests pickle_data = pickle.dumps(py_data, protocol=pickle_protocol) # YAML serialization tests yaml_data = """\ float: 3.1415926500000002 int: 10 list: [george, jerry, elaine, cosmo] string: The quick brown fox jumps over the lazy dog unicode: "Th\\xE9 quick brown fox jumps over th\\xE9 lazy dog" """ msgpack_py_data = dict(py_data) msgpack_py_data['unicode'] = b'Th quick brown fox jumps over th lazy dog' msgpack_py_data['list'] = [str_to_bytes(x) for x in msgpack_py_data['list']] msgpack_py_data = dict( (str_to_bytes(k), str_to_bytes(v) if isinstance(v, text_t) else v) for k, v in items(msgpack_py_data) ) # Unicode chars are lost in transmit :( msgpack_data = b64decode(str_to_bytes("""\ haNpbnQKpWZsb2F0y0AJIftTyNTxpGxpc3SUpmdlb3JnZaVqZXJyeaZlbGFpbmWlY29zbW+mc3Rya\ W5n2gArVGhlIHF1aWNrIGJyb3duIGZveCBqdW1wcyBvdmVyIHRoZSBsYXp5IGRvZ6d1bmljb2Rl2g\ ApVGggcXVpY2sgYnJvd24gZm94IGp1bXBzIG92ZXIgdGggbGF6eSBkb2c=\ """)) def say(m): sys.stderr.write('%s\n' % (m, ))
def b64encode(s): return bytes_to_str(base64.b64encode(str_to_bytes(s)))
# YAML serialization tests yaml_data = """\ float: 3.1415926500000002 int: 10 list: [george, jerry, elaine, cosmo] string: The quick brown fox jumps over the lazy dog unicode: "Th\\xE9 quick brown fox jumps over th\\xE9 lazy dog" """ msgpack_py_data = dict(py_data) # Unicode chars are lost in transmit :( msgpack_py_data['unicode'] = 'Th quick brown fox jumps over th lazy dog' msgpack_data = b64decode(str_to_bytes("""\ haNpbnQKpWZsb2F0y0AJIftTyNTxpGxpc3SUpmdlb3JnZaVqZXJyeaZlbGFpbmWlY29zbW+mc3Rya\ W5n2gArVGhlIHF1aWNrIGJyb3duIGZveCBqdW1wcyBvdmVyIHRoZSBsYXp5IGRvZ6d1bmljb2Rl2g\ ApVGggcXVpY2sgYnJvd24gZm94IGp1bXBzIG92ZXIgdGggbGF6eSBkb2c=\ """)) def say(m): sys.stderr.write('%s\n' % (m, )) registry.register('testS', lambda s: s, lambda s: 'decoded', 'application/testS', 'utf-8') class test_Serialization(TestCase): def test_disable(self):
def deserialize(data): return pickle.loads(str_to_bytes(data))
def test_encode__no_serializer(self): ctyp, cenc, data = registry.encode(str_to_bytes('foo')) self.assertEqual(ctyp, 'application/data') self.assertEqual(cenc, 'binary')
def decode(s): return pickle_loads(str_to_bytes(s), load=dill.load)
def test_loads_bytes(self): assert loads( str_to_bytes(dumps({'x': 'z'})), decode_bytes=True) == {'x': 'z'}
def encode(self, value): """Encode/decode the value using Base64 encoding.""" return base64.b64encode(str_to_bytes(value)).decode()
AMQPConnectionException = AMQPChannelException = NA # noqa from kombu.five import items from kombu.utils.encoding import str_to_bytes from kombu.utils.amq_manager import get_manager from . import base DEFAULT_PORT = 5672 HAS_MSG_PEEK = hasattr(socket, 'MSG_PEEK') # amqplib's handshake mistakenly identifies as protocol version 1191, # this breaks in RabbitMQ tip, which no longer falls back to # 0-8 for unknown ids. transport.AMQP_PROTOCOL_HEADER = str_to_bytes('AMQP\x01\x01\x08\x00') # - fixes warnings when socket is not connected. class TCPTransport(transport.TCPTransport): def read_frame(self): frame_type, channel, size = unpack('>BHI', self._read(7, True)) payload = self._read(size) ch = ord(self._read(1)) if ch == 206: # '\xce' return frame_type, channel, payload else: raise Exception( 'Framing Error, received 0x%02x while expecting 0xce' % ch)
# Pickle serialization tests pickle_data = pickle.dumps(py_data, protocol=pickle_protocol) # YAML serialization tests yaml_data = """\ float: 3.1415926500000002 int: 10 list: [george, jerry, elaine, cosmo] string: The quick brown fox jumps over the lazy dog unicode: "Th\\xE9 quick brown fox jumps over th\\xE9 lazy dog" """ msgpack_py_data = dict(py_data) msgpack_py_data['unicode'] = b'Th quick brown fox jumps over th lazy dog' msgpack_py_data['list'] = [str_to_bytes(x) for x in msgpack_py_data['list']] msgpack_py_data = dict( (str_to_bytes(k), str_to_bytes(v) if isinstance(v, text_t) else v) for k, v in items(msgpack_py_data)) # Unicode chars are lost in transmit :( msgpack_data = b64decode( str_to_bytes("""\ haNpbnQKpWZsb2F0y0AJIftTyNTxpGxpc3SUpmdlb3JnZaVqZXJyeaZlbGFpbmWlY29zbW+mc3Rya\ W5n2gArVGhlIHF1aWNrIGJyb3duIGZveCBqdW1wcyBvdmVyIHRoZSBsYXp5IGRvZ6d1bmljb2Rl2g\ ApVGggcXVpY2sgYnJvd24gZm94IGp1bXBzIG92ZXIgdGggbGF6eSBkb2c=\ """)) def say(m): sys.stderr.write('%s\n' % (m, ))
def b64decode(s): return base64.b64decode(str_to_bytes(s))
def maybe_compress(value, do_compress=False): if do_compress: return compress(str_to_bytes(value)) return value
def maybe_decompress(value, do_decompress=False): if do_decompress: if str_to_bytes(value[:15]) != NO_DECOMPRESS_HEADER: return decompress(str_to_bytes(value)) return value
# YAML serialization tests yaml_data = """\ float: 3.1415926500000002 int: 10 list: [george, jerry, elaine, cosmo] string: The quick brown fox jumps over the lazy dog unicode: "Th\\xE9 quick brown fox jumps over th\\xE9 lazy dog" """ msgpack_py_data = dict(py_data) msgpack_py_data['unicode'] = 'Th quick brown fox jumps over th lazy dog' # Unicode chars are lost in transmit :( msgpack_data = b64decode(str_to_bytes("""\ haNpbnQKpWZsb2F0y0AJIftTyNTxpGxpc3SUpmdlb3JnZaVqZXJyeaZlbGFpbmWlY29zbW+mc3Rya\ W5n2gArVGhlIHF1aWNrIGJyb3duIGZveCBqdW1wcyBvdmVyIHRoZSBsYXp5IGRvZ6d1bmljb2Rl2g\ ApVGggcXVpY2sgYnJvd24gZm94IGp1bXBzIG92ZXIgdGggbGF6eSBkb2c=\ """)) registry.register('testS', lambda s: s, lambda s: 'decoded', 'application/testS', 'utf-8') class test_Serialization(Case): def test_disable(self): disabled = registry._disabled_content_types try: registry.disable('testS') self.assertIn('application/testS', disabled)
pass amqp = NA amqp.Connection = NA transport = _Channel = NA # noqa # Sphinx crashes if this is NA, must be different class transport.TCPTransport = transport.SSLTransport = NAx AMQPConnectionException = AMQPChannelException = NA # noqa DEFAULT_PORT = 5672 HAS_MSG_PEEK = hasattr(socket, 'MSG_PEEK') # amqplib's handshake mistakenly identifies as protocol version 1191, # this breaks in RabbitMQ tip, which no longer falls back to # 0-8 for unknown ids. transport.AMQP_PROTOCOL_HEADER = str_to_bytes('AMQP\x01\x01\x08\x00') # - fixes warnings when socket is not connected. class TCPTransport(transport.TCPTransport): def read_frame(self): frame_type, channel, size = unpack('>BHI', self._read(7, True)) payload = self._read(size) ch = ord(self._read(1)) if ch == 206: # '\xce' return frame_type, channel, payload else: raise Exception( 'Framing Error, received 0x%02x while expecting 0xce' % ch) def _read(self, n, initial=False):