def test_infinite_loop_exceeds_max_recursion_depth(self): codecopts = CodecOptions(type_registry=TypeRegistry( [self.B2A()], fallback_encoder=self.fallback_encoder_A2B)) # Raises max recursion depth exceeded error with self.assertRaises(RuntimeError): encode({'x': self.TypeA(100)}, codec_options=codecopts)
def post_items(body: dict): # connexion cannot validate this kind of entries try: bson.encode(body) except bson.InvalidDocument as e: return problem(status=422, title="Unprocessable Entity", detail=e.args[0]) except OverflowError as e: return problem(status=422, title="Unprocessable Entity", detail=e.args[0]) uuid = str(uuid4()) ts = datetime.now().isoformat() current_app.config["store"].add(uuid, dict(id=uuid, timestamp=ts, item=body)) return { "id": uuid, "timestamp": ts, "status": "success", "url": request.base_url + "/" + uuid, "debug": current_app.config["store"].list(), }
def test_utf8(self): w = {u"aéあ": u"aéあ"} self.assertEqual(w, decode(encode(w))) # b'a\xe9' == u"aé".encode("iso-8859-1") iso8859_bytes = b'a\xe9' y = {"hello": iso8859_bytes} if PY3: # Stored as BSON binary subtype 0. out = decode(encode(y)) self.assertTrue(isinstance(out['hello'], bytes)) self.assertEqual(out['hello'], iso8859_bytes) else: # Python 2. try: encode(y) except InvalidStringData as e: self.assertTrue(repr(iso8859_bytes) in str(e)) # The next two tests only make sense in python 2.x since # you can't use `bytes` type as document keys in python 3.x. x = {u"aéあ".encode("utf-8"): u"aéあ".encode("utf-8")} self.assertEqual(w, decode(encode(x))) z = {iso8859_bytes: "hello"} self.assertRaises(InvalidStringData, encode, z)
def test_bad_dbref(self): ref_only = {'ref': {'$ref': 'collection'}} id_only = {'ref': {'$id': ObjectId()}} self.assertEqual(DBRef('collection', id=None), decode(encode(ref_only))['ref']) self.assertEqual(id_only, decode(encode(id_only)))
def test_bson_regex(self): # Invalid Python regex, though valid PCRE. bson_re1 = Regex(r'[\w-\.]') self.assertEqual(r'[\w-\.]', bson_re1.pattern) self.assertEqual(0, bson_re1.flags) doc1 = {'r': bson_re1} doc1_bson = ( b'\x11\x00\x00\x00' # document length b'\x0br\x00[\\w-\\.]\x00\x00' # r: regex b'\x00') # document terminator self.assertEqual(doc1_bson, encode(doc1)) self.assertEqual(doc1, decode(doc1_bson)) # Valid Python regex, with flags. re2 = re.compile('.*', re.I | re.M | re.S | re.U | re.X) bson_re2 = Regex('.*', re.I | re.M | re.S | re.U | re.X) doc2_with_re = {'r': re2} doc2_with_bson_re = {'r': bson_re2} doc2_bson = ( b"\x11\x00\x00\x00" # document length b"\x0br\x00.*\x00imsux\x00" # r: regex b"\x00") # document terminator self.assertEqual(doc2_bson, encode(doc2_with_re)) self.assertEqual(doc2_bson, encode(doc2_with_bson_re)) self.assertEqual(re2.pattern, decode(doc2_bson)['r'].pattern) self.assertEqual(re2.flags, decode(doc2_bson)['r'].flags)
def test_overflow(self): self.assertTrue(encode({"x": long(9223372036854775807)})) self.assertRaises(OverflowError, encode, {"x": long(9223372036854775808)}) self.assertTrue(encode({"x": long(-9223372036854775808)})) self.assertRaises(OverflowError, encode, {"x": long(-9223372036854775809)})
def test_overflow(self): self.assertTrue(encode({"x": 9223372036854775807})) self.assertRaises(OverflowError, encode, {"x": 9223372036854775808}) self.assertTrue(encode({"x": -9223372036854775808})) self.assertRaises(OverflowError, encode, {"x": -9223372036854775809})
def test_simple(self): codecopts = self._get_codec_options(lambda x: Decimal128(x)) document = {'average': Decimal('56.47')} bsonbytes = encode(document, codec_options=codecopts) exp_document = {'average': Decimal128('56.47')} exp_bsonbytes = encode(exp_document) self.assertEqual(bsonbytes, exp_bsonbytes)
def test_encode_custom_then_fallback(self): codecopts = CodecOptions(type_registry=TypeRegistry( [self.B2A()], fallback_encoder=self.fallback_encoder_A2BSON)) testdoc = {'x': self.TypeB(123)} expected_bytes = encode({'x': 123}) self.assertEqual(encode(testdoc, codec_options=codecopts), expected_bytes)
def test_bson_encode_decode(self) -> None: doc = {'_id': ObjectId()} encoded = bson.encode(doc) decoded = bson.decode(encoded) encoded = bson.encode(decoded) decoded = bson.decode(encoded) # Documents returned from decode are mutable. decoded['new_field'] = 1 self.assertTrue(decoded['_id'].generation_time)
def test_bad_id_keys(self): self.assertRaises(InvalidDocument, encode, {"_id": { "$bad": 123 }}, True) self.assertRaises(InvalidDocument, encode, {"_id": { '$oid': "52d0b971b3ba219fdeb4170e" }}, True) encode({"_id": {'$oid': "52d0b971b3ba219fdeb4170e"}})
def test_type_unencodable_by_fallback_encoder(self): def fallback_encoder(value): try: return Decimal128(value) except: raise TypeError("cannot encode type %s" % (type(value))) codecopts = self._get_codec_options(fallback_encoder) document = {'average': Decimal} with self.assertRaises(TypeError): encode(document, codec_options=codecopts)
def test_utf8(self): w = {"aéあ": "aéあ"} self.assertEqual(w, decode(encode(w))) # b'a\xe9' == "aé".encode("iso-8859-1") iso8859_bytes = b'a\xe9' y = {"hello": iso8859_bytes} # Stored as BSON binary subtype 0. out = decode(encode(y)) self.assertTrue(isinstance(out['hello'], bytes)) self.assertEqual(out['hello'], iso8859_bytes)
def test_erroring_fallback_encoder(self): codecopts = self._get_codec_options(lambda _: 1/0) # fallback converter should not be invoked when encoding known types. encode( {'a': 1, 'b': Decimal128('1.01'), 'c': {'arr': ['abc', 3.678]}}, codec_options=codecopts) # expect an error when encoding a custom type. document = {'average': Decimal('56.47')} with self.assertRaises(ZeroDivisionError): encode(document, codec_options=codecopts)
def test_custom_class(self): self.assertIsInstance(decode(encode({})), dict) self.assertNotIsInstance(decode(encode({})), SON) self.assertIsInstance( decode(encode({}), CodecOptions(document_class=SON)), SON) self.assertEqual( 1, decode(encode({"x": 1}), CodecOptions(document_class=SON))["x"]) x = encode({"x": [{"y": 1}]}) self.assertIsInstance( decode(x, CodecOptions(document_class=SON))["x"][0], SON)
def test_uuid(self): id = uuid.uuid4() # The default uuid_representation is UNSPECIFIED with self.assertRaisesRegex(ValueError, 'cannot encode native uuid'): bson.decode_all(encode({'uuid': id})) opts = CodecOptions(uuid_representation=UuidRepresentation.STANDARD) transformed_id = decode(encode({"id": id}, codec_options=opts), codec_options=opts)["id"] self.assertTrue(isinstance(transformed_id, uuid.UUID)) self.assertEqual(id, transformed_id) self.assertNotEqual(uuid.uuid4(), transformed_id)
def test_decode_all_defaults(self): # Test decode_all()'s default document_class is dict and tz_aware is # False. doc = {'sub_document': {}, 'dt': datetime.datetime.utcnow()} decoded = bson.decode_all(bson.encode(doc))[0] self.assertIsInstance(decoded['sub_document'], dict) self.assertIsNone(decoded['dt'].tzinfo) # The default uuid_representation is UNSPECIFIED with self.assertRaisesRegex(ValueError, 'cannot encode native uuid'): bson.decode_all(bson.encode({'uuid': uuid.uuid4()}))
def test_raise_invalid_document(self): class Wrapper(object): def __init__(self, val): self.val = val def __repr__(self): return repr(self.val) self.assertEqual('1', repr(Wrapper(1))) with self.assertRaisesRegex( InvalidDocument, "cannot encode object: 1, of type: " + repr(Wrapper)): encode({'t': Wrapper(1)})
def test_small_long_encode_decode(self): encoded1 = encode({'x': 256}) decoded1 = decode(encoded1)['x'] self.assertEqual(256, decoded1) self.assertEqual(type(256), type(decoded1)) encoded2 = encode({'x': Int64(256)}) decoded2 = decode(encoded2)['x'] expected = Int64(256) self.assertEqual(expected, decoded2) self.assertEqual(type(expected), type(decoded2)) self.assertNotEqual(type(decoded1), type(decoded2))
def test_move_id(self): self.assertEqual(b"\x19\x00\x00\x00\x02_id\x00\x02\x00\x00\x00a\x00" b"\x02a\x00\x02\x00\x00\x00a\x00\x00", encode(SON([("a", "a"), ("_id", "a")]))) self.assertEqual(b"\x2c\x00\x00\x00" b"\x02_id\x00\x02\x00\x00\x00b\x00" b"\x03b\x00" b"\x19\x00\x00\x00\x02a\x00\x02\x00\x00\x00a\x00" b"\x02_id\x00\x02\x00\x00\x00a\x00\x00\x00", encode(SON([("b", SON([("a", "a"), ("_id", "a")])), ("_id", "b")])))
def test_null_character(self): doc = {"a": "\x00"} self.assertEqual(doc, decode(encode(doc))) doc = {"a": "\x00"} self.assertEqual(doc, decode(encode(doc))) self.assertRaises(InvalidDocument, encode, {b"\x00": "a"}) self.assertRaises(InvalidDocument, encode, {"\x00": "a"}) self.assertRaises(InvalidDocument, encode, {"a": re.compile(b"ab\x00c")}) self.assertRaises(InvalidDocument, encode, {"a": re.compile("ab\x00c")})
def _assert_same_utf8_validation(self, data): try: data.decode('utf-8') py_is_legal = True except UnicodeDecodeError: py_is_legal = False try: encode({'x': data}) bson_is_legal = True except InvalidStringData: bson_is_legal = False self.assertEqual(py_is_legal, bson_is_legal, data)
def set_value(self, key, value, expiretime=None): log.debug("[MongoDB %s] Set Key: %s (Expiry: %s) ... " % (self.mongo, key, expiretime)) _id = {} doc = {} if self._pickle or key == 'session': try: value = pickle.dumps(value) except: log.exception("Failed to pickle value.") else: value = { 'stored': value[0], 'expires': value[1], 'value': value[2], 'pickled': False } try: bson.encode(value) except: log.warning( "Value is not bson serializable, pickling inner value.") value['value'] = pickle.dumps(value['value']) value['pickled'] = True if self._sparse: _id = { 'namespace': self.namespace, 'key': key } doc['data'] = value doc['_id'] = _id if expiretime: # TODO - What is the datatype of this? it should be # instantiated as a datetime instance doc['valid_until'] = expiretime else: _id = self.namespace doc['$set'] = {'data.' + key: value} if expiretime: # TODO - What is the datatype of this? it should be # instantiated as a datetime instance doc['$set']['valid_until'] = expiretime log.debug("Upserting Doc '%s' to _id '%s'" % (doc, _id)) self.mongo.update({"_id": _id}, doc, upsert=True, safe=True)
def __same_doc(self, a, b, ignore=None): if ignore: a = dict(a) b = dict(b) for key in ignore: if key in a: del a[key] if key in b: del b[key] bson_a = bson.encode(a) bson_b = bson.encode(b) return bson_a == bson_b
def _make_idx_key(idx_key): """ MongoDB is very liberal when it comes to what keys it can compare on. When we get something weird, it makes sense to just store it as a hashable key :param idx_key value: :rtype: hashable value """ if isinstance(idx_key, collections.abc.Hashable): return _sort_tup(idx_key) try: return _sort_tup(str(bson.encode(idx_key))) except TypeError: return _sort_tup(str(bson.encode({'idx_key': idx_key})))
def set_value(self, key, value, expiretime=None): log.debug("[MongoDB %s] Set Key: %s (Expiry: %s) ... " % (self.mongo, key, expiretime)) _id = {} doc = {} if self._pickle or key == 'session': try: value = pickle.dumps(value) except: log.exception("Failed to pickle value.") else: value = { 'stored': value[0], 'expires': value[1], 'value': value[2], 'pickled': False } try: bson.encode(value) except: log.warning("Value is not bson serializable, pickling inner value.") value['value'] = pickle.dumps(value['value']) value['pickled'] = True if self._sparse: _id = { 'namespace': self.namespace, 'key': key } doc['data'] = value doc['_id'] = _id if expiretime: # TODO - What is the datatype of this? it should be instantiated as a datetime instance doc['valid_until'] = expiretime else: _id = self.namespace doc['$set'] = {'data.' + key: value} if expiretime: # TODO - What is the datatype of this? it should be instantiated as a datetime instance doc['$set']['valid_until'] = expiretime log.debug("Upserting Doc '%s' to _id '%s'" % (doc, _id)) self.mongo.update({"_id": _id}, doc, upsert=True)
def getCheckInData(host: str, port: int): crypto = CryptoManager() sock = socket() sock.connect((host, port)) handshakePacket = crypto.getHandshakePacket() sock.send(handshakePacket) p = Packet( 1, 0, "CHECKIN", 0, bson.encode({ "userId": 0, "os": "win32", "ntype": 0, "appVer": "3.14", "MCCMNC": "999", "lang": "ko", }), ) sock.send(p.toEncryptedLocoPacket(crypto)) data = sock.recv(2048) recvPacket = Packet() recvPacket.readEncryptedLocoPacket(data, crypto) return recvPacket
def encrypt(self, value, algorithm, key_id=None, key_alt_name=None): """Encrypt a BSON value with a given key and algorithm. Note that exactly one of ``key_id`` or ``key_alt_name`` must be provided. :Parameters: - `value`: The BSON value to encrypt. - `algorithm` (string): The encryption algorithm to use. See :class:`Algorithm` for some valid options. - `key_id`: Identifies a data key by ``_id`` which must be a :class:`~bson.binary.Binary` with subtype 4 ( :attr:`~bson.binary.UUID_SUBTYPE`). - `key_alt_name`: Identifies a key vault document by 'keyAltName'. :Returns: The encrypted value, a :class:`~bson.binary.Binary` with subtype 6. """ self._check_closed() if (key_id is not None and not (isinstance(key_id, Binary) and key_id.subtype == UUID_SUBTYPE)): raise TypeError( 'key_id must be a bson.binary.Binary with subtype 4') doc = encode({'v': value}, codec_options=self._codec_options) with _wrap_encryption_errors(): encrypted_doc = self._encryption.encrypt(doc, algorithm, key_id=key_id, key_alt_name=key_alt_name) return decode(encrypted_doc)['v']
def test_uuid(self): id = uuid.uuid4() transformed_id = decode(encode({"id": id}))["id"] self.assertTrue(isinstance(transformed_id, uuid.UUID)) self.assertEqual(id, transformed_id) self.assertNotEqual(uuid.uuid4(), transformed_id)
def stream_files(*files): buffer = BytesIO() media_type = "application/octet-stream" files = sorted(files, key=lambda f: int(os.path.basename(f)) ) # sort by making filenames integers contents = [] for audio_file in files: with open(audio_file, "rb") as f: contents.append(f.read()) os.remove(audio_file) length = len(contents) empty = sum(1 for x in contents if not x) if empty / length > 0.1: return SpeechProcessDeadError data = zlib.compress(bson.encode({"data": contents})) with tempfile.NamedTemporaryFile(delete=False, suffix='.dat') as f: f.write(data) return FileResponse(f.name)
def test_timestamp_highorder_bits(self): doc = {'a': Timestamp(0xFFFFFFFF, 0xFFFFFFFF)} doc_bson = (b'\x10\x00\x00\x00' b'\x11a\x00\xff\xff\xff\xff\xff\xff\xff\xff' b'\x00') self.assertEqual(doc_bson, encode(doc)) self.assertEqual(doc, decode(doc_bson))