Example #1
0
    def test_utf8(self):
        w = {u("aéあ"): u("aéあ")}
        self.assertEqual(w, BSON.encode(w).decode())

        # b'a\xe9' == u"aé".encode("iso-8859-1")
        iso8859_bytes = b'a\xe9'
        y = {"hello": iso8859_bytes}
        if PY3:
            # Stored as BSON binary subtype 0.
            out = BSON.encode(y).decode()
            self.assertTrue(isinstance(out['hello'], bytes))
            self.assertEqual(out['hello'], iso8859_bytes)
        else:
            # Python 2.
            try:
                BSON.encode(y)
            except InvalidStringData as e:
                self.assertTrue(repr(iso8859_bytes) in str(e))

            # The next two tests only make sense in python 2.x since
            # you can't use `bytes` type as document keys in python 3.x.
            x = {u("aéあ").encode("utf-8"): u("aéあ").encode("utf-8")}
            self.assertEqual(w, BSON.encode(x).decode())

            z = {iso8859_bytes: "hello"}
            self.assertRaises(InvalidStringData, BSON.encode, z)
Example #2
0
    def test_bson_regex(self):
        # Invalid Python regex, though valid PCRE.
        bson_re1 = Regex(r'[\w-\.]')
        self.assertEqual(r'[\w-\.]', bson_re1.pattern)
        self.assertEqual(0, bson_re1.flags)

        doc1 = {'r': bson_re1}
        doc1_bson = (
            b'\x11\x00\x00\x00'              # document length
            b'\x0br\x00[\\w-\\.]\x00\x00'    # r: regex
            b'\x00')                         # document terminator

        self.assertEqual(doc1_bson, BSON.encode(doc1))
        self.assertEqual(doc1, BSON(doc1_bson).decode())

        # Valid Python regex, with flags.
        re2 = re.compile('.*', re.I | re.L | re.M | re.S | re.U | re.X)
        bson_re2 = Regex('.*', re.I | re.L | re.M | re.S | re.U | re.X)

        doc2_with_re = {'r': re2}
        doc2_with_bson_re = {'r': bson_re2}
        doc2_bson = (
            b"\x12\x00\x00\x00"           # document length
            b"\x0br\x00.*\x00ilmsux\x00"  # r: regex
            b"\x00")                      # document terminator

        self.assertEqual(doc2_bson, BSON.encode(doc2_with_re))
        self.assertEqual(doc2_bson, BSON.encode(doc2_with_bson_re))

        self.assertEqual(re2.pattern, BSON(doc2_bson).decode()['r'].pattern)
        self.assertEqual(re2.flags, BSON(doc2_bson).decode()['r'].flags)
Example #3
0
    def update(self, spec, document, upsert=False, multi=False, safe=True, **kwargs):
        if not isinstance(spec, types.DictType):
            raise TypeError("spec must be an instance of dict")
        if not isinstance(document, types.DictType):
            raise TypeError("document must be an instance of dict")
        if not isinstance(upsert, types.BooleanType):
            raise TypeError("upsert must be an instance of bool")

        flags = kwargs.get("flags", 0)

        if multi:
            flags |= UPDATE_MULTI
        if upsert:
            flags |= UPDATE_UPSERT

        spec = BSON.encode(spec)
        document = BSON.encode(document)
        update = Update(flags=flags, collection=str(self),
                        selector=spec, update=document)
        proto = yield self._database.connection.getprotocol()

        proto.send_UPDATE(update)

        if safe:
            ret = yield proto.get_last_error(str(self._database))
            defer.returnValue(ret)
Example #4
0
	def work(self, body, msg, *args, **kargs):
		## Sanity Checks
		rk = msg.delivery_info['routing_key']
		if not rk:
			raise Exception("Invalid routing-key '%s' (%s)" % (rk, body))
		
		#self.logger.info( body ) 	
		## Try to decode event
		if isinstance(body, dict):
			event = body
		else:
			self.logger.debug(" + Decode JSON")
			try:
				if isinstance(body, str) or isinstance(body, unicode):
					try:
						event = json.loads(body)
						self.logger.debug("   + Ok")
					except Exception, err:
						try:
							self.logger.debug(" + Try hack for windows string")
							# Hack for windows FS -_-
							event = json.loads(body.replace('\\', '\\\\'))
							self.logger.debug("   + Ok")
						except Exception, err :
							try:
								self.logger.debug(" + Decode BSON")
								bson = BSON (body)
								event = bson.decode()
								self.logger.debug("   + Ok")
							except Exception, err:
								raise Exception(err)
Example #5
0
    def test_bad_dbref(self):
        ref_only = {'ref': {'$ref': 'collection'}}
        id_only = {'ref': {'$id': ObjectId()}}

        self.assertEqual(DBRef('collection', id=None),
                         BSON.encode(ref_only).decode()['ref'])
        self.assertEqual(id_only, BSON.encode(id_only).decode())
Example #6
0
File: a2mxc.py Project: p1tt/A2MX
def send(request):
	rid = random.randint(0, 0xFFFFFF).to_bytes(4, byteorder='big')
	data = BSON.encode(request)
	s = rid + data
	p = struct.pack('>BLH', 2, random.randint(0, 0xFFFFFFFF), len(s))
	sock.sendall(p + s)

	data = bytearray()
	exp_len = 7
	got_len = False
	while True:
		data += sock.recv(4096)
		if len(data) < exp_len:
			continue
		if not got_len:
			d, r, l = struct.unpack('>BLH', data[:7])
			assert d == 2
			data = data[7:]
			got_len = True
		if len(data) < l:
			continue
		assert l > 4
		assert len(data) == l
		got_rid = data[:4]
		assert got_rid == rid
		bs = BSON.decode(bytes(data[4:]), tz_aware=True)
		if 'error' in bs:
			raise RemoteException(bs['error'])
		if len(bs) == 1 and 'data' in bs:
			return bs['data']
		return bs
Example #7
0
    def store_task_result(self,posted_task,result):
        """
        Store result of a 'successful' task run.
        The task is removed from the original collection (tasks), and put in the
        results collection.
        """
        posted_task.remove()

        # TODO: Need special serialization?
        try:
            # Can the result be safely BSON-encoded?
            if type(result) != dict:
                safe_result = BSON.encode({'_':result})
            else:
                safe_result = BSON.encode(result)
        except:
            safe_result = str(result)
            self.logger.warning("%s: result of %s cannot be BSON-encoded: %s: %s" %
                                (self.worker_name,posted_task.name,safe_result,
                                 sys.exc_info()[1]))
            result = safe_result

        posted_task.doc['result'] = result
        posted_task.doc['status'] = 'completed'
        posted_task.doc['completed_at'] = datetime.datetime.utcnow()

        posted_task.save_into(self.results_collection)

        posted_task.notify_results(self.activity_log)
Example #8
0
def clean_message(body, msg):
    ## Sanity Checks
    rk = msg.delivery_info["routing_key"]
    if not rk:
        raise Exception("Invalid routing-key '%s' (%s)" % (rk, body))

        # logger.debug("Event: %s" % rk)
        # logger.info( body )
        ## Try to decode event
    if isinstance(body, dict):
        event = body
    else:
        logger.debug(" + Decode JSON")
        try:
            if isinstance(body, str) or isinstance(body, unicode):
                try:
                    event = json.loads(body)
                    logger.debug("   + Ok")
                except Exception, err:
                    try:
                        logger.debug(" + Try hack for windows string")
                        # Hack for windows FS -_-
                        event = json.loads(body.replace("\\", "\\\\"))
                        logger.debug("   + Ok")
                    except Exception, err:
                        try:
                            logger.debug(" + Decode BSON")
                            bson = BSON(body)
                            event = bson.decode()
                            logger.debug("   + Ok")
                        except Exception, err:
                            raise Exception(err)
def tile_socket(ws):
    tilestore = None
    while True:
        message = ws.receive()
        # Wraps entire websocket response, any errors will be reported back
        with app.app_context():
            try:
                req = BSON(message).decode()
                if "init" in req:
                    """
                    Initialization request
                    """
                    tilestore = models.ImageStore.objects.get(id=req["init"]["db"])
                    if tilestore == None:
                        raise Exception("Tile Store %s not found"%(req["init"]["db"]))
                    resp = BSON.encode({"request" : req, "success" : True})

                elif "tile" in req:
                    """
                    Regular request
                    """
                    if tilestore == None:
                        raise Exception("Tile Store not initialized")

                    imgdata = tilestore.get_tile(req["tile"]["image"], req["tile"]["name"])
                    resp = BSON.encode({"request" : req, "image" : Binary(imgdata), "success" : True})
#                    resp = BSON.encode({"request" : req, "image" : b64encode(imgdata), "success" : True})
                else:
                    raise Exception("Unknown request")
                ws.send(resp, True)

            except Exception as e:
                resp = BSON.encode({"request" : req, "error" : e.message})
                ws.send(resp, True)
Example #10
0
    def _dorequest(self,msg,timeout=5):
        """
        _dorequest: Set up a BSON string and send zmq REQ to ZMQRPC target
        """
        # Set up bson message
        bson = BSON.encode(msg)
        
        # Send...
        try:
            self._pollout.poll(timeout=timeout*1000) # Poll for outbound send, then send
            self._zmqsocket.send(bson,flags=zmq.NOBLOCK)
        except:
            raise ZMQRPCError('Request failure')

        # Poll for inbound then rx
        try:        
            for i in range(0,timeout*100):
                if len(self._pollin.poll(timeout=1)) > 0:
                    break
                time.sleep(0.01)
            msg_in = self._zmqsocket.recv(flags=zmq.NOBLOCK)
        
        except:
            raise ZMQRPCError('Response timeout')

        
        if msg_in == None:
            raise ZMQRPCError('No response')
    
        result = BSON(msg_in).decode()
        
        self._lastrun = result.get('runner')
        
        return result
Example #11
0
def split_bson(path):
    bsonfile_path = os.path.abspath(path)
    splitsfile_out = os.path.join(os.path.dirname(bsonfile_path),  "." + os.path.basename(bsonfile_path) + ".splits")
    bsonfile = open(bsonfile_path, 'r')
    splitsfile = open(splitsfile_out, 'w')

    file_position = 0
    cur_split_start = 0
    cur_split_size = 0
    while True:
        size_bits = bsonfile.read(4)
        if len(size_bits) < 4:
            if cur_split_size > 0:
                #print {"start":cur_split_start, "length": bsonfile.tell() - cur_split_start}
                splitsfile.write(BSON.encode({"s":long(cur_split_start), "l": long(bsonfile.tell() - cur_split_start)}))
            break
        size = struct.unpack("<i", size_bits)[0] - 4 # BSON size byte includes itself
        file_position += 4
        if cur_split_size + 4 + size > SPLIT_SIZE:
            #print {"start":cur_split_start, "length": bsonfile.tell() - 4 - cur_split_start}
            splitsfile.write(BSON.encode({"s":long(cur_split_start), "l": long(bsonfile.tell() - 4 - cur_split_start)}))
            cur_split_start = bsonfile.tell() - 4
            cur_split_size = 0

        bsonfile.seek(file_position + size)
        file_position += size
        cur_split_size += 4 + size
    def test_infinite_loop_exceeds_max_recursion_depth(self):
        codecopts = CodecOptions(type_registry=TypeRegistry(
            [self.B2A()], fallback_encoder=self.fallback_encoder_A2B))

        # Raises max recursion depth exceeded error
        with self.assertRaises(RuntimeError):
            BSON.encode({'x': self.TypeA(100)}, codec_options=codecopts)
Example #13
0
    def test_bson_regex(self):
        # Invalid Python regex, though valid PCRE.
        bson_re1 = Regex(r'[\w-\.]')
        self.assertEqual(r'[\w-\.]', bson_re1.pattern)
        self.assertEqual(0, bson_re1.flags)

        doc1 = {'r': bson_re1}
        doc1_bson = b(
            '\x11\x00\x00\x00'              # document length
            '\x0br\x00[\\w-\\.]\x00\x00'    # r: regex
            '\x00')                         # document terminator

        self.assertEqual(doc1_bson, BSON.encode(doc1))
        self.assertEqual(doc1, BSON(doc1_bson).decode(compile_re=False))

        # Valid Python regex, with flags.
        re2 = re.compile('.*', re.I | re.L | re.M | re.S | re.U | re.X)
        bson_re2 = Regex('.*', re.I | re.L | re.M | re.S | re.U | re.X)

        doc2_with_re = {'r': re2}
        doc2_with_bson_re = {'r': bson_re2}
        doc2_bson = b(
            "\x12\x00\x00\x00"           # document length
            "\x0br\x00.*\x00ilmsux\x00"  # r: regex
            "\x00")                      # document terminator

        self.assertEqual(doc2_bson, BSON.encode(doc2_with_re))
        self.assertEqual(doc2_bson, BSON.encode(doc2_with_bson_re))

        # Built-in re objects don't support ==. Compare pattern and flags.
        self.assertEqual(re2.pattern, BSON(doc2_bson).decode()['r'].pattern)
        self.assertEqual(re2.flags, BSON(doc2_bson).decode()['r'].flags)

        self.assertEqual(
            doc2_with_bson_re, BSON(doc2_bson).decode(compile_re=False))
Example #14
0
    def update(self, spec, document, upsert=False, multi=False, safe=None, flags=0, **kwargs):
        if not isinstance(spec, dict):
            raise TypeError("TxMongo: spec must be an instance of dict.")
        if not isinstance(document, dict):
            raise TypeError("TxMongo: document must be an instance of dict.")
        if not isinstance(upsert, bool):
            raise TypeError("TxMongo: upsert must be an instance of bool.")

        if multi:
            flags |= UPDATE_MULTI
        if upsert:
            flags |= UPDATE_UPSERT

        spec = BSON.encode(spec)
        document = BSON.encode(document)
        update = Update(flags=flags, collection=str(self),
                        selector=spec, update=document)

        proto = yield self._database.connection.getprotocol()
        check_deadline(kwargs.pop("_deadline", None))
        proto.send_UPDATE(update)

        write_concern = self._get_write_concern(safe, **kwargs)
        if write_concern.acknowledged:
            ret = yield proto.get_last_error(str(self._database), **write_concern.document)
            defer.returnValue(ret)
Example #15
0
def read_gcfile(path):
    """
    Read a gc in_file and decode the encoded scene object.
    Uses the decoder object specified in the gazer.settings.

    Parameters
    ----------
    path : in_file like stream
        File that contains an encoded scene.

    Returns
    -------
    gazer.scene.Scene
        Scene object encoded in the in_file or None if no valid Scene
        was encoded.
    """
    logger.debug('Reading file as gcfile {}'.format(path))

    with open(path, 'rb') as in_file:
        try:
            contents = in_file.read()
            bson_obj = BSON(contents)
            wrapper = bson_obj.decode()
            from gazer.settings import DECODERS
            wrapper_type = wrapper['type']
            decoder = DECODERS.get(wrapper_type)
            if decoder is None:
                raise ValueError('Decoder {} not found'.format(wrapper_type))
            body = wrapper['data']
            if wrapper['compression'] == 'bz2':
                body = bz2.decompress(body)
            scene = decoder.scene_from_data(body)
            return scene
        except RuntimeError:
            logger.exception('Failed to read file.')
Example #16
0
def db_collection_insert(res_id, collection_name):
    # TODO: Ensure request.json is not None.
    if 'document' in request.json:
        document = request.json['document']
    else:
        error = '\'document\' argument not found in the insert request.'
        raise MWSServerError(400, error)

    # Check quota
    size = get_collection_size(res_id, collection_name)

    # Handle inserting both a list of docs or a single doc
    if isinstance(document, list):
        req_size = 0
        for d in document:
            req_size += len(BSON.encode(d))
    else:
        req_size = len(BSON.encode(document))

    if size + req_size > current_app.config['QUOTA_COLLECTION_SIZE']:
        raise MWSServerError(403, 'Collection size exceeded')

    # Insert document
    with UseResId(res_id):
        get_db()[collection_name].insert(document)
        return empty_success()
Example #17
0
	def __setitem__(self, name, value):
		if hasattr(value, 'mongo_data_manager'):
			if value.has_key('_id'):
				self.uncommitted[name] = DBRef(value.collection.name,
				                               value['_id'])
			else:
				txn = transaction.get()
				if value in txn._resources:
					# this document is part of the current transaction and
					# doesn't have a mongo _id yet queue it and trigger adding
					# the reference at the end of the transaction
					self.queued[name] = value
				else:
					# this document is not part of the current transaction,
					# so treat it as a regular python dict and make it an
					# embedded document inside the current doc
					logger.warn('mongo document does not exist in mongodb and'
					            ' is not part of current transaction - saving'
								' as embedded instead of a reference')
					self.uncommitted[name] = value.copy()
		else:
			try:
				BSON.encode({name:value})
				self.uncommitted[name] = value
			except:
				self.uncommitted[name] = jsonpickle.encode(value)
    def test_backports(self):
        doc = BSON.encode({"tuple": (1, 2)})
        exp = {"tuple": [1, 2]}
        options = CodecOptions(uuid_representation=ALL_UUID_REPRESENTATIONS[0],
                               tz_aware=False, document_class=dict)

        self.assertEqual(
            {"tuple": [1, 2]},
            BSON.encode(
                {"tuple": (1, 2)}, codec_options=options,
                uuid_subtype=ALL_UUID_REPRESENTATIONS[1]).decode())
        self.assertEqual(exp, doc.decode(
            as_class=SON,
            tz_aware=True,
            uuid_subtype=ALL_UUID_REPRESENTATIONS[1],
            codec_options=options))
        self.assertEqual([exp], list(decode_iter(
            doc,
            as_class=SON,
            tz_aware=True,
            uuid_subtype=ALL_UUID_REPRESENTATIONS[1],
            codec_options=options)))
        self.assertEqual([exp], list(decode_file_iter(
            StringIO(doc),
            as_class=SON,
            tz_aware=True,
            uuid_subtype=ALL_UUID_REPRESENTATIONS[1],
            codec_options=options)))
        self.assertEqual([exp], decode_all(
            doc, SON, True, ALL_UUID_REPRESENTATIONS[1], True, options))
Example #19
0
 def test_EncodeDecodeUpdate(self):
     request = Update(
         flags=UPDATE_MULTI | UPDATE_UPSERT,
         collection="coll",
         selector=BSON.encode({"x": 42}),
         update=BSON.encode({"$set": {"y": 123}}),
     )
     self.__test_encode_decode(request)
Example #20
0
    def test_overflow(self):
        self.assertTrue(BSON.encode({"x": long(9223372036854775807)}))
        self.assertRaises(OverflowError, BSON.encode,
                          {"x": long(9223372036854775808)})

        self.assertTrue(BSON.encode({"x": long(-9223372036854775808)}))
        self.assertRaises(OverflowError, BSON.encode,
                          {"x": long(-9223372036854775809)})
    def test_encode_custom_then_fallback(self):
        codecopts = CodecOptions(type_registry=TypeRegistry(
            [self.B2A()], fallback_encoder=self.fallback_encoder_A2BSON))
        testdoc = {'x': self.TypeB(123)}
        expected_bytes = BSON.encode({'x': 123})

        self.assertEqual(BSON.encode(testdoc, codec_options=codecopts),
                         expected_bytes)
Example #22
0
    def test_overflow(self):
        self.assertTrue(BSON.encode({"x": 9223372036854775807L}))
        self.assertRaises(OverflowError, BSON.encode,
                          {"x": 9223372036854775808L})

        self.assertTrue(BSON.encode({"x": -9223372036854775808L}))
        self.assertRaises(OverflowError, BSON.encode,
                          {"x": -9223372036854775809L})
Example #23
0
    def _generate_batch_commands(collname, command, docs_field, documents, ordered,
                                 write_concern, max_bson, max_count):
        # Takes a list of documents and generates one or many `insert` commands
        # with documents list in each command is less or equal to max_bson bytes
        # and contains less or equal documents than max_count

        # Manually composing command in BSON form because this way we can
        # perform costly documents serialization only once

        msg = SON([(command, collname),
                   ("ordered", ordered),
                   ("writeConcern", write_concern.document)])

        buf = io.BytesIO()
        buf.write(BSON.encode(msg))
        buf.seek(-1, io.SEEK_END)  # -1 because we don't need final NUL from partial command
        buf.write(docs_field)  # type, name and length placeholder of 'documents' array
        docs_start = buf.tell() - 4

        def prepare_command():
            docs_end = buf.tell() + 1  # +1 for final NUL for 'documents'
            buf.write(b'\x00\x00')  # final NULs for 'documents' and the command itself
            total_length = buf.tell()

            # writing 'documents' length
            buf.seek(docs_start)
            buf.write(struct.pack('<i', docs_end - docs_start))

            # writing total message length
            buf.seek(0)
            buf.write(struct.pack('<i', total_length))

            return BSON(buf.getvalue())

        idx = 0
        idx_offset = 0
        for doc in documents:
            key = str(idx).encode('ascii')
            value = BSON.encode(doc)

            enough_size = buf.tell() + len(key)+2 + len(value) - docs_start > max_bson
            enough_count = idx >= max_count
            if enough_size or enough_count:
                yield idx_offset, prepare_command()

                buf.seek(docs_start + 4)
                buf.truncate()

                idx_offset += idx
                idx = 0
                key = b'0'

            buf.write(b'\x03' + key + b'\x00')  # type and key of document
            buf.write(value)

            idx += 1

        yield idx_offset, prepare_command()
Example #24
0
    def test_custom_class(self):
        self.assertTrue(isinstance(BSON.encode({}).decode(), dict))
        self.assertFalse(isinstance(BSON.encode({}).decode(), SON))
        self.assertTrue(isinstance(BSON.encode({}).decode(SON), SON))

        self.assertEqual(1, BSON.encode({"x": 1}).decode(SON)["x"])

        x = BSON.encode({"x": [{"y": 1}]})
        self.assertTrue(isinstance(x.decode(SON)["x"][0], SON))
Example #25
0
def debug(req):
    if req.method == 'GET':
        print (BSON.encode({'hello': 'world'}))
        return HttpResponse(BSON.encode({'hello': 'world'}))
    else:
        try:
            return HttpResponse(dumps(BSON(req.body).decode()))
        except:
            return HttpResponse('{"error": "decode error"}')
Example #26
0
 def test_EncodeDecodeQuery(self):
     request = Query(
         collection="coll",
         n_to_skip=123,
         n_to_return=456,
         query=BSON.encode({"x": 42}),
         fields=BSON.encode({"y": 1}),
     )
     self.__test_encode_decode(request)
Example #27
0
 def update(col, spec, doc, upsert, multi):
     colname = _make_c_string(col)
     flags = 0
     if upsert:
         flags |= 1 << 0
     if multi:
         flags |= 1 << 1
     fmt = '<i%dsi' % len(colname)
     part = struct.pack(fmt, 0, colname, flags)
     return "%s%s%s" % (part, BSON.encode(spec), BSON.encode(doc))
Example #28
0
 def query(col, spec, fields, skip, limit):
     data = [
         _ZERO, 
         _make_c_string(col), 
         struct.pack('<ii', skip, limit),
         BSON.encode(spec or {}),
     ]
     if fields:
         data.append(BSON.encode(dict.fromkeys(fields, 1)))
     return "".join(data)
Example #29
0
 def scene_from_data(self, data):
     bson_data = BSON(data)
     data_dict = bson_data.decode()
     decoded_array = self._decode_array(data_dict[u'lookup_table'])
     lut = ArrayLookupTable(decoded_array)
     frames = [self._decode_array(value) for key, value
               in
               sorted(data_dict['frames'].items(), key=lambda x: int(x[0]))]
     image_manager = ArrayStackImageManager(frames)
     scene = ImageStackScene(image_manager, lut)
     return scene
Example #30
0
 def get_size(self):
     """
     return the size of the underlying bson object
     """
     try:
         size = len(BSON.encode(self))
     except:
         self._process_custom_type('bson', self, self.structure)
         size = len(BSON.encode(self))
         self._process_custom_type('python', self, self.structure)
     return size
    def test_encode_then_decode(self):
        def helper(dict):
            self.assertEqual(dict, (BSON.encode(dict)).decode())

        helper({})
        helper({"test": u"hello"})
        self.assertTrue(
            isinstance(
                BSON.encode({
                    "hello": "world"
                }).decode()["hello"], unicode))
        helper({"mike": -10120})
        helper({"long": long(10)})
        helper({"really big long": 2147483648})
        helper({u"hello": 0.0013109})
        helper({"something": True})
        helper({"false": False})
        helper({"an array": [1, True, 3.8, u"world"]})
        helper({"an object": {"test": u"something"}})
        helper({"a binary": Binary(b("test"), 100)})
        helper({"a binary": Binary(b("test"), 128)})
        helper({"a binary": Binary(b("test"), 254)})
        helper({"another binary": Binary(b("test"), 2)})
        helper(SON([(u'test dst', datetime.datetime(1993, 4, 4, 2))]))
        helper(
            SON([(u'test negative dst', datetime.datetime(1, 1, 1, 1, 1, 1))]))
        helper({"big float": float(10000000000)})
        helper({"ref": DBRef("coll", 5)})
        helper({"ref": DBRef("coll", 5, foo="bar", bar=4)})
        helper({"ref": DBRef("coll", 5, "foo")})
        helper({"ref": DBRef("coll", 5, "foo", foo="bar")})
        helper({"ref": Timestamp(1, 2)})
        helper({"foo": MinKey()})
        helper({"foo": MaxKey()})
        helper({"$field": Code("function(){ return true; }")})
        helper({
            "$field":
            Code("return function(){ return x; }", scope={'x': False})
        })

        doc_class = dict
        # Work around http://bugs.jython.org/issue1728
        if (sys.platform.startswith('java') and sys.version_info[:3] >=
            (2, 5, 2)):
            doc_class = SON

        def encode_then_decode(doc):
            return doc == (BSON.encode(doc)).decode(as_class=doc_class)

        qcheck.check_unittest(self, encode_then_decode,
                              qcheck.gen_mongo_dict(3))
Example #32
0
    def control_loop(self, stop_event):
        while not stop_event.is_set():
            try:
                data = self.recv_line()
                if data is not None:
                    if self.bson_protocol:
                        data_json = BSON.decode(BSON(data), codec_options=CodecOptions(uuid_representation=STANDARD))
                        if data_json.get('ping', None):
                            t = data_json.get('ping').get('timestamp')
                            self.send(AgentServerCommandPong(timestamp=long(t)))
                    elif data.startswith('ping'):
                        self.send(AgentServerCommandPong())
                else:
                    self.close()
            except Exception as e:
                if not stop_event.is_set():
                    import traceback
                    error('Unexpected exception receiving from IDM server: {0}' + str(e))

            time.sleep(1)

        info('Ends IDM control message thread..')
        self.control_thread_running = False
Example #33
0
 def add(self, doc, autoflush=True):
     try:
         self._oplog.write(BSON.encode(doc))
         self._writes_unflushed += 1
         self._count += 1
         if not self._first_ts:
             self._first_ts = doc['ts']
         self._last_ts = doc['ts']
         if autoflush:
             self.autoflush()
     except Exception, e:
         logging.fatal("Cannot write to oplog file %s! Error: %s" %
                       (self.oplog_file, e))
         raise OperationError(e)
 def test_aware_datetime(self):
     aware = datetime.datetime(1993,
                               4,
                               4,
                               2,
                               tzinfo=FixedOffset(555, "SomeZone"))
     as_utc = (aware - aware.utcoffset()).replace(tzinfo=utc)
     self.assertEqual(datetime.datetime(1993, 4, 3, 16, 45, tzinfo=utc),
                      as_utc)
     after = BSON.encode({
         "date": aware
     }).decode(CodecOptions(tz_aware=True))["date"]
     self.assertEqual(utc, after.tzinfo)
     self.assertEqual(as_utc, after)
Example #35
0
 def test_raw(self):
     """Test with RawBSONDocument."""
     raw_coll = self.coll.with_options(
         codec_options=DEFAULT_RAW_BSON_OPTIONS)
     with raw_coll.watch() as change_stream:
         raw_doc = RawBSONDocument(BSON.encode({'_id': 1}))
         self.coll.insert_one(raw_doc)
         change = next(change_stream)
         self.assertIsInstance(change, RawBSONDocument)
         self.assertEqual(change['operationType'], 'insert')
         self.assertEqual(change['ns']['db'], self.coll.database.name)
         self.assertEqual(change['ns']['coll'], self.coll.name)
         self.assertEqual(change['fullDocument'], raw_doc)
         self.assertEqual(change['_id'], change_stream._resume_token)
Example #36
0
    def get_response(self, func, *args, **kwargs):
        """
        Run a Python function, returning the result in BSON-serializable form.

        The behaviour of this function is to capture either a successful return
        value or exception in a BSON-serializable form (a dictionary with
        `result` and `error` keys).
        """

        result, error = None, None
        try:
            result = func(*args, **kwargs)
        except Exception, exc:
            exc_type = "%s.%s" % (type(exc).__module__, type(exc).__name__)
            exc_message = traceback.format_exception_only(type(exc),
                                                          exc)[-1].strip()
            error = {"type": exc_type, "message": exc_message}
            try:
                BSON.encode({'args': exc.args})
            except TypeError:
                pass
            else:
                error["args"] = exc.args
Example #37
0
        def prepare_command():
            docs_end = buf.tell() + 1  # +1 for final NUL for 'documents'
            buf.write(b'\x00\x00')  # final NULs for 'documents' and the command itself
            total_length = buf.tell()

            # writing 'documents' length
            buf.seek(docs_start)
            buf.write(struct.pack('<i', docs_end - docs_start))

            # writing total message length
            buf.seek(0)
            buf.write(struct.pack('<i', total_length))

            return BSON(buf.getvalue())
Example #38
0
 def test_basic_decode(self):
     self.assertEqual({"test": u"hello world"},
                      BSON(
                          b("\x1B\x00\x00\x00\x0E\x74\x65\x73\x74\x00\x0C"
                            "\x00\x00\x00\x68\x65\x6C\x6C\x6F\x20\x77\x6F"
                            "\x72\x6C\x64\x00\x00")).decode())
     self.assertEqual([{
         "test": u"hello world"
     }, {}],
                      decode_all(
                          b("\x1B\x00\x00\x00\x0E\x74\x65\x73\x74"
                            "\x00\x0C\x00\x00\x00\x68\x65\x6C\x6C"
                            "\x6f\x20\x77\x6F\x72\x6C\x64\x00\x00"
                            "\x05\x00\x00\x00\x00")))
Example #39
0
 def restore(self, dump_path, exclude=None):
     """ 恢复数据库 """
     for fp in glob.glob(path.join(dump_path, '*')):
         n = path.basename(fp)
         if exclude and n in exclude:
             continue
         with open(fp, 'rb') as f:
             d = BSON(f.read()).decode()
         table = self.get_table(n)
         table.drop()
         docs = d['data']
         if docs:
             table.insert(docs)
             print('resotre:' + n)
Example #40
0
    def test_basic_validation(self):
        self.assertRaises(TypeError, is_valid, 100)
        self.assertRaises(TypeError, is_valid, u"test")
        self.assertRaises(TypeError, is_valid, 10.4)

        self.assertFalse(is_valid("test"))

        # the simplest valid BSON document
        self.assert_(is_valid("\x05\x00\x00\x00\x00"))
        self.assert_(is_valid(BSON("\x05\x00\x00\x00\x00")))
        self.assertFalse(is_valid("\x04\x00\x00\x00\x00"))
        self.assertFalse(is_valid("\x05\x00\x00\x00\x01"))
        self.assertFalse(is_valid("\x05\x00\x00\x00"))
        self.assertFalse(is_valid("\x05\x00\x00\x00\x00\x00"))
Example #41
0
def test_utils_montyimport(monty_client, tmp_monty_utils_repo):
    database = "dump_db_JSON"
    collection = "dump_col_JSON"

    with open_repo(tmp_monty_utils_repo):
        with open(JSON_DUMP, "w") as dump:
            dump.write(SERIALIZED)

        montyimport(database, collection, JSON_DUMP)

        col = monty_client[database][collection]
        for i, doc in enumerate(col.find(sort=[("_id", 1)])):
            assert doc == BSON.encode(DOCUMENTS[i]).decode()

        os.remove(JSON_DUMP)
Example #42
0
def test_utils_montyrestore(monty_client, tmp_monty_utils_repo):
    database = "dump_db_BSON"
    collection = "dump_col_BSON"

    with open_repo(tmp_monty_utils_repo):
        with open(BSON_DUMP, "wb") as dump:
            dump.write(base64.b64decode(BINARY))

        montyrestore(database, collection, BSON_DUMP)

        col = monty_client[database][collection]
        for i, doc in enumerate(col.find(sort=[("_id", 1)])):
            assert doc == BSON.encode(DOCUMENTS[i]).decode()

        os.remove(BSON_DUMP)
Example #43
0
def chaton(s, chatId):
    print " [*] CHATON from " + str(chatId)

    data = '\x07\x00\x00\x00'  # Packet ID
    data += '\x00\x00'  # Status Code : when sending command -> 0
    data += 'CHATON\x00\x00\x00\x00\x00'  # Method
    data += '\x00'  # Body Type : when sending command -> 0

    body = BSON.encode({u'chatId': chatId})

    data += body[:4]
    data += body

    succ = command_send(s, data)
    return succ
Example #44
0
def leave(s, chatId):
    print " [*] LEAVE from " + str(chatId)

    data = '\x06\x00\x00\x00'  # Packet ID
    data += '\x00\x00'  # Status Code : when sending command -> 0
    data += 'LEAVE\x00\x00\x00\x00\x00\x00'  # Method
    data += '\x00'  # Body Type : when sending command -> 0

    body = BSON.encode({u'chatId': chatId})

    data += body[:4]
    data += body

    succ = command_send(s, data)
    return succ
Example #45
0
    def test_CursorClosing(self):
        # Calculate number of objects in 4mb batch
        obj_count_4mb = 4 * 1024**2 // len(
            BSON.encode(self.__make_big_object())) + 1

        first_batch = 5
        yield self.coll.insert([
            self.__make_big_object()
            for _ in range(first_batch + obj_count_4mb)
        ])
        result = yield self.coll.find(limit=first_batch)

        self.assertEqual(len(result), 5)

        yield self.__check_no_open_cursors()
Example #46
0
def cwrite(s, memId = [], msg = u'test'):
    print " [*] CWRITE to " + str(memId) + " : " + msg

    data = '\x06\x00\x00\x00' # Packet ID
    data += '\x00\x00' # Status Code : when sending command -> 0
    data += 'CWRITE\x00\x00\x00\x00\x00' # Method
    data += '\x00' # Body Type : when sending command -> 0

    body = BSON.encode({u'memberIds': memId, u'msg': msg, u'extra': None, u'pushAlert': True})

    data += body[:4]
    data += body

    succ = send(s, data)
    return succ
Example #47
0
def reload_geo():
    # check to see if there is an exisitng file.  If so than do not reload
    col = db.fs.files.find_one()

    if col == None:
        # if the geojson file is not stored, call the API.
        response = requests.get("https://opendata.arcgis.com/datasets/d192da4d0ac249fa9584109b1d626286_0.geojson")

        # GridFS stored BSON binary files, the fucntion to do that is BSON.encode
        geojson = BSON.encode(response.json())

        # then we store it with the put()
        fs.put(geojson)

    return get_geo()
Example #48
0
 def write(self, do_merge=False):
     f = None
     try:
         self.lock.acquire()
         if do_merge and os.path.isfile(self.state_file):
             curr = self.load(True)
             data = self.merge(self.state, curr)
         f = open(self.state_file, 'w+')
         logging.debug("Writing %s state file: %s" % (self.__class__.__name__, self.state_file))
         self.state['updated_at'] = int(time())
         f.write(BSON.encode(self.state))
     finally:
         if f:
             f.close()
         self.lock.release()
 def _read(self):
     try:
         size_bits = self.fh.read(4)
         size = struct.unpack(
             "<i", size_bits)[0] - 4  # BSON size byte includes itself
         data = size_bits + self.fh.read(size)
         if len(data) != size + 4:
             raise struct.error(
                 "Unable to cleanly read expected BSON Chunk; EOF, underful buffer or invalid object size."
             )
         if data[size + 4 - 1] != "\x00":
             raise InvalidBSON("Bad EOO in BSON Data")
         if self.fast_string_prematch in data:
             if self.decode:
                 try:
                     return BSON(data).decode(tz_aware=True)
                 except TypeError:
                     return BSON(data).decode()
             else:
                 return data
         raise ValueError("Unknown Error")
     except struct.error, e:
         self.eof = True
         raise StopIteration(e)
Example #50
0
    def __process_connection_message_bson(self, response):
        message_ok = False
        try:
            bson_msg = BSON.decode(BSON(response), codec_options=CodecOptions(uuid_representation=STANDARD))
            print bson_msg
            if bson_msg.get('ok') is not None:
                if bson_msg['ok'].get('id', None):
                    message_ok = True
                    print bson_msg['ok'].get('id', None)
            elif bson_msg.get('noack') is not None:
                your_sensor_id = bson_msg['noack'].get('your_sensor_id', None)
                if your_sensor_id is not None:
                    self.__sensor_id_change_request_received = True
                    debug('UUID change request from :%s ' % self.__nameServer)
                    lock_uuid.acquire()
                    self.__write_new_system_id(str(your_sensor_id))
                    self.__set_sensor_id(str(your_sensor_id))
                    lock_uuid.release()
                else:
                    error('Bad response from server')
        except InvalidBSON:
            error('Bad response from server {0}'.format(response))

        return message_ok
Example #51
0
    def test_decrypt(self):
        mc = self.create_mongocrypt()
        self.addCleanup(mc.close)
        with mc.decryption_context(
                bson_data('encrypted-command-reply.json')) as ctx:
            self.assertEqual(ctx.state, lib.MONGOCRYPT_CTX_NEED_MONGO_KEYS)

            self._test_kms_context(ctx)

            self.assertEqual(ctx.state, lib.MONGOCRYPT_CTX_READY)

            encrypted = ctx.finish()
            self.assertEqual(
                BSON(encrypted).decode(), json_data('command-reply.json'))
            self.assertEqual(encrypted, bson_data('command-reply.json'))
            self.assertEqual(ctx.state, lib.MONGOCRYPT_CTX_DONE)
Example #52
0
        def post(self):
            """
            Recieve Snapshot and either pass the result to queue or publish function
            """
            data = BSON.decode(request.get_data())
            user_id = data[consts.USER_ID]
            date = datetime.fromtimestamp(int(data[consts.DATETIME]) / 1000)
            context = Context(base_save_path, user_id, date)

            dump_binary_data(data, context)

            if queue is not None:
                queue.publish(consts.PARSER_INPUT_EXCHANGE_NAME,
                              json.dumps(data))
            else:
                publish(json.dumps(data))
Example #53
0
    def test_basic_validation(self):
        self.assertRaises(TypeError, is_valid, 100)
        self.assertRaises(TypeError, is_valid, u"test")
        self.assertRaises(TypeError, is_valid, 10.4)

        self.assertFalse(is_valid(b("test")))

        # the simplest valid BSON document
        self.assertTrue(is_valid(b("\x05\x00\x00\x00\x00")))
        self.assertTrue(is_valid(BSON(b("\x05\x00\x00\x00\x00"))))
        self.assertFalse(is_valid(b("\x04\x00\x00\x00\x00")))
        self.assertFalse(is_valid(b("\x05\x00\x00\x00\x01")))
        self.assertFalse(is_valid(b("\x05\x00\x00\x00")))
        self.assertFalse(is_valid(b("\x05\x00\x00\x00\x00\x00")))
        self.assertFalse(is_valid(b("\x07\x00\x00\x00\x02a\x00\x78\x56\x34\x12")))
        self.assertFalse(is_valid(b("\x09\x00\x00\x00\x10a\x00\x05\x00")))
Example #54
0
    def _PostRequest(self, oparms):
        oparms['_asyncrequest'] = True

        if self.Binary:
            data = BSON.encode(oparms)
        else:
            data = json.dumps(oparms, sort_keys=True)

        try:
            self.MessagesSent += 1
            self.Socket.sendto(data, (self.Address, self.Port))
        except:
            print 'send failed; ', sys.exc_info()[0]
            return json.loads('{"_Success" : 0, "_Message" : "unknown error"}')

        return json.loads('{"_Success" : 2}')
Example #55
0
 def test_round_trip_codec_options(self):
     doc = {
         'date': datetime.datetime(2015, 6, 3, 18, 40, 50, 826000),
         '_id': uuid.UUID('026fab8f-975f-4965-9fbf-85ad874c60ff')
     }
     db = self.client.pymongo_test
     coll = db.get_collection(
         'test_raw',
         codec_options=CodecOptions(uuid_representation=JAVA_LEGACY))
     coll.insert_one(doc)
     raw_java_legacy = CodecOptions(uuid_representation=JAVA_LEGACY,
                                    document_class=RawBSONDocument)
     coll = db.get_collection('test_raw', codec_options=raw_java_legacy)
     self.assertEqual(
         RawBSONDocument(BSON.encode(doc, codec_options=raw_java_legacy)),
         coll.find_one())
Example #56
0
    def test_all_bson_types(self):
        # Because we can't round-trip all BSON types (see _DEPRECATED_BSON_TYPES
        # above for how these are handled), make this test a special case,
        # instead of mangling our create_test function below.
        with open(os.path.join(_TEST_PATH, 'multi-type.json')) as spec_file:
            case_spec = json.load(spec_file)
        for valid_case in case_spec.get('valid', []):
            B = binascii.unhexlify(b(valid_case['bson']))
            E = valid_case['extjson']

            # Make sure that the BSON and JSON decode to the same document.
            self.assertEqual(
                json_util.loads(E,
                                json_options=json_util.CANONICAL_JSON_OPTIONS),
                BSON(B).decode(codec_options=CodecOptions(document_class=SON,
                                                          tz_aware=True)))
Example #57
0
def get_geo():

    #to acces the data first we need to get the colletion in where the files are stored
    col = db.fs.files.find_one()

    if col == None:
        print("GeoJSON is still loading please wait and reselect year")
        data = {}
    else:
    # once we have the object storing the file information, we can get the data and read it
        bsdata = fs.get(col["_id"]).read()

        # since the data was encode, we need to decode it back
        data = BSON.decode(bsdata)

    return jsonify(data)
Example #58
0
    def test_CursorClosingWithCursor(self):
        # Calculate number of objects in 4mb batch
        obj_count_4mb = 4 * 1024**2 // len(BSON.encode(self.__make_big_object())) + 1

        first_batch = 5
        yield self.coll.insert(
            [self.__make_big_object() for _ in range(first_batch + obj_count_4mb)])

        result = []
        docs, dfr = yield self.coll.find_with_cursor({}, limit=first_batch)
        while docs:
            result.extend(docs)
            docs, dfr = yield dfr

        self.assertEqual(len(result), 5)

        yield self.__check_no_open_cursors()
Example #59
0
    def check_encode_then_decode(self, doc_class=dict):

        # Work around http://bugs.jython.org/issue1728
        if sys.platform.startswith('java'):
            doc_class = SON

        def helper(doc):
            self.assertEqual(doc, (BSON.encode(doc_class(doc))).decode())
        helper({})
        helper({"test": u("hello")})
        self.assertTrue(isinstance(BSON.encode({"hello": "world"})
                                   .decode()["hello"],
                                   text_type))
        helper({"mike": -10120})
        helper({"long": Int64(10)})
        helper({"really big long": 2147483648})
        helper({u("hello"): 0.0013109})
        helper({"something": True})
        helper({"false": False})
        helper({"an array": [1, True, 3.8, u("world")]})
        helper({"an object": doc_class({"test": u("something")})})
        helper({"a binary": Binary(b"test", 100)})
        helper({"a binary": Binary(b"test", 128)})
        helper({"a binary": Binary(b"test", 254)})
        helper({"another binary": Binary(b"test", 2)})
        helper(SON([(u('test dst'), datetime.datetime(1993, 4, 4, 2))]))
        helper(SON([(u('test negative dst'),
                     datetime.datetime(1, 1, 1, 1, 1, 1))]))
        helper({"big float": float(10000000000)})
        helper({"ref": DBRef("coll", 5)})
        helper({"ref": DBRef("coll", 5, foo="bar", bar=4)})
        helper({"ref": DBRef("coll", 5, "foo")})
        helper({"ref": DBRef("coll", 5, "foo", foo="bar")})
        helper({"ref": Timestamp(1, 2)})
        helper({"foo": MinKey()})
        helper({"foo": MaxKey()})
        helper({"$field": Code("function(){ return true; }")})
        helper({"$field": Code("return function(){ return x; }", scope={'x': False})})

        def encode_then_decode(doc):
            return doc_class(doc) == BSON.encode(doc).decode(
                CodecOptions(document_class=doc_class))

        qcheck.check_unittest(self, encode_then_decode,
                              qcheck.gen_mongo_dict(3))
Example #60
0
def test_utils_montyrestore(tmp_monty_repo):
    database = "dump_db_BSON"
    collection = "dump_col_BSON"

    if not os.path.isdir(tmp_monty_repo):
        os.makedirs(tmp_monty_repo)

    with open_repo(tmp_monty_repo):
        with open(BSON_DUMP, "wb") as dump:
            dump.write(base64.b64decode(BINARY))

        montyrestore(database, collection, BSON_DUMP)

        client = MontyClient()
        for i, doc in enumerate(client[database][collection].find()):
            assert doc == BSON.encode(DOCUMENTS[i]).decode()

        os.remove(BSON_DUMP)