Exemplo n.º 1
0
    def tearDown(self):
        """Predefined tearDown function for cleaning up after tests,
        in our case deleting any generated db files."""
        import os
        import shutil
        from ezdb.mongo import Mongo

        db_path = "./unit_test_db"
        db = Mongo({
            "pylog": null_printer,
            "db_path": db_path,
            "db_log_path": db_path
        })
        db.stop()
        if (db_path is not None):
            shutil.rmtree(db_path)

        # for tests only to check db directory has been removed
        self.assertFalse(os.path.isdir(db_path))
Exemplo n.º 2
0
    def test_dump(self):
        """Test/ example of dump and retrieve from a MongoDB database."""
        from ezdb.mongo import Mongo

        db = Mongo({"pylog": null_printer})
        self.assertIsInstance(db, Mongo)
        db.connect()
        db.dump(db_collection_name="test", data={"success": 1})
        cursor = db.getCursor(db_collection_name="test")
        for batch in db.getBatches(db_data_cursor=cursor):
            self.assertEqual(len(batch), 1)
            for doc in batch:
                self.assertEqual(doc["success"], 1)
Exemplo n.º 3
0
    def single_gridfs_data_stream(self, graphical=None):
        """Dump data to database sequentially."""
        db = Mongo(self.args)
        db.connect()
        cursor = db.getCursor(
            db_collection_name=self.args["db_collection_name"]+".files")
        for batch in tqdm(db.getFiles(db_data_cursor=cursor)):
            for grid in batch:
                print(grid["_id"])
                b = grid["gridout"].read()
                data = io.BytesIO(b)

                try:
                    filename = "{}_{}.jpeg".format(
                        grid["metadata"]["datetime"].strftime(
                            "%Y-%m-%dT%H:%M:%S"),
                        grid["_id"])
                except KeyError:
                    filename = "{}.jpeg".format(grid["_id"])

                if(os.path.isfile(filename) is not True):
                    print(filename)
                    with open(
                        filename,
                            "wb") as f:
                        f.write(data.getbuffer())
                else:
                    print(filename, "*")
Exemplo n.º 4
0
    def test_gridfs(self):
        """Test/ example of gridfs dump and retrieve from MongoDB."""
        from ezdb.mongo import Mongo

        db = Mongo({"pylog": null_printer})
        self.assertIsInstance(db, Mongo)
        db.connect()
        db.dump(db_collection_name="test", data=({"success": 1}, b'success'))
        cursor = db.getCursor(db_collection_name="test.files")
        for batch in db.getFiles(db_data_cursor=cursor):
            for grid in batch:
                # check ids match
                self.assertEqual(grid["_id"], grid["metadata"]["_id"])
                # read file and check is equal to what we put in
                self.assertEqual(grid["gridout"].read(), b'success')
Exemplo n.º 5
0
    def test_userAdd(self):
        from ezdb.mongo import Mongo

        db = Mongo({"pylog": null_printer})
        self.assertIsInstance(db, Mongo)
        db.connect()
        a = db.userAdd(username="******", password="******", roles=["readWrite"])
        b = db.userInfo(username="******")
        print("TEST USER ADD: {}, {}".format(a, b))
Exemplo n.º 6
0
 def single_gridfs_data_stream(self, graphical=None):
     """Dump data to database sequentially."""
     db = Mongo(self.args)
     db.connect()
     # loop n many iterations
     for i in range(self.args["iterations"]) if graphical is None else tqdm(
             range(self.args["iterations"])):
         # dump all data associated with a single iteration
         for data in self.data:
             print("submitting: ", i)
             db.dump(
                 db_collection_name=self.args["db_collection_name"], data=(
                     {"iteration": i}, data))
Exemplo n.º 7
0
    def setUp(self):
        """Predefined setUp function for preparing tests, in our case
        creating the database."""
        import os
        from ezdb.mongo import Mongo

        # the path and directory we want to use to store the database files
        db_path = "./unit_test_db"
        db = Mongo({
            "pylog": null_printer,
            "db_path": db_path,
            "db_log_path": db_path
        })
        # initialise the database files and create a basic user
        db.init()
        # start the database with authenticaton
        db.start()

        # for tests only to check db directory is created
        self.assertTrue(os.path.isdir(db_path))
Exemplo n.º 8
0
    def test_donate(self):
        """Test/ example of donating data to another collection."""
        from ezdb.mongo import Mongo

        db = Mongo({"pylog": null_printer})
        self.assertIsInstance(db, Mongo)
        db.connect()

        # insert data in the donor collection
        db.dump(db_collection_name="donor", data={"success": 1})

        # donate data
        db.getCursor(db_collection_name="rec")
        db.donate(other=db, other_collection="rec", db_collection_name="donor")

        # check donated data is correct
        cursor = db.getCursor(db_collection_name="rec")
        for batch in db.getBatches(db_data_cursor=cursor):
            self.assertEqual(len(batch), 1)
            for doc in batch:
                self.assertEqual(doc["success"], 1)
Exemplo n.º 9
0
    def test_delete(self):
        """Test that we can delete items from db correctly by id."""
        from ezdb.mongo import Mongo

        db = Mongo({"pylog": null_printer})
        self.assertIsInstance(db, Mongo)
        db.connect()
        db.dump(db_collection_name="test", data={"success": 1})
        cursor = db.getCursor(db_collection_name="test")
        for batch in db.getBatches(db_data_cursor=cursor):
            self.assertEqual(len(batch), 1)
            for doc in batch:
                self.assertEqual(doc["success"], 1)
                db.deleteId(db_collection_name="test", id=doc["_id"])

        cursor = db.getCursor(db_collection_name="test")
        deleted_collection = list(db.getBatches(db_data_cursor=cursor))
        self.assertEqual(deleted_collection, [])
Exemplo n.º 10
0
    "db_tls": True,  # secure communication with tls
    "db_tls_ca_file": "CA.cert",  # path to tls certificate
    "db_name": "ff_rasberry",  # database of concern in mongodb
    "db_collection_name": str(top_level_collection_name) +
    ".files",  # unused as we set it explicitly in mapping
    "pylog": logging.debug,  # what logger to use
}

#######################
# JPEG DOWNLOAD FROM DB
#######################

db_connect_args = db_config
projection = [{'$project': {'_id': 1}}]

db = Mongo(db_connect_args)
db.connect()
db.getCursor(db_collection_name=db_connect_args["db_collection_name"],
             db_pipeline=db["db_pipeline"].extend(projection))
# db.debug()

every_id = []
# check verify if files exist already
for batch in db.getBatches():
    every_id.extend(list(map(lambda doc: str(doc["_id"]), batch)))

print(every_id)
file_names = [f for f in glob.glob("*.jpeg")]
print(file_names)

# get data that does not already exist from database
Exemplo n.º 11
0
    "db_collection_name": "test",  # unused as we set it explicitly in mapping
    "pylog": logging.debug,  # what logger to use
}

local_config = {
    # populate the database credentials being moved FROM on the LOCAL system
    # for possible options see:
    # https://python-ezdb.readthedocs.io/en/latest/mongo.html#ezdb.mongo.Mongo.connect
    "db_ip": "127.0.0.1",  # where db is locally accessible
    "db_port": 65530,  # db local port
    "db_name": "local_store",  # db local name
    "db_authentication": None,  # whatever authentication you use see link abv
    "pylog": logging.debug,  # whatever logger you want
}

lcas = Mongo(lcas_config)
local_db = Mongo(local_config)

local_db.debug()
lcas.debug()

local_db.connect()
lcas.connect()

collection_mapping = {
    # A collection map, that lists which collection goes where.
    # source-collection -> destination-collection
    "source1": "destination1",
    "source2": "destination2",
}