def testDb(self): from Db import Db for db_path in [os.path.abspath("%s/test/zeronet.db" % config.data_dir), "%s/test/zeronet.db" % config.data_dir]: print "Creating db using %s..." % db_path, schema = { "db_name": "TestDb", "db_file": "%s/test/zeronet.db" % config.data_dir, "map": { "data.json": { "to_table": { "test": "test" } } }, "tables": { "test": { "cols": [ ["test_id", "INTEGER"], ["title", "TEXT"], ], "indexes": ["CREATE UNIQUE INDEX test_id ON test(test_id)"], "schema_changed": 1426195822 } } } if os.path.isfile("%s/test/zeronet.db" % config.data_dir): os.unlink("%s/test/zeronet.db" % config.data_dir) db = Db(schema, "%s/test/zeronet.db" % config.data_dir) db.checkTables() db.close() # Cleanup os.unlink("%s/test/zeronet.db" % config.data_dir) os.rmdir("%s/test/" % config.data_dir)
def main(): db = Db(dbconfig) urls = get_urls(sql, dbconfig) for url in urls: url = url[0] urlnew = 'https:' + url page = get_page(urlnew) item = parse_page(page) print(item) db.update('website_lazadatb', item, 'product_url="{0}"'.format(url)) time.sleep(2) db.close()
def testQueries(self): db_path = "%s/zeronet.db" % config.data_dir schema = { "db_name": "TestDb", "db_file": "%s/zeronet.db" % config.data_dir, "map": { "data.json": { "to_table": { "test": "test" } } }, "tables": { "test": { "cols": [ ["test_id", "INTEGER"], ["title", "TEXT"], ], "indexes": ["CREATE UNIQUE INDEX test_id ON test(test_id)"], "schema_changed": 1426195822 } } } if os.path.isfile(db_path): os.unlink(db_path) db = Db(schema, db_path) db.checkTables() # Test insert for i in range(100): db.execute("INSERT INTO test ?", {"test_id": i, "title": "Test #%s" % i}) assert db.execute("SELECT COUNT(*) AS num FROM test").fetchone()["num"] == 100 # Test single select assert db.execute("SELECT COUNT(*) AS num FROM test WHERE ?", {"test_id": 1}).fetchone()["num"] == 1 # Test multiple select assert db.execute("SELECT COUNT(*) AS num FROM test WHERE ?", {"test_id": [1,2,3]}).fetchone()["num"] == 3 assert db.execute("SELECT COUNT(*) AS num FROM test WHERE ?", {"test_id": [1,2,3], "title": "Test #2"}).fetchone()["num"] == 1 assert db.execute("SELECT COUNT(*) AS num FROM test WHERE ?", {"test_id": [1,2,3], "title": ["Test #2", "Test #3", "Test #4"]}).fetchone()["num"] == 2 # Test named parameter escaping assert db.execute("SELECT COUNT(*) AS num FROM test WHERE test_id = :test_id AND title LIKE :titlelike", {"test_id": 1, "titlelike": "Test%"}).fetchone()["num"] == 1 db.close() # Cleanup os.unlink(db_path)
def testCheckTables(self): db_path = "%s/zeronet.db" % config.data_dir schema = { "db_name": "TestDb", "db_file": "%s/zeronet.db" % config.data_dir, "map": { "data.json": { "to_table": { "test": "test" } } }, "tables": { "test": { "cols": [ ["test_id", "INTEGER"], ["title", "TEXT"], ], "indexes": ["CREATE UNIQUE INDEX test_id ON test(test_id)"], "schema_changed": 1426195822 } } } if os.path.isfile(db_path): os.unlink(db_path) db = Db(schema, db_path) db.checkTables() db.close() # Verify tables assert os.path.isfile(db_path) db = Db(schema, db_path) tables = [row["name"] for row in db.execute("SELECT name FROM sqlite_master WHERE type='table'")] assert "keyvalue" in tables # To store simple key -> value assert "json" in tables # Json file path registry assert "test" in tables # The table defined in dbschema.json # Verify test table cols = [col["name"] for col in db.execute("PRAGMA table_info(test)")] assert "test_id" in cols assert "title" in cols db.close() # Cleanup os.unlink(db_path)
def testDb(self): print "Importing db..." from Db import Db for db_path in [ os.path.abspath("data/test/zeronet.db"), "data/test/zeronet.db" ]: print "Creating db using %s..." % db_path, schema = { "db_name": "TestDb", "db_file": "data/test/zeronet.db", "map": { "data.json": { "to_table": { "test": "test" } } }, "tables": { "test": { "cols": [ ["test_id", "INTEGER"], ["title", "TEXT"], ], "indexes": ["CREATE UNIQUE INDEX test_id ON test(test_id)"], "schema_changed": 1426195822 } } } if os.path.isfile("data/test/zeronet.db"): os.unlink("data/test/zeronet.db") db = Db(schema, "data/test/zeronet.db") db.checkTables() db.close() # Cleanup os.unlink("data/test/zeronet.db") os.rmdir("data/test/") print "ok"
class SiteStorage: def __init__(self, site, allow_create=True): self.site = site self.directory = "data/%s" % self.site.address # Site data diretory self.log = site.log self.db = None # Db class self.db_checked = False # Checked db tables since startup self.event_db_busy = None # Gevent AsyncResult if db is working on rebuild self.has_db = self.isFile("dbschema.json") # The site has schema if not os.path.isdir(self.directory): if allow_create: os.mkdir(self.directory) # Create directory if not found else: raise Exception("Directory not exists: %s" % self.directory) # Load db from dbschema.json def openDb(self, check=True): schema = self.loadJson("dbschema.json") db_path = self.getPath(schema["db_file"]) if check: if not os.path.isfile(db_path) or os.path.getsize(db_path) == 0: # Not exits or null self.rebuildDb() self.db = Db(schema, db_path) if check and not self.db_checked: changed_tables = self.db.checkTables() if changed_tables: self.rebuildDb(delete_db=False) # Todo only update the changed table datas def closeDb(self): if self.db: self.db.close() # Return db class def getDb(self): if not self.db and self.has_db: self.openDb() return self.db # Rebuild sql cache def rebuildDb(self, delete_db=True): self.event_db_busy = gevent.event.AsyncResult() schema = self.loadJson("dbschema.json") db_path = self.getPath(schema["db_file"]) if os.path.isfile(db_path) and delete_db: if self.db: self.db.close() # Close db if open self.log.info("Deleting %s" % db_path) try: os.unlink(db_path) except Exception, err: self.log.error("Delete error: %s" % err) self.openDb(check=False) self.log.info("Creating tables...") self.db.checkTables() self.log.info("Importing data...") cur = self.db.getCursor() cur.execute("BEGIN") cur.logging = False found = 0 s = time.time() for content_inner_path, content in self.site.content_manager.contents.items(): content_path = self.getPath(content_inner_path) if os.path.isfile(content_path): # Missing content.json file if self.db.loadJson(content_path, cur=cur): found += 1 else: self.log.error("[MISSING] %s" % content_inner_path) for file_relative_path in content["files"].keys(): if not file_relative_path.endswith(".json"): continue # We only interesed in json files file_inner_path = self.site.content_manager.toDir(content_inner_path)+file_relative_path # Relative to content.json file_inner_path = file_inner_path.strip("/") # Strip leading / file_path = self.getPath(file_inner_path) if os.path.isfile(file_path): if self.db.loadJson(file_path, cur=cur): found += 1 else: self.log.error("[MISSING] %s" % file_inner_path) cur.execute("END") self.log.info("Imported %s data file in %ss" % (found, time.time()-s)) self.event_db_busy.set(True) # Event done, notify waiters self.event_db_busy = None # Clear event
["title", "TEXT"], ["json_id", "INTEGER REFERENCES json (json_id)"] ], "indexes": ["CREATE UNIQUE INDEX test_key ON test(test_id, json_id)"], "schema_changed": 1426195822 } } } if os.path.isfile("%s/benchmark.db" % config.data_dir): os.unlink("%s/benchmark.db" % config.data_dir) with benchmark("Open x 10", 0.13): for i in range(10): db = Db(schema, "%s/benchmark.db" % config.data_dir) db.checkTables() db.close() yield "." db = Db(schema, "%s/benchmark.db" % config.data_dir) db.checkTables() import json with benchmark("Insert x 10 x 1000", 1.0): for u in range(10): # 10 user data = {"test": []} for i in range(1000): # 1000 line of data data["test"].append({"test_id": i, "title": "Testdata for %s message %s" % (u, i)}) json.dump(data, open("%s/test_%s.json" % (config.data_dir, u), "w")) db.loadJson("%s/test_%s.json" % (config.data_dir, u)) os.unlink("%s/test_%s.json" % (config.data_dir, u))
["json_id", "INTEGER REFERENCES json (json_id)"]], "indexes": ["CREATE UNIQUE INDEX test_key ON test(test_id, json_id)"], "schema_changed": 1426195822 } } } if os.path.isfile("data/benchmark.db"): os.unlink("data/benchmark.db") with benchmark("Open x 10", 0.13): for i in range(10): db = Db(schema, "data/benchmark.db") db.checkTables() db.close() yield "." db = Db(schema, "data/benchmark.db") db.checkTables() import json with benchmark("Insert x 10 x 1000", 1.0): for u in range(10): # 10 user data = {"test": []} for i in range(1000): # 1000 line of data data["test"].append({ "test_id": i, "title": "Testdata for %s message %s" % (u, i)