def init(): global DB_SHARING_MODEL global DB_USER_PROFILE try: # DB_SHARING_MODEL: LOG.info( "[usermgnt.data.standalone.db] [init] Initializing DB_SHARING_MODEL [" + config.dic['UM_WORKING_DIR_VOLUME'] + config.dic['DB_SHARING_MODEL'] + "] ...") DB_SHARING_MODEL = Base(config.dic['UM_WORKING_DIR_VOLUME'] + config.dic['DB_SHARING_MODEL']) if not DB_SHARING_MODEL.exists(): # create new base with field names DB_SHARING_MODEL.create('id', 'user_id', 'device_id', 'max_apps', 'battery_limit') else: DB_SHARING_MODEL.open() # DB_USER_PROFILE: LOG.info( "[usermgnt.data.standalone.db] [init] Initializing DB_USER_PROFILE [" + config.dic['UM_WORKING_DIR_VOLUME'] + config.dic['DB_USER_PROFILE'] + "] ...") DB_USER_PROFILE = Base(config.dic['UM_WORKING_DIR_VOLUME'] + config.dic['DB_USER_PROFILE']) if not DB_USER_PROFILE.exists(): # create new base with field names DB_USER_PROFILE.create('id', 'user_id', 'device_id', 'service_consumer', 'resource_contributor') else: DB_USER_PROFILE.open() except: LOG.exception( '[usermgnt.data.standalone.db] [init] Exception: Error while initializing db components' )
def test_open_existing(self): db = Base(test_db_name, save_to_file=True) db.create('unique_id', 'name', "active", mode="open") db.insert("123", "N", True) db.commit() # Just verify that it works to open an existing db. # The column names are ignored, therefore they should # equal the old column names db = Base(test_db_name, save_to_file=True) db.create('unique_id2', 'name2', "active2", mode="open") rec = db.insert("123", "N", True) db.commit() self.assertEqual(db.fields, ['unique_id', 'name', "active"]) # mode="override" will overwrite existing db db = Base(test_db_name, save_to_file=True) db.create('unique_id', 'name', "active", mode="override") db.commit() self.assertEqual(len(self.filter_db), 0) # Equals passing mode=None self.assertRaises(IOError, db.create, 'unique_id', 'name', "active") self.assertRaises(ValueError, db.create, 'unique_id', 'name', "active", mode="invalidmode")
def setupdb(ndbl='ndb.pd1', hdbl='hdb.pd1'): global ndb global hdb ndb = Base(ndbl) hdb = Base(hdbl) ndb.create('NID', 'Range', mode="open") hdb.create('UID', 'HID', 'NID', 'ip', 'bridge')
def test_open_memory_with_existing_filename(self): self.filter_db = Base(test_db_name, save_to_file=True) self.filter_db.create('unique_id', 'name', "active", mode="override") self.filter_db.commit() db = Base(test_db_name, save_to_file=False) db.open() self.assertEqual(db.fields, ['unique_id', 'name', "active"]) db = Base(test_db_name, save_to_file=False) db.create('unique_id2', 'name2', "active2", mode="override") self.assertEqual(db.fields, ['unique_id2', 'name2', "active2"])
def init(): global DB_DOCKER_PORTS global SERVICE_INSTANCES_LIST try: # SERVICE_INSTANCES_LIST => "MEMORY DB" LOG.info( '[lifecycle.data.app.db] [init] Initializing SERVICE_INSTANCES_LIST ...' ) SERVICE_INSTANCES_LIST = [] # DB_DOCKER_PORTS: PORTS DATABASE for each of the Lifecycles / agents => "PHYSICAL DB" LOG.info( '[lifecycle.data.app.db] [init] Initializing DB_DOCKER_PORTS ...') DB_DOCKER_PORTS = Base(config.dic['LM_WORKING_DIR_VOLUME'] + config.dic['DB_DOCKER_PORTS'] ) #Base(config.dic['DB_DOCKER_PORTS']) # create new base with field names if not DB_DOCKER_PORTS.exists(): DB_DOCKER_PORTS.create('port', 'mapped_to') else: DB_DOCKER_PORTS.open() records = DB_DOCKER_PORTS() except: LOG.exception( '[lifecycle.data.app.db] [init] Exception: Error while initializing db components' )
def inventoryshape(classdir): filecount = len(glob(classdir + "/*.jpg")) print('determining optimal image resolution...') db = Base('shape', save_to_file=False) db.create('filename', 'height', 'width', 'count') index = 0 for imagePath in glob(classdir + "/*.jpg"): index = index + 1 try: img = cv2.imread(imagePath) filename = os.path.basename(imagePath) shape = img.shape h = shape[0] w = shape[1] pre = db(height=h, width=w) # see if there is already an image of this shape in the DB... if pre: # ...if so - update the count rec_id = pre[0]['__id__'] counter = int(pre[0]['count']) counter = counter + 1 record = db[rec_id] db.update(record, count=counter) else: # ...if not - insert the new shape db.insert(filename=filename, height=h, width=w, count=1) sys.stdout.write("reading shape for image #{} of {} \r".format( index, filecount)) sys.stdout.flush() time.sleep(0.1) except Exception, e: print('error processing image {}: {}'.format(imagePath, e)) continue
def detectduplicates(classdir): # Create an in-memory database db = Base('fingerprinter', save_to_file=False) db.create('filename', 'hash') filecount = len(glob(classdir + "/*.jpg")) duplicatecount = 0 print("creating image fingerprints for de-duplication ...") index = 0 for imagePath in glob(classdir + "/*.jpg"): index = index + 1 try: if os.path.exists(imagePath): image = Image.open(imagePath) h = str(imagehash.dhash(image)) filename = os.path.basename(imagePath) sys.stdout.write( "fingerprint created for image # {} of {} \r".format( index, filecount)) sys.stdout.flush() time.sleep(0.1) pre = db(hash=h) if pre: # This image is a duplicate - delete it duplicatecount = duplicatecount + 1 os.remove(classdir + "/" + filename) else: db.insert(filename=filename, hash=h) except Exception, e: print('Error in detectduplicates() function: {}'.format(e)) continue
def __init__(self, name="", key="", value=['col']): self.db = Base(name, save_to_file=False) self.db.create(key, *value) self.db.create_index(key) self.key = key self.value = value
def __init__(self): db = Base('rjdb.pdl') if db.exists(): db.open() else: db.create(*FIELDS) self.db = db
def _setup_db(self): # pragma: no cover with self._lock: self._db = Base("", save_to_file=False) self._db.create('record', 'id', 'first_isd', 'first_as', 'last_isd', 'last_as', 'sibra', mode='override') self._db.create_index('id') self._db.create_index('last_isd') self._db.create_index('last_as')
def __init__(self, pydblite_db_file=None): if pydblite_db_file is None: pydblite_db_file = ZipcodeDB.PYDBLITE_DB_FILE try: self._db = Base(pydblite_db_file) self._db.open() except Exception: self._db = None
def test_open_file_with_existing_dir(self): os.mkdir(test_db_name) db = Base(test_db_name, save_to_file=True) # A dir with that name exists self.assertRaises(IOError, db.create, 'unique_id', 'name', "active", mode="open")
def __init__(self): self.out_file = open("scnscraper/abap.json", "a") self.out_file.close() self.db = Base("scnscraper/abap.pydb") if self.db.exists(): self.db.open() else: self.db.create('url', 'uid', 'type', 'author', 'title', 'date_time', 'tags', 'views', 'answers', 'resolve', 'upvotes', 'text')
def create(): """ Creates a pydblite database in memory. :return: Base class """ from pydblite.pydblite import Base db = Base("temp-db", save_to_file=False) db.create("hash", "path") return db
def test_sqlite_compat_insert_list(self): self.filter_db = Base(test_db_name, save_to_file=False, sqlite_compat=True) self.filter_db.create('unique_id', 'name', "active", mode="override") status = [(8, "testname", 0)] # Insert 1 entries rec = self.filter_db.insert(status) self.assertEqual(rec, None) self.assertEqual(len(self.filter_db), 1) self.assertEqual(self.filter_db[0]["unique_id"], 8) self.assertEqual(self.filter_db[0]["name"], "testname") self.assertEqual(self.filter_db[0]["active"], 0)
def state_extraction(): db = Base("scnscraper/abap.pydb") if db.exists(): db.open() record = db(type="Question") print("# discussion scraped: " + str(record.__len__())) print("Answered: " + str(db(resolve="Answered.").__len__())) print("Answered with solution: " + str(db(resolve="solution").__len__())) print("Not Answered: " + str(db(resolve="Not Answered.").__len__())) print("Assumed Answered: " + str(db(resolve="Assumed Answered.").__len__()))
def pydblite(): from pydblite.pydblite import Base db = Base('dummy', save_to_file=False) # create new base with field names db.create('name', 'age', 'size') # insert new record db.insert(name='homer', age=23, size=1.84) # records are dictionaries with a unique integer key __id__ # simple selection by field value records = db(name="homer") # complex selection by list comprehension res = [r for r in db if 30 > r['age'] >= 18 and r['size'] < 2] print("res:", res) # delete a record or a list of records r = records[0] db.delete(r) list_of_records = [] r = db.insert(name='homer', age=23, size=1.84) list_of_records.append(db[r]) r = db.insert(name='marge', age=36, size=1.94) list_of_records.append(db[r]) # or generator expression for r in (r for r in db if r['name'] in ('homer', 'marge')): # print "record:", r pass db.delete(list_of_records) rec_id = db.insert(name='Bart', age=15, size=1.34) record = db[rec_id] # the record such that record['__id__'] == rec_id # delete a record by its id del db[rec_id] # create an index on a field db.create_index('age') # update rec_id = db.insert(name='Lisa', age=13, size=1.24) # direct access by id record = db[rec_id] db.update(record, age=24) # add and drop fields db.add_field('new_field', default=0) db.drop_field('name') # save changes on disk db.commit()
def test_sqlite_compat(self): db = Base(test_db_name, save_to_file=False, sqlite_compat=True) db.create('unique_id', 'name', "active", mode="open") self.reset_status_values_for_filter() # Insert 7 entries res = db.insert(self.status) self.assertEqual(res, None) self.assertEqual(len(db), 7) status = [(8, "testname", 0)] res = db.insert(status) self.assertEqual(res, None) self.assertEqual(len(db), 8)
def load_db(self, check=True, fix=False, save_to_file=False): self.log('Opening {0} database(s)'.format(len(self.db_files)), logging.INFO) for db_name, db_file in self.db_files.items(): _db = Base(db_file, save_to_file=save_to_file) _db.open() self.log( 'Database {0} opened, records #: {1}'.format( db_name, len(_db)), logging.DEBUG) self.db.update({db_name: _db}) _db.create_index('uid') _db.create_index('type') if check is True: self.check_db(fix)
def from_json_to_db(self): thread = '' db = Base("scnscraper/abap.pydb", save_to_file=True) # create new base with field names db.create('url', 'uid', 'type', 'author', 'title', 'date_time', 'tags', 'views', 'answers', 'resolve', 'upvotes', 'text', mode='override') i = 0 with open('scnsraper/threads.json', 'r') as file: for line in file: if (line.endswith(" }\n")): thread += line tokens = re.search( r"url:\s'(.*?)',\suid:\s'(.*?)',\stype:\s'(.*?)',\sauthor:\s'(.*?)',\stitle:\s'(.*?)',\sdate_time:\s'(.*?)',\stags:\s'(.*?)',\sviews:\s'(.*?)',\sanswers:\s'(.*?)',\sresolve:\s'(.*?)',\supvotes:\s'(.*?)', text:\s'((.|\n)*)'\s}", str(thread)) if tokens is not None: db.insert(url=tokens.group(1), uid=tokens.group(2), type=tokens.group(3), author=tokens.group(4), title=tokens.group(5), date_time=tokens.group(6), tags=tokens.group(7), views=tokens.group(8), answers=tokens.group(9), resolve=tokens.group(10), upvotes=tokens.group(11), text=tokens.group(12)) db.commit() print('\n--------------------------------------------\n') thread = '' if (line.startswith(" ]")): print("new page") thread = '' if (line.endswith('\n') and (not line.startswith(" ]\n\n")) and (not line.endswith(" }\n"))): thread += line
def init(): global DB_LM_SERVICE_INSTANCES try: # DB_LM: LM DATABASE ("PHYSICAL DB") LOG.info('[lifecycle.data.app.lm_db] [init] Initializing DB_LM ...') DB_LM_SERVICE_INSTANCES = Base(config.dic['LM_WORKING_DIR_VOLUME'] + config.dic['DB_STANDALONE_MODE'] + "_service_instances") # create new base with field names if not DB_LM_SERVICE_INSTANCES.exists(): DB_LM_SERVICE_INSTANCES.create('id', 'service_instance') else: DB_LM_SERVICE_INSTANCES.open() except: LOG.exception( '[lifecycle.data.app.lm_db] [init] Exception: Error while initializing db components' )
def load_db(self, check=True, fix=False, save_to_file=False): self.log('Opening {0} database(s)'.format(len(self.db_files)), logging.INFO) for db_name, db_file in self.db_files.items(): _db = Base(db_file, save_to_file=save_to_file) _db.open() self.log( 'Database {0} opened, records #: {1}'.format( db_name, len(_db)), logging.DEBUG) self.db.update({db_name: _db}) _db.create_index('uid') _db.create_index('type') self.log( "Db {0}: printing simple strawman prediction accuracy for answers with max upvotes as best answer:" .format(db_name), logging.INFO) self._strawman(_db) if check is True: self.check_db(fix)
def __init__(self, segment_ttl=None, max_res_no=None): # pragma: no cover """ :param int segment_ttl: The TTL for each record in the database (in s) or None to just use the segment's expiration time. :param int max_res_no: Number of results returned for a query. """ self._db = Base("", save_to_file=False) self._db.create('record', 'id', 'first_isd', 'first_as', 'last_isd', 'last_as', 'sibra', mode='override') self._db.create_index('id') self._db.create_index('last_isd') self._db.create_index('last_as') self._lock = threading.Lock() self._segment_ttl = segment_ttl self._max_res_no = max_res_no
def test_open_memory(self): db = Base(":memory:") self.assertFalse(db.save_to_file)
def test_open(self): db = Base('dummy', save_to_file=False) db.create('name', 'age', 'size') db.insert(name='homer', age=23, size=1.84)
def setUp(self): # NOQA self.first_record_id = 0 filter_db = Base(test_db_name, save_to_file=False) filter_db.create('unique_id', 'name', "active", mode="override") self.filter_db = filter_db
import cProfile from pydblite.pydblite import Base from workalendar.core import Calendar, MON, TUE, WED, THU, FRI, SAT, SUN from ..retrieve_data import (retrieve_nth_weekday_loop, retrieve_nth_weekday_map retrieve_nth_weekday_list_comp, retrieve_nth_weekday_all_fields) date_table = Base('temporal_data.pdl') date_table = date_table.create(mode="open") # nth_weekday = input("Give the year, month, weekday, nth number(2010, 2, 2, 3): ") nth_weekday = (2010, 2, 2, 3) cProfile.run('retrieve_nth_weekday_map(date_table, nth_weekday)') cProfile.run('retrieve_nth_weekday_loop(date_table, nth_weekday)') cProfile.run('retrieve_nth_weekday_all_fields(date_table, nth_weekday)') cProfile.run('retrieve_nth_weekday_list_comp(date_table, nth_weekday)') cProfile.run('Calendar.get_nth_weekday_in_month(2010, 2, FRI, 3)')
def test_open_wrong_mode(self): db = Base(test_db_name, save_to_file=True) self.assertRaises(ValueError, db.create, 'name', mode="fancy")
def create_db(): db = Base('dummy', save_to_file=False) db.create('sid', 'channel', 'pid') db.create_index('sid') db.create_index('channel') return db
def __init__(self, name="", col_names=['col']): self.db = Base(name, save_to_file=False) self.db.create(*col_names) self.st = 0 self.en = 0