def recreate_db(chat_history_dir): """ """ global _LocalStorage try: _LocalStorage.close() except Exception as exc: lg.warn('failed closing local storage : %r' % exc) _LocalStorage = None dbs = Database(chat_history_dir) dbs.custom_header = message_index.make_custom_header() temp_dir = os.path.join(settings.ChatHistoryDir(), 'tmp') if os.path.isdir(temp_dir): bpio._dir_remove(temp_dir) orig_dir = os.path.join(settings.ChatHistoryDir(), 'orig') if os.path.isdir(orig_dir): bpio._dir_remove(orig_dir) dbt = Database(temp_dir) dbt.custom_header = message_index.make_custom_header() source_opened = False try: dbs.open() source_opened = True except Exception as exc: lg.warn('failed open local storage : %r' % exc) # patch_flush_fsync(dbs) dbt.create() dbt.close() refresh_indexes(dbt, reindex=False) dbt.open() # patch_flush_fsync(dbt) if source_opened: for c in dbs.all('id'): del c['_rev'] dbt.insert(c) dbt.close() if source_opened: dbs.close() os.rename(dbs.path, orig_dir) os.rename(dbt.path, dbs.path) _LocalStorage = Database(chat_history_dir) _LocalStorage.custom_header = message_index.make_custom_header() db().open() # patch_flush_fsync(db()) if refresh_indexes(db(), rewrite=False, reindex=False): bpio._dir_remove(orig_dir) lg.info('local DB re-created in %r' % chat_history_dir) else: lg.err('local DB is broken !!!')
def __init__(self, passkey): self.todayDate = str(getDayStart()) self.key = passkey self.DBConfig = AppConfig() self.dayKey = None # setup befpore checking, avoid attribute error self.dbName = self.DBConfig.mapget('databaseinfo')['databasename'] self.db = Database(self.dbName) try: self.dayKey = self.DBConfig.mapget('databaseinfo')['daykey'] except KeyError: # if notthin in config, check self.db for entry daystatus = self.checkfordate() if (daystatus == False): self.makeDayRow() self.DBConfig.putmap('databaseinfo', 'daykey', self.dayKey) #if true do nothing, config file fixed else: daystatus = self.checkfordate( ) #if false, scans for right one, and fixes config oldcompare = self.dayKey self.dayKey = self.DBConfig.mapget('databaseinfo')['daykey'] if (daystatus == False) & (oldcompare == self.dayKey): self.makeDayRow() self.DBConfig.putmap('databaseinfo', 'daykey', self.dayKey) if (daystatus == True): #everything all good pass #nothing created just a check
def init(reindex=True, recreate=True): global _LocalStorage if _LocalStorage is not None: lg.warn('local storage already initialized') return chat_history_dir = os.path.join(settings.ChatHistoryDir(), 'current') _LocalStorage = Database(chat_history_dir) _LocalStorage.custom_header = message_index.make_custom_header() if _Debug: lg.out(_DebugLevel, 'message_db.init in %s' % chat_history_dir) if db().exists(): try: db().open() # patch_flush_fsync(db()) except Exception as exc: lg.err('failed to open local database : %r' % exc) if not recreate: raise Exception('failed to open database') lg.info('local DB will be recreated now') recreate_db(chat_history_dir) else: lg.info('create fresh local DB') db().create() if reindex: if not refresh_indexes(db(), rewrite=False, reindex=True): lg.err('failed to refresh indexes') if not recreate: raise Exception('failed to refresh indexes') lg.info('local DB will be recreated now') recreate_db(chat_history_dir)
def __init__(self, passkey): self.key = passkey self.DBConfig = AppConfig() #check for self.db stuff #IF IT DOESNT PASS THESE TESTS #warn before deletion self.dbName = self.DBConfig.mapget('databaseinfo')['databasename'] self.masterIndex = self.DBConfig.mapget('databaseinfo')['indexkey'] self.db = Database(self.dbName) if (self.db.exists()): self.db.open() self.db.id_ind.enc_key = self.key try: self.Qindex = self.QindexGet() #question index key self.Tindex = self.TindexGet() #table index key self.IndexedTable = self.tableGet( ) #regular 'table' index. list of hash pointers in order except: print 'bad index' self.db.close() self.sanitycheck = False else: self.db.close() self.sanitycheck = True
def main(): db = Database('/tmp/tut_update') db.create() x_ind = WithXIndex(db.path, 'x') db.add_index(x_ind) # full examples so we had to add first the data # the same code as in previous step for x in xrange(100): db.insert(dict(x=x)) for y in xrange(100): db.insert(dict(y=y)) # end of insert part print db.count(db.all, 'x') for curr in db.all('x', with_doc=True): doc = curr['doc'] if curr['key'] % 7 == 0: db.delete(doc) elif curr['key'] % 5 == 0: doc['updated'] = True db.update(doc) print db.count(db.all, 'x') for curr in db.all('x', with_doc=True): print curr
def init(): global _LocalStorage if _LocalStorage is not None: lg.warn('local storage already initialized') return contract_chain_dir = os.path.join(settings.ContractChainDir(), 'current') _LocalStorage = Database(contract_chain_dir) _LocalStorage.custom_header = coins_index.make_custom_header() if _Debug: lg.out(_DebugLevel, 'coins_db.init in %s' % contract_chain_dir) if db().exists(): try: db().open() except: temp_dir = os.path.join(settings.ContractChainDir(), 'tmp') if os.path.isdir(temp_dir): bpio._dir_remove(temp_dir) tmpdb = regenerate_indexes(temp_dir) rewrite_indexes(db(), tmpdb) bpio._dir_remove(temp_dir) db().open() db().reindex() else: db().create() refresh_indexes(db())
def __init__(self, passkey, xtraDB): self.key = passkey self.dbName = xtraDB self.db = Database(self.dbName) self.importScan()
def recreate_db(chat_history_dir): """ """ global _LocalStorage temp_dir = os.path.join(settings.ChatHistoryDir(), 'tmp') if os.path.isdir(temp_dir): bpio._dir_remove(temp_dir) tmpdb = regenerate_indexes(temp_dir) try: db().close() except: pass rewrite_indexes(db(), tmpdb) bpio._dir_remove(temp_dir) try: db().open() db().reindex() except: # really bad... we will lose whole data _LocalStorage = Database(chat_history_dir) _LocalStorage.custom_header = message_index.make_custom_header() try: _LocalStorage.destroy() except: pass try: _LocalStorage.create() except Exception as exc: lg.warn('failed to create local storage: %r' % exc)
def createTindex(self): self.dbName = self.DBConfig.mapget('databaseinfo')['databasename'] self.masterIndex = self.DBConfig.mapget('databaseinfo')['indexkey'] self.db = Database(self.dbName) if (self.db.exists()): self.db.open() self.db.id_ind.enc_key = self.key #this function assumes database #insert question index self.db.insert(dict(t='Tindex', table=[])) #get question index passkey, form type qintex (t=xxxx) for curr in self.db.all( 'id' ): #since first passkey in self.db should be only one there, function only perfomed once if curr['t'] == 'Tindex': self.Tindexkey = ''.join(curr['_id']) break #add else statement for errors if couldnt be written for found #write Qindex passkey to master index indexRow = self.db.get('id', self.masterIndex, with_doc=True) #write question index passkey to master index indexRow['Tindex'] = self.Tindexkey self.db.update(indexRow) self.db.close()
def test_to_many_shards(self, tmpdir): db = Database(str(tmpdir) + '/db') db.create(with_id_index=False) # it's ok to use sharded directly there with pytest.raises(IndexPreconditionsException): db.add_index(ShardedUniqueHashIndex(db.path, 'id', sh_nums=300)) with pytest.raises(IndexPreconditionsException): db.add_index(ShardedUniqueHashIndex(db.path, 'id', sh_nums=256))
def init_store_db(self): self.db = Database(os.path.join(self.store_path, "store.db")) if not self.db.exists(): self.db.create() self.db.add_index(WithHashIndex(self.db.path, "hash")) self.db.add_index(WithPointerIndex(self.db.path, "pointer")) else: self.db.open()
def __init__(self, passkey, dbname=None): self.key = passkey if (dbname == None): self.DBConfig = AppConfig() self.dbName = self.DBConfig.mapget('databaseinfo')['databasename'] else: self.dbName = dbname self.db = Database(self.dbName)
def setup(feature): app.config['CODERNITY_DATABASE_PATH'] = mkdtemp() global patcher patcher = mock.patch.dict(lite_mms.database.__dict__, { "codernity_db": Database(app.config['CODERNITY_DATABASE_PATH']) }) patcher.start() lite_mms.database.codernity_db.create()
def init_db(): db = Database(OUTPUT_DB) try: db.create() except IndexConflict: db.open() return db
def main(): db = Database('/tmp/tut1') db.create() for x in xrange(100): print db.insert(dict(x=x)) for curr in db.all('id'): print curr
def regenerate_indexes(temp_dir): """ """ tmpdb = Database(temp_dir) tmpdb.custom_header = coins_index.make_custom_header() tmpdb.create() refresh_indexes(tmpdb) tmpdb.close() return tmpdb
def main(): db = Database('/tmp/demo_secure') key = 'abcdefgh' id_ind = EncUniqueHashIndex(db.path, 'id', storage_class='Salsa20Storage') db.set_indexes([id_ind]) db.create() db.id_ind.enc_key = key for x in xrange(100): db.insert(dict(x=x, data='testing')) db.close() dbr = Database('/tmp/demo_secure') dbr.open() dbr.id_ind.enc_key = key for curr in dbr.all('id', limit=5): print curr
def migrate(source, destination): """ Very basic for now """ dbs = Database(source) dbt = Database(destination) dbs.open() dbt.create() dbt.close() for curr in os.listdir(os.path.join(dbs.path, '_indexes')): if curr != '00id.py': shutil.copyfile(os.path.join(dbs.path, '_indexes', curr), os.path.join(dbt.path, '_indexes', curr)) dbt.open() for c in dbs.all('id'): del c['_rev'] dbt.insert(c) return True
def test_compact_shards(self, tmpdir): db = Database(str(tmpdir) + '/db') db.create(with_id_index=False) db.add_index(ShardedUniqueHashIndex5(db.path, 'id')) for x in xrange(100): db.insert({'x': x}) db.compact() assert db.count(db.all, 'id') == 100
def main(): db = Database('/tmp/tut5_2') db.create() x_ind = WithXIndex(db.path, 'x') db.add_index(x_ind) for x in xrange(100): db.insert(dict(x=x, t=random.random())) print db.run('x', 'avg', start=10, end=30)
def setup(self, eventbus): dbpath = join(dric.datadir, 'data', 'mavlink', 'database') if exists(dbpath): rmtree(dbpath) self.db = Database(dbpath) self.db.create() key_ind = MavlinkIndex(self.db.path, 'key') self.db.add_index(key_ind) self.bus = eventbus self.timeref = time()
def test_insert_get(self, tmpdir, sh_nums): db = Database(str(tmpdir) + '/db') db.create(with_id_index=False) n = globals()['ShardedUniqueHashIndex%d' % sh_nums] db.add_index(n(db.path, 'id')) l = [] for x in xrange(10000): l.append(db.insert(dict(x=x))['_id']) for curr in l: assert db.get('id', curr)['_id'] == curr
def read_samples(db_filename, test_name): db = Database(db_filename) db.open() test_name_ind = WithTestNameIndex(db.path, 'test_name') try: db.edit_index(test_name_ind) except (IndexConflict, PreconditionsException): db.add_index(test_name_ind) for data in db.get_many('test_name', test_name, limit=-1): yield data
def main(): db = Database('/tmp/tut2') db.create() x_ind = WithXIndex(db.path, 'x') db.add_index(x_ind) for x in xrange(100): db.insert(dict(x=x)) for y in xrange(100): db.insert(dict(y=y)) print db.get('x', 10, with_doc=True)
def main(): db = Database('/tmp/tut5_1') db.create() x_ind = WithXIndex(db.path, 'x') db.add_index(x_ind) for x in xrange(100): db.insert(dict(x=x, t=random.random())) l = [] for curr in db.get_many('x', start=10, end=30, limit=-1, with_doc=True): l.append(curr['doc']['t']) print sum(l) / len(l)
def __init__(self,passkey): self.key = passkey self.initQuestions = SecuQ(self.key) self.DBConfig = AppConfig() self.dbName = self.DBConfig.mapget('databaseinfo')['databasename'] self.db = Database(self.dbName) initDay = DayEntry(self.key) # checks day hash or creates a new one self.dayKey = initDay.dayKey
def init(): global _LocalStorage if _LocalStorage is not None: lg.warn('local storage already initialized') return _LocalStorage = Database(os.path.join(settings.BlockChainDir(), 'current')) if db().exists(): db().open() else: id_index = UniqueHashIndex(db().path, 'id') nodes_index = IndexByIDURL(db().path, 'idurl') coins_index = IndexByHash(db().path, 'hash') db().set_indexes([id_index, nodes_index, coins_index]) db().create()
def __init__(self, ): """ Create Analex Cache """ # use this dictionary as a local cache, # The global db will be updated on destructing object self.cache = {} self.db = Database('~/tmp/thaalibCache') if not self.db.exists(): self.db.create() x_ind = WithAIndex(self.db.path, 'a') self.db.add_index(x_ind) else: self.db.open()
def __init__(self,): """ Create Analex Cache """ self.cache={'checkedWords':{}, 'FreqWords':{'noun':{}, 'verb':{},'stopword':{}}, }; self.db = Database('~/tmp/qalsadiCache') if not self.db.exists(): self.db.create(); x_ind = WithAIndex(self.db.path, 'a') self.db.add_index(x_ind) else: self.db.open();
def __init__(self, app): super(CodernityDB, self).__init__(app) self.dbfile = os.path.join(self.app.config['app']['dir'], self.app.config['db']['path']) self.db = None self.uncommitted = dict() self.stop_event = Event() self.db = Database(self.dbfile) try: log.info('opening db', path=self.dbfile) self.db.open() except DatabasePathException: log.info('db does not exist, creating it', path=self.dbfile) self.db.create() self.db.add_index(MD5Index(self.dbfile, 'key'))