Beispiel #1
0
    def __init__(self, passkey):
        self.todayDate = str(getDayStart())
        self.key = passkey

        self.DBConfig = AppConfig()

        self.dayKey = None  # setup befpore checking, avoid attribute error
        self.dbName = self.DBConfig.mapget('databaseinfo')['databasename']
        self.db = Database(self.dbName)

        try:
            self.dayKey = self.DBConfig.mapget('databaseinfo')['daykey']
        except KeyError:  # if notthin in config, check self.db for entry
            daystatus = self.checkfordate()
            if (daystatus == False):
                self.makeDayRow()
                self.DBConfig.putmap('databaseinfo', 'daykey', self.dayKey)
            #if true do nothing, config file fixed
        else:

            daystatus = self.checkfordate(
            )  #if false, scans for right one, and fixes config

            oldcompare = self.dayKey
            self.dayKey = self.DBConfig.mapget('databaseinfo')['daykey']
            if (daystatus == False) & (oldcompare == self.dayKey):
                self.makeDayRow()
                self.DBConfig.putmap('databaseinfo', 'daykey', self.dayKey)
            if (daystatus == True):  #everything all good
                pass  #nothing created just a check
Beispiel #2
0
    def createTindex(self):

        self.dbName = self.DBConfig.mapget('databaseinfo')['databasename']
        self.masterIndex = self.DBConfig.mapget('databaseinfo')['indexkey']

        self.db = Database(self.dbName)
        if (self.db.exists()):
            self.db.open()
            self.db.id_ind.enc_key = self.key
    #this function assumes database
    #insert question index

        self.db.insert(dict(t='Tindex', table=[]))
        #get question index passkey, form type qintex (t=xxxx)
        for curr in self.db.all(
                'id'
        ):  #since first passkey in self.db should be only one there, function only perfomed once
            if curr['t'] == 'Tindex':
                self.Tindexkey = ''.join(curr['_id'])
                break

                #add else statement for errors if couldnt be written for found

                #write Qindex passkey to master index

        indexRow = self.db.get('id', self.masterIndex, with_doc=True)

        #write question index passkey to master index

        indexRow['Tindex'] = self.Tindexkey
        self.db.update(indexRow)
        self.db.close()
Beispiel #3
0
    def __init__(self, passkey):
        self.key = passkey

        self.DBConfig = AppConfig()
        #check for self.db stuff
        #IF IT DOESNT PASS THESE TESTS
        #warn before deletion
        self.dbName = self.DBConfig.mapget('databaseinfo')['databasename']

        self.masterIndex = self.DBConfig.mapget('databaseinfo')['indexkey']

        self.db = Database(self.dbName)

        if (self.db.exists()):
            self.db.open()
            self.db.id_ind.enc_key = self.key
            try:
                self.Qindex = self.QindexGet()  #question index key
                self.Tindex = self.TindexGet()  #table index key
                self.IndexedTable = self.tableGet(
                )  #regular 'table' index. list of hash pointers in order

            except:
                print 'bad index'
                self.db.close()
                self.sanitycheck = False

            else:
                self.db.close()
                self.sanitycheck = True
Beispiel #4
0
def init(reindex=True, recreate=True):
    global _LocalStorage
    if _LocalStorage is not None:
        lg.warn('local storage already initialized')
        return
    chat_history_dir = os.path.join(settings.ChatHistoryDir(), 'current')
    _LocalStorage = Database(chat_history_dir)
    _LocalStorage.custom_header = message_index.make_custom_header()
    if _Debug:
        lg.out(_DebugLevel, 'message_db.init in %s' % chat_history_dir)
    if db().exists():
        try:
            db().open()
            # patch_flush_fsync(db())
        except Exception as exc:
            lg.err('failed to open local database : %r' % exc)
            if not recreate:
                raise Exception('failed to open database')
            lg.info('local DB will be recreated now')
            recreate_db(chat_history_dir)
    else:
        lg.info('create fresh local DB')
        db().create()
    if reindex:
        if not refresh_indexes(db(), rewrite=False, reindex=True):
            lg.err('failed to refresh indexes')
            if not recreate:
                raise Exception('failed to refresh indexes')
            lg.info('local DB will be recreated now')
            recreate_db(chat_history_dir)
Beispiel #5
0
    def __init__(self, passkey, xtraDB):
        self.key = passkey

        self.dbName = xtraDB
        self.db = Database(self.dbName)

        self.importScan()
Beispiel #6
0
def init():
    global _LocalStorage
    if _LocalStorage is not None:
        lg.warn('local storage already initialized')
        return
    contract_chain_dir = os.path.join(settings.ContractChainDir(), 'current')
    _LocalStorage = Database(contract_chain_dir)
    _LocalStorage.custom_header = coins_index.make_custom_header()
    if _Debug:
        lg.out(_DebugLevel, 'coins_db.init in %s' % contract_chain_dir)
    if db().exists():
        try:
            db().open()
        except:
            temp_dir = os.path.join(settings.ContractChainDir(), 'tmp')
            if os.path.isdir(temp_dir):
                bpio._dir_remove(temp_dir)
            tmpdb = regenerate_indexes(temp_dir)
            rewrite_indexes(db(), tmpdb)
            bpio._dir_remove(temp_dir)
            db().open()
            db().reindex()
    else:
        db().create()
    refresh_indexes(db())
class BenchCodernityDB(BenchBase):
    
    ID_FIELD = "_id"
    
    def __init__(self, *args, **kwargs):
        super(BenchCodernityDB, self).__init__(*args, **kwargs)
    
    def create_database(self):
        self.db = Database(self.db_name)
        self.db.create()
        self.db.add_index(WithSmallNumberIndex(self.db.path, "small_number"))

    def delete_database(self):
        self.db.close()
        shutil.rmtree(self.db_name)
            
    def create(self, record):
        self.db.insert(record)
    
    def get(self, key):
        return self.db.get("id", key, with_doc=True)
        
    def query(self, **kwargs):
        key, val = kwargs.items()[0]
        return list(self.db.get_many(key, val, limit=-1, with_doc=True))
Beispiel #8
0
 def __init__(self, passkey, dbname=None):
     self.key = passkey
     if (dbname == None):
         self.DBConfig = AppConfig()
         self.dbName = self.DBConfig.mapget('databaseinfo')['databasename']
     else:
         self.dbName = dbname
     self.db = Database(self.dbName)
Beispiel #9
0
 def init_store_db(self):
     self.db = Database(os.path.join(self.store_path, "store.db"))
     if not self.db.exists():
         self.db.create()
         self.db.add_index(WithHashIndex(self.db.path, "hash"))
         self.db.add_index(WithPointerIndex(self.db.path, "pointer"))
     else:
         self.db.open()
Beispiel #10
0
def init_db():
    db = Database(OUTPUT_DB)

    try:
        db.create()
    except IndexConflict:
        db.open()

    return db
Beispiel #11
0
def main():
    db = Database('/tmp/tut5_2')
    db.create()
    x_ind = WithXIndex(db.path, 'x')
    db.add_index(x_ind)

    for x in xrange(100):
        db.insert(dict(x=x, t=random.random()))

    print db.run('x', 'avg', start=10, end=30)
Beispiel #12
0
    def test_compact_shards(self, tmpdir):
        db = Database(str(tmpdir) + '/db')
        db.create(with_id_index=False)
        db.add_index(ShardedUniqueHashIndex5(db.path, 'id'))

        for x in xrange(100):
            db.insert({'x': x})

        db.compact()
        assert db.count(db.all, 'id') == 100
Beispiel #13
0
    def setup(self, eventbus):
        dbpath = join(dric.datadir, 'data', 'mavlink', 'database')
        if exists(dbpath):
            rmtree(dbpath)
        self.db = Database(dbpath)
        self.db.create()
        key_ind = MavlinkIndex(self.db.path, 'key')
        self.db.add_index(key_ind)
        self.bus = eventbus

        self.timeref = time()
def main():
    db = Database("/tmp/tut5_2")
    db.create()
    x_ind = WithXIndex(db.path, "x")
    db.add_index(x_ind)

    for x in xrange(100):
        db.insert(dict(x=x, t=random.random()))

    print db.run("x", "avg", start=10, end=30)
Beispiel #15
0
	def __init__(self,passkey):
		self.key = passkey


		self.initQuestions = SecuQ(self.key)

		self.DBConfig = AppConfig()
		self.dbName = self.DBConfig.mapget('databaseinfo')['databasename']

		self.db = Database(self.dbName)


		initDay = DayEntry(self.key) # checks day hash or creates a new one
		self.dayKey = initDay.dayKey
Beispiel #16
0
 def __init__(self, ):
     """
     Create Analex Cache
     """
     # use this dictionary as a local cache,
     # The global db will be updated on destructing object
     self.cache = {}
     self.db = Database('~/tmp/thaalibCache')
     if not self.db.exists():
         self.db.create()
         x_ind = WithAIndex(self.db.path, 'a')
         self.db.add_index(x_ind)
     else:
         self.db.open()
Beispiel #17
0
 def __init__(self,):
     """
     Create Analex Cache
     """
     self.cache={'checkedWords':{},
             'FreqWords':{'noun':{}, 'verb':{},'stopword':{}},
         };
     self.db = Database('~/tmp/qalsadiCache')
     if not self.db.exists():
         self.db.create();
         x_ind = WithAIndex(self.db.path, 'a')
         self.db.add_index(x_ind)
     else:
         self.db.open();
Beispiel #18
0
 def __init__(self, app):
     super(CodernityDB, self).__init__(app)
     self.dbfile = os.path.join(self.app.config['app']['dir'],
                                self.app.config['db']['path'])
     self.db = None
     self.uncommitted = dict()
     self.stop_event = Event()
     self.db = Database(self.dbfile)
     try:
         log.info('opening db', path=self.dbfile)
         self.db.open()
     except DatabasePathException:
         log.info('db does not exist, creating it', path=self.dbfile)
         self.db.create()
         self.db.add_index(MD5Index(self.dbfile, 'key'))
Beispiel #19
0
 def test_to_many_shards(self, tmpdir):
     db = Database(str(tmpdir) + '/db')
     db.create(with_id_index=False)
     # it's ok to use sharded directly there
     with pytest.raises(IndexPreconditionsException):
         db.add_index(ShardedUniqueHashIndex(db.path, 'id', sh_nums=300))
     with pytest.raises(IndexPreconditionsException):
         db.add_index(ShardedUniqueHashIndex(db.path, 'id', sh_nums=256))
Beispiel #20
0
 def __init__(self):
     db = Database('db')
     if db.exists():
         db.open()
     else:
         db.create()
         index = UrlIndex(db.path, 'urlidx')
         db.add_index(index)
     self._db = db
Beispiel #21
0
def main():
    db = Database('/tmp/tut5_1')
    db.create()
    x_ind = WithXIndex(db.path, 'x')
    db.add_index(x_ind)

    for x in xrange(100):
        db.insert(dict(x=x, t=random.random()))

    l = []
    for curr in db.get_many('x', start=10, end=30, limit=-1, with_doc=True):
        l.append(curr['doc']['t'])
    print sum(l) / len(l)
Beispiel #22
0
def read_samples(db_filename, test_name):
    db = Database(db_filename)
    db.open()

    test_name_ind = WithTestNameIndex(db.path, 'test_name')

    try:
        db.edit_index(test_name_ind)
    except (IndexConflict, PreconditionsException):
        db.add_index(test_name_ind)

    for data in db.get_many('test_name', test_name, limit=-1):
        yield data
Beispiel #23
0
class BaseTest(object):
    def setup(self):
        self.db = Database(tempfile.mkdtemp())
        self.db.create()
        add_index(self.db)
        self.task_flow_engine = TaskFlowEngine(self.db)

    def teardown(self):
        shutil.rmtree(self.db.path)

    def run_plainly(self, tests=None):
        self.setup()
        for k, v in self.__class__.__dict__.items():
            if k.startswith("test") and isinstance(v, types.FunctionType):
                if not tests or (k in tests):
                    v(self)
        self.teardown()
def main():

    db = Database('/tmp/trafficDB')

    if db.exists():

        db.open()
        database = DB(db)

        # Create velocity map with
        # Time resolution: 300 sec
        # Spacial resolution: 1 sq km
        v_map = VelocityMap(288, 10)

        # Get vehicles id list
        vid_list = database.get_all_id()

        # Get data for specific vehicle id
        veh_data = database.get_data_by_id(vid_list[0])

        # TODO: add loop through all vehicle IDs
        # for x in xrange(len(vid_list)):
        #    veh_data = database.get_data_by_id(vid_list[x])
        #    av_vel_map = v_map.vel_map_calc(veh_data)

        av_vel_map = v_map.vel_map_calc(veh_data)

        f = open('results.txt', 'w')

        t_res = 300
        for t in xrange(288):
            for x in xrange(80):
                for y in xrange(60):
                    if av_vel_map[t, x, y] != 0:
                        f.write(
                            str(t * t_res) + ' ' + str((t + 1) * t_res) + ' ' +
                            str(x) + ' ' + str(y) + ' ' +
                            str(av_vel_map[t, x, y]) + '\n')
            # f.write('\n')

        f.close()

    else:
        print "Database not found"

    print("--- %s seconds ---" % (time.time() - start_time))
class CodernityStore(DataStore):
    def __init__(self, redis_server_ip):
        self.db = Database('/tmp/db_a')
        self.db.create()
        x_ind = WithXIndex(self.db.path, 'x')
        self.db.add_index(x_ind)

    def put(self, key, value):
        self.db.insert(dict(x=key, chunk=value))

    def get(self, key):
        return self.db.get('x', key, with_doc=True)['doc']['chunk']

    def exists(self, key):
        return self.r.exists(key)

    def persist(self):
        pass

    def close(self):
        pass

    def used_memory(self):
        return 0

    def dump(self):
        return "dbsize: %d \n info: %r" % (0, 0)

    def reset(self):
        pass
Beispiel #26
0
def recreate_db(chat_history_dir):
    """
    """
    global _LocalStorage
    temp_dir = os.path.join(settings.ChatHistoryDir(), 'tmp')
    if os.path.isdir(temp_dir):
        bpio._dir_remove(temp_dir)
    tmpdb = regenerate_indexes(temp_dir)
    try:
        db().close()
    except:
        pass
    rewrite_indexes(db(), tmpdb)
    bpio._dir_remove(temp_dir)
    try:
        db().open()
        db().reindex()
    except:
        # really bad... we will lose whole data
        _LocalStorage = Database(chat_history_dir)
        _LocalStorage.custom_header = message_index.make_custom_header()
        try:
            _LocalStorage.destroy()
        except:
            pass
        try:
            _LocalStorage.create()
        except Exception as exc:
            lg.warn('failed to create local storage: %r' % exc)
Beispiel #27
0
 def setup(feature):
     app.config['CODERNITY_DATABASE_PATH'] = mkdtemp()
     global patcher
     patcher = mock.patch.dict(lite_mms.database.__dict__, 
                               {
                                   "codernity_db": Database(app.config['CODERNITY_DATABASE_PATH'])
                               })
     patcher.start()
     lite_mms.database.codernity_db.create()
Beispiel #28
0
class BaseTest(object):

    def setup(self):
        self.db = Database(tempfile.mkdtemp()) 
        self.db.create()
        add_index(self.db)
        self.task_flow_engine = TaskFlowEngine(self.db) 

    def teardown(self):
        shutil.rmtree(self.db.path)

    def run_plainly(self, tests=None):
        self.setup()
        for k, v in self.__class__.__dict__.items():
            if k.startswith("test") and isinstance(v, types.FunctionType):
                if not tests or (k in tests):
                    v(self)
        self.teardown()
Beispiel #29
0
class CDBBase(object):
    def __init__(self, path, logger = None ):
        self.logger = logger or logging.getLogger( __name__ )
        self._db = Database(path)

        if self._db.exists():
            self._db.open()
            self.logger.debug( "Abierta con exito la BD '%s'", path )
        else:
            self.logger.debug( "Creando la BD '%s'", path )
            self._db.create()
            self._initialitate( self._db )

    def _initialitate(self, db):
        pass

    def get_db(self):
        return self._db
Beispiel #30
0
    def test_insert_get(self, tmpdir, sh_nums):
        db = Database(str(tmpdir) + '/db')
        db.create(with_id_index=False)
        n = globals()['ShardedUniqueHashIndex%d' % sh_nums]
        db.add_index(n(db.path, 'id'))
        l = []
        for x in xrange(10000):
            l.append(db.insert(dict(x=x))['_id'])

        for curr in l:
            assert db.get('id', curr)['_id'] == curr
Beispiel #31
0
 def test_to_many_shards(self, tmpdir):
     db = Database(str(tmpdir) + '/db')
     db.create(with_id_index=False)
     # it's ok to use sharded directly there
     with pytest.raises(IndexPreconditionsException):
         db.add_index(ShardedUniqueHashIndex(db.path, 'id', sh_nums=300))
     with pytest.raises(IndexPreconditionsException):
         db.add_index(ShardedUniqueHashIndex(db.path, 'id', sh_nums=256))
Beispiel #32
0
 def __init__(self, db_path):
     self.db = Database(db_path)
     if self.db.exists():
         self.db.open()
     else:
         self.db.create()
         path_index = PathIndex(self.db.path, 'path')
         self.db.add_index(path_index)
         path_added_index = PathAddedIndex(self.db.path, 'path_added')
         self.db.add_index(path_added_index)
Beispiel #33
0
    def buildDB(self):

        from _dbindex import EncUniqueHashIndex
        self.dbName = self.DBConfig.mapget('databaseinfo')['databasename']

        self.db = Database(self.dbName)
        id_ind = EncUniqueHashIndex(self.db.path, 'id')
        self.db.set_indexes([id_ind])
        self.db.create()
        self.db.id_ind.enc_key = self.key
        self.db.close()

        self.createMasterindex()  #create master index passkey, only once
        self.createQindex()
        self.createTindex()

        #add error handling
        return True
        '''
Beispiel #34
0
def regenerate_indexes(temp_dir):
    """
    """
    tmpdb = Database(temp_dir)
    tmpdb.custom_header = coins_index.make_custom_header()
    tmpdb.create()
    refresh_indexes(tmpdb)
    tmpdb.close()
    return tmpdb
Beispiel #35
0
def main():
    db = Database('/tmp/tut1')
    db.create()

    for x in xrange(100):
        print db.insert(dict(x=x))

    for curr in db.all('id'):
        print curr
Beispiel #36
0
class DBImport:
	'''
	import scan: scans existing self.db and rebuilds config file 
	create self.db: creates self.db file, master index, question index and table index



	'''




	def __init__(self,passkey,xtraDB):
		self.key = passkey
		



		
		self.dbName = xtraDB
		self.db=Database(self.dbName)
		
		self.importScan()

	def __del__(self):
		if (self.db.opened):
			self.db.close()

# ADD REBUILD OPTION



	def importScan(self):

		
		#read from config, as a check
		
		self.db=Database(self.dbName)
		if(self.db.exists()):
			self.db.open()
			self.db.id_ind.enc_key = self.key
	
			for curr in self.db.all('id'): #since first passkey in self.db should be only one there, function only perfomed once
				if curr['t'] == 'master':
					masterKey=''.join(curr['_id'])
					self.DBConfig = AppConfig()
					self.DBConfig.putmap('databaseinfo','indexkey',masterKey)#masterkey=value
					self.DBConfig.putmap('databaseinfo','databasename',self.dbName)
					break
					#add else statement for errors if couldnt be written for found
			self.db.close()
		return True
Beispiel #37
0
	def __init__(self,passkey,xtraDB):
		self.key = passkey
		



		
		self.dbName = xtraDB
		self.db=Database(self.dbName)
		
		self.importScan()
def main():
    db = Database('/tmp/tut5_1')
    db.create()
    x_ind = WithXIndex(db.path, 'x')
    db.add_index(x_ind)

    for x in xrange(100):
        db.insert(dict(x=x, t=random.random()))

    l = []
    for curr in db.get_many('x', start=10, end=30, limit=-1, with_doc=True):
        l.append(curr['doc']['t'])
    print sum(l) / len(l)
    def OpenDb(self):
        '''
        打开当前数据库
        :param dbName:数据库名称
        :return:
        '''
        self.__DBHandle = Database(os.path.join(self.__dbRoot,self.__dbName))

        self.__DBHandle.open()

        if not self.__DBHandle.exists():
            self.__DBHandle = None
            return False,"DB Not Exist"

        try:
            self.__DBHandle.open()
        except:
            return False,"Open DB Failed"

        return True,"Success"
Beispiel #40
0
    def __init__(self, path, logger = None ):
        self.logger = logger or logging.getLogger( __name__ )
        self._db = Database(path)

        if self._db.exists():
            self._db.open()
            self.logger.debug( "Abierta con exito la BD '%s'", path )
        else:
            self.logger.debug( "Creando la BD '%s'", path )
            self._db.create()
            self._initialitate( self._db )
Beispiel #41
0
    def __init__(self, passkey, date_range='all'):
        self.key = passkey
        self.Qeng = SecuQ(self.key)

        self.indexdb = DBIndexSystem(self.key)
        #self.indexdb.masterIndex
        #self.indexdb.Qindex
        #self.indexdb.Tindex
        #self.indexdb.IndexedTable
        #self.indexdb.dbName

        self.dayindex = DayEntry(self.key)
        #self.dayindex.dayKey

        self.DBConfig = AppConfig()

        self.dbName = self.DBConfig.mapget('databaseinfo')['databasename']

        self.db = Database(self.dbName)

        self.dbparseable = self.db2json(daterange=date_range, clean=True)
Beispiel #42
0
    def test_insert_get(self, tmpdir, sh_nums):
        db = Database(str(tmpdir) + '/db')
        db.create(with_id_index=False)
        n = globals()['ShardedUniqueHashIndex%d' % sh_nums]
        db.add_index(n(db.path, 'id'))
        l = []
        for x in xrange(10000):
            l.append(db.insert(dict(x=x))['_id'])

        for curr in l:
            assert db.get('id', curr)['_id'] == curr
Beispiel #43
0
def init():
    global _LocalStorage
    if _LocalStorage is not None:
        lg.warn('local storage already initialized')
        return
    chat_history_dir = os.path.join(settings.ChatHistoryDir(), 'current')
    _LocalStorage = Database(chat_history_dir)
    _LocalStorage.custom_header = message_index.make_custom_header()
    if _Debug:
        lg.out(_DebugLevel, 'message_db.init in %s' % chat_history_dir)
    if db().exists():
        try:
            db().open()
        except:
            lg.err('failed to open database, local DB will be recreated')
            recreate_db(chat_history_dir)
    else:
        db().create()
    if not refresh_indexes(db()):
        lg.err('failed to refresh indexes, local DB will be recreated')
        recreate_db(chat_history_dir)
        refresh_indexes(db())
Beispiel #44
0
    def __init__(self, passkey):
        self.key = passkey
        self.indexdb = DBIndexSystem(self.key)
        #self.indexdb.masterIndex
        #self.indexdb.Qindex
        #self.indexdb.Tindex
        #self.indexdb.IndexedTable
        #self.indexdb.dbName

        self.db = Database(self.indexdb.dbName)

        #init variables blank, avoid key errors
        self.all = {}
        self.valid = {}
        self.active = {}
        self.notactive = {}
        self.unInit = {}
        self.typ = {}
        self.aggregate = {}
        self.multipoint = {}

        self.questionGet()  #populate variables
Beispiel #45
0
	def __init__(self,):
		"""
		Create Analex Cache
		"""
		self.cache={'checkedWords':{},
			    'FreqWords':{'noun':{}, 'verb':{},'stopword':{}},
			};
		self.db = Database('/tmp/qalsadiCache')
		if not self.db.exists():
			self.db.create();
			x_ind = WithAIndex(self.db.path, 'a')
			self.db.add_index(x_ind)        
		else:
			self.db.open();
def main():
    db = Database('/tmp/tut1')
    db.create()

    for x in xrange(100):
        print db.insert(dict(x=x))

    for curr in db.all('id'):
        print curr
Beispiel #47
0
	def importScan(self):

		
		#read from config, as a check
		
		self.db=Database(self.dbName)
		if(self.db.exists()):
			self.db.open()
			self.db.id_ind.enc_key = self.key
	
			for curr in self.db.all('id'): #since first passkey in self.db should be only one there, function only perfomed once
				if curr['t'] == 'master':
					masterKey=''.join(curr['_id'])
					self.DBConfig = AppConfig()
					self.DBConfig.putmap('databaseinfo','indexkey',masterKey)#masterkey=value
					self.DBConfig.putmap('databaseinfo','databasename',self.dbName)
					break
					#add else statement for errors if couldnt be written for found
			self.db.close()
		return True
Beispiel #48
0
	def __init__(self,passkey,date_range='all'):
		self.key = passkey
		self.Qeng = SecuQ(self.key)

		self.indexdb = DBIndexSystem(self.key)
		#self.indexdb.masterIndex
		#self.indexdb.Qindex
		#self.indexdb.Tindex
		#self.indexdb.IndexedTable
		#self.indexdb.dbName

		self.dayindex = DayEntry(self.key)
		#self.dayindex.dayKey

		self.DBConfig = AppConfig()

		self.dbName = self.DBConfig.mapget('databaseinfo')['databasename']

		self.db = Database(self.dbName)


		self.dbparseable = self.db2json(daterange=date_range,clean=True)
def main():
    db = Database('/tmp/tut4')
    db.open()
    #db.create()
    #x_ind = WithXIndex(db.path, 'x')
    #db.add_index(x_ind)
    '''
    for x in xrange(100):
        db.insert(dict(x=x))

    for y in xrange(100):
        db.insert(dict(y=y))
    '''
    print db.get('x', 10, with_doc=True)

    for curr in db.get_many('x', start=15, end=25, limit=-1, with_doc=False):
        print curr
Beispiel #50
0
	def __init__(self,passkey):
		self.key = passkey
		self.indexdb = DBIndexSystem(self.key)
		#self.indexdb.masterIndex
		#self.indexdb.Qindex
		#self.indexdb.Tindex
		#self.indexdb.IndexedTable
		#self.indexdb.dbName
		

		self.db = Database(self.indexdb.dbName)
		

		#init variables blank, avoid key errors
		self.all = {}
		self.valid = {}
		self.active = {}
		self.notactive = {}
		self.unInit = {}
		self.typ = {}
		self.aggregate = {}
		self.multipoint = {}

		self.questionGet() #populate variables
Beispiel #51
0
 def __init__(self, cache_path=False):
     """
     Create Analex Cache
     """
     # use this dictionary as a local cache,
     # The global db will be updated on destructing object
     # get the database path
     if hasattr(sys, 'frozen'): # only when running in py2exe this exists
         base = sys.prefix
     else: # otherwise this is a regular python script
         base = os.path.dirname(os.path.realpath(__file__))
     if not cache_path:
         file_path = self.DB_PATH
     else:
         file_path = os.path.join(os.path.dirname(cache_path), '.thaalabCache')
     
     self.cache={};
     self.db = Database(file_path)
     if not self.db.exists():
         self.db.create();
         x_ind = WithAIndex(self.db.path, 'a')
         self.db.add_index(x_ind)        
     else:
         self.db.open();
Beispiel #52
0
def main():
    db = Database('db/tut2')
    if db.exists():
    	db.open()
    else:
    	db.create()
    x_ind = WithXIndex(db.path, 'y')
    db.add_index(x_ind)

    for x in xrange(100):
        db.insert(dict(x=x))

    for y in xrange(100):
        db.insert(dict(y=y))

    print db.get('x', 10, with_doc=True)
Beispiel #53
0
class cache :
	"""
		cache for word morphological analysis
	"""
	def __init__(self,):
		"""
		Create Analex Cache
		"""
		self.cache={'checkedWords':{},
			    'FreqWords':{'noun':{}, 'verb':{},'stopword':{}},
			};
		self.db = Database('/tmp/qalsadiCache')
		if not self.db.exists():
			self.db.create();
			x_ind = WithAIndex(self.db.path, 'a')
			self.db.add_index(x_ind)        
		else:
			self.db.open();

	def __del__(self):
		"""
		Delete instance and clear cache
		
		"""
		self.cache=None;
		self.db.close();

	def isAlreadyChecked(self, word):
		try:
			return bool(self.db.get('a', word))
		except: return False
		#~ except: return False;

	def getChecked(self, word):
		x = self.db.get('a', word, with_doc=True)
		y= x.get('doc',False);
		if y: return y.get('d',[])
		else: return []
	
	def addChecked(self, word, data):
		idata = {"a":word,'d':data}
		self.db.insert(idata)

	
	def existsCacheFreq(self, word, wordtype):
		return word in self.cache['FreqWords'];
	
	def getFreq(self, originalword, wordtype):
		return self.cache['FreqWords'][wordtype].get(originalword,0);
	
	def addFreq(self, original, wordtype, freq):
		self.cache['FreqWords'][wordtype][original]=freq;
Beispiel #54
0
# -*- coding: utf-8 -*-
from lite_mms.basemain import app
app.config["SQLALCHEMY_DATABASE_URI"] = app.config["DBSTR"]
from flask.ext.sqlalchemy import SQLAlchemy
db = SQLAlchemy(app)

from CodernityDB.database import Database

codernity_db = Database(app.config['CODERNITY_DATABASE_PATH'])
if codernity_db.exists():
    codernity_db.open()
    codernity_db.reindex()
else:
    codernity_db.create()

app.config["MONGODB_DB"] = "localhost"

def init_db():
    # 必须要import models, 否则不会建立表
    from lite_mms import models
    db.create_all()

Beispiel #55
0
    custom_header = """from CodernityDB.tree_index import MultiTreeBasedIndex
from itertools import izip"""

    def __init__(self, *args, **kwargs):
        kwargs['key_format'] = '16s'
        super(TreeMultiTest, self).__init__(*args, **kwargs)
        self.__l = kwargs.get('w_len', 2)

    def make_key_value(self, data):
        name = data['w']
        l = self.__l
        max_l = len(name)
        out = set()
        for x in xrange(l - 1, max_l):
            m = (name, )
            for y in xrange(0, x):
                m += (name[y + 1:],)
            out.update(set(''.join(x).rjust(16, '_').lower() for x in izip(*m)))  #ignore import error
        return out, dict(w=name)

    def make_key(self, key):
        return key.rjust(16, '_').lower()

db = Database('./tmp/multi')
db.create()
db.add_index(TreeMultiTest(db.path, "words"))

db.insert(dict(w='Codernity'))

print db.get('words', 'dern')['w']  # "Codernity"
print db.get('words', 'cod')['w']  # "Codernity"
Beispiel #56
0
class Store():

    def __init__(self, pathname):
        self.store_path = os.path.join(pathname, "store")
        self.objects_counter = {}
        self.init_store_db()
        if not os.path.exists(self.store_path):
            os.mkdir(self.store_path)

    def init_store_dir(self):
        if not os.path.exists(self.store_path):
            os.mkdir(self.store_path)
        objects_path = os.path.join(self.store_path, "objects")
        if not os.path.exists(objects_path):
            os.mkdir(objects_path)
        backups_path = os.path.join(self.store_path, "backups")
        if not os.path.exists(backups_path):
            os.mkdir(backups_path)
        journal_path = os.path.join(self.store_path, "journal")
        if not os.path.exists(journal_path):
            os.mkdir(journal_path)
        journal_objects_path = os.path.join(self.store_path, "journal/objects")
        if not os.path.exists(journal_objects_path):
            os.mkdir(journal_objects_path)
        journal_backups_path = os.path.join(self.store_path, "journal/backups")
        if not os.path.exists(journal_backups_path):
            os.mkdir(journal_backups_path)

    def init_store_db(self):
        self.db = Database(os.path.join(self.store_path, "store.db"))
        if not self.db.exists():
            self.db.create()
            self.db.add_index(WithHashIndex(self.db.path, "hash"))
            self.db.add_index(WithPointerIndex(self.db.path, "pointer"))
        else:
            self.db.open()

    def get_path(self):
        return self.store_path #volania napr. BackupObject.new...(... , target.get_path())

    def get_backup_path(self, backup_name):
        backup_path = os.path.join(self.store_path, "backups")
        return os.path.join(backup_path, backup_name)

    def get_journal_backup_path(self, backup_name):
        backup_path = os.path.join(self.get_journal_path(), "backups")
        return os.path.join(backup_path, backup_name)

    def get_journal_backup_path(self, backup_name):
        backup_path = os.path.join(self.get_journal_path(), "backups")
        return os.path.join(backup_path, backup_name)

    def get_objet_dir_path(self, hash):
        return os.path.join(self.store_path, "objects", hash[:2])

    def get_object_path(self, hash):
        object_path = os.path.join(self.store_path, "objects", hash[:2])
        return os.path.join(object_path, hash + ".data")

    def get_journal_object_path(self, hash):
        object_path = os.path.join(self.get_journal_path(), "objects", hash[:2])
        if not os.path.exists(object_path):
            os.mkdir(object_path)
        return os.path.join(object_path, hash + ".data")

    def get_journal_tmp_object_path(self, hash):
        object_path = os.path.join(self.get_journal_path(), "objects")
        return os.path.join(object_path, hash + ".data")

    def get_object_header_path(self, hash):
        object_header_path = os.path.join(self.store_path, "objects", hash[:2])
        return os.path.join(object_header_path, hash + ".meta")

    def get_journal_object_header_path(self, hash):
        object_header_path = os.path.join(self.get_journal_path(), "objects", hash[:2])
        if not os.path.exists(object_header_path):
            os.mkdir(object_header_path)
        return os.path.join(object_header_path, hash + ".meta")

    def get_journal_tmp_object_header_path(self, hash):
        object_header_path = os.path.join(self.get_journal_path(), "objects")
        return os.path.join(object_header_path, hash + ".meta")

    def get_backups_path(self):
        return os.path.join(self.store_path, "backups")

    def get_latest_path(self):
        latest_tmp_path = os.path.join(self.store_path, "backups")
        return os.path.join(latest_tmp_path, "latest")

    def get_journal_latest_path(self):
        latest_tmp_path = os.path.join(self.get_journal_path(), "backups")
        return os.path.join(latest_tmp_path, "latest")

    def get_journal_path(self):
        return os.path.join(self.store_path, "journal")

    def get_all_backups(self):
        backups_path = os.path.join(self.store_path, "backups")
        backups = os.listdir(backups_path)
        if "latest" in backups:
            backups.remove("latest")
        return backups

    def is_journal_complete(self):
        journal_path = self.get_journal_path()
        if (os.path.exists(journal_path)):
            if (os.path.isfile(os.path.join(journal_path, "journal_complete"))):
                return True
            elif (os.path.isfile(os.path.join(journal_path, "journal_incomplete"))):
                print("Clearing Journal")
                self.remove_incomplete_journal()
                os.remove(os.path.join(journal_path, "journal_incomplete"))
                self.rebuildDB()
                return False
        return False

    def remove_incomplete_journal(self):
        journal_path = self.get_journal_path()
        for file_object in os.listdir(os.path.join(journal_path, "objects")):
            os.remove(os.path.join(journal_path, "objects", file_object))
        for file_object in os.listdir(os.path.join(journal_path, "backups")):
            os.remove(os.path.join(journal_path, "backups", file_object))

    def write_to_journal(self, command):
        journal_path = self.get_journal_path()
        with open(os.path.join(journal_path, "journal_incomplete"), "a") as TF:
            TF.write(command + "\n")
            TF.close()

    def finish_journal(self):
        for key, value in self.objects_counter.iteritems():
                if value["operation"] == "update" and value["value"] == 0:
                    self.removeObject(key)
                else:
                    self.write_to_journal(value["operation"] + " " + key + " " + str(value["value"]))
        if os.path.exists(os.path.join(self.get_journal_path(), "journal_incomplete")):
            journal_file = open(os.path.join(self.get_journal_path(), "journal_incomplete"), "r+")
            uniqlines = set(journal_file.readlines())
            journal_file.close()
            journal_file = open(os.path.join(self.get_journal_path(), "journal_incomplete"), "w")
            journal_file.writelines(uniqlines)
            journal_file.close()
            self.file_rename(os.path.join(self.get_journal_path(), "journal_incomplete"), "journal_complete")

    def commit(self):
        print("Committing Journal")
        journal_path = self.get_journal_path()
        if (os.path.exists(self.get_latest_path())):
            os.remove(self.get_latest_path())
        if (self.is_journal_complete()):
            with open(os.path.join(journal_path, "journal_complete"), "rb") as TF:
                for command in TF:
                    words = command.split()
                    if (words[0] == "move"):
                        file_path, file_name = os.path.split(words[2])
                        if not os.path.exists(file_path):
                            os.mkdir(file_path)
                        shutil.move(words[1], words[2])
                        #os.rename(words[1], words[2])
                    elif (words[0] == "remove"):
                        os.remove(words[1])
                    elif (words[0] == "rmdir"):
                        shutil.rmtree(words[1])
                    elif (words[0] == "insert"):
                        self.db.insert({'hash':words[1], 'pointer':int(words[2])})
                    elif (words[0] == "update"):
                        element = self.db.get('hash', words[1], with_doc=True)
                        element = element['doc']
                        element['pointer'] = int(words[2])
                        self.db.update(element)
                    elif (words[0] == "delete"):
                        element = self.db.get('hash', words[1], with_doc=True)
                        element = element['doc']
                        self.db.delete(element)
                TF.close()
            os.remove(os.path.join(journal_path, "journal_complete"))
            journal_objects_path = os.path.join(journal_path, "objects")
            shutil.rmtree(journal_objects_path)
            os.mkdir(journal_objects_path)

    @staticmethod
    def file_rename(old_name, new_name):
        new_file_name = os.path.join(os.path.dirname(old_name), new_name)
        os.rename(old_name, new_file_name)

    def file_move(self, old_name, new_name):
        tmp = os.path.join(self.get_journal_path(), "objects", new_name[:2])
        if (not os.path.exists(tmp)):
            os.mkdir(tmp)
        os.rename(old_name, os.path.join(tmp, new_name))

    def save_file(self, source_path, name, previous_hash = None, block_size = constants.CONST_BLOCK_SIZE):
        file_hash = hashlib.sha1()
        store_file = self.get_journal_tmp_object_path(name)
        store_file_header = self.get_journal_tmp_object_header_path(name)
        if not previous_hash == None:
            previous_type = self.get_object_type(previous_hash)
            if previous_type == "gz\n" or previous_type == "delta\n" :
                previous_file = self.get_object_file_header(previous_hash, "rb")
                previous_file.readline()
                previous_file.readline()
                sig_size = previous_file.readline()
                sig_data = previous_file.read(int(sig_size))
                deltaProcess = subprocess.Popen(['rdiff', 'delta', '-', source_path], stdout=subprocess.PIPE, stdin=subprocess.PIPE)
                deltaProcess.stdin.write(sig_data)
                deltaProcess.stdin.close()
                with gzip.open(store_file, "wb") as TF: #bol gzip
                    while True:
                        deltaData = deltaProcess.stdout.read(16)
                        if deltaData:
                            file_hash.update(deltaData)
                            TF.write(deltaData)
                        else:
                             with open(store_file_header, "wb") as THF:
                                THF.write("delta\n")
                                THF.write("signature\n")
                                sigProcess = subprocess.Popen(['rdiff', 'signature', source_path], stdout=subprocess.PIPE)
                                signature, signatureErr = sigProcess.communicate()
                                if (signatureErr is None):
                                    THF.write(str(len(signature)))
                                    THF.write("\n")
                                    THF.write(signature)
                                else:
                                    THF.write(str(0))
                                THF.write("\n")
                                THF.write("previous\n")
                                THF.write(previous_hash)
                                THF.close()
                                self.file_move(store_file, file_hash.hexdigest() + ".data")
                                self.file_move(store_file_header, file_hash.hexdigest() + ".meta")
                                break
                    TF.close()
                    self.write_to_journal("move " + self.get_journal_object_path(file_hash.hexdigest()) + " " + os.path.join(self.store_path, "objects", file_hash.hexdigest()[:2], file_hash.hexdigest() + ".data"))
                    self.write_to_journal("move " + self.get_journal_object_header_path(file_hash.hexdigest()) + " " + os.path.join(self.store_path, "objects", file_hash.hexdigest()[:2], file_hash.hexdigest() + ".meta"))
                return file_hash.hexdigest()
        else:
            with open(source_path, "rb") as SF:
                with gzip.open(store_file, "wb") as TF: #bol gzip
                    while True:
                        block = SF.read(block_size)
                        file_hash.update(block)
                        TF.write(block)
                        if not block:
                            self.file_move(store_file, file_hash.hexdigest() + ".data")
                            with open(store_file_header, "wb") as THF:
                                THF.write("gz\n")
                                THF.write("signature\n")
                                sigProcess = subprocess.Popen(['rdiff', 'signature', source_path], stdout=subprocess.PIPE)
                                signature, signatureErr = sigProcess.communicate()
                                if (signatureErr is None):
                                    THF.write(str(len(signature)))
                                    THF.write("\n")
                                    THF.write(signature)
                                else:
                                    THF.write(str(0))
                                self.file_move(store_file_header, file_hash.hexdigest() + ".meta")
                                THF.close()
                            break
                    TF.close()
                    self.write_to_journal("move " + self.get_journal_object_path(file_hash.hexdigest()) + " " + os.path.join(self.store_path, "objects", file_hash.hexdigest()[:2], file_hash.hexdigest() + ".data"))
                    self.write_to_journal("move " + self.get_journal_object_header_path(file_hash.hexdigest()) + " " + os.path.join(self.store_path, "objects", file_hash.hexdigest()[:2], file_hash.hexdigest() + ".meta"))
                SF.close()
            return file_hash.hexdigest()

    def save_directory(self, pi, hash_name):
        with self.get_journal_object_file(hash_name, "wb") as DF:
            with self.get_journal_object_file_header(hash_name, "wb") as DHF:
                DHF.write("directory\n")
                DF.write(pi)
                DF.close()
                DHF.close()
        self.write_to_journal("move " + DF.name + " " + os.path.join(self.store_path, "objects", hash_name[:2], hash_name + ".data"))
        self.write_to_journal("move " + DHF.name + " " + os.path.join(self.store_path, "objects", hash_name[:2], hash_name + ".meta"))

    def save_link(self, link, hash_name):
        with self.get_journal_object_file(hash_name.hexdigest(), "wb") as DF:
            with self.get_journal_object_file_header(hash_name.hexdigest(), "wb") as DHF:
                DHF.write("link\n")
                DHF.write("signature\n")
                DHF.write(str(0))
                DHF.write("\n")
                DF.write(link)
                DHF.close()
            DF.close()
        self.write_to_journal("move " + DF.name + " " + os.path.join(self.store_path, "objects", hash_name.hexdigest()[:2], hash_name.hexdigest() + ".data"))
        self.write_to_journal("move " + DHF.name + " " + os.path.join(self.store_path, "objects", hash_name.hexdigest()[:2], hash_name.hexdigest() + ".meta"))

    def save_data(self, file_name, data):
        with open(file_name, "wb") as BF:
            BF.write(data)
            BF.close()
        self.write_to_journal("move " + BF.name + " " + os.path.join(self.store_path, "backups"))

    def get_object_file(self, hash, mode):
        type = self.get_object_type(hash)
        if type == "gz\n" or type == "delta\n":
            return gzip.open(self.get_object_path(hash), mode)
        return open(self.get_object_path(hash), mode)

    def get_journal_object_file(self, hash, mode):
        return open(self.get_journal_object_path(hash), mode)

    def get_object_file_header(self, hash, mode):
        return open(self.get_object_header_path(hash), mode)

    def get_journal_object_file_header(self, hash, mode):
        return open(self.get_journal_object_header_path(hash), mode)

    def get_object_type(self, hash):
        with self.get_object_file_header(hash, "rb") as HF:
            object_type = HF.readline()
            HF.close()
            return object_type

    def get_object(self, source_path, hash, side_dict):
        return StoreObject.create(source_path, self, side_dict)

    def get_unzipped_tempFile(self, hash, tempFile):
        gzipFile = gzip.open(self.get_object_path(hash))
        temp = open(tempFile.name, "w+")
        while True:
            block = gzipFile.read()
            temp.write(block)
            if not block:
                break
        temp.seek(0)
        gzipFile.close()
        return temp

    def get_hash(self, src_file, block_size = constants.CONST_BLOCK_SIZE):
        file_hash = hashlib.sha1()
        with open(src_file, "rb") as SF:
            while True:
                block = SF.read(block_size)
                file_hash.update(block)
                if not block : break
            SF.close()
        return file_hash.hexdigest()

    def incIndex(self, hash):
        if hash in self.objects_counter:
            self.objects_counter[hash]["value"] = self.objects_counter[hash]["value"] + 1
            return self.objects_counter[hash]["value"]
        else:
            try:
                element = self.db.get('hash', hash, with_doc=True)
                element = element['doc']
                self.objects_counter[hash] = {"value":element['pointer'] + 1, "operation":"update"}
                return element['pointer'] + 1
            except RecordNotFound:
                self.objects_counter[hash] = {"value":1, "operation":"insert"}
                return 1

    def decIndex(self, hash):
        if hash in self.objects_counter:
            self.objects_counter[hash]["value"] = self.objects_counter[hash]["value"] - 1
            return self.objects_counter[hash]["value"]
        else:
            try:
                element = self.db.get('hash', hash, with_doc=True)
                element = element['doc']
                self.objects_counter[hash] = {"value":element['pointer'] - 1, "operation":"update"}
                return element['pointer'] - 1
            except RecordNotFound:
                return

    def getIndex(self, hash):
        if hash in self.objects_counter:
            return self.objects_counter[hash]["value"]
        else:
            try:
                element = self.db.get('hash', hash, with_doc=True)
                element = element['doc']
                return element['pointer']
            except RecordNotFound:
                return 0


    def rebuildDB(self):
        self.db.destroy()
        self.init_store_db()
        backups = self.get_all_backups()
        for backup in backups:
            tmp = ExistingBackup('', self, backup)
            tmp.recovery_backup(True)

    def removeObject(self, hash):
        if len(os.listdir(self.get_objet_dir_path(hash))) == 2:
            self.write_to_journal("rmdir " + self.get_objet_dir_path(hash))
        else:
            self.write_to_journal("remove " + self.get_object_path(hash))
            self.write_to_journal("remove " + self.get_object_header_path(hash))
        self.write_to_journal("delete " + hash)

    def removeBackup(self, time):
        backup = ExistingBackup("", self, time).get_root_object()
        self.is_journal_complete()
        backup.remove()
        os.remove(self.get_backup_path(time))
        newest = self.getNewestBackupTime()
        if newest != None:
            self.save_data(self.get_journal_latest_path(), newest)
        self.finish_journal()
        self.commit()

    def getNewestBackupTime(self):
        backups_path = self.get_backups_path()
        backups = sorted(os.listdir(backups_path))
        backups.remove("latest")
        if len(backups) > 0:
            return backups[len(backups) - 1]
        return None
class CodernityDB(BaseService):

    """A service providing a codernity db interface."""

    name = 'db'
    default_config = dict(db=dict(path=''), app=dict(dir=''))

    def __init__(self, app):
        super(CodernityDB, self).__init__(app)
        self.dbfile = os.path.join(self.app.config['app']['dir'],
                                   self.app.config['db']['path'])
        self.db = None
        self.uncommitted = dict()
        self.stop_event = Event()
        self.db = Database(self.dbfile)
        try:
            log.info('opening db', path=self.dbfile)
            self.db.open()
        except DatabasePathException:
            log.info('db does not exist, creating it', path=self.dbfile)
            self.db.create()
            self.db.add_index(MD5Index(self.dbfile, 'key'))

    def _run(self):
        self.stop_event.wait()

    def stop(self):
        # commit?
        log.info('closing db')
        if self.started:
            self.db.close()
            self.stop_event.set()

    def get(self, key):
        log.debug('getting entry', key=key)
        if key in self.uncommitted:
            if self.uncommitted[key] is None:
                raise KeyError("key not in db")
            return self.uncommitted[key]
        try:
            value = self.db.get('key', key, with_doc=True)['doc']['value']
        except RecordNotFound:
            raise KeyError("key not in db")
        return compress.decompress(value)

    def put(self, key, value):
        log.debug('putting entry', key=key, value=value)
        self.uncommitted[key] = value

    def commit(self):
        log.debug('committing', db=self)
        for k, v in self.uncommitted.items():
            if v is None:
                doc = self.db.get('key', k, with_doc=True)['doc']
                self.db.delete(doc)
            else:
                self.db.insert({'key': k, 'value': compress.compress(v)})
        self.uncommitted.clear()

    def delete(self, key):
        log.debug('deleting entry', key=key)
        self.uncommitted[key] = None

    def __contains__(self, key):
        try:
            self.get(key)
        except KeyError:
            return False
        return True

    def __eq__(self, other):
        return isinstance(other, self.__class__) and self.db == other.db

    def __repr__(self):
        return '<DB at %d uncommitted=%d>' % (id(self.db), len(self.uncommitted))

    def inc_refcount(self, key, value):
        self.put(key, value)

    def dec_refcount(self, key):
        pass

    def revert_refcount_changes(self, epoch):
        pass

    def commit_refcount_changes(self, epoch):
        pass

    def cleanup(self, epoch):
        pass

    def put_temporarily(self, key, value):
        self.inc_refcount(key, value)
        self.dec_refcount(key)