示例#1
0
class BenchCodernityDB(BenchBase):
    
    ID_FIELD = "_id"
    
    def __init__(self, *args, **kwargs):
        super(BenchCodernityDB, self).__init__(*args, **kwargs)
    
    def create_database(self):
        self.db = Database(self.db_name)
        self.db.create()
        self.db.add_index(WithSmallNumberIndex(self.db.path, "small_number"))

    def delete_database(self):
        self.db.close()
        shutil.rmtree(self.db_name)
            
    def create(self, record):
        self.db.insert(record)
    
    def get(self, key):
        return self.db.get("id", key, with_doc=True)
        
    def query(self, **kwargs):
        key, val = kwargs.items()[0]
        return list(self.db.get_many(key, val, limit=-1, with_doc=True))
示例#2
0
def regenerate_indexes(temp_dir):
    """
    """
    tmpdb = Database(temp_dir)
    tmpdb.custom_header = coins_index.make_custom_header()
    tmpdb.create()
    refresh_indexes(tmpdb)
    tmpdb.close()
    return tmpdb
示例#3
0
class cache:
    """
		cache for word morphological analysis
	"""
    def __init__(self, ):
        """
		Create Analex Cache
		"""
        self.cache = {
            'checkedWords': {},
            'FreqWords': {
                'noun': {},
                'verb': {},
                'stopword': {}
            },
        }
        self.db = Database('/tmp/qalsadiCache')
        if not self.db.exists():
            self.db.create()
            x_ind = WithAIndex(self.db.path, 'a')
            self.db.add_index(x_ind)
        else:
            self.db.open()

    def __del__(self):
        """
		Delete instance and clear cache
		
		"""
        self.cache = None
        self.db.close()

    def isAlreadyChecked(self, word):
        try:
            return bool(self.db.get('a', word))
        except:
            return False
        #~ except: return False;

    def getChecked(self, word):
        x = self.db.get('a', word, with_doc=True)
        y = x.get('doc', False)
        if y: return y.get('d', [])
        else: return []

    def addChecked(self, word, data):
        idata = {"a": word, 'd': data}
        self.db.insert(idata)

    def existsCacheFreq(self, word, wordtype):
        return word in self.cache['FreqWords']

    def getFreq(self, originalword, wordtype):
        return self.cache['FreqWords'][wordtype].get(originalword, 0)

    def addFreq(self, original, wordtype, freq):
        self.cache['FreqWords'][wordtype][original] = freq
示例#4
0
class DBImport:
	'''
	import scan: scans existing self.db and rebuilds config file 
	create self.db: creates self.db file, master index, question index and table index



	'''




	def __init__(self,passkey,xtraDB):
		self.key = passkey
		



		
		self.dbName = xtraDB
		self.db=Database(self.dbName)
		
		self.importScan()

	def __del__(self):
		if (self.db.opened):
			self.db.close()

# ADD REBUILD OPTION



	def importScan(self):

		
		#read from config, as a check
		
		self.db=Database(self.dbName)
		if(self.db.exists()):
			self.db.open()
			self.db.id_ind.enc_key = self.key
	
			for curr in self.db.all('id'): #since first passkey in self.db should be only one there, function only perfomed once
				if curr['t'] == 'master':
					masterKey=''.join(curr['_id'])
					self.DBConfig = AppConfig()
					self.DBConfig.putmap('databaseinfo','indexkey',masterKey)#masterkey=value
					self.DBConfig.putmap('databaseinfo','databasename',self.dbName)
					break
					#add else statement for errors if couldnt be written for found
			self.db.close()
		return True
示例#5
0
文件: cache.py 项目: ATouhou/mishkal
class cache :
	"""
		cache for word morphological analysis
	"""
	def __init__(self,):
		"""
		Create Analex Cache
		"""
		self.cache={'checkedWords':{},
			    'FreqWords':{'noun':{}, 'verb':{},'stopword':{}},
			};
		self.db = Database('/tmp/qalsadiCache')
		if not self.db.exists():
			self.db.create();
			x_ind = WithAIndex(self.db.path, 'a')
			self.db.add_index(x_ind)        
		else:
			self.db.open();

	def __del__(self):
		"""
		Delete instance and clear cache
		
		"""
		self.cache=None;
		self.db.close();

	def isAlreadyChecked(self, word):
		try:
			return bool(self.db.get('a', word))
		except: return False
		#~ except: return False;

	def getChecked(self, word):
		x = self.db.get('a', word, with_doc=True)
		y= x.get('doc',False);
		if y: return y.get('d',[])
		else: return []
	
	def addChecked(self, word, data):
		idata = {"a":word,'d':data}
		self.db.insert(idata)

	
	def existsCacheFreq(self, word, wordtype):
		return word in self.cache['FreqWords'];
	
	def getFreq(self, originalword, wordtype):
		return self.cache['FreqWords'][wordtype].get(originalword,0);
	
	def addFreq(self, original, wordtype, freq):
		self.cache['FreqWords'][wordtype][original]=freq;
示例#6
0
def migrate(source, destination):
    """
    Very basic for now
    """
    dbs = Database(source)
    dbt = Database(destination)
    dbs.open()
    dbt.create()
    dbt.close()
    for curr in os.listdir(os.path.join(dbs.path, "_indexes")):
        if curr != "00id.py":
            shutil.copyfile(os.path.join(dbs.path, "_indexes", curr), os.path.join(dbt.path, "_indexes", curr))
    dbt.open()
    for c in dbs.all("id"):
        del c["_rev"]
        dbt.insert(c)
    return True
示例#7
0
class DBImport:
    '''
	import scan: scans existing self.db and rebuilds config file 
	create self.db: creates self.db file, master index, question index and table index



	'''
    def __init__(self, passkey, xtraDB):
        self.key = passkey

        self.dbName = xtraDB
        self.db = Database(self.dbName)

        self.importScan()

    def __del__(self):
        if (self.db.opened):
            self.db.close()

# ADD REBUILD OPTION

    def importScan(self):

        #read from config, as a check

        self.db = Database(self.dbName)
        if (self.db.exists()):
            self.db.open()
            self.db.id_ind.enc_key = self.key

            for curr in self.db.all(
                    'id'
            ):  #since first passkey in self.db should be only one there, function only perfomed once
                if curr['t'] == 'master':
                    masterKey = ''.join(curr['_id'])
                    self.DBConfig = AppConfig()
                    self.DBConfig.putmap('databaseinfo', 'indexkey',
                                         masterKey)  #masterkey=value
                    self.DBConfig.putmap('databaseinfo', 'databasename',
                                         self.dbName)
                    break
                    #add else statement for errors if couldnt be written for found
            self.db.close()
        return True
def main():
    db = Database('/tmp/demo_secure')
    key = 'abcdefgh'
    id_ind = EncUniqueHashIndex(db.path, 'id', storage_class='Salsa20Storage')
    db.set_indexes([id_ind])
    db.create()
    db.id_ind.enc_key = key

    for x in xrange(100):
        db.insert(dict(x=x, data='testing'))

    db.close()
    dbr = Database('/tmp/demo_secure')
    dbr.open()
    dbr.id_ind.enc_key = key

    for curr in dbr.all('id', limit=5):
        print curr
示例#9
0
def main():
    db = Database('/tmp/demo_secure')
    key = 'abcdefgh'
    id_ind = EncUniqueHashIndex(db.path, 'id', storage_class='Salsa20Storage')
    db.set_indexes([id_ind])
    db.create()
    db.id_ind.enc_key = key

    for x in xrange(100):
        db.insert(dict(x=x, data='testing'))

    db.close()
    dbr = Database('/tmp/demo_secure')
    dbr.open()
    dbr.id_ind.enc_key = key

    for curr in dbr.all('id', limit=5):
        print curr
示例#10
0
class Developer:
    def __init__(self, passkey, dbname=None):
        self.key = passkey
        if (dbname == None):
            self.DBConfig = AppConfig()
            self.dbName = self.DBConfig.mapget('databaseinfo')['databasename']
        else:
            self.dbName = dbname
        self.db = Database(self.dbName)

    def dump(self):
        if (self.db.exists()):
            self.db.open()
            self.db.id_ind.enc_key = self.key
            for curr in self.db.all('id'):
                print curr

            self.db.close()
示例#11
0
def migrate(source, destination):
    """
    Very basic for now
    """
    dbs = Database(source)
    dbt = Database(destination)
    dbs.open()
    dbt.create()
    dbt.close()
    for curr in os.listdir(os.path.join(dbs.path, '_indexes')):
        if curr != '00id.py':
            shutil.copyfile(os.path.join(dbs.path, '_indexes', curr),
                            os.path.join(dbt.path, '_indexes', curr))
    dbt.open()
    for c in dbs.all('id'):
        del c['_rev']
        dbt.insert(c)
    return True
def main():
    db = Database("/tmp/demo_secure")
    key = "abcdefgh"
    id_ind = EncUniqueHashIndex(db.path, "id")
    db.set_indexes([id_ind])
    db.create()
    db.id_ind.enc_key = key
    print db.id_ind.storage

    for x in xrange(100):
        db.insert(dict(x=x, data="testing"))

    db.close()
    dbr = Database("/tmp/demo_secure")
    dbr.open()
    dbr.id_ind.enc_key = key

    for curr in dbr.all("id", limit=5):
        print curr
示例#13
0
    def CreateDB(self):
        '''
        创建nosql数据库
        :param dbName:
        :return:
        '''

        db = Database(os.path.join(self.__dbRoot,self.__dbName))

        if db.exists():
            return True,"DB Exist"
        try:
            # OutPutHelper.consolePrint("Create DB=%s, dbpath=%s"% (self.__dbName ,db.create()))
            db.create()
            # if indexname:
            #     x_ind = WithXIndex(db.path, indexname)
            #     db.add_index(x_ind)
        except Exception as ex:
            return False,"Create DB Failed"
        db.close()

        return True ,"Success"
        pass
示例#14
0
def recreate_db(chat_history_dir):
    """
    """
    global _LocalStorage
    try:
        _LocalStorage.close()
    except Exception as exc:
        lg.warn('failed closing local storage : %r' % exc)
    _LocalStorage = None
    dbs = Database(chat_history_dir)
    dbs.custom_header = message_index.make_custom_header()
    temp_dir = os.path.join(settings.ChatHistoryDir(), 'tmp')
    if os.path.isdir(temp_dir):
        bpio._dir_remove(temp_dir)
    orig_dir = os.path.join(settings.ChatHistoryDir(), 'orig')
    if os.path.isdir(orig_dir):
        bpio._dir_remove(orig_dir)
    dbt = Database(temp_dir)
    dbt.custom_header = message_index.make_custom_header()
    source_opened = False
    try:
        dbs.open()
        source_opened = True
    except Exception as exc:
        lg.warn('failed open local storage : %r' % exc)
    # patch_flush_fsync(dbs)
    dbt.create()
    dbt.close()
    refresh_indexes(dbt, reindex=False)
    dbt.open()
    # patch_flush_fsync(dbt)
    if source_opened:
        for c in dbs.all('id'):
            del c['_rev']
            dbt.insert(c)
    dbt.close()
    if source_opened:
        dbs.close()
    os.rename(dbs.path, orig_dir)
    os.rename(dbt.path, dbs.path)
    _LocalStorage = Database(chat_history_dir)
    _LocalStorage.custom_header = message_index.make_custom_header()
    db().open()
    # patch_flush_fsync(db())
    if refresh_indexes(db(), rewrite=False, reindex=False):
        bpio._dir_remove(orig_dir)
        lg.info('local DB re-created in %r' % chat_history_dir)
    else:
        lg.err('local DB is broken !!!')
示例#15
0
class Cache(object):
    """
        cache for word morphological analysis
    """
    DB_PATH = os.path.join(os.path.expanduser('~'), '.qalsadiCache')

    def __init__(self, dp_path = False):
        """
        Create Analex Cache
        """
        self.cache = {
            'checkedWords': {},
            'FreqWords': {
                'noun': {},
                'verb': {},
                'stopword': {}
            },
        }
        if not dp_path:
            dp_path = self.DB_PATH
        else:
            dp_path = os.path.join(os.path.dirname(dp_path), '.qalsadiCache')
        self.db = Database(dp_path)
        if not self.db.exists():
            self.db.create()
            x_ind = WithAIndex(self.db.path, 'a')
            self.db.add_index(x_ind)
        else:
            self.db.open()

    def __del__(self):
        """
        Delete instance and clear cache

        """
        self.cache = None
        self.db.close()

    def is_already_checked(self, word):
        """ return if ``word`` is already cached"""
        try:
            return bool(self.db.get('a', word))
        except:
            return False
        #~ except: return False;

    def get_checked(self, word):
        """ return checked ``word`` form cache"""
        xxx = self.db.get('a', word, with_doc=True)
        yyy = xxx.get('doc', False)
        if yyy:
            return yyy.get('d', [])
        else: return []

    def add_checked(self, word, data):
        """ add checked ``word`` form cache"""
        idata = {"a": word, 'd': data}
        self.db.insert(idata)

    def exists_cache_freq(self, word, wordtype):
        """ return if word exists in freq cache"""
        return word in self.cache['FreqWords']

    def get_freq(self, originalword, wordtype):
        """ return  ``word`` frequency form cache"""
        return self.cache['FreqWords'][wordtype].get(originalword, 0)

    def add_freq(self, original, wordtype, freq):
        """ add   ``original`` frequency ``freq`` to cache"""
        self.cache['FreqWords'][wordtype][original] = freq
示例#16
0
class SecuIn:
	'''
	Handles all data input into the database

	'''
	def __init__(self,passkey):
		self.key = passkey


		self.initQuestions = SecuQ(self.key)

		self.DBConfig = AppConfig()
		self.dbName = self.DBConfig.mapget('databaseinfo')['databasename']

		self.db = Database(self.dbName)


		initDay = DayEntry(self.key) # checks day hash or creates a new one
		self.dayKey = initDay.dayKey



	def questionDataIN(self,data):
	
		'''
		Data IN:
		{'a' : 2, 'b': 14 , 'c': 11, 'd': 43, 'note' : 'hello'}
	
		or
	
		{ 'b': 14 , 'c': 11, 'd': 43, 'note' : 'hello'} 
		 some entries may be missing
	
	
		Data OUT: (NEVER DELETE ANYTIHNG :) )
	
		{'date' : xx , _id: ###date2### , 'a':{'xxdate3xx':2},
						'b':{'xxdate3xx':14},
						'c':{'xxdate3xx':11},
						'note':{'xxdate3xx':'you'}}
	
	
		{'date' : xx , _id: ###date1### , 'a':{'xxdate1xx':1,'xxdate2xx':2},
						'b':{'xxdate1xx':14,'xxdate2xx':14},
						'c':{'xxdate1xx':11,'xxdate2xx':11},
						'note':{'xxdate2xx':'hello','xxdate3xx':'you'}
	
	
		'''


		timeIN = getTimeStamp() #get now time
		#initialize new questions
		
	
		# get data, as doc {'date':'xx/xx/xxTxx:xx:xxxx','question1':'x','question2':'x'}, same as dic format

		if(self.db.exists()):
			self.db.open()
			self.db.id_ind.enc_key = self.key
			dayrow = self.db.get('id', self.dayKey, with_doc=True)
	
			#this function assumes database already opened
			# this is gonna be a tuple that is inserted directly

	
			#convert data from javasript to python dict/json
			# if (type(data) is str):
			dataIN=eval(data) #{ 'b': 14 , 'c': 11, 'd': 43, 'note' : 'hello'}
			datachanged = dataIN.keys()





			for question in datachanged:
				try:
					dayrow[question][timeIN] = dataIN[question]
				except KeyError: #first write to key, initiate
					dayrow[question] = {}
					dayrow[question][timeIN] = dataIN[question]

			

			self.db.update(dayrow) 
			self.db.close()
			self.initQuestions.questionsValidate(datachanged) #insert questions whos data had changed

			#if all ok!
			return True
示例#17
0
class cache :
    """
        cache for word morphological analysis
    """
    DB_PATH = os.path.join(os.path.expanduser('~'), '.thaalabCache')
    def __init__(self, cache_path=False):
        """
        Create Analex Cache
        """
        # use this dictionary as a local cache,
        # The global db will be updated on destructing object
        # get the database path
        if hasattr(sys, 'frozen'): # only when running in py2exe this exists
            base = sys.prefix
        else: # otherwise this is a regular python script
            base = os.path.dirname(os.path.realpath(__file__))
        if not cache_path:
            file_path = self.DB_PATH
        else:
            file_path = os.path.join(os.path.dirname(cache_path), '.thaalabCache')
        
        self.cache={};
        self.db = Database(file_path)
        if not self.db.exists():
            self.db.create();
            x_ind = WithAIndex(self.db.path, 'a')
            self.db.add_index(x_ind)        
        else:
            self.db.open();

    def __del__(self):
        """
        Delete instance and clear cache
        
        """
        self.cache=None;
        self.db.close();

    def update(self):
        """update data base """
        #~ pass
        for word in self.cache:
            self.add_checked(word, self.cache[word])        

    def is_already_checked(self, word):
        try:
            return bool(self.db.get('a', word))
        except:
            return False
        #~ except: return False;

    def get_checked(self, word):
        try:
            x = self.db.get('a', word, with_doc=True)
            y = x.get('doc',False);
            if y: 
                return y.get('d',[])
            else: return []
        except:
            return []
    
    def add_checked(self, word, data):
        idata = {"a":word,'d':data}
        try:
            saved = self.db.get('a', word, with_doc=True)
        except:
            saved = False
        if saved:
            saved['doc']['d'] = data
            doc  = saved['doc']
            doc['update'] = True
            self.db.update(doc)
        else:
            self.db.insert(idata)

    
    def exists_cache_word(self, word):
        """ test if word exists in cache"""
        #if exists in cache dictionary
        if word in self.cache:
            return True
        else: # test in database
            if self.is_already_checked(word):
                stored_data = self.get_checked(word)
                self.cache[word] = stored_data
                return bool(self.cache[word])
            else:
                # add null dict to the word index to avoid multiple database check
                self.cache[word] = {}
                return {}            

    
    def get_relation_freq(self, word_prev, word_cur, relation):
        self.exists_cache_word(word_prev)
        return self.cache.get(word_prev, {}).get(word_cur, {}).get(relation, 0);
    
    def is_related(self, word_prev, word_cur):
        """ test if two words are related"""
        #serach in cache
        self.exists_cache_word(word_prev)
        # if exists in cache or database
        return self.cache.get(word_prev, {}).get(word_cur, {});
                
            

    def add_relation(self, word_prev, word_cur, relation):
        
        #~ relation ='r'+str(relation)

        if word_prev not in self.cache:
            # test first that is in db cache
            if self.is_already_checked(word_prev):
                stored_data = self.get_checked(word_prev)
                self.cache[word_prev] = stored_data
            else: # create an new entry
                self.cache[word_prev] = {word_cur:{relation:1, }, }

        # word_prev exists
        # add word_cur to previous dict
        elif word_cur not in self.cache[word_prev]:
            self.cache[word_prev][word_cur] = {relation:1,}
                
        elif relation not in self.cache[word_prev][word_cur]:
            self.cache[word_prev][word_cur][relation] = 1
        else:
            self.cache[word_prev][word_cur][relation] += 1

    def display_all(self):
        """ display all contents of data base """
        #~ pass
        print "aranasyn.cache: dislay all records in Thaalib Database """
        for curr in self.db.all('a', with_doc=True):
            print curr['doc']['a'], arepr(curr['doc']['d'])
示例#18
0
class SecuQ:
    '''
	Abstract:
	Handles all Questions, input and output input into the database





	def questionGet(self):

		- reloads all question database attributes



	def questionInsert(self,data,descriptor='inclusive'):
		-> data = input question dict
				ex: 

				'a':{'active':'True','typ':'slider','range':'0-100','aggregate':True, 'multipoint':True}, ,,,,.................

		- inclusive = Updates entried (overwrites) (safe)
		- exclusive = deletes all entried not in input (dangerous)

	def questionsValidate(self,data):
		-> data = input list of questions [(keys)]

		each question verified, if not initiated, will be initiated after


	'''
    def __init__(self, passkey):
        self.key = passkey
        self.indexdb = DBIndexSystem(self.key)
        #self.indexdb.masterIndex
        #self.indexdb.Qindex
        #self.indexdb.Tindex
        #self.indexdb.IndexedTable
        #self.indexdb.dbName

        self.db = Database(self.indexdb.dbName)

        #init variables blank, avoid key errors
        self.all = {}
        self.valid = {}
        self.active = {}
        self.notactive = {}
        self.unInit = {}
        self.typ = {}
        self.aggregate = {}
        self.multipoint = {}

        self.questionGet()  #populate variables

    #query: all , valid, true,unInit, false, inline
    def questionGet(
        self
    ):  #the passkey in question should be loaded from config or index passkey

        if (self.db.exists()):
            self.db.open()
            self.db.id_ind.enc_key = self.key

            #select Qindex
            Qindex = self.db.get('id', self.indexdb.Qindex, with_doc=True)

            oQ = Qindex.copy()
            # delete unnessesary to modify
            # if u use del <key> it deletes all instances in all variables using same reference to dict
            oQ.pop('_rev', None)
            oQ.pop('_id', None)
            oQ.pop('t', None)
            #oQ.pop('questions', None)
            #returns list in string for of all keys aka the question and metadata

            #step through and assign

            questionsSet = oQ.keys()

            for question in questionsSet:

                self.all[question] = oQ[question]

                if oQ[question]['active'] == 'True':
                    self.active[question] = oQ[question]

                if oQ[question]['active'] == 'unInit':
                    self.unInit[question] = oQ[question]

                if (oQ[question]['active']
                        == 'unInit') | (oQ[question]['active'] == 'True'):
                    self.valid[question] = oQ[question]

                if oQ[question]['active'] == 'False':
                    self.notactive[question] = oQ[question]

                if oQ[question]['typ'] == 'note':
                    self.typ[question] = oQ[question]

                try:
                    if oQ[question]['aggregate'] == 'True':
                        self.aggregate[question] = oQ[question]
                except KeyError:
                    pass

                try:

                    if oQ[question]['multipoint'] == 'True':
                        self.multipoint[question] = oQ[question]
                except KeyError:
                    pass

            self.db.close()

            return True
            '''
			Qinfo=
			{
			'a':{'active':'True','typ':'slider','range':'0-100','aggregate':True, 'multipoint':True},
			
			'b':{'active':'True','typ':'slider','range':'0-100','aggregate':True, 'multipoint':False},
			
			'c':{'active':'True','typ':'slider','range':'0-100','aggregate':False, 'multipoint':True},
			
			'd':{'active':'True','typ':'slider','range':'0-100','aggregate':False, 'multipoint':False},
			
			'note':{'active':'True','typ':'note', 'multipoint':"False"}
			}
			'''

    def questionInsert(self,
                       data,
                       descriptor='inclusive'
                       ):  # this will be a class later for ... infinite data

        if (self.db.exists()):
            self.db.open()
            self.db.id_ind.enc_key = self.key
            #select Qindex
            Qindex = self.db.get('id', self.indexdb.Qindex, with_doc=True)

            #must copy in this way for dictionaries or else all references are affected
            oQ = Qindex.copy()
            # delete unnessesary to modify
            # if u use del <key> it deletes all instances in all variables using same reference to dict
            oQ.pop('_rev', None)
            oQ.pop('_id', None)
            oQ.pop('t', None)
            oQ.pop('questions', None)

            # if (type(data) is str):
            nQL = eval(str(data))

            if (descriptor == "exclusive"):
                #exclusive, new data always overwrites old data, deletes any data that is not new
                # remmove old keys from row
                for key in oQ.keys(
                ):  #removes keys not in entry and overwrites everything
                    if key not in nQL.keys():
                        Qindex.pop(key, None)

            if (descriptor == "inclusive"):
                #only overwrites data, keeps old data that is unnaffected
                pass

            #oQ.update(nQL) # update existing keys to be written

            Qindex.update(nQL)  #updates existing keys in row

            self.db.update(Qindex)  #updates NoSQL
            self.db.close()
            self.questionGet()
            return True
        else:
            print('CANNOT LOAD self.db')
            return False

    def questionsValidate(
            self,
            data):  #turns all uesd questions true, insert list of questions

        for question in data:
            if question in self.unInit.keys():
                updated = {}
                updated[question] = self.unInit[question]
                updated[question]['active'] = "True"
                self.questionInsert(str(updated), "inclusive")

        #update class variables
        self.questionGet()

        return True
示例#19
0
class CodernityDataStore(object):
    PATH_TYPE = 'path'

    def __init__(self, db_path):
        self.db = Database(db_path)
        if self.db.exists():
            self.db.open()
        else:
            self.db.create()
            path_index = PathIndex(self.db.path, 'path')
            self.db.add_index(path_index)
            path_added_index = PathAddedIndex(self.db.path, 'path_added')
            self.db.add_index(path_added_index)

    @classmethod
    def dt_str(cls, datetime):
        return datetime.isoformat()[0:19]

    def add_video(self, path, video, added=None):
        logger.debug("add_video(%s, %s, %s)", path, video, added)
        added = added or datetime.utcnow()

        existing = list(self.db.get_many('path', path, with_doc=True))

        video_data, video_type = Serializer.serialize_video(video)
        data = dict(_t=self.PATH_TYPE, path=path, video_data=video_data, video_type=video_type,
                    downloads=dict(), added=self.dt_str(added))
        self.db.insert(data)

        for existing_path in existing:
            self.db.delete(existing_path['doc'])

    def add_download(self, path, provider, sub_id, language, score):
        logger.debug("add_download(%s, %s, %s, %s, %d)", path, provider, sub_id, language, score)
        data = self.db.get('path', path, with_doc=True)
        path = data['doc']
        download = dict(provider=provider, sub_id=sub_id, lang=str(language), score=score)
        if str(language) in path['downloads']:
            path['downloads'][str(language)].append(download)
        else:
            path['downloads'][str(language)] = [download]
        self.db.update(path)

    def get_downloads_for_video(self, path):
        logger.debug("get_downloads_for_video(%s)", path)
        data = self.db.get('path', path, with_doc=True)
        return data['doc']['downloads']

    @staticmethod
    def exceeds_desired_score(video, score, desired_movie_score, desired_episode_score):
        if isinstance(video, Episode):
            return score >= desired_episode_score
        elif isinstance(video, Movie):
            return score >= desired_movie_score

    def get_incomplete_videos(self, languages, desired_movie_score, desired_episode_score, ignore_older_than):
        logger.debug("get_incomplete_videos(%s, %d, %d, %s)", languages, desired_movie_score, desired_episode_score, ignore_older_than)
        within_date = self.db.get_many('path_added', start=self.dt_str(ignore_older_than), with_doc=True)
        results = []
        for path in (data['doc'] for data in within_date):
            video = Serializer.deserialize_video(path['video_type'], path['video_data'])
            needs = []
            for lang in languages:
                if str(lang) in path['downloads']:
                    current_score = max(download['score'] for download in path['downloads'][str(lang)])
                    if not self.exceeds_desired_score(video, current_score, desired_movie_score, desired_episode_score):
                        needs.append(dict(lang=lang, current_score=current_score))
                else:
                    needs.append(dict(lang=lang, current_score=0))
            if needs:
                results.append(dict(path=path['path'], video=video, needs=needs))

        logger.debug("found %d incomplete videos: %s", len(results), results)
        return results

    def close(self):
        self.db.close()
示例#20
0
class SecuQ:
	'''
	Abstract:
	Handles all Questions, input and output input into the database





	def questionGet(self):

		- reloads all question database attributes



	def questionInsert(self,data,descriptor='inclusive'):
		-> data = input question dict
				ex: 

				'a':{'active':'True','typ':'slider','range':'0-100','aggregate':True, 'multipoint':True}, ,,,,.................

		- inclusive = Updates entried (overwrites) (safe)
		- exclusive = deletes all entried not in input (dangerous)

	def questionsValidate(self,data):
		-> data = input list of questions [(keys)]

		each question verified, if not initiated, will be initiated after


	'''
	def __init__(self,passkey):
		self.key = passkey
		self.indexdb = DBIndexSystem(self.key)
		#self.indexdb.masterIndex
		#self.indexdb.Qindex
		#self.indexdb.Tindex
		#self.indexdb.IndexedTable
		#self.indexdb.dbName
		

		self.db = Database(self.indexdb.dbName)
		

		#init variables blank, avoid key errors
		self.all = {}
		self.valid = {}
		self.active = {}
		self.notactive = {}
		self.unInit = {}
		self.typ = {}
		self.aggregate = {}
		self.multipoint = {}

		self.questionGet() #populate variables



	#query: all , valid, true,unInit, false, inline
	def questionGet(self): #the passkey in question should be loaded from config or index passkey


		if(self.db.exists()):
			self.db.open()
			self.db.id_ind.enc_key = self.key

			#select Qindex
			Qindex = self.db.get('id', self.indexdb.Qindex, with_doc=True)
	
	
			oQ = Qindex.copy()
			# delete unnessesary to modify
			# if u use del <key> it deletes all instances in all variables using same reference to dict
			oQ.pop('_rev', None)
			oQ.pop('_id', None)
			oQ.pop('t', None)
			#oQ.pop('questions', None)
			#returns list in string for of all keys aka the question and metadata
	
			#step through and assign

			questionsSet = oQ.keys()

			for question in questionsSet:

				
				self.all[question] = oQ[question]

				if oQ[question]['active'] == 'True':
					self.active[question] = oQ[question]



				
				if oQ[question]['active'] == 'unInit':
					self.unInit[question] = oQ[question]

				
				if (oQ[question]['active'] == 'unInit') | (oQ[question]['active'] == 'True'):
					self.valid[question] = oQ[question]


				
				if oQ[question]['active'] == 'False':
					self.notactive[question] = oQ[question]



				
				if oQ[question]['typ'] == 'note':
					self.typ[question] = oQ[question]


				try:
					if oQ[question]['aggregate'] == 'True':
						self.aggregate[question] = oQ[question]
				except KeyError:
					pass
	
				try:
					
					if oQ[question]['multipoint'] == 'True':
						self.multipoint[question] = oQ[question]
				except KeyError:
					pass

			self.db.close()

	
			return True 

			'''
			Qinfo=
			{
			'a':{'active':'True','typ':'slider','range':'0-100','aggregate':True, 'multipoint':True},
			
			'b':{'active':'True','typ':'slider','range':'0-100','aggregate':True, 'multipoint':False},
			
			'c':{'active':'True','typ':'slider','range':'0-100','aggregate':False, 'multipoint':True},
			
			'd':{'active':'True','typ':'slider','range':'0-100','aggregate':False, 'multipoint':False},
			
			'note':{'active':'True','typ':'note', 'multipoint':"False"}
			}
			'''


	def questionInsert(self,data,descriptor='inclusive'):# this will be a class later for ... infinite data

	
	
	
		
		if(self.db.exists()):
			self.db.open()
			self.db.id_ind.enc_key = self.key
			#select Qindex
			Qindex = self.db.get('id', self.indexdb.Qindex, with_doc=True)
	
			#must copy in this way for dictionaries or else all references are affected
			oQ= Qindex.copy()
			# delete unnessesary to modify
			# if u use del <key> it deletes all instances in all variables using same reference to dict
			oQ.pop('_rev', None)
			oQ.pop('_id', None)
			oQ.pop('t', None)
			oQ.pop('questions', None)
	
			# if (type(data) is str):
			nQL=eval(str(data))
	
	

	
			if (descriptor == "exclusive"):
			 #exclusive, new data always overwrites old data, deletes any data that is not new
			 # remmove old keys from row
				for key in oQ.keys(): #removes keys not in entry and overwrites everything
					if key not in nQL.keys():
						Qindex.pop(key,None)

	
			if (descriptor == "inclusive"): 
				#only overwrites data, keeps old data that is unnaffected
				pass
	
	
			#oQ.update(nQL) # update existing keys to be written
	
			
			Qindex.update(nQL) #updates existing keys in row
	
			self.db.update(Qindex) #updates NoSQL
			self.db.close()
			self.questionGet()
			return True
		else:
			print ('CANNOT LOAD self.db')
			return False
	
	
	
	def questionsValidate(self,data): #turns all uesd questions true, insert list of questions

		for question in data:
			if question in self.unInit.keys():
				updated = {}
				updated[question] = self.unInit[question]
				updated[question]['active'] = "True"
				self.questionInsert(str(updated),"inclusive")

		#update class variables
		self.questionGet()
	
		return True
示例#21
0
class cache:
    """
        cache for word morphological analysis
    """
    def __init__(self, ):
        """
        Create Analex Cache
        """
        # use this dictionary as a local cache,
        # The global db will be updated on destructing object
        self.cache = {}
        self.db = Database('~/tmp/thaalibCache')
        if not self.db.exists():
            self.db.create()
            x_ind = WithAIndex(self.db.path, 'a')
            self.db.add_index(x_ind)
        else:
            self.db.open()

    def __del__(self):
        """
        Delete instance and clear cache
        
        """
        self.cache = None
        self.db.close()

    def update(self):
        """update data base """
        for word in self.cache:
            self.add_checked(word, self.cache[word])

    def is_already_checked(self, word):
        try:
            return bool(self.db.get('a', word))
        except:
            return False
        #~ except: return False;

    def get_checked(self, word):
        try:
            x = self.db.get('a', word, with_doc=True)
            y = x.get('doc', False)
            if y:
                return y.get('d', [])
            else:
                return []
        except:
            return []

    def add_checked(self, word, data):
        idata = {"a": word, 'd': data}
        try:
            saved = self.db.get('a', word, with_doc=True)
        except:
            saved = False
        if saved:
            saved['doc']['d'] = data
            doc = saved['doc']
            doc['update'] = True
            self.db.update(doc)
        else:
            self.db.insert(idata)

    def exists_cache_word(self, word):
        """ test if word exists in cache"""
        #if exists in cache dictionary
        if word in self.cache:
            return True
        else:  # test in database
            if self.is_already_checked(word):
                stored_data = self.get_checked(word)
                self.cache[word] = stored_data
                return bool(self.cache[word])
            else:
                # add null dict to the word index to avoid multiple database check
                self.cache[word] = {}
                return {}

    def get_relation_freq(self, word_prev, word_cur, relation):
        self.exists_cache_word(word_prev)
        return self.cache.get(word_prev, {}).get(word_cur,
                                                 {}).get(relation, 0)

    def is_related(self, word_prev, word_cur):
        """ test if two words are related"""
        #serach in cache
        self.exists_cache_word(word_prev)
        # if exists in cache or database
        return self.cache.get(word_prev, {}).get(word_cur, {})

    def add_relation(self, word_prev, word_cur, relation):

        #~ relation ='r'+str(relation)

        if word_prev not in self.cache:
            # test first that is in db cache
            if self.is_already_checked(word_prev):
                stored_data = self.get_checked(word_prev)
                self.cache[word_prev] = stored_data
            else:  # create an new entry
                self.cache[word_prev] = {
                    word_cur: {
                        relation: 1,
                    },
                }

        # word_prev exists
        # add word_cur to previous dict
        elif word_cur not in self.cache[word_prev]:
            self.cache[word_prev][word_cur] = {
                relation: 1,
            }

        elif relation not in self.cache[word_prev][word_cur]:
            self.cache[word_prev][word_cur][relation] = 1
        else:
            self.cache[word_prev][word_cur][relation] += 1

    def display_all(self):
        """ display all contents of data base """
        print "aranasyn.cache: dislay all records in Thaalib Database " ""
        for curr in self.db.all('a', with_doc=True):
            print curr['doc']['a'], arepr(curr['doc']['d'])
示例#22
0
class SecuFrame:  #in producion, key must be specified
    def __init__(self, passkey, date_range='all'):
        self.key = passkey
        self.Qeng = SecuQ(self.key)

        self.indexdb = DBIndexSystem(self.key)
        #self.indexdb.masterIndex
        #self.indexdb.Qindex
        #self.indexdb.Tindex
        #self.indexdb.IndexedTable
        #self.indexdb.dbName

        self.dayindex = DayEntry(self.key)
        #self.dayindex.dayKey

        self.DBConfig = AppConfig()

        self.dbName = self.DBConfig.mapget('databaseinfo')['databasename']

        self.db = Database(self.dbName)

        self.dbparseable = self.db2json(daterange=date_range, clean=True)

    def __del__(self):
        if (self.db.opened):
            self.db.close()

    def db2json(self, daterange, clean=True):
        '''
		> daterange 
		- tuple datetime objects to specify range
		(dateObj,dateObj)
		
		
		
					
		'''

        dfJSON = []

        if (self.db.exists()):
            self.db.open()
            self.db.id_ind.enc_key = self.key
            if daterange == "all":
                if clean == True:

                    for currHash in self.indexdb.IndexedTable:  #get row
                        curr = self.db.get('id', currHash, with_doc=True)

                        curr.pop('_id')
                        curr.pop('_rev')
                        dfJSON.append(curr)

                    self.db.close()
                    return dfJSON

                if clean == False:
                    for currHash in self.indexdb.IndexedTable:  #get row
                        curr = self.db.get('id', currHash, with_doc=True)
                        dfJSON.append(curr)

                    self.db.close()
                    return dfJSON

            if daterange == "today":
                if clean == True:
                    curr = self.db.get('id',
                                       self.dayindex.dayKey,
                                       with_doc=True)
                    curr.pop('_id')
                    curr.pop('_rev')
                    dfJSON.append(curr)
                    self.db.close()
                    return dfJSON
        '''
		#@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@  

			if ((type(daterange) == tuple) & (len(daterange)<=2) & (daterange[0]<daterange[1]) &(type(daterange[0])==datetime.datetime) & (type(daterange[1])==datetime.datetime): #if it's a valid daterange
				if clean == True
					
					for curr in db.all('id'): #get row   
						currdto=dt.datetime.strptime(curr['date'],"%Y-%m-%d %H:%M:%S.%f")
						if ( daterange[0] <= currdto <= daterange[1]):







							curr.pop('_id')
							curr.pop('_rev')
							dfJSON.append(curr)
					db.close()
					return dfJSON
			
				if clean == False:
					for curr in db.all('id'): #get row		  
						dfJSON.append(curr)
					db.close()
					return dfJSON





			else: # raise some kindof exception
				return False
			
	
		#@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@
		'''

    def iffo(self, daterange="all", obey=True):
        self.dbfiltered = []
        self.dbdatemapped = {}
        self.infoIndex = self.Qeng.valid  # {'a':{'active':'True','typ':'slider','range':'0-100','aggregate':'True', 'multipoint':'True'}}
        if obey == True:
            for curr in self.dbparseable:  #get row
                '''
				{'date' : xx , 'a':{'xxdate1xx':1,'xxdate2xx':2},
					'b':{'xxdate1xx':14,'xxdate2xx':14},
					'c':{'xxdate1xx':11,'xxdate2xx':11},
					'note':{'xxdate2xx':'hello','xxdate3xx':'you'}
				}
				'''

                tmp = {}  #holder that is constructed

                rowDate = curr["date"]  #'date' : xx
                '''
				date : xx
				'''

                questionsData = curr.keys()  # returns a list
                questionsData.remove('date')
                '''
				['a','b','c','note']
				'''
                #questionsData.remove('note')

                for question in questionsData:  #get question from list

                    try:
                        if (
                                self.infoIndex[question]['multipoint'] ==
                                "True"
                        ):  # & (self.infoIndex[question]['aggregate']== "False"): #display all points

                            multiP = curr[question].keys()
                            '''
							in 'a'
							['xxdate1xx','xxdate2xx']
							'''

                            for point in multiP:  #points are dates
                                try:
                                    tmp[question][point] = curr[question][
                                        point]
                                except KeyError:
                                    tmp[question] = {}
                                    tmp[question][point] = curr[question][
                                        point]

                                try:
                                    self.dbdatemapped[point][question] = curr[
                                        question][point]
                                except KeyError:  #avoid overwriting
                                    self.dbdatemapped[point] = {}
                                    self.dbdatemapped[point][question] = curr[
                                        question][point]

                        if (self.infoIndex[question]['multipoint'] == "True"
                            ) & (
                                self.infoIndex[question]['aggregate'] == "True"
                            ):  #display only one aggregate in it's own column
                            '''
							creates unique key for aggregate
							'''
                            datelist = curr[question].keys(
                            )  #gets all dates within the question
                            datelist.sort()  #ensure earliest to latest
                            aggregate_key_name = str(question) + "_aggregate"
                            tmp[aggregate_key_name] = {}

                            try:  #as intigers
                                tmp[aggregate_key_name][rowDate] = 0
                                aggregate_sum = 0
                                for point in datelist:
                                    aggregate_sum += curr[question][point]
                            except TypeError:  #process aggregate function as concatenated strings
                                tmp[aggregate_key_name][rowDate] = ""
                                aggregate_sum = ""
                                for point in datelist:
                                    aggregate_sum += curr[question][
                                        point] + "\n"

                            try:
                                self.dbdatemapped[rowDate][
                                    aggregate_key_name] = aggregate_sum
                            except KeyError:
                                self.dbdatemapped[rowDate] = {}
                                self.dbdatemapped[rowDate][
                                    aggregate_key_name] = aggregate_sum

                            tmp[aggregate_key_name] = {}
                            tmp[aggregate_key_name][
                                rowDate] = aggregate_sum  # replaces with single

                        if ((self.infoIndex[question]['multipoint'] == "False")
                                &
                            (self.infoIndex[question]['aggregate']
                             == "False")) | (self.infoIndex[question]['typ']
                                             == "note"):  #display only one
                            '''
							Puts last entry under rowdate 
							'''
                            ''' 
							NOTE HANDLING
							in future this should select the most positive note based on sentiment analysis
	
							- For now it will select the last note typed in
							'''

                            datelist = curr[question].keys(
                            )  #gets all dates within the question

                            pointKey = self.getLastDate(
                                datelist
                            )  #selects most recent date from list (keys)
                            try:
                                tmp[question][rowDate] = curr[question][
                                    pointKey]  # replaces with single, most recent, point only
                            except KeyError:
                                tmp[question] = {}
                                tmp[question][rowDate] = curr[question][
                                    pointKey]  # replaces with single, most recent, point only
                            try:
                                self.dbdatemapped[rowDate][question] = curr[
                                    question][pointKey]
                            except KeyError:
                                self.dbdatemapped[rowDate] = {}
                                self.dbdatemapped[rowDate][question] = curr[
                                    question][pointKey]

                        if (self.infoIndex[question]['multipoint'] == "False"
                            ) & (
                                self.infoIndex[question]['aggregate'] == "True"
                            ):  #display only one aggregate in it's own column
                            datelist = curr[question].keys(
                            )  #gets all dates within the question
                            datelist.sort()  #ensure earliest to latest

                            tmp[question] = {}

                            try:  #as intigers
                                tmp[question][rowDate] = 0
                                aggregate_sum = 0
                                for point in datelist:
                                    aggregate_sum += curr[question][point]
                            except TypeError:  #process aggregate function as concatenated strings
                                tmp[question][rowDate] = ""
                                aggregate_sum = ""
                                for point in datelist:
                                    aggregate_sum += curr[question][
                                        point] + "\n"

                            #output
                            tmp[question][rowDate] = aggregate_sum
                            #remapping is additive
                            try:
                                self.dbdatemapped[rowDate][
                                    question] = aggregate_sum
                            except KeyError:
                                self.dbdatemapped[rowDate] = {}
                                self.dbdatemapped[rowDate][
                                    question] = aggregate_sum
                    except KeyError:
                        continue

                self.dbfiltered.append(tmp)

        return self

    def igraph(self):
        import datetime as dt
        self.graphFrame = []

        graphpoints = self.dbdatemapped.keys()
        graphdates = []

        for date in graphpoints:
            try:
                graphdates.append(
                    dt.datetime.strptime(date, "%Y-%m-%d %H:%M:%S.%f"))
            except ValueError:
                graphdates.append(
                    dt.datetime.strptime(date, "%Y-%m-%d %H:%M:%S"))

        sortkeydto, pointerdts = zip(*sorted(zip(graphdates, graphpoints)))

        for i in xrange(
                0,
                len(pointerdts)):  # want {date: xxxISOxxx , a:x ,b:x ,note:x}

            tmpRow = {}
            tmpRow['date'] = sortkeydto[i].isoformat() + "Z"
            for question in self.dbdatemapped[pointerdts[i]]:
                tmpRow[question] = self.dbdatemapped[pointerdts[i]][question]

            self.graphFrame.append(tmpRow)
        return self

        #map accordingly with date to iso format

    def Agraph(self, neuroOutput):
        import datetime as dt
        self.neuroOutput = neuroOutput  # [(dto,dto),(dto,dto),,,,]
        self.AgraphFrame = []

        graphpoints = self.dbdatemapped.keys()
        graphdates = []

        self.last_date = None
        self.curr_date = None

        self.neuro_scan_count = 0
        self.neuro_highlight_complete = False

        for date in graphpoints:
            try:
                graphdates.append(
                    dt.datetime.strptime(date, "%Y-%m-%d %H:%M:%S.%f"))
            except ValueError:
                graphdates.append(
                    dt.datetime.strptime(date, "%Y-%m-%d %H:%M:%S"))

        sortkeydto, pointerdts = zip(*sorted(zip(graphdates, graphpoints)))

        for i in xrange(
                0,
                len(pointerdts)):  # want {date: xxxISOxxx , a:x ,b:x ,note:x}
            tmpRow = {}
            # set to white /  transparent first
            self.curr_date = sortkeydto[i]

            if (self.neuro_highlight_complete == False):
                tmpScanPos = divmod(self.neuro_scan_count,
                                    2)  # divisor answer, remainder
                #print "tmpScanPos: " +str(tmpScanPos) + " self.neuro_scan_count:  " + str(self.neuro_scan_count)
                tmpNeuroDate = self.neuroOutput[tmpScanPos[0]][tmpScanPos[1]]

                if (self.last_date == None): tmpRow["lineColor"] = "#FFFFFF"
                elif (self.curr_date == tmpNeuroDate):
                    if (tmpScanPos[1] == 0):
                        tmpRow["lineColor"] = "#CC0000"  #if start of range
                    if (tmpScanPos[1] == 1):
                        tmpRow["lineColor"] = "#FFFFFF"  # if end of range
                    self.neuro_scan_count += 1

                elif (self.last_date < tmpNeuroDate < self.curr_date):
                    if (tmpScanPos[1] == 0):
                        tmpRow["lineColor"] = "#CC0000"  #if start of range
                    if (tmpScanPos[1] == 1):
                        tmpRow["lineColor"] = "#FFFFFF"  # if end of range
                    self.neuro_scan_count += 1

                if ((tmpScanPos[0] + tmpScanPos[0]) == len(neuroOutput)):
                    self.neuro_highlight_complete = True  #checks if this should be the last iteration

            tmpRow['date'] = sortkeydto[i].isoformat() + "Z"
            for question in self.dbdatemapped[pointerdts[i]]:
                tmpRow[question] = self.dbdatemapped[pointerdts[i]][question]

            self.AgraphFrame.append(tmpRow)
            self.last_date = sortkeydto[i]

        #map accordingly with date to iso format

    def dayresponse(self):
        self.responseFrame = {}
        try:
            tmp = self.dbdatemapped[self.dayindex.todayDate]
        except KeyError:  #means there is no information for the daykey
            return self
        # remove aggregate keyword, json handles association

        for question in tmp.keys():
            cleankey = question.replace('_aggregate', '')
            self.responseFrame[cleankey] = tmp[question]

        return self

    def orderedmap(self):
        import datetime as dt
        self.processFrameList = []
        self.processFrameDict = {}

        graphpoints = self.dbdatemapped.keys()
        graphdates = []

        for date in graphpoints:
            try:
                graphdates.append(
                    dt.datetime.strptime(date, "%Y-%m-%d %H:%M:%S.%f"))
            except ValueError:
                graphdates.append(
                    dt.datetime.strptime(date, "%Y-%m-%d %H:%M:%S"))

        sortkeydto, pointerdts = zip(*sorted(zip(graphdates, graphpoints)))

        for i in xrange(
                0,
                len(pointerdts)):  # want {date: xxxISOxxx , a:x ,b:x ,note:x}

            tmpRow = {}
            tmpRow[sortkeydto[i]] = {}
            self.processFrameDict[sortkeydto[i]] = {}

            for question in self.dbdatemapped[pointerdts[i]]:
                tmpRow[sortkeydto[i]][question] = self.dbdatemapped[
                    pointerdts[i]][question]
                self.processFrameDict[sortkeydto[i]][
                    question] = self.dbdatemapped[pointerdts[i]][question]

            self.processFrameList.append(tmpRow)
        return self

    def getLastDate(self, dates):  #input a list of dates
        dates.sort(reverse=True)
        return dates[0]  #output most recent date in subset
示例#23
0
class CodernityDB(BaseService):

    """A service providing a codernity db interface."""

    name = 'db'
    default_config = dict(db=dict(path=''), app=dict(dir=''))

    def __init__(self, app):
        super(CodernityDB, self).__init__(app)
        self.dbfile = os.path.join(self.app.config['app']['dir'],
                                   self.app.config['db']['path'])
        self.db = None
        self.uncommitted = dict()
        self.stop_event = Event()
        self.db = Database(self.dbfile)
        try:
            log.info('opening db', path=self.dbfile)
            self.db.open()
        except DatabasePathException:
            log.info('db does not exist, creating it', path=self.dbfile)
            self.db.create()
            self.db.add_index(MD5Index(self.dbfile, 'key'))

    def _run(self):
        self.stop_event.wait()

    def stop(self):
        # commit?
        log.info('closing db')
        if self.started:
            self.db.close()
            self.stop_event.set()

    def get(self, key):
        log.debug('getting entry', key=key)
        if key in self.uncommitted:
            if self.uncommitted[key] is None:
                raise KeyError("key not in db")
            return self.uncommitted[key]
        try:
            value = self.db.get('key', key, with_doc=True)['doc']['value']
        except RecordNotFound:
            raise KeyError("key not in db")
        return compress.decompress(value)

    def put(self, key, value):
        log.debug('putting entry', key=key, value=value)
        self.uncommitted[key] = value

    def commit(self):
        log.debug('committing', db=self)
        for k, v in self.uncommitted.items():
            if v is None:
                doc = self.db.get('key', k, with_doc=True)['doc']
                self.db.delete(doc)
            else:
                self.db.insert({'key': k, 'value': compress.compress(v)})
        self.uncommitted.clear()

    def delete(self, key):
        log.debug('deleting entry', key=key)
        self.uncommitted[key] = None

    def __contains__(self, key):
        try:
            self.get(key)
        except KeyError:
            return False
        return True

    def __eq__(self, other):
        return isinstance(other, self.__class__) and self.db == other.db

    def __repr__(self):
        return '<DB at %d uncommitted=%d>' % (id(self.db), len(self.uncommitted))

    def inc_refcount(self, key, value):
        self.put(key, value)

    def dec_refcount(self, key):
        pass

    def revert_refcount_changes(self, epoch):
        pass

    def commit_refcount_changes(self, epoch):
        pass

    def cleanup(self, epoch):
        pass

    def put_temporarily(self, key, value):
        self.inc_refcount(key, value)
        self.dec_refcount(key)
示例#24
0
class DBIndexSystem:
    '''
	QindexGet: 
		-- Get the question index key


		Allows for simple calling to database variables

		ex:

	self.indexdb = DBIndexSystem()
		#self.indexdb.masterIndex
		#self.indexdb.Qindex
		#self.indexdb.Tindex
		#self.indexdb.IndexedTable
		#self.indexdb.dbName

	'''
    def __init__(self, passkey):
        self.key = passkey

        self.DBConfig = AppConfig()
        #check for self.db stuff
        #IF IT DOESNT PASS THESE TESTS
        #warn before deletion
        self.dbName = self.DBConfig.mapget('databaseinfo')['databasename']

        self.masterIndex = self.DBConfig.mapget('databaseinfo')['indexkey']

        self.db = Database(self.dbName)

        if (self.db.exists()):
            self.db.open()
            self.db.id_ind.enc_key = self.key
            try:
                self.Qindex = self.QindexGet()  #question index key
                self.Tindex = self.TindexGet()  #table index key
                self.IndexedTable = self.tableGet(
                )  #regular 'table' index. list of hash pointers in order

            except:
                print 'bad index'
                self.db.close()
                self.sanitycheck = False

            else:
                self.db.close()
                self.sanitycheck = True

    def __del__(self):
        if (self.db.opened):
            self.db.close()

    def QindexGet(self):
        masterRow = self.db.get('id', self.masterIndex, with_doc=True)
        Qindexkey = masterRow['Qindex']
        return Qindexkey  # questions as [a,b,c,d,e,f,g,h,i]
        #add try if line not found, your self.db is totally f****d eh

    def TindexGet(self, tableName='Tindex'):
        masterRow = self.db.get('id', self.masterIndex, with_doc=True)
        Tindexkey = masterRow[tableName]
        return Tindexkey  # questions as [a,b,c,d,e,f,g,h,i]
        #add try if line not found, your self.db is totally f****d eh

    def tableGet(
        self,
        tableName='Tindex'
    ):  #the self.key in question should be loaded from config or index self.key
        Tindexkey = self.TindexGet(
            tableName)  #not self variable because this may be a custom table
        Tindex = self.db.get('id', Tindexkey, with_doc=True)
        return Tindex[
            'table']  # table entries as as [######,######,######,######,######,]

    def TindexPut(self,
                  data,
                  tableName='Tindex'):  #enter ordered hash data into table

        if (self.db.exists()):
            self.db.open()
            self.db.id_ind.enc_key = self.key
            Tindexkey = self.TindexGet(tableName)
            TindexRow = self.db.get('id', Tindexkey, with_doc=True)
            try:
                TindexRow['table'].append(data)
            except KeyError:
                TindexRow['table'] = []
                TindexRow['table'].append(data)
            self.db.update(TindexRow)
            self.db.close()

            #append net entry to current table array

            #write table array to same index key
            return True

    def selectrow(self, idIN):
        #check if already open
        if (self.db.exists()):
            if (self.db.opened == False):
                self.db.open()
                self.db.id_ind.enc_key = self.key
                #select Qindex
                data = self.db.get('id', idIN, with_doc=True)
                self.db.close()
                return data
            else:
                data = self.db.get('id', idIN, with_doc=True)
                return data

    def updaterow(self, data):

        if (self.db.exists()):
            if (self.db.opened == False):
                self.db.open()
                self.db.id_ind.enc_key = self.key
                self.db.update(data)  #must include _id, must be dict/json
                self.db.close()
                return True
            else:
                self.db.update(data)  #must include _id, must be dict/json
                return True
示例#25
0
class DayEntry:  #checker class
    '''
	checks day hash or creates a new one


	once instatiated, it checks for:
		- if day key in config coincideds with todays date
		- if there isnt a date in config, it scans database for the one matching todays
		- if no date in conifig, or it's the wrong date, new row is made (only if there isnt one with matching date in the entire self.db)
	'''
    def __init__(self, passkey):
        self.todayDate = str(getDayStart())
        self.key = passkey

        self.DBConfig = AppConfig()

        self.dayKey = None  # setup befpore checking, avoid attribute error
        self.dbName = self.DBConfig.mapget('databaseinfo')['databasename']
        self.db = Database(self.dbName)

        try:
            self.dayKey = self.DBConfig.mapget('databaseinfo')['daykey']
        except KeyError:  # if notthin in config, check self.db for entry
            daystatus = self.checkfordate()
            if (daystatus == False):
                self.makeDayRow()
                self.DBConfig.putmap('databaseinfo', 'daykey', self.dayKey)
            #if true do nothing, config file fixed
        else:

            daystatus = self.checkfordate(
            )  #if false, scans for right one, and fixes config

            oldcompare = self.dayKey
            self.dayKey = self.DBConfig.mapget('databaseinfo')['daykey']
            if (daystatus == False) & (oldcompare == self.dayKey):
                self.makeDayRow()
                self.DBConfig.putmap('databaseinfo', 'daykey', self.dayKey)
            if (daystatus == True):  #everything all good
                pass  #nothing created just a check

    def __del__(self):
        if (self.db.opened):
            self.db.close()

    def makeDayRow(self):

        if (self.checkfordate() == True):  #already exists no need to write
            return False

        dbindex = DBIndexSystem(self.key)

        dayrow = {}
        dayrow["date"] = self.todayDate
        if (self.db.exists() == True):
            self.db.open()
            self.db.id_ind.enc_key = self.key
            self.db.insert(dayrow)
            self.db.close()  #must close first , no double opens
            self.getDayRowID()  # resfresh day key
            dbindex.TindexPut(self.dayKey)
            ## would normally write to config file
            return True

    def getDayRowID(self):  #gets row id by date
        if (self.db.exists()):
            self.db.open()
            self.db.id_ind.enc_key = self.key
            for curr in self.db.all('id'):
                try:
                    if curr['date'] == str(self.todayDate):
                        dataop = curr['_id']
                        dataop = "".join(
                            dataop
                        )  #_id is returned as a list of charaters, must be concatenated to string
                        self.db.close()
                        self.dayKey = dataop
                        return dataop  #returns datestring
                except KeyError:
                    continue

                    #break
            #if it makes it here, entry doesnt exist
            self.db.close()
            return False  #there is a probplem

    def checkfordate(self):  #checks for existance of that date in self.db
        if (self.db.exists()):
            self.db.open()
            self.db.id_ind.enc_key = self.key
            if (self.dayKey != None):
                dayrow = self.db.get('id', self.dayKey, with_doc=True)
                #doesnt account for if there is an entry in the config that doesnt exist
                if dayrow['date'] == str(self.todayDate):
                    self.db.close()
                    return True
            for curr in self.db.all('id'):  #try to search
                try:
                    if curr['date'] == str(self.todayDate):
                        self.DBConfig.putmap('databaseinfo', 'daykey', "".join(
                            curr['_id']))  #fix lost entry
                        self.db.close()
                        return False
                except KeyError:
                    continue

                    #break
            #if it makes it here, entry doesnt exist and nothing was remapped
            self.db.close()
            return False
示例#26
0
class SecuFrame: #in producion, key must be specified
	def __init__(self,passkey,date_range='all'):
		self.key = passkey
		self.Qeng = SecuQ(self.key)

		self.indexdb = DBIndexSystem(self.key)
		#self.indexdb.masterIndex
		#self.indexdb.Qindex
		#self.indexdb.Tindex
		#self.indexdb.IndexedTable
		#self.indexdb.dbName

		self.dayindex = DayEntry(self.key)
		#self.dayindex.dayKey

		self.DBConfig = AppConfig()

		self.dbName = self.DBConfig.mapget('databaseinfo')['databasename']

		self.db = Database(self.dbName)


		self.dbparseable = self.db2json(daterange=date_range,clean=True)
		
	def __del__(self):
		if (self.db.opened):
			self.db.close()

		
		
	def db2json(self,daterange,clean=True):
		'''
		> daterange 
		- tuple datetime objects to specify range
		(dateObj,dateObj)
		
		
		
					
		'''
		
		dfJSON = []
		
		if(self.db.exists()):
			self.db.open()
			self.db.id_ind.enc_key = self.key
			if daterange == "all":
				if clean == True:


					for currHash in self.indexdb.IndexedTable: #get row  
						curr = self.db.get('id', currHash, with_doc=True)

						curr.pop('_id')
						curr.pop('_rev')
						dfJSON.append(curr)

					self.db.close()
					return dfJSON
			
				if clean == False:
					for currHash in self.indexdb.IndexedTable: #get row  
						curr = self.db.get('id', currHash, with_doc=True)
						dfJSON.append(curr)

					self.db.close()
					return dfJSON

			if daterange == "today":
				if clean == True:
					curr = self.db.get('id', self.dayindex.dayKey, with_doc=True)
					curr.pop('_id')
					curr.pop('_rev')
					dfJSON.append(curr)
					self.db.close()
					return dfJSON
				
		'''
		#@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@  

			if ((type(daterange) == tuple) & (len(daterange)<=2) & (daterange[0]<daterange[1]) &(type(daterange[0])==datetime.datetime) & (type(daterange[1])==datetime.datetime): #if it's a valid daterange
				if clean == True
					
					for curr in db.all('id'): #get row   
						currdto=dt.datetime.strptime(curr['date'],"%Y-%m-%d %H:%M:%S.%f")
						if ( daterange[0] <= currdto <= daterange[1]):







							curr.pop('_id')
							curr.pop('_rev')
							dfJSON.append(curr)
					db.close()
					return dfJSON
			
				if clean == False:
					for curr in db.all('id'): #get row		  
						dfJSON.append(curr)
					db.close()
					return dfJSON





			else: # raise some kindof exception
				return False
			
	
		#@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@
		'''	
	
	
	
	
	
	
	
	
	
	def iffo(self, daterange = "all", obey = True):
		self.dbfiltered = []
		self.dbdatemapped = {}
		self.infoIndex = self.Qeng.valid # {'a':{'active':'True','typ':'slider','range':'0-100','aggregate':'True', 'multipoint':'True'}}
		if obey == True :
			for curr in self.dbparseable: #get row
				'''
				{'date' : xx , 'a':{'xxdate1xx':1,'xxdate2xx':2},
					'b':{'xxdate1xx':14,'xxdate2xx':14},
					'c':{'xxdate1xx':11,'xxdate2xx':11},
					'note':{'xxdate2xx':'hello','xxdate3xx':'you'}
				}
				'''

				tmp={} #holder that is constructed

				rowDate = curr["date"] #'date' : xx
				'''
				date : xx
				'''

				questionsData = curr.keys() # returns a list
				questionsData.remove('date')
				'''
				['a','b','c','note']
				'''
				#questionsData.remove('note')

				for question in questionsData: #get question from list	

					try:
						if (self.infoIndex[question]['multipoint']== "True"): # & (self.infoIndex[question]['aggregate']== "False"): #display all points
		
		
							multiP = curr[question].keys()
							'''
							in 'a'
							['xxdate1xx','xxdate2xx']
							'''
							
							for point in multiP: #points are dates
								try:
									tmp[question][point] = curr[question][point]
								except KeyError:
									tmp[question]={}
									tmp[question][point] = curr[question][point]
	
								try:
									self.dbdatemapped[point][question] = curr[question][point]
								except KeyError: #avoid overwriting
									self.dbdatemapped[point] = {}
									self.dbdatemapped[point][question] = curr[question][point]			
	
						
						if (self.infoIndex[question]['multipoint']== "True") & (self.infoIndex[question]['aggregate']== "True"): #display only one aggregate in it's own column
	
							'''
							creates unique key for aggregate
							'''
							datelist = curr[question].keys() #gets all dates within the question 
							datelist.sort()	 #ensure earliest to latest
							aggregate_key_name = str(question)+"_aggregate"
							tmp[aggregate_key_name]={}
	
	
							try: #as intigers
								tmp[aggregate_key_name][rowDate] = 0
								aggregate_sum = 0
								for point in datelist:
									aggregate_sum += curr[question][point]
							except TypeError: #process aggregate function as concatenated strings
								tmp[aggregate_key_name][rowDate] = ""
								aggregate_sum = ""
								for point in datelist:
									aggregate_sum += curr[question][point] + "\n"
								
	
	
							try:
								self.dbdatemapped[rowDate][aggregate_key_name] = aggregate_sum
							except KeyError: 
								self.dbdatemapped[rowDate] = {}
								self.dbdatemapped[rowDate][aggregate_key_name] = aggregate_sum
		
							tmp[aggregate_key_name] = {}
							tmp[aggregate_key_name][rowDate] = aggregate_sum # replaces with single 
	
	
	
						if ((self.infoIndex[question]['multipoint']== "False") & (self.infoIndex[question]['aggregate']== "False")) | (self.infoIndex[question]['typ']== "note"): #display only one
							'''
							Puts last entry under rowdate 
							'''
	
	
	
							''' 
							NOTE HANDLING
							in future this should select the most positive note based on sentiment analysis
	
							- For now it will select the last note typed in
							'''
	
	
							datelist = curr[question].keys() #gets all dates within the question
		
							pointKey = self.getLastDate(datelist) #selects most recent date from list (keys)
							try:
								tmp[question][rowDate] = curr[question][pointKey] # replaces with single, most recent, point only
							except KeyError:
								tmp[question]={}
								tmp[question][rowDate] = curr[question][pointKey] # replaces with single, most recent, point only
							try:
								self.dbdatemapped[rowDate][question]  = curr[question][pointKey]
							except KeyError:
								self.dbdatemapped[rowDate] = {}
								self.dbdatemapped[rowDate][question]  = curr[question][pointKey]
	
	
		
						if (self.infoIndex[question]['multipoint']== "False") & (self.infoIndex[question]['aggregate']== "True"): #display only one aggregate in it's own column
							datelist = curr[question].keys() #gets all dates within the question 
							datelist.sort()	 #ensure earliest to latest
		
							tmp[question]={}
							
							try: #as intigers
								tmp[question][rowDate] = 0
								aggregate_sum = 0
								for point in datelist:
									aggregate_sum += curr[question][point]
							except TypeError: #process aggregate function as concatenated strings
								tmp[question][rowDate] = ""
								aggregate_sum = ""
								for point in datelist:
									aggregate_sum += curr[question][point] + "\n"
		
							#output	
							tmp[question][rowDate] = aggregate_sum
							#remapping is additive
							try:
								self.dbdatemapped[rowDate][question]  = aggregate_sum
							except KeyError:
								self.dbdatemapped[rowDate] = {}
								self.dbdatemapped[rowDate][question]  = aggregate_sum
					except KeyError:
						continue

				self.dbfiltered.append(tmp)

		return self

	def igraph(self):
		import datetime as dt
		self.graphFrame = []

		graphpoints = self.dbdatemapped.keys()
		graphdates = []

		
		for date in graphpoints:
			try:
				graphdates.append(dt.datetime.strptime(date,"%Y-%m-%d %H:%M:%S.%f"))
			except ValueError:
				graphdates.append(dt.datetime.strptime(date,"%Y-%m-%d %H:%M:%S"))

		sortkeydto, pointerdts = zip(*sorted(zip(graphdates, graphpoints)))

		for i in xrange(0,len(pointerdts)): # want {date: xxxISOxxx , a:x ,b:x ,note:x}

			tmpRow = {}
			tmpRow['date'] = sortkeydto[i].isoformat() + "Z"
			for question in self.dbdatemapped[pointerdts[i]]:
				tmpRow[question] = self.dbdatemapped[pointerdts[i]][question]
				
			self.graphFrame.append(tmpRow)
		return self
			 
		#map accordingly with date to iso format



	def Agraph(self,neuroOutput):
		import datetime as dt
		self.neuroOutput = neuroOutput # [(dto,dto),(dto,dto),,,,]
		self.AgraphFrame = []

		graphpoints = self.dbdatemapped.keys()
		graphdates = []

		self.last_date = None
		self.curr_date = None

		self.neuro_scan_count = 0 
		self.neuro_highlight_complete = False

		for date in graphpoints:
			try:
				graphdates.append(dt.datetime.strptime(date,"%Y-%m-%d %H:%M:%S.%f"))
			except ValueError:
				graphdates.append(dt.datetime.strptime(date,"%Y-%m-%d %H:%M:%S"))

		sortkeydto, pointerdts = zip(*sorted(zip(graphdates, graphpoints)))

		for i in xrange(0,len(pointerdts)): # want {date: xxxISOxxx , a:x ,b:x ,note:x}
			tmpRow = {}
			# set to white /  transparent first
			self.curr_date = sortkeydto[i]

			if (self.neuro_highlight_complete == False):
				tmpScanPos = divmod(self.neuro_scan_count,2) # divisor answer, remainder
				#print "tmpScanPos: " +str(tmpScanPos) + " self.neuro_scan_count:  " + str(self.neuro_scan_count)
				tmpNeuroDate = self.neuroOutput[tmpScanPos[0]][tmpScanPos[1]]

				if ( self.last_date == None): tmpRow["lineColor"] = "#FFFFFF"
				elif (self.curr_date == tmpNeuroDate):
					if (tmpScanPos[1] == 0 ): tmpRow["lineColor"] = "#CC0000" #if start of range
					if (tmpScanPos[1] == 1 ): tmpRow["lineColor"] = "#FFFFFF" # if end of range
					self.neuro_scan_count +=1

				elif(self.last_date < tmpNeuroDate < self.curr_date):
					if (tmpScanPos[1] == 0 ): tmpRow["lineColor"] = "#CC0000" #if start of range
					if (tmpScanPos[1] == 1 ): tmpRow["lineColor"] = "#FFFFFF" # if end of range
					self.neuro_scan_count +=1

				if ((tmpScanPos[0] + tmpScanPos[0]) == len(neuroOutput)): self.neuro_highlight_complete = True #checks if this should be the last iteration

				

			
			tmpRow['date'] = sortkeydto[i].isoformat() + "Z"
			for question in self.dbdatemapped[pointerdts[i]]:
				tmpRow[question] = self.dbdatemapped[pointerdts[i]][question]
				
			self.AgraphFrame.append(tmpRow)
			self.last_date = sortkeydto[i]
			 
		#map accordingly with date to iso format


	def dayresponse(self):
		self.responseFrame = {}
		try:
			tmp = self.dbdatemapped[self.dayindex.todayDate]
		except KeyError: #means there is no information for the daykey
			return self
		# remove aggregate keyword, json handles association
		
		for question in tmp.keys():
			cleankey = question.replace('_aggregate', '')
			self.responseFrame[cleankey] = tmp[question]
		
		return self

	def orderedmap(self):
		import datetime as dt
		self.processFrameList = []
		self.processFrameDict = {}

		graphpoints = self.dbdatemapped.keys()
		graphdates = []

		
		for date in graphpoints:
			try:
				graphdates.append(dt.datetime.strptime(date,"%Y-%m-%d %H:%M:%S.%f"))
			except ValueError:
				graphdates.append(dt.datetime.strptime(date,"%Y-%m-%d %H:%M:%S"))

		sortkeydto, pointerdts = zip(*sorted(zip(graphdates, graphpoints)))

		for i in xrange(0,len(pointerdts)): # want {date: xxxISOxxx , a:x ,b:x ,note:x}

			tmpRow = {}
			tmpRow[sortkeydto[i]] = {}
			self.processFrameDict[sortkeydto[i]] = {}

			for question in self.dbdatemapped[pointerdts[i]]:
				tmpRow[sortkeydto[i]][question] = self.dbdatemapped[pointerdts[i]][question]
				self.processFrameDict[sortkeydto[i]][question] = self.dbdatemapped[pointerdts[i]][question]
				
			self.processFrameList.append(tmpRow)
		return self


	def getLastDate(self,dates): #input a list of dates
		dates.sort(reverse=True)
		return dates[0] #output most recent date in subset
示例#27
0
class DBSubsystem:
    '''
	import scan: scans existing self.db and rebuilds config file 
	create self.db: creates self.db file, master index, question index and table index



	'''
    def __init__(self, passkey, xtraDB=None):
        self.DATABASE_SOFTWARE_VERSION = "0.3.1a"
        self.key = passkey
        self.DBConfig = AppConfig()
        self.dbval = xtraDB

    def __del__(self):
        if (self.db.opened):
            self.db.close()


# ADD REBUILD OPTION

    def createDB(self):
        if (self.creationCheck()):
            self.buildDB()
            return True
        else:
            return False

    def creationCheck(self):
        if (Integrity().checkExists() == False):
            if (self.dbval != None):
                self.DBConfig.createConfig()
                self.DBConfig.putmap('databaseinfo', 'databasename',
                                     self.dbval)

                self.dbName = self.dbval

                return True
            else:
                return False

        else:  #if integrity passed as ok existing
            return False

    def buildDB(self):

        from _dbindex import EncUniqueHashIndex
        self.dbName = self.DBConfig.mapget('databaseinfo')['databasename']

        self.db = Database(self.dbName)
        id_ind = EncUniqueHashIndex(self.db.path, 'id')
        self.db.set_indexes([id_ind])
        self.db.create()
        self.db.id_ind.enc_key = self.key
        self.db.close()

        self.createMasterindex()  #create master index passkey, only once
        self.createQindex()
        self.createTindex()

        #add error handling
        return True
        '''
		@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@
	
		Index Creation
	
		@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@
	
		'''

    def createMasterindex(self):
        if (self.db.exists()):
            self.db.open()
            self.db.id_ind.enc_key = self.key

    #this function assumes database
        self.db.insert(
            dict(t='master',
                 Qindex=None,
                 Tindex=None,
                 DBVersion=self.DATABASE_SOFTWARE_VERSION))

        for curr in self.db.all(
                'id'
        ):  #since first passkey in self.db should be only one there, function only perfomed once
            if curr['t'] == 'master':
                self.masterIndex = ''.join(curr['_id'])
                self.DBConfig.putmap('databaseinfo', 'indexkey',
                                     self.masterIndex)  #masterkey=value
                break

                #add else statement for errors if couldnt be written for found

        self.db.close()
        return self.masterIndex

    def createQindex(self):
        if (self.db.exists()):
            self.db.open()
            self.db.id_ind.enc_key = self.key
    #this function assumes database
    #insert question index

        self.db.insert(dict(t='Qindex'))
        #get question index passkey, form type qintex (t=xxxx)
        for curr in self.db.all(
                'id'
        ):  #since first passkey in self.db should be only one there, function only perfomed once
            if curr['t'] == 'Qindex':
                self.Qindexkey = ''.join(curr['_id'])
                break

                #add else statement for errors if couldnt be written for found

                #write Qindex passkey to master index

        indexRow = self.db.get('id', self.masterIndex, with_doc=True)

        #write question index passkey to master index

        indexRow['Qindex'] = self.Qindexkey
        self.db.update(indexRow)
        self.db.close()

    #wrote new Qindex passkey to master index passkey

    def createTindex(self):

        self.dbName = self.DBConfig.mapget('databaseinfo')['databasename']
        self.masterIndex = self.DBConfig.mapget('databaseinfo')['indexkey']

        self.db = Database(self.dbName)
        if (self.db.exists()):
            self.db.open()
            self.db.id_ind.enc_key = self.key
    #this function assumes database
    #insert question index

        self.db.insert(dict(t='Tindex', table=[]))
        #get question index passkey, form type qintex (t=xxxx)
        for curr in self.db.all(
                'id'
        ):  #since first passkey in self.db should be only one there, function only perfomed once
            if curr['t'] == 'Tindex':
                self.Tindexkey = ''.join(curr['_id'])
                break

                #add else statement for errors if couldnt be written for found

                #write Qindex passkey to master index

        indexRow = self.db.get('id', self.masterIndex, with_doc=True)

        #write question index passkey to master index

        indexRow['Tindex'] = self.Tindexkey
        self.db.update(indexRow)
        self.db.close()

    #wrote new Qindex passkey to master index passkey
    '''