class DBImport: ''' import scan: scans existing self.db and rebuilds config file create self.db: creates self.db file, master index, question index and table index ''' def __init__(self,passkey,xtraDB): self.key = passkey self.dbName = xtraDB self.db=Database(self.dbName) self.importScan() def __del__(self): if (self.db.opened): self.db.close() # ADD REBUILD OPTION def importScan(self): #read from config, as a check self.db=Database(self.dbName) if(self.db.exists()): self.db.open() self.db.id_ind.enc_key = self.key for curr in self.db.all('id'): #since first passkey in self.db should be only one there, function only perfomed once if curr['t'] == 'master': masterKey=''.join(curr['_id']) self.DBConfig = AppConfig() self.DBConfig.putmap('databaseinfo','indexkey',masterKey)#masterkey=value self.DBConfig.putmap('databaseinfo','databasename',self.dbName) break #add else statement for errors if couldnt be written for found self.db.close() return True
class DBImport: ''' import scan: scans existing self.db and rebuilds config file create self.db: creates self.db file, master index, question index and table index ''' def __init__(self, passkey, xtraDB): self.key = passkey self.dbName = xtraDB self.db = Database(self.dbName) self.importScan() def __del__(self): if (self.db.opened): self.db.close() # ADD REBUILD OPTION def importScan(self): #read from config, as a check self.db = Database(self.dbName) if (self.db.exists()): self.db.open() self.db.id_ind.enc_key = self.key for curr in self.db.all( 'id' ): #since first passkey in self.db should be only one there, function only perfomed once if curr['t'] == 'master': masterKey = ''.join(curr['_id']) self.DBConfig = AppConfig() self.DBConfig.putmap('databaseinfo', 'indexkey', masterKey) #masterkey=value self.DBConfig.putmap('databaseinfo', 'databasename', self.dbName) break #add else statement for errors if couldnt be written for found self.db.close() return True
class Sync: ''' import scan: scans existing self.db and rebuilds config file create self.db: creates self.db file, master index, question index and table index ''' def __init__(self): self.FH = FileHandler() self.DBConfig = AppConfig() def ssl_seed(self,size=24, chars=string.ascii_uppercase + string.digits): self.randomString = ''.join(random.choice(chars) for _ in range(size)) return self def getcloudselection(self): try: backupLocation = self.DBConfig.mapget('cloudinfo')['location'] validlocation = ['ftp' , 'googledrive' , 'icloud' , 'dropbox' , 'skydrive'] if (backupLocation in validlocation): self.backup = backupLocation return True else: self.backup = "False" return False except KeyError: self.backup = "False" return False def parseconfigjson(self,json): self.uploadconfig = eval(json) print "----------- VALUE: " + str(self.uploadconfig['action']) if (self.uploadconfig['action'] == 'upload'): status = self.upload() if (self.uploadconfig['action'] == 'authenticate'): status = self.authenticate() if (self.uploadconfig['action'] == 'authorize'): status = self.authorize() if (self.uploadconfig['action'] == 'save'): status = self.setbackuplocation() if (self.uploadconfig['action'] == 'import'): #status = self.importDB() pass return status def setbackuplocation(self): # data = {location: xxxx , username: xxxxx , password: xxxxx} backupconfig = self.uploadconfig.copy() backupconfig.pop('action') try: backupconfig.pop('dbpassword') except KeyError: pass for setting in backupconfig.keys(): if (setting in ("location","ftphost","ftpuser","gmail","appleid","dropboxid","livemail")): self.DBConfig.putmap('cloudinfo', setting , backupconfig[setting]) def getconfig(self,location=None): self.loc = location if (self.loc==None): self.loc = self.backup if (self.loc=="False"): print "need location" return False try: if (self.loc == 'ftp'): Host=self.DBConfig.mapget('cloudinfo')['ftphost'] User=self.DBConfig.mapget('cloudinfo')['ftpuser'] return str(dict(ftphost=Host,ftpuser=User)) if (self.loc == 'googledrive'): return True if (self.loc == 'icloud'): Email=self.DBConfig.mapget('cloudinfo')['appleid'] return str(dict(appleid=Email)) if (self.loc == 'dropbox'): Email=self.DBConfig.mapget('cloudinfo')['dropboxid'] return str(dict(dropboxid=Email)) if (self.loc == 'skydrive'): Email=self.DBConfig.mapget('cloudinfo')['livemail'] return str(dict(livemail=Email)) else: return False except KeyError: return False def authenticate(self): socket.RAND_add(self.ssl_seed().randomString, 75.0) # pre-seed generator if (self.uploadconfig['location'] == 'dropbox'): url = CloudHandler().authenticatedropbox() print "============= authenticate dropbox" + str(url) return url if (self.uploadconfig['location'] == 'googledrive'): url = CloudHandler().authenticategoogle() print "============= authenticate google" + str(url) if (url != True): return url else: return True # then upload def authorize(self): if (self.uploadconfig['location'] == 'googledrive'): socket.RAND_add(self.ssl_seed().randomString, 75.0) # pre-seed generator authcode = self.uploadconfig['authcode'] status = CloudHandler().googleauthorize(authcode) if (status): # True return True else: return False def upload(self): self.key = self.uploadconfig["dbpassword"] socket.RAND_add(self.ssl_seed().randomString, 75.0) # pre-seed generator self.FH.genpack(self.key) #packadge database self.packname = self.FH.finalpackname #from config if (self.uploadconfig['location'] == 'ftp'): host = self.uploadconfig['ftphost'] user = self.uploadconfig['ftpuser'] password = self.uploadconfig['password'] self.DBConfig.putmap('cloudinfo','location','ftp') self.DBConfig.putmap('cloudinfo','ftphost', host ) self.DBConfig.putmap('cloudinfo','ftpuser', user ) status = CloudHandler().uploadftp(self.packname, host, user, password) if (self.uploadconfig['location'] == 'googledrive'): self.DBConfig.putmap('cloudinfo','location','googledrive') status = CloudHandler().uploadgoogledrive(self.packname) if (self.uploadconfig['location'] == 'icloud'): email = self.uploadconfig['appleid'] password = self.uploadconfig['password'] self.DBConfig.putmap('cloudinfo','location','icloud') self.DBConfig.putmap('cloudinfo','appleid',email) status = uploadicloud(self.packname, email, pasword) if (self.uploadconfig['location'] == 'dropbox'): authcode = self.uploadconfig['authcode'] self.DBConfig.putmap('cloudinfo','location','dropbox') # self.DBConfig.putmap('cloudinfo','dropboxid',authcode) status = CloudHandler().uploaddropbox(self.packname, authcode) if (self.uploadconfig['location'] == 'skydrive'): email = self.uploadconfig['livemail'] password = self.uploadconfig['password'] self.DBConfig.putmap('cloudinfo','location','googledrive') self.DBConfig.putmap('cloudinfo','livemail',email) status = uploadskydrive(self.packname, email, password) #print self.FH.deletefile(str(self.FH.finalpackname)) # clean-up try: import os os.remove(self.FH.finalpackname) #except OSError: except Exception as e: print e ret = "upload success: " + str(status) + " [ERROR, Clean-up]: " + str(e) return ret else: return True
class Sync: ''' import scan: scans existing self.db and rebuilds config file create self.db: creates self.db file, master index, question index and table index ''' def __init__(self): self.FH = FileHandler() self.DBConfig = AppConfig() def ssl_seed(self, size=24, chars=string.ascii_uppercase + string.digits): self.randomString = ''.join(random.choice(chars) for _ in range(size)) return self def getcloudselection(self): try: backupLocation = self.DBConfig.mapget('cloudinfo')['location'] validlocation = [ 'ftp', 'googledrive', 'icloud', 'dropbox', 'skydrive' ] if (backupLocation in validlocation): self.backup = backupLocation return True else: self.backup = "False" return False except KeyError: self.backup = "False" return False def parseconfigjson(self, json): self.uploadconfig = eval(json) if (self.uploadconfig['action'] == 'upload'): status = self.upload() if (self.uploadconfig['action'] == 'authenticate'): status = self.authenticate() if (self.uploadconfig['action'] == 'authenticate'): status = self.authorize() if (self.uploadconfig['action'] == 'save'): status = self.setbackuplocation() if (self.uploadconfig['action'] == 'import'): #status = self.importDB() pass return status def setbackuplocation( self ): # data = {location: xxxx , username: xxxxx , password: xxxxx} backupconfig = self.uploadconfig.copy() backupconfig.pop('action') try: backupconfig.pop('dbpassword') except KeyError: pass for setting in backupconfig.keys(): if (setting in ("location", "ftphost", "ftpuser", "gmail", "appleid", "dropboxid", "livemail")): self.DBConfig.putmap('cloudinfo', setting, backupconfig[setting]) def getconfig(self, location=None): self.loc = location if (self.loc == None): self.loc = self.backup if (self.loc == "False"): print "need location" return False try: if (self.loc == 'ftp'): Host = self.DBConfig.mapget('cloudinfo')['ftphost'] User = self.DBConfig.mapget('cloudinfo')['ftpuser'] return str(dict(ftphost=Host, ftpuser=User)) if (self.loc == 'googledrive'): return True if (self.loc == 'icloud'): Email = self.DBConfig.mapget('cloudinfo')['appleid'] return str(dict(appleid=Email)) if (self.loc == 'dropbox'): Email = self.DBConfig.mapget('cloudinfo')['dropboxid'] return str(dict(dropboxid=Email)) if (self.loc == 'skydrive'): Email = self.DBConfig.mapget('cloudinfo')['livemail'] return str(dict(livemail=Email)) else: return False except KeyError: return False def authenticate(self): socket.RAND_add(self.ssl_seed().randomString, 75.0) # pre-seed generator if (self.uploadconfig['location'] == 'dropbox'): url = CloudHandler().authenticatedropbox() print "============= authenticate dropbox" + str(url) return url if (self.uploadconfig['location'] == 'googledrive'): url = CloudHandler().authenticategoogle() print "============= authenticate google" + str(url) if (url != True): return url else: return True # then upload def authorize(self): if (self.uploadconfig['location'] == 'googledrive'): socket.RAND_add(self.ssl_seed().randomString, 75.0) # pre-seed generator authcode = self.uploadconfig['authcode'] status = CloudHandler().googleauthorize(authcode) if (status): # True return True else: return False def upload(self): self.key = self.uploadconfig["dbpassword"] socket.RAND_add(self.ssl_seed().randomString, 75.0) # pre-seed generator self.FH.genpack(self.key) #packadge database self.packname = self.FH.finalpackname #from config if (self.uploadconfig['location'] == 'ftp'): host = self.uploadconfig['ftphost'] user = self.uploadconfig['ftpuser'] password = self.uploadconfig['password'] self.DBConfig.putmap('cloudinfo', 'location', 'ftp') self.DBConfig.putmap('cloudinfo', 'ftphost', host) self.DBConfig.putmap('cloudinfo', 'ftpuser', user) status = CloudHandler().uploadftp(self.packname, host, user, password) if (self.uploadconfig['location'] == 'googledrive'): self.DBConfig.putmap('cloudinfo', 'location', 'googledrive') status = CloudHandler().uploadgoogledrive(self.packname) if (self.uploadconfig['location'] == 'icloud'): email = self.uploadconfig['appleid'] password = self.uploadconfig['password'] self.DBConfig.putmap('cloudinfo', 'location', 'icloud') self.DBConfig.putmap('cloudinfo', 'appleid', email) status = uploadicloud(self.packname, email, pasword) if (self.uploadconfig['location'] == 'dropbox'): authcode = self.uploadconfig['authcode'] self.DBConfig.putmap('cloudinfo', 'location', 'dropbox') # self.DBConfig.putmap('cloudinfo','dropboxid',authcode) status = CloudHandler().uploaddropbox(self.packname, authcode) if (self.uploadconfig['location'] == 'skydrive'): email = self.uploadconfig['livemail'] password = self.uploadconfig['password'] self.DBConfig.putmap('cloudinfo', 'location', 'googledrive') self.DBConfig.putmap('cloudinfo', 'livemail', email) status = uploadskydrive(self.packname, email, password) #print self.FH.deletefile(str(self.FH.finalpackname)) # clean-up try: import os os.remove(self.FH.finalpackname) #except OSError: except Exception as e: print e ret = "upload success: " + str( status) + " [ERROR, Clean-up]: " + str(e) return ret else: return True
class CloudHandler: ''' ''' def __init__(self): self.DBConfig = AppConfig() self.FH = FileHandler() self.googleConfig = 'configs/gdrive.yaml' def ssl_seed(self,size=24, chars=string.ascii_uppercase + string.digits): self.randomString = ''.join(random.choice(chars) for _ in range(size)) return self def authenticategoogle(self): self.ga = GoogleAuth(self.googleConfig) try: self.ga.LoadClientConfig() self.ga.LoadCredentials() self.ga.Authorize() except: return self.ga.GetAuthUrl() #code = raw_input("code: ").strip() else: return True #auth loaded ok def googleauthorize(self,authCode): code = authCode.strip() self.ga.Auth(code) self.ga.SaveCredentials() return True def googleupload(self,filename): packageName = filename self.ga = GoogleAuth(self.googleConfig) self.ga.LoadClientConfig() self.ga.LoadCredentials() self.ga.Authorize() drive = GoogleDrive(self.ga) #flist = drive.ListFile({'q': "title contains '.crypt' and trashed = false"}) folderlistquery = drive.ListFile({'q': "title = 'SeccuDB' and mimeType = 'application/vnd.google-apps.folder' and trashed = false"}) cloudfolder = folderlistquery.GetList() if (len(cloudfolder) == 0): #create folder folder = drive.CreateFile() folder['title'] = "SeccuDB" folder['mimeType'] = "application/vnd.google-apps.folder" folder['parents'] = "root" folder.Upload() cloudfolder = folderlistquery.GetList() if (len(cloudfolder) == 0): print "error" raise error('GooglePermissionsError') cloudfolderid = cloudfolder[0]['id'] print cloudfolderid databaseListquery = drive.ListFile({'q': "'%s' in parents and trashed = false" % (cloudfolderid)}) databaseList = databaseListquery.GetList() database_file = drive.CreateFile() database_file['title'] = packageName database_file['parents']=[{"kind": "drive#fileLink" ,'id': str(cloudfolderid) }] #check if already exists, if so, get id and update databasenamelist = [] for databaseAvaliable in databaseList: databasenamelist.append(databaseAvaliable['title']) if packageName in databasenamelist: cloudPackageQuery = drive.ListFile({'q': "title = '%s' and trashed = false" % (packageName)}) cloudPackage = cloudPackageQuery.GetList() if(len(cloudPackage) > 1): # if theres more than one, go for the most recent packdates = [] for everypack in cloudPackage: packdates.append((everypack['modifiedByMeDate'],everypack['id'])) database_file['id'] = sorted(packdates,reverse=True)[0][1] else: database_file['id'] = cloudPackage[0]['id'] database_file.Upload() return True def getgooglefileid(self,title): print os.getcwd() self.ga = GoogleAuth(self.DBConfig.googleyaml) self.ga.LocalWebserverAuth() drive = GoogleDrive(self.ga) flist = drive.ListFile({'q': "title = '%s' and trashed = false"%title}) files = flist.GetList() if len(files)==0: return False else: return files[0]['id'] def simpleUploadGoogle(self,filename): #test_01_Files_Insert drive = GoogleDrive(self.ga) #f = drive.CreateFile({'fluidSecurity': parent_id}) file1 = drive.CreateFile() file1.SetContentFile(filename) # Read local file file1.Upload() # Upload it return True ##save file ID to config def simpleUpdateGoogle(self,filename,dbID): drive = GoogleDrive(self.ga) file1=drive.CreateFile({'id': dbID}) #overwrite by ID file1['title'] = filename file1.FetchContent() file1.SetContentFile(filename) file1.Upload() return True def simpleDownloadGoogle(self,filename): DeleteOldFile(self.filename) #delete local version, not that safe drive = GoogleDrive(self.ga) file1 = drive.CreateFile() file1['title'] = filename file1.FetchContent() # Force download and double check content return True # # # def ftpDown(filename,host,user,ftpass): # import ftplib # session = ftplib.FTP(host,user,ftpass) # file = open(filename, 'wb') # session.retrbinary('RETR %s' % filename, file.write) # file.close() # session.quit() # def uploadftp(self,filename,host,user,password): # upload logic import ftplib session = ftplib.FTP(host,user,password) file = open(filename,'rb') # file to send session.storbinary('STOR ' +filename, file) # send the file file.close() # close file and FTP session.quit() return True def authenticatedropbox(self): import dropbox # Get your app key and secret from the Dropbox developer website app_key = 'p2iu4n7f9yegl3u' app_secret = '0903whnau7p2zde' flow = dropbox.client.DropboxOAuth2FlowNoRedirect(app_key, app_secret) authorize_url = flow.start() print "============= authenticate dropbox" + str(authorize_url) pickled = pickle.dumps(flow) encodedPickle = base64.b64encode(pickled) self.DBConfig.putmap('cloudinfo','dropboxobject', str(encodedPickle) ) return authorize_url def uploaddropbox(self,filename,authcode): import socket socket.RAND_add(self.ssl_seed().randomString, 75.0) import dropbox try: # Get your app key and secret from the Dropbox developer website # encodedPickle =self.DBConfig.mapget('cloudinfo')['dropboxobject'] # decodedPickle = base64.b64decode(encodedPickle) # flow = pickle.loads(decodedPickle) app_key = 'p2iu4n7f9yegl3u' app_secret = '0903whnau7p2zde' flow = dropbox.client.DropboxOAuth2FlowNoRedirect(app_key, app_secret) authorize_url = flow.start() print "============= authenticate dropbox" + str(authorize_url) print "loaded ok" print "------ authcode: " + str(authcode) access_token, user_id = flow.finish(authcode) print "finidhed oko" client = dropbox.client.DropboxClient(access_token) print "finidhed client" print 'linked account: ', client.account_info() #Uploading files f = open(filename, 'rb') response = client.put_file('/'+str(filename), f) print "uploaded:", response except Exception as e: print("Upload failed: " + str(e)) return False return True def uploadgoogledrive(self,filename): googlefileid = self.getgooglefileid(filename) if googlefileid == False: self.simpleUploadGoogle(filename) else: self.simpleUpdateGoogle(filename,googlefileid) return True def uploadicloud(self,filename,email,password): return True def uploadskydrive(self,filename,email,password): return True
class DayEntry: #checker class ''' checks day hash or creates a new one once instatiated, it checks for: - if day key in config coincideds with todays date - if there isnt a date in config, it scans database for the one matching todays - if no date in conifig, or it's the wrong date, new row is made (only if there isnt one with matching date in the entire self.db) ''' def __init__(self, passkey): self.todayDate = str(getDayStart()) self.key = passkey self.DBConfig = AppConfig() self.dayKey = None # setup befpore checking, avoid attribute error self.dbName = self.DBConfig.mapget('databaseinfo')['databasename'] self.db = Database(self.dbName) try: self.dayKey = self.DBConfig.mapget('databaseinfo')['daykey'] except KeyError: # if notthin in config, check self.db for entry daystatus = self.checkfordate() if (daystatus == False): self.makeDayRow() self.DBConfig.putmap('databaseinfo', 'daykey', self.dayKey) #if true do nothing, config file fixed else: daystatus = self.checkfordate( ) #if false, scans for right one, and fixes config oldcompare = self.dayKey self.dayKey = self.DBConfig.mapget('databaseinfo')['daykey'] if (daystatus == False) & (oldcompare == self.dayKey): self.makeDayRow() self.DBConfig.putmap('databaseinfo', 'daykey', self.dayKey) if (daystatus == True): #everything all good pass #nothing created just a check def __del__(self): if (self.db.opened): self.db.close() def makeDayRow(self): if (self.checkfordate() == True): #already exists no need to write return False dbindex = DBIndexSystem(self.key) dayrow = {} dayrow["date"] = self.todayDate if (self.db.exists() == True): self.db.open() self.db.id_ind.enc_key = self.key self.db.insert(dayrow) self.db.close() #must close first , no double opens self.getDayRowID() # resfresh day key dbindex.TindexPut(self.dayKey) ## would normally write to config file return True def getDayRowID(self): #gets row id by date if (self.db.exists()): self.db.open() self.db.id_ind.enc_key = self.key for curr in self.db.all('id'): try: if curr['date'] == str(self.todayDate): dataop = curr['_id'] dataop = "".join( dataop ) #_id is returned as a list of charaters, must be concatenated to string self.db.close() self.dayKey = dataop return dataop #returns datestring except KeyError: continue #break #if it makes it here, entry doesnt exist self.db.close() return False #there is a probplem def checkfordate(self): #checks for existance of that date in self.db if (self.db.exists()): self.db.open() self.db.id_ind.enc_key = self.key if (self.dayKey != None): dayrow = self.db.get('id', self.dayKey, with_doc=True) #doesnt account for if there is an entry in the config that doesnt exist if dayrow['date'] == str(self.todayDate): self.db.close() return True for curr in self.db.all('id'): #try to search try: if curr['date'] == str(self.todayDate): self.DBConfig.putmap('databaseinfo', 'daykey', "".join( curr['_id'])) #fix lost entry self.db.close() return False except KeyError: continue #break #if it makes it here, entry doesnt exist and nothing was remapped self.db.close() return False
class DBSubsystem: ''' import scan: scans existing self.db and rebuilds config file create self.db: creates self.db file, master index, question index and table index ''' def __init__(self, passkey, xtraDB=None): self.DATABASE_SOFTWARE_VERSION = "0.3.1a" self.key = passkey self.DBConfig = AppConfig() self.dbval = xtraDB def __del__(self): if (self.db.opened): self.db.close() # ADD REBUILD OPTION def createDB(self): if (self.creationCheck()): self.buildDB() return True else: return False def creationCheck(self): if (Integrity().checkExists() == False): if (self.dbval != None): self.DBConfig.createConfig() self.DBConfig.putmap('databaseinfo', 'databasename', self.dbval) self.dbName = self.dbval return True else: return False else: #if integrity passed as ok existing return False def buildDB(self): from _dbindex import EncUniqueHashIndex self.dbName = self.DBConfig.mapget('databaseinfo')['databasename'] self.db = Database(self.dbName) id_ind = EncUniqueHashIndex(self.db.path, 'id') self.db.set_indexes([id_ind]) self.db.create() self.db.id_ind.enc_key = self.key self.db.close() self.createMasterindex() #create master index passkey, only once self.createQindex() self.createTindex() #add error handling return True ''' @@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@ Index Creation @@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@ ''' def createMasterindex(self): if (self.db.exists()): self.db.open() self.db.id_ind.enc_key = self.key #this function assumes database self.db.insert( dict(t='master', Qindex=None, Tindex=None, DBVersion=self.DATABASE_SOFTWARE_VERSION)) for curr in self.db.all( 'id' ): #since first passkey in self.db should be only one there, function only perfomed once if curr['t'] == 'master': self.masterIndex = ''.join(curr['_id']) self.DBConfig.putmap('databaseinfo', 'indexkey', self.masterIndex) #masterkey=value break #add else statement for errors if couldnt be written for found self.db.close() return self.masterIndex def createQindex(self): if (self.db.exists()): self.db.open() self.db.id_ind.enc_key = self.key #this function assumes database #insert question index self.db.insert(dict(t='Qindex')) #get question index passkey, form type qintex (t=xxxx) for curr in self.db.all( 'id' ): #since first passkey in self.db should be only one there, function only perfomed once if curr['t'] == 'Qindex': self.Qindexkey = ''.join(curr['_id']) break #add else statement for errors if couldnt be written for found #write Qindex passkey to master index indexRow = self.db.get('id', self.masterIndex, with_doc=True) #write question index passkey to master index indexRow['Qindex'] = self.Qindexkey self.db.update(indexRow) self.db.close() #wrote new Qindex passkey to master index passkey def createTindex(self): self.dbName = self.DBConfig.mapget('databaseinfo')['databasename'] self.masterIndex = self.DBConfig.mapget('databaseinfo')['indexkey'] self.db = Database(self.dbName) if (self.db.exists()): self.db.open() self.db.id_ind.enc_key = self.key #this function assumes database #insert question index self.db.insert(dict(t='Tindex', table=[])) #get question index passkey, form type qintex (t=xxxx) for curr in self.db.all( 'id' ): #since first passkey in self.db should be only one there, function only perfomed once if curr['t'] == 'Tindex': self.Tindexkey = ''.join(curr['_id']) break #add else statement for errors if couldnt be written for found #write Qindex passkey to master index indexRow = self.db.get('id', self.masterIndex, with_doc=True) #write question index passkey to master index indexRow['Tindex'] = self.Tindexkey self.db.update(indexRow) self.db.close() #wrote new Qindex passkey to master index passkey '''