Esempio n. 1
0
class Developer:
    def __init__(self, passkey, dbname=None):
        self.key = passkey
        if (dbname == None):
            self.DBConfig = AppConfig()
            self.dbName = self.DBConfig.mapget('databaseinfo')['databasename']
        else:
            self.dbName = dbname
        self.db = Database(self.dbName)

    def dump(self):
        if (self.db.exists()):
            self.db.open()
            self.db.id_ind.enc_key = self.key
            for curr in self.db.all('id'):
                print curr

            self.db.close()
Esempio n. 2
0
class Sync:
	'''
	import scan: scans existing self.db and rebuilds config file 
	create self.db: creates self.db file, master index, question index and table index



	'''




	def __init__(self):
		self.FH = FileHandler()
		self.DBConfig = AppConfig()
		
	def ssl_seed(self,size=24, chars=string.ascii_uppercase + string.digits):
		self.randomString = ''.join(random.choice(chars) for _ in range(size))
		return self




	def getcloudselection(self):
		try:
			
			backupLocation = self.DBConfig.mapget('cloudinfo')['location']
			validlocation = ['ftp' , 'googledrive' , 'icloud' , 'dropbox' , 'skydrive']
			if (backupLocation in validlocation):
				self.backup = backupLocation
				return True
			else:
				self.backup = "False"
				return False
		except KeyError:

			self.backup = "False"
			return False





	def parseconfigjson(self,json):
		self.uploadconfig = eval(json)

		print "----------- VALUE: " + str(self.uploadconfig['action'])
		if (self.uploadconfig['action'] == 'upload'):
			status = self.upload()

		if (self.uploadconfig['action'] == 'authenticate'):
			status = self.authenticate()

		if (self.uploadconfig['action'] == 'authorize'):
			status = self.authorize()

		if (self.uploadconfig['action'] == 'save'):
			status = self.setbackuplocation()

		if (self.uploadconfig['action'] == 'import'):
			#status = self.importDB()
			pass
		return status

	def setbackuplocation(self): # data = {location: xxxx , username: xxxxx , password: xxxxx}
		backupconfig = self.uploadconfig.copy()
		backupconfig.pop('action')
		try:
			backupconfig.pop('dbpassword')
		except KeyError:
			pass

		for setting in backupconfig.keys():
			if (setting in ("location","ftphost","ftpuser","gmail","appleid","dropboxid","livemail")):
				self.DBConfig.putmap('cloudinfo', setting , backupconfig[setting])


		


	def getconfig(self,location=None):
		self.loc = location

		if (self.loc==None):
			self.loc = self.backup
			if (self.loc=="False"):
				print "need location"
				return False


		try:
			if (self.loc == 'ftp'):
				Host=self.DBConfig.mapget('cloudinfo')['ftphost']
				User=self.DBConfig.mapget('cloudinfo')['ftpuser']
				return str(dict(ftphost=Host,ftpuser=User))
	
			if (self.loc == 'googledrive'):

				return True
	
			if (self.loc == 'icloud'):
				Email=self.DBConfig.mapget('cloudinfo')['appleid']
				return str(dict(appleid=Email))
	
			if (self.loc == 'dropbox'):
				Email=self.DBConfig.mapget('cloudinfo')['dropboxid']
				return str(dict(dropboxid=Email))
	
			if (self.loc == 'skydrive'):
				Email=self.DBConfig.mapget('cloudinfo')['livemail']
				return str(dict(livemail=Email))

			else:
				return False
		except KeyError: 
			return False

	def authenticate(self):
		socket.RAND_add(self.ssl_seed().randomString, 75.0) # pre-seed generator
		if (self.uploadconfig['location'] == 'dropbox'):

			url = CloudHandler().authenticatedropbox()
			print "============= authenticate dropbox" + str(url)
			return url
		if (self.uploadconfig['location'] == 'googledrive'):

			url = CloudHandler().authenticategoogle()
			print "============= authenticate google" + str(url)
			if (url != True):
				return url
			else:
				return True # then upload

	def authorize(self):
		if (self.uploadconfig['location'] == 'googledrive'):
			socket.RAND_add(self.ssl_seed().randomString, 75.0) # pre-seed generator
			authcode = self.uploadconfig['authcode']
			status = CloudHandler().googleauthorize(authcode)
			if (status): # True
				return True
			else:
				return False


	def upload(self):
		
		self.key = self.uploadconfig["dbpassword"]

		socket.RAND_add(self.ssl_seed().randomString, 75.0) # pre-seed generator
		self.FH.genpack(self.key) #packadge database

		self.packname = self.FH.finalpackname #from config


		if (self.uploadconfig['location'] == 'ftp'):
			host = self.uploadconfig['ftphost']
			user = self.uploadconfig['ftpuser']
			password = self.uploadconfig['password']

			self.DBConfig.putmap('cloudinfo','location','ftp')
			self.DBConfig.putmap('cloudinfo','ftphost', host )
			self.DBConfig.putmap('cloudinfo','ftpuser', user )
			status = CloudHandler().uploadftp(self.packname, host, user, password)

		if (self.uploadconfig['location'] == 'googledrive'):
			self.DBConfig.putmap('cloudinfo','location','googledrive')
			status = CloudHandler().uploadgoogledrive(self.packname)

		if (self.uploadconfig['location'] == 'icloud'):
			email = self.uploadconfig['appleid']
			password = self.uploadconfig['password']

			self.DBConfig.putmap('cloudinfo','location','icloud')
			self.DBConfig.putmap('cloudinfo','appleid',email)
			status = uploadicloud(self.packname, email, pasword)

		if (self.uploadconfig['location'] == 'dropbox'):
			authcode = self.uploadconfig['authcode']

			self.DBConfig.putmap('cloudinfo','location','dropbox')
			# self.DBConfig.putmap('cloudinfo','dropboxid',authcode)
			status = CloudHandler().uploaddropbox(self.packname, authcode)

		if (self.uploadconfig['location'] == 'skydrive'):
			email = self.uploadconfig['livemail']
			password = self.uploadconfig['password']

			self.DBConfig.putmap('cloudinfo','location','googledrive')
			self.DBConfig.putmap('cloudinfo','livemail',email)
			status = uploadskydrive(self.packname, email, password)
		#print self.FH.deletefile(str(self.FH.finalpackname)) # clean-up
		try:
			import os
			os.remove(self.FH.finalpackname)
			#except OSError:
		except Exception as e:
			print e
			ret =  "upload success: " + str(status) + " [ERROR, Clean-up]: " + str(e)
			return ret

		else:
		  return True
Esempio n. 3
0
class SecuFrame:  #in producion, key must be specified
    def __init__(self, passkey, date_range='all'):
        self.key = passkey
        self.Qeng = SecuQ(self.key)

        self.indexdb = DBIndexSystem(self.key)
        #self.indexdb.masterIndex
        #self.indexdb.Qindex
        #self.indexdb.Tindex
        #self.indexdb.IndexedTable
        #self.indexdb.dbName

        self.dayindex = DayEntry(self.key)
        #self.dayindex.dayKey

        self.DBConfig = AppConfig()

        self.dbName = self.DBConfig.mapget('databaseinfo')['databasename']

        self.db = Database(self.dbName)

        self.dbparseable = self.db2json(daterange=date_range, clean=True)

    def __del__(self):
        if (self.db.opened):
            self.db.close()

    def db2json(self, daterange, clean=True):
        '''
		> daterange 
		- tuple datetime objects to specify range
		(dateObj,dateObj)
		
		
		
					
		'''

        dfJSON = []

        if (self.db.exists()):
            self.db.open()
            self.db.id_ind.enc_key = self.key
            if daterange == "all":
                if clean == True:

                    for currHash in self.indexdb.IndexedTable:  #get row
                        curr = self.db.get('id', currHash, with_doc=True)

                        curr.pop('_id')
                        curr.pop('_rev')
                        dfJSON.append(curr)

                    self.db.close()
                    return dfJSON

                if clean == False:
                    for currHash in self.indexdb.IndexedTable:  #get row
                        curr = self.db.get('id', currHash, with_doc=True)
                        dfJSON.append(curr)

                    self.db.close()
                    return dfJSON

            if daterange == "today":
                if clean == True:
                    curr = self.db.get('id',
                                       self.dayindex.dayKey,
                                       with_doc=True)
                    curr.pop('_id')
                    curr.pop('_rev')
                    dfJSON.append(curr)
                    self.db.close()
                    return dfJSON
        '''
		#@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@  

			if ((type(daterange) == tuple) & (len(daterange)<=2) & (daterange[0]<daterange[1]) &(type(daterange[0])==datetime.datetime) & (type(daterange[1])==datetime.datetime): #if it's a valid daterange
				if clean == True
					
					for curr in db.all('id'): #get row   
						currdto=dt.datetime.strptime(curr['date'],"%Y-%m-%d %H:%M:%S.%f")
						if ( daterange[0] <= currdto <= daterange[1]):







							curr.pop('_id')
							curr.pop('_rev')
							dfJSON.append(curr)
					db.close()
					return dfJSON
			
				if clean == False:
					for curr in db.all('id'): #get row		  
						dfJSON.append(curr)
					db.close()
					return dfJSON





			else: # raise some kindof exception
				return False
			
	
		#@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@
		'''

    def iffo(self, daterange="all", obey=True):
        self.dbfiltered = []
        self.dbdatemapped = {}
        self.infoIndex = self.Qeng.valid  # {'a':{'active':'True','typ':'slider','range':'0-100','aggregate':'True', 'multipoint':'True'}}
        if obey == True:
            for curr in self.dbparseable:  #get row
                '''
				{'date' : xx , 'a':{'xxdate1xx':1,'xxdate2xx':2},
					'b':{'xxdate1xx':14,'xxdate2xx':14},
					'c':{'xxdate1xx':11,'xxdate2xx':11},
					'note':{'xxdate2xx':'hello','xxdate3xx':'you'}
				}
				'''

                tmp = {}  #holder that is constructed

                rowDate = curr["date"]  #'date' : xx
                '''
				date : xx
				'''

                questionsData = curr.keys()  # returns a list
                questionsData.remove('date')
                '''
				['a','b','c','note']
				'''
                #questionsData.remove('note')

                for question in questionsData:  #get question from list

                    try:
                        if (
                                self.infoIndex[question]['multipoint'] ==
                                "True"
                        ):  # & (self.infoIndex[question]['aggregate']== "False"): #display all points

                            multiP = curr[question].keys()
                            '''
							in 'a'
							['xxdate1xx','xxdate2xx']
							'''

                            for point in multiP:  #points are dates
                                try:
                                    tmp[question][point] = curr[question][
                                        point]
                                except KeyError:
                                    tmp[question] = {}
                                    tmp[question][point] = curr[question][
                                        point]

                                try:
                                    self.dbdatemapped[point][question] = curr[
                                        question][point]
                                except KeyError:  #avoid overwriting
                                    self.dbdatemapped[point] = {}
                                    self.dbdatemapped[point][question] = curr[
                                        question][point]

                        if (self.infoIndex[question]['multipoint'] == "True"
                            ) & (
                                self.infoIndex[question]['aggregate'] == "True"
                            ):  #display only one aggregate in it's own column
                            '''
							creates unique key for aggregate
							'''
                            datelist = curr[question].keys(
                            )  #gets all dates within the question
                            datelist.sort()  #ensure earliest to latest
                            aggregate_key_name = str(question) + "_aggregate"
                            tmp[aggregate_key_name] = {}

                            try:  #as intigers
                                tmp[aggregate_key_name][rowDate] = 0
                                aggregate_sum = 0
                                for point in datelist:
                                    aggregate_sum += curr[question][point]
                            except TypeError:  #process aggregate function as concatenated strings
                                tmp[aggregate_key_name][rowDate] = ""
                                aggregate_sum = ""
                                for point in datelist:
                                    aggregate_sum += curr[question][
                                        point] + "\n"

                            try:
                                self.dbdatemapped[rowDate][
                                    aggregate_key_name] = aggregate_sum
                            except KeyError:
                                self.dbdatemapped[rowDate] = {}
                                self.dbdatemapped[rowDate][
                                    aggregate_key_name] = aggregate_sum

                            tmp[aggregate_key_name] = {}
                            tmp[aggregate_key_name][
                                rowDate] = aggregate_sum  # replaces with single

                        if ((self.infoIndex[question]['multipoint'] == "False")
                                &
                            (self.infoIndex[question]['aggregate']
                             == "False")) | (self.infoIndex[question]['typ']
                                             == "note"):  #display only one
                            '''
							Puts last entry under rowdate 
							'''
                            ''' 
							NOTE HANDLING
							in future this should select the most positive note based on sentiment analysis
	
							- For now it will select the last note typed in
							'''

                            datelist = curr[question].keys(
                            )  #gets all dates within the question

                            pointKey = self.getLastDate(
                                datelist
                            )  #selects most recent date from list (keys)
                            try:
                                tmp[question][rowDate] = curr[question][
                                    pointKey]  # replaces with single, most recent, point only
                            except KeyError:
                                tmp[question] = {}
                                tmp[question][rowDate] = curr[question][
                                    pointKey]  # replaces with single, most recent, point only
                            try:
                                self.dbdatemapped[rowDate][question] = curr[
                                    question][pointKey]
                            except KeyError:
                                self.dbdatemapped[rowDate] = {}
                                self.dbdatemapped[rowDate][question] = curr[
                                    question][pointKey]

                        if (self.infoIndex[question]['multipoint'] == "False"
                            ) & (
                                self.infoIndex[question]['aggregate'] == "True"
                            ):  #display only one aggregate in it's own column
                            datelist = curr[question].keys(
                            )  #gets all dates within the question
                            datelist.sort()  #ensure earliest to latest

                            tmp[question] = {}

                            try:  #as intigers
                                tmp[question][rowDate] = 0
                                aggregate_sum = 0
                                for point in datelist:
                                    aggregate_sum += curr[question][point]
                            except TypeError:  #process aggregate function as concatenated strings
                                tmp[question][rowDate] = ""
                                aggregate_sum = ""
                                for point in datelist:
                                    aggregate_sum += curr[question][
                                        point] + "\n"

                            #output
                            tmp[question][rowDate] = aggregate_sum
                            #remapping is additive
                            try:
                                self.dbdatemapped[rowDate][
                                    question] = aggregate_sum
                            except KeyError:
                                self.dbdatemapped[rowDate] = {}
                                self.dbdatemapped[rowDate][
                                    question] = aggregate_sum
                    except KeyError:
                        continue

                self.dbfiltered.append(tmp)

        return self

    def igraph(self):
        import datetime as dt
        self.graphFrame = []

        graphpoints = self.dbdatemapped.keys()
        graphdates = []

        for date in graphpoints:
            try:
                graphdates.append(
                    dt.datetime.strptime(date, "%Y-%m-%d %H:%M:%S.%f"))
            except ValueError:
                graphdates.append(
                    dt.datetime.strptime(date, "%Y-%m-%d %H:%M:%S"))

        sortkeydto, pointerdts = zip(*sorted(zip(graphdates, graphpoints)))

        for i in xrange(
                0,
                len(pointerdts)):  # want {date: xxxISOxxx , a:x ,b:x ,note:x}

            tmpRow = {}
            tmpRow['date'] = sortkeydto[i].isoformat() + "Z"
            for question in self.dbdatemapped[pointerdts[i]]:
                tmpRow[question] = self.dbdatemapped[pointerdts[i]][question]

            self.graphFrame.append(tmpRow)
        return self

        #map accordingly with date to iso format

    def Agraph(self, neuroOutput):
        import datetime as dt
        self.neuroOutput = neuroOutput  # [(dto,dto),(dto,dto),,,,]
        self.AgraphFrame = []

        graphpoints = self.dbdatemapped.keys()
        graphdates = []

        self.last_date = None
        self.curr_date = None

        self.neuro_scan_count = 0
        self.neuro_highlight_complete = False

        for date in graphpoints:
            try:
                graphdates.append(
                    dt.datetime.strptime(date, "%Y-%m-%d %H:%M:%S.%f"))
            except ValueError:
                graphdates.append(
                    dt.datetime.strptime(date, "%Y-%m-%d %H:%M:%S"))

        sortkeydto, pointerdts = zip(*sorted(zip(graphdates, graphpoints)))

        for i in xrange(
                0,
                len(pointerdts)):  # want {date: xxxISOxxx , a:x ,b:x ,note:x}
            tmpRow = {}
            # set to white /  transparent first
            self.curr_date = sortkeydto[i]

            if (self.neuro_highlight_complete == False):
                tmpScanPos = divmod(self.neuro_scan_count,
                                    2)  # divisor answer, remainder
                #print "tmpScanPos: " +str(tmpScanPos) + " self.neuro_scan_count:  " + str(self.neuro_scan_count)
                tmpNeuroDate = self.neuroOutput[tmpScanPos[0]][tmpScanPos[1]]

                if (self.last_date == None): tmpRow["lineColor"] = "#FFFFFF"
                elif (self.curr_date == tmpNeuroDate):
                    if (tmpScanPos[1] == 0):
                        tmpRow["lineColor"] = "#CC0000"  #if start of range
                    if (tmpScanPos[1] == 1):
                        tmpRow["lineColor"] = "#FFFFFF"  # if end of range
                    self.neuro_scan_count += 1

                elif (self.last_date < tmpNeuroDate < self.curr_date):
                    if (tmpScanPos[1] == 0):
                        tmpRow["lineColor"] = "#CC0000"  #if start of range
                    if (tmpScanPos[1] == 1):
                        tmpRow["lineColor"] = "#FFFFFF"  # if end of range
                    self.neuro_scan_count += 1

                if ((tmpScanPos[0] + tmpScanPos[0]) == len(neuroOutput)):
                    self.neuro_highlight_complete = True  #checks if this should be the last iteration

            tmpRow['date'] = sortkeydto[i].isoformat() + "Z"
            for question in self.dbdatemapped[pointerdts[i]]:
                tmpRow[question] = self.dbdatemapped[pointerdts[i]][question]

            self.AgraphFrame.append(tmpRow)
            self.last_date = sortkeydto[i]

        #map accordingly with date to iso format

    def dayresponse(self):
        self.responseFrame = {}
        try:
            tmp = self.dbdatemapped[self.dayindex.todayDate]
        except KeyError:  #means there is no information for the daykey
            return self
        # remove aggregate keyword, json handles association

        for question in tmp.keys():
            cleankey = question.replace('_aggregate', '')
            self.responseFrame[cleankey] = tmp[question]

        return self

    def orderedmap(self):
        import datetime as dt
        self.processFrameList = []
        self.processFrameDict = {}

        graphpoints = self.dbdatemapped.keys()
        graphdates = []

        for date in graphpoints:
            try:
                graphdates.append(
                    dt.datetime.strptime(date, "%Y-%m-%d %H:%M:%S.%f"))
            except ValueError:
                graphdates.append(
                    dt.datetime.strptime(date, "%Y-%m-%d %H:%M:%S"))

        sortkeydto, pointerdts = zip(*sorted(zip(graphdates, graphpoints)))

        for i in xrange(
                0,
                len(pointerdts)):  # want {date: xxxISOxxx , a:x ,b:x ,note:x}

            tmpRow = {}
            tmpRow[sortkeydto[i]] = {}
            self.processFrameDict[sortkeydto[i]] = {}

            for question in self.dbdatemapped[pointerdts[i]]:
                tmpRow[sortkeydto[i]][question] = self.dbdatemapped[
                    pointerdts[i]][question]
                self.processFrameDict[sortkeydto[i]][
                    question] = self.dbdatemapped[pointerdts[i]][question]

            self.processFrameList.append(tmpRow)
        return self

    def getLastDate(self, dates):  #input a list of dates
        dates.sort(reverse=True)
        return dates[0]  #output most recent date in subset
Esempio n. 4
0
class SecuIn:
	'''
	Handles all data input into the database

	'''
	def __init__(self,passkey):
		self.key = passkey


		self.initQuestions = SecuQ(self.key)

		self.DBConfig = AppConfig()
		self.dbName = self.DBConfig.mapget('databaseinfo')['databasename']

		self.db = Database(self.dbName)


		initDay = DayEntry(self.key) # checks day hash or creates a new one
		self.dayKey = initDay.dayKey



	def questionDataIN(self,data):
	
		'''
		Data IN:
		{'a' : 2, 'b': 14 , 'c': 11, 'd': 43, 'note' : 'hello'}
	
		or
	
		{ 'b': 14 , 'c': 11, 'd': 43, 'note' : 'hello'} 
		 some entries may be missing
	
	
		Data OUT: (NEVER DELETE ANYTIHNG :) )
	
		{'date' : xx , _id: ###date2### , 'a':{'xxdate3xx':2},
						'b':{'xxdate3xx':14},
						'c':{'xxdate3xx':11},
						'note':{'xxdate3xx':'you'}}
	
	
		{'date' : xx , _id: ###date1### , 'a':{'xxdate1xx':1,'xxdate2xx':2},
						'b':{'xxdate1xx':14,'xxdate2xx':14},
						'c':{'xxdate1xx':11,'xxdate2xx':11},
						'note':{'xxdate2xx':'hello','xxdate3xx':'you'}
	
	
		'''


		timeIN = getTimeStamp() #get now time
		#initialize new questions
		
	
		# get data, as doc {'date':'xx/xx/xxTxx:xx:xxxx','question1':'x','question2':'x'}, same as dic format

		if(self.db.exists()):
			self.db.open()
			self.db.id_ind.enc_key = self.key
			dayrow = self.db.get('id', self.dayKey, with_doc=True)
	
			#this function assumes database already opened
			# this is gonna be a tuple that is inserted directly

	
			#convert data from javasript to python dict/json
			# if (type(data) is str):
			dataIN=eval(data) #{ 'b': 14 , 'c': 11, 'd': 43, 'note' : 'hello'}
			datachanged = dataIN.keys()





			for question in datachanged:
				try:
					dayrow[question][timeIN] = dataIN[question]
				except KeyError: #first write to key, initiate
					dayrow[question] = {}
					dayrow[question][timeIN] = dataIN[question]

			

			self.db.update(dayrow) 
			self.db.close()
			self.initQuestions.questionsValidate(datachanged) #insert questions whos data had changed

			#if all ok!
			return True
Esempio n. 5
0
class FileHandler:
    '''
	- performs file operations
	- gets file version info
	- verify hashes
	- encrypt 
	- decrypt


	'''
    def __init__(self):
        self.DBConfig = AppConfig()
        #self.DBConfig.mapget('section')['key']
        self.packtype = self.DBConfig.mapget('databaseinfo')['packtype']
        self.dbName = self.DBConfig.mapget('databaseinfo')['databasename']
        self.zipname = self.dbName + ".z"
        self.encryptedzipname = self.dbName + ".z" + ".crypt"

        if (DeviceDetection().embeddedapp):  # =True
            import sys
            sys.path = [
                '/pythonHome/Lib', '/pythonHome/', '/pythonHome', '.', '/Lib',
                '/pythonHome/system'
            ]

        #if database detected get version info

    def keygen(self, password, salt):
        dig = hmac.new(salt, msg=password, digestmod=hashlib.sha256)
        fkey = dig.hexdigest()
        skey = fkey[0:32]
        return skey

    def encrypt_file(self, key, in_filename, out_filename):
        with open(in_filename, 'rb') as infile:
            with open(out_filename, 'wb') as outfile:
                outfile.write(aes.encryptData(key, infile.read()))

    def decrypt_file(self, key, in_filename, out_filename):
        #out_filename = os.path.splitext(in_filename)[0]
        with open(in_filename, 'rb') as infile:
            with open(out_filename, 'wb') as outfile:
                outfile.write(aes.decryptData(key, infile.read()))

    def getfilesize(self, path):
        filesize = os.path.getsize(path)  #returns float
        return filesize

    def moddate(self, path):
        t = os.path.getmtime(path)
        return datetime.datetime.fromtimestamp(t)

    def deletefile(self, file_name):  # deletes singe file
        if (os.path.isfile(file_name)):
            try:
                os.remove(file_name)
            #except OSError:
            except Exception as e:
                print e
                print 'a'
                return False
            else:
                return True
        else:
            print "b"
            return False

    def checkfile(self, path):
        return os.path.isfile(path)

    def deletepath(self, path):  #deletes entire path and everythin inside
        import shutil
        shutil.rmtree(path)

    def zipit(self, path, zip):  #sip entire path to file
        zipf = zipfile.ZipFile(zip, 'w', zipfile.ZIP_DEFLATED)
        self.zipdir(path, zipf)
        zipf.close()

    def unzipit(self, path, zip):  #unzip function
        with zipfile.ZipFile(zip, "r") as z:
            z.extractall(path)

    def zipdir(self, path, zip):  # directoyry stepping funtion use zipit
        for root, dirs, files in os.walk(path):
            for file in files:
                zip.write(os.path.join(root, file))

    def eraseALL(self):
        dbName = self.DBConfig.mapget('databaseinfo')['databasename']
        self.deletepath(dbName)
        self.deletefile("configs/seccuConfig.ini")

    def md5_for_file(self, path, block_size=256 * 128, hr=True):
        '''
		Block size directly depends on the block size of your filesystem
		to avoid performances issues
		Here I have blocks of 4096 octets (Default NTFS)
		'''
        md5 = hashlib.md5()
        with open(path, 'rb') as f:
            for chunk in iter(lambda: f.read(block_size), b''):
                md5.update(chunk)
        if hr:
            return md5.hexdigest()
        return md5.digest()

    def compareVersions(self):
        return true
        #this should be a seperate module

    def localVersion(self):
        return True

    def cloudVersion(self):
        return True

    def packdbsecure(self, password):

        #zip path
        self.zipit(self.dbName, self.zipname)
        #key
        salt = self.dbName
        key32 = self.keygen(password, salt)
        #encrypt
        self.encrypt_file(key32, self.zipname, self.encryptedzipname)

        pass

    def packdb(self):
        #zip path
        pass

        self.zipit(
            self.dbName,
            self.zipname)  # self.dbName corresponds to (currDir)./dbname/xxxxx
        return True

    def unpackdbsecure(self):

        #key
        salt = self.dbName
        key32 = self.keygen(self.key, salt)
        #encrypt
        self.encrypt_file(key32, self.encryptedzipname, self.zipname)
        #unzip path
        self.unzipit(self.zipname, self.dbName)

    def unpackdb(self):
        pass

    def parsepacktype(self):
        #if regular zip
        #return False

        #if secure
        return False

    def cleanpacked(self):
        self.deletefile(str(self.finalpackname))

    def genpack(self, password=None):

        #check pack type

        #pack

        #get file hash
        #get version info
        if (self.parsepacktype()):  #if secure or not

            #if it's true (secure)
            self.packdbsecure(password)
            self.finalpackname = self.encryptedzipname
        else:  #regular zip
            self.packdb()
            self.finalpackname = self.zipname

        return True

    def genunpack(self):

        #check pack type
        # get verison info
        #verify file hash

        #(decrypt)
        #unpack

        #delete garbadge

        pass
Esempio n. 6
0
class SecuFrame: #in producion, key must be specified
	def __init__(self,passkey,date_range='all'):
		self.key = passkey
		self.Qeng = SecuQ(self.key)

		self.indexdb = DBIndexSystem(self.key)
		#self.indexdb.masterIndex
		#self.indexdb.Qindex
		#self.indexdb.Tindex
		#self.indexdb.IndexedTable
		#self.indexdb.dbName

		self.dayindex = DayEntry(self.key)
		#self.dayindex.dayKey

		self.DBConfig = AppConfig()

		self.dbName = self.DBConfig.mapget('databaseinfo')['databasename']

		self.db = Database(self.dbName)


		self.dbparseable = self.db2json(daterange=date_range,clean=True)
		
	def __del__(self):
		if (self.db.opened):
			self.db.close()

		
		
	def db2json(self,daterange,clean=True):
		'''
		> daterange 
		- tuple datetime objects to specify range
		(dateObj,dateObj)
		
		
		
					
		'''
		
		dfJSON = []
		
		if(self.db.exists()):
			self.db.open()
			self.db.id_ind.enc_key = self.key
			if daterange == "all":
				if clean == True:


					for currHash in self.indexdb.IndexedTable: #get row  
						curr = self.db.get('id', currHash, with_doc=True)

						curr.pop('_id')
						curr.pop('_rev')
						dfJSON.append(curr)

					self.db.close()
					return dfJSON
			
				if clean == False:
					for currHash in self.indexdb.IndexedTable: #get row  
						curr = self.db.get('id', currHash, with_doc=True)
						dfJSON.append(curr)

					self.db.close()
					return dfJSON

			if daterange == "today":
				if clean == True:
					curr = self.db.get('id', self.dayindex.dayKey, with_doc=True)
					curr.pop('_id')
					curr.pop('_rev')
					dfJSON.append(curr)
					self.db.close()
					return dfJSON
				
		'''
		#@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@  

			if ((type(daterange) == tuple) & (len(daterange)<=2) & (daterange[0]<daterange[1]) &(type(daterange[0])==datetime.datetime) & (type(daterange[1])==datetime.datetime): #if it's a valid daterange
				if clean == True
					
					for curr in db.all('id'): #get row   
						currdto=dt.datetime.strptime(curr['date'],"%Y-%m-%d %H:%M:%S.%f")
						if ( daterange[0] <= currdto <= daterange[1]):







							curr.pop('_id')
							curr.pop('_rev')
							dfJSON.append(curr)
					db.close()
					return dfJSON
			
				if clean == False:
					for curr in db.all('id'): #get row		  
						dfJSON.append(curr)
					db.close()
					return dfJSON





			else: # raise some kindof exception
				return False
			
	
		#@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@
		'''	
	
	
	
	
	
	
	
	
	
	def iffo(self, daterange = "all", obey = True):
		self.dbfiltered = []
		self.dbdatemapped = {}
		self.infoIndex = self.Qeng.valid # {'a':{'active':'True','typ':'slider','range':'0-100','aggregate':'True', 'multipoint':'True'}}
		if obey == True :
			for curr in self.dbparseable: #get row
				'''
				{'date' : xx , 'a':{'xxdate1xx':1,'xxdate2xx':2},
					'b':{'xxdate1xx':14,'xxdate2xx':14},
					'c':{'xxdate1xx':11,'xxdate2xx':11},
					'note':{'xxdate2xx':'hello','xxdate3xx':'you'}
				}
				'''

				tmp={} #holder that is constructed

				rowDate = curr["date"] #'date' : xx
				'''
				date : xx
				'''

				questionsData = curr.keys() # returns a list
				questionsData.remove('date')
				'''
				['a','b','c','note']
				'''
				#questionsData.remove('note')

				for question in questionsData: #get question from list	

					try:
						if (self.infoIndex[question]['multipoint']== "True"): # & (self.infoIndex[question]['aggregate']== "False"): #display all points
		
		
							multiP = curr[question].keys()
							'''
							in 'a'
							['xxdate1xx','xxdate2xx']
							'''
							
							for point in multiP: #points are dates
								try:
									tmp[question][point] = curr[question][point]
								except KeyError:
									tmp[question]={}
									tmp[question][point] = curr[question][point]
	
								try:
									self.dbdatemapped[point][question] = curr[question][point]
								except KeyError: #avoid overwriting
									self.dbdatemapped[point] = {}
									self.dbdatemapped[point][question] = curr[question][point]			
	
						
						if (self.infoIndex[question]['multipoint']== "True") & (self.infoIndex[question]['aggregate']== "True"): #display only one aggregate in it's own column
	
							'''
							creates unique key for aggregate
							'''
							datelist = curr[question].keys() #gets all dates within the question 
							datelist.sort()	 #ensure earliest to latest
							aggregate_key_name = str(question)+"_aggregate"
							tmp[aggregate_key_name]={}
	
	
							try: #as intigers
								tmp[aggregate_key_name][rowDate] = 0
								aggregate_sum = 0
								for point in datelist:
									aggregate_sum += curr[question][point]
							except TypeError: #process aggregate function as concatenated strings
								tmp[aggregate_key_name][rowDate] = ""
								aggregate_sum = ""
								for point in datelist:
									aggregate_sum += curr[question][point] + "\n"
								
	
	
							try:
								self.dbdatemapped[rowDate][aggregate_key_name] = aggregate_sum
							except KeyError: 
								self.dbdatemapped[rowDate] = {}
								self.dbdatemapped[rowDate][aggregate_key_name] = aggregate_sum
		
							tmp[aggregate_key_name] = {}
							tmp[aggregate_key_name][rowDate] = aggregate_sum # replaces with single 
	
	
	
						if ((self.infoIndex[question]['multipoint']== "False") & (self.infoIndex[question]['aggregate']== "False")) | (self.infoIndex[question]['typ']== "note"): #display only one
							'''
							Puts last entry under rowdate 
							'''
	
	
	
							''' 
							NOTE HANDLING
							in future this should select the most positive note based on sentiment analysis
	
							- For now it will select the last note typed in
							'''
	
	
							datelist = curr[question].keys() #gets all dates within the question
		
							pointKey = self.getLastDate(datelist) #selects most recent date from list (keys)
							try:
								tmp[question][rowDate] = curr[question][pointKey] # replaces with single, most recent, point only
							except KeyError:
								tmp[question]={}
								tmp[question][rowDate] = curr[question][pointKey] # replaces with single, most recent, point only
							try:
								self.dbdatemapped[rowDate][question]  = curr[question][pointKey]
							except KeyError:
								self.dbdatemapped[rowDate] = {}
								self.dbdatemapped[rowDate][question]  = curr[question][pointKey]
	
	
		
						if (self.infoIndex[question]['multipoint']== "False") & (self.infoIndex[question]['aggregate']== "True"): #display only one aggregate in it's own column
							datelist = curr[question].keys() #gets all dates within the question 
							datelist.sort()	 #ensure earliest to latest
		
							tmp[question]={}
							
							try: #as intigers
								tmp[question][rowDate] = 0
								aggregate_sum = 0
								for point in datelist:
									aggregate_sum += curr[question][point]
							except TypeError: #process aggregate function as concatenated strings
								tmp[question][rowDate] = ""
								aggregate_sum = ""
								for point in datelist:
									aggregate_sum += curr[question][point] + "\n"
		
							#output	
							tmp[question][rowDate] = aggregate_sum
							#remapping is additive
							try:
								self.dbdatemapped[rowDate][question]  = aggregate_sum
							except KeyError:
								self.dbdatemapped[rowDate] = {}
								self.dbdatemapped[rowDate][question]  = aggregate_sum
					except KeyError:
						continue

				self.dbfiltered.append(tmp)

		return self

	def igraph(self):
		import datetime as dt
		self.graphFrame = []

		graphpoints = self.dbdatemapped.keys()
		graphdates = []

		
		for date in graphpoints:
			try:
				graphdates.append(dt.datetime.strptime(date,"%Y-%m-%d %H:%M:%S.%f"))
			except ValueError:
				graphdates.append(dt.datetime.strptime(date,"%Y-%m-%d %H:%M:%S"))

		sortkeydto, pointerdts = zip(*sorted(zip(graphdates, graphpoints)))

		for i in xrange(0,len(pointerdts)): # want {date: xxxISOxxx , a:x ,b:x ,note:x}

			tmpRow = {}
			tmpRow['date'] = sortkeydto[i].isoformat() + "Z"
			for question in self.dbdatemapped[pointerdts[i]]:
				tmpRow[question] = self.dbdatemapped[pointerdts[i]][question]
				
			self.graphFrame.append(tmpRow)
		return self
			 
		#map accordingly with date to iso format



	def Agraph(self,neuroOutput):
		import datetime as dt
		self.neuroOutput = neuroOutput # [(dto,dto),(dto,dto),,,,]
		self.AgraphFrame = []

		graphpoints = self.dbdatemapped.keys()
		graphdates = []

		self.last_date = None
		self.curr_date = None

		self.neuro_scan_count = 0 
		self.neuro_highlight_complete = False

		for date in graphpoints:
			try:
				graphdates.append(dt.datetime.strptime(date,"%Y-%m-%d %H:%M:%S.%f"))
			except ValueError:
				graphdates.append(dt.datetime.strptime(date,"%Y-%m-%d %H:%M:%S"))

		sortkeydto, pointerdts = zip(*sorted(zip(graphdates, graphpoints)))

		for i in xrange(0,len(pointerdts)): # want {date: xxxISOxxx , a:x ,b:x ,note:x}
			tmpRow = {}
			# set to white /  transparent first
			self.curr_date = sortkeydto[i]

			if (self.neuro_highlight_complete == False):
				tmpScanPos = divmod(self.neuro_scan_count,2) # divisor answer, remainder
				#print "tmpScanPos: " +str(tmpScanPos) + " self.neuro_scan_count:  " + str(self.neuro_scan_count)
				tmpNeuroDate = self.neuroOutput[tmpScanPos[0]][tmpScanPos[1]]

				if ( self.last_date == None): tmpRow["lineColor"] = "#FFFFFF"
				elif (self.curr_date == tmpNeuroDate):
					if (tmpScanPos[1] == 0 ): tmpRow["lineColor"] = "#CC0000" #if start of range
					if (tmpScanPos[1] == 1 ): tmpRow["lineColor"] = "#FFFFFF" # if end of range
					self.neuro_scan_count +=1

				elif(self.last_date < tmpNeuroDate < self.curr_date):
					if (tmpScanPos[1] == 0 ): tmpRow["lineColor"] = "#CC0000" #if start of range
					if (tmpScanPos[1] == 1 ): tmpRow["lineColor"] = "#FFFFFF" # if end of range
					self.neuro_scan_count +=1

				if ((tmpScanPos[0] + tmpScanPos[0]) == len(neuroOutput)): self.neuro_highlight_complete = True #checks if this should be the last iteration

				

			
			tmpRow['date'] = sortkeydto[i].isoformat() + "Z"
			for question in self.dbdatemapped[pointerdts[i]]:
				tmpRow[question] = self.dbdatemapped[pointerdts[i]][question]
				
			self.AgraphFrame.append(tmpRow)
			self.last_date = sortkeydto[i]
			 
		#map accordingly with date to iso format


	def dayresponse(self):
		self.responseFrame = {}
		try:
			tmp = self.dbdatemapped[self.dayindex.todayDate]
		except KeyError: #means there is no information for the daykey
			return self
		# remove aggregate keyword, json handles association
		
		for question in tmp.keys():
			cleankey = question.replace('_aggregate', '')
			self.responseFrame[cleankey] = tmp[question]
		
		return self

	def orderedmap(self):
		import datetime as dt
		self.processFrameList = []
		self.processFrameDict = {}

		graphpoints = self.dbdatemapped.keys()
		graphdates = []

		
		for date in graphpoints:
			try:
				graphdates.append(dt.datetime.strptime(date,"%Y-%m-%d %H:%M:%S.%f"))
			except ValueError:
				graphdates.append(dt.datetime.strptime(date,"%Y-%m-%d %H:%M:%S"))

		sortkeydto, pointerdts = zip(*sorted(zip(graphdates, graphpoints)))

		for i in xrange(0,len(pointerdts)): # want {date: xxxISOxxx , a:x ,b:x ,note:x}

			tmpRow = {}
			tmpRow[sortkeydto[i]] = {}
			self.processFrameDict[sortkeydto[i]] = {}

			for question in self.dbdatemapped[pointerdts[i]]:
				tmpRow[sortkeydto[i]][question] = self.dbdatemapped[pointerdts[i]][question]
				self.processFrameDict[sortkeydto[i]][question] = self.dbdatemapped[pointerdts[i]][question]
				
			self.processFrameList.append(tmpRow)
		return self


	def getLastDate(self,dates): #input a list of dates
		dates.sort(reverse=True)
		return dates[0] #output most recent date in subset
Esempio n. 7
0
class Integrity:

	def __init__(self): #if you want to add analysis database

		self.configFileName='configs/seccuConfig.ini'
		self.DBConfig = AppConfig()

		if (DeviceDetection().embeddedapp): #if embedded
			import sys
			sys.path = ['/pythonHome/Lib','/pythonHome/','/pythonHome','.','/Lib','/pythonHome/system']


	#def detectdb



	def checkReal(self):
		if(os.path.isfile(self.configFileName)):
			try:
				self.dbName = self.DBConfig.mapget('databaseinfo')['databasename']
				pass
				#then try and read master index and compare against version in config file
	
			except ConfigParser.NoSectionError:
				AppIO('False')   
				return False
				
			if(self.dbName==None): #not empty
				AppIO('False')   
				return False
			try:
				os.path.exists(self.dbName)
				AppIO('True')
				return True
	
			except UnboundLocalError:
				AppIO('False')			
				return False
				
		else:
			AppIO('False')   
			return False

	def checkExists(self):
		if(os.path.isfile(self.configFileName)):
			try:
				self.dbName = self.DBConfig.mapget('databaseinfo')['databasename']
				#then try and read master index and compare against version in config file
				
			except NoSectionError:
				return False
				
			if(self.dbName==None): #not empty
				return False

			try:
				os.path.exists(self.dbName)
				return True
	
			except UnboundLocalError:		  
				return False
				
		else:
			return False
Esempio n. 8
0
class Sync:
    '''
	import scan: scans existing self.db and rebuilds config file 
	create self.db: creates self.db file, master index, question index and table index



	'''
    def __init__(self):
        self.FH = FileHandler()
        self.DBConfig = AppConfig()

    def ssl_seed(self, size=24, chars=string.ascii_uppercase + string.digits):
        self.randomString = ''.join(random.choice(chars) for _ in range(size))
        return self

    def getcloudselection(self):
        try:

            backupLocation = self.DBConfig.mapget('cloudinfo')['location']
            validlocation = [
                'ftp', 'googledrive', 'icloud', 'dropbox', 'skydrive'
            ]
            if (backupLocation in validlocation):
                self.backup = backupLocation
                return True
            else:
                self.backup = "False"
                return False
        except KeyError:

            self.backup = "False"
            return False

    def parseconfigjson(self, json):
        self.uploadconfig = eval(json)
        if (self.uploadconfig['action'] == 'upload'):
            status = self.upload()

        if (self.uploadconfig['action'] == 'authenticate'):
            status = self.authenticate()

        if (self.uploadconfig['action'] == 'authenticate'):
            status = self.authorize()

        if (self.uploadconfig['action'] == 'save'):
            status = self.setbackuplocation()

        if (self.uploadconfig['action'] == 'import'):
            #status = self.importDB()
            pass
        return status

    def setbackuplocation(
            self
    ):  # data = {location: xxxx , username: xxxxx , password: xxxxx}
        backupconfig = self.uploadconfig.copy()
        backupconfig.pop('action')
        try:
            backupconfig.pop('dbpassword')
        except KeyError:
            pass

        for setting in backupconfig.keys():
            if (setting in ("location", "ftphost", "ftpuser", "gmail",
                            "appleid", "dropboxid", "livemail")):
                self.DBConfig.putmap('cloudinfo', setting,
                                     backupconfig[setting])

    def getconfig(self, location=None):
        self.loc = location

        if (self.loc == None):
            self.loc = self.backup
            if (self.loc == "False"):
                print "need location"
                return False

        try:
            if (self.loc == 'ftp'):
                Host = self.DBConfig.mapget('cloudinfo')['ftphost']
                User = self.DBConfig.mapget('cloudinfo')['ftpuser']
                return str(dict(ftphost=Host, ftpuser=User))

            if (self.loc == 'googledrive'):

                return True

            if (self.loc == 'icloud'):
                Email = self.DBConfig.mapget('cloudinfo')['appleid']
                return str(dict(appleid=Email))

            if (self.loc == 'dropbox'):
                Email = self.DBConfig.mapget('cloudinfo')['dropboxid']
                return str(dict(dropboxid=Email))

            if (self.loc == 'skydrive'):
                Email = self.DBConfig.mapget('cloudinfo')['livemail']
                return str(dict(livemail=Email))

            else:
                return False
        except KeyError:
            return False

    def authenticate(self):
        socket.RAND_add(self.ssl_seed().randomString,
                        75.0)  # pre-seed generator
        if (self.uploadconfig['location'] == 'dropbox'):

            url = CloudHandler().authenticatedropbox()
            print "============= authenticate dropbox" + str(url)
            return url
        if (self.uploadconfig['location'] == 'googledrive'):

            url = CloudHandler().authenticategoogle()
            print "============= authenticate google" + str(url)
            if (url != True):
                return url
            else:
                return True  # then upload

    def authorize(self):
        if (self.uploadconfig['location'] == 'googledrive'):
            socket.RAND_add(self.ssl_seed().randomString,
                            75.0)  # pre-seed generator
            authcode = self.uploadconfig['authcode']
            status = CloudHandler().googleauthorize(authcode)
            if (status):  # True
                return True
            else:
                return False

    def upload(self):

        self.key = self.uploadconfig["dbpassword"]

        socket.RAND_add(self.ssl_seed().randomString,
                        75.0)  # pre-seed generator
        self.FH.genpack(self.key)  #packadge database

        self.packname = self.FH.finalpackname  #from config

        if (self.uploadconfig['location'] == 'ftp'):
            host = self.uploadconfig['ftphost']
            user = self.uploadconfig['ftpuser']
            password = self.uploadconfig['password']

            self.DBConfig.putmap('cloudinfo', 'location', 'ftp')
            self.DBConfig.putmap('cloudinfo', 'ftphost', host)
            self.DBConfig.putmap('cloudinfo', 'ftpuser', user)
            status = CloudHandler().uploadftp(self.packname, host, user,
                                              password)

        if (self.uploadconfig['location'] == 'googledrive'):
            self.DBConfig.putmap('cloudinfo', 'location', 'googledrive')
            status = CloudHandler().uploadgoogledrive(self.packname)

        if (self.uploadconfig['location'] == 'icloud'):
            email = self.uploadconfig['appleid']
            password = self.uploadconfig['password']

            self.DBConfig.putmap('cloudinfo', 'location', 'icloud')
            self.DBConfig.putmap('cloudinfo', 'appleid', email)
            status = uploadicloud(self.packname, email, pasword)

        if (self.uploadconfig['location'] == 'dropbox'):
            authcode = self.uploadconfig['authcode']

            self.DBConfig.putmap('cloudinfo', 'location', 'dropbox')
            # self.DBConfig.putmap('cloudinfo','dropboxid',authcode)
            status = CloudHandler().uploaddropbox(self.packname, authcode)

        if (self.uploadconfig['location'] == 'skydrive'):
            email = self.uploadconfig['livemail']
            password = self.uploadconfig['password']

            self.DBConfig.putmap('cloudinfo', 'location', 'googledrive')
            self.DBConfig.putmap('cloudinfo', 'livemail', email)
            status = uploadskydrive(self.packname, email, password)
        #print self.FH.deletefile(str(self.FH.finalpackname)) # clean-up
        try:
            import os
            os.remove(self.FH.finalpackname)
            #except OSError:
        except Exception as e:
            print e
            ret = "upload success: " + str(
                status) + " [ERROR, Clean-up]: " + str(e)
            return ret

        else:
            return True
Esempio n. 9
0
class DayEntry:  #checker class
    '''
	checks day hash or creates a new one


	once instatiated, it checks for:
		- if day key in config coincideds with todays date
		- if there isnt a date in config, it scans database for the one matching todays
		- if no date in conifig, or it's the wrong date, new row is made (only if there isnt one with matching date in the entire self.db)
	'''
    def __init__(self, passkey):
        self.todayDate = str(getDayStart())
        self.key = passkey

        self.DBConfig = AppConfig()

        self.dayKey = None  # setup befpore checking, avoid attribute error
        self.dbName = self.DBConfig.mapget('databaseinfo')['databasename']
        self.db = Database(self.dbName)

        try:
            self.dayKey = self.DBConfig.mapget('databaseinfo')['daykey']
        except KeyError:  # if notthin in config, check self.db for entry
            daystatus = self.checkfordate()
            if (daystatus == False):
                self.makeDayRow()
                self.DBConfig.putmap('databaseinfo', 'daykey', self.dayKey)
            #if true do nothing, config file fixed
        else:

            daystatus = self.checkfordate(
            )  #if false, scans for right one, and fixes config

            oldcompare = self.dayKey
            self.dayKey = self.DBConfig.mapget('databaseinfo')['daykey']
            if (daystatus == False) & (oldcompare == self.dayKey):
                self.makeDayRow()
                self.DBConfig.putmap('databaseinfo', 'daykey', self.dayKey)
            if (daystatus == True):  #everything all good
                pass  #nothing created just a check

    def __del__(self):
        if (self.db.opened):
            self.db.close()

    def makeDayRow(self):

        if (self.checkfordate() == True):  #already exists no need to write
            return False

        dbindex = DBIndexSystem(self.key)

        dayrow = {}
        dayrow["date"] = self.todayDate
        if (self.db.exists() == True):
            self.db.open()
            self.db.id_ind.enc_key = self.key
            self.db.insert(dayrow)
            self.db.close()  #must close first , no double opens
            self.getDayRowID()  # resfresh day key
            dbindex.TindexPut(self.dayKey)
            ## would normally write to config file
            return True

    def getDayRowID(self):  #gets row id by date
        if (self.db.exists()):
            self.db.open()
            self.db.id_ind.enc_key = self.key
            for curr in self.db.all('id'):
                try:
                    if curr['date'] == str(self.todayDate):
                        dataop = curr['_id']
                        dataop = "".join(
                            dataop
                        )  #_id is returned as a list of charaters, must be concatenated to string
                        self.db.close()
                        self.dayKey = dataop
                        return dataop  #returns datestring
                except KeyError:
                    continue

                    #break
            #if it makes it here, entry doesnt exist
            self.db.close()
            return False  #there is a probplem

    def checkfordate(self):  #checks for existance of that date in self.db
        if (self.db.exists()):
            self.db.open()
            self.db.id_ind.enc_key = self.key
            if (self.dayKey != None):
                dayrow = self.db.get('id', self.dayKey, with_doc=True)
                #doesnt account for if there is an entry in the config that doesnt exist
                if dayrow['date'] == str(self.todayDate):
                    self.db.close()
                    return True
            for curr in self.db.all('id'):  #try to search
                try:
                    if curr['date'] == str(self.todayDate):
                        self.DBConfig.putmap('databaseinfo', 'daykey', "".join(
                            curr['_id']))  #fix lost entry
                        self.db.close()
                        return False
                except KeyError:
                    continue

                    #break
            #if it makes it here, entry doesnt exist and nothing was remapped
            self.db.close()
            return False
Esempio n. 10
0
class DBIndexSystem:
    '''
	QindexGet: 
		-- Get the question index key


		Allows for simple calling to database variables

		ex:

	self.indexdb = DBIndexSystem()
		#self.indexdb.masterIndex
		#self.indexdb.Qindex
		#self.indexdb.Tindex
		#self.indexdb.IndexedTable
		#self.indexdb.dbName

	'''
    def __init__(self, passkey):
        self.key = passkey

        self.DBConfig = AppConfig()
        #check for self.db stuff
        #IF IT DOESNT PASS THESE TESTS
        #warn before deletion
        self.dbName = self.DBConfig.mapget('databaseinfo')['databasename']

        self.masterIndex = self.DBConfig.mapget('databaseinfo')['indexkey']

        self.db = Database(self.dbName)

        if (self.db.exists()):
            self.db.open()
            self.db.id_ind.enc_key = self.key
            try:
                self.Qindex = self.QindexGet()  #question index key
                self.Tindex = self.TindexGet()  #table index key
                self.IndexedTable = self.tableGet(
                )  #regular 'table' index. list of hash pointers in order

            except:
                print 'bad index'
                self.db.close()
                self.sanitycheck = False

            else:
                self.db.close()
                self.sanitycheck = True

    def __del__(self):
        if (self.db.opened):
            self.db.close()

    def QindexGet(self):
        masterRow = self.db.get('id', self.masterIndex, with_doc=True)
        Qindexkey = masterRow['Qindex']
        return Qindexkey  # questions as [a,b,c,d,e,f,g,h,i]
        #add try if line not found, your self.db is totally f****d eh

    def TindexGet(self, tableName='Tindex'):
        masterRow = self.db.get('id', self.masterIndex, with_doc=True)
        Tindexkey = masterRow[tableName]
        return Tindexkey  # questions as [a,b,c,d,e,f,g,h,i]
        #add try if line not found, your self.db is totally f****d eh

    def tableGet(
        self,
        tableName='Tindex'
    ):  #the self.key in question should be loaded from config or index self.key
        Tindexkey = self.TindexGet(
            tableName)  #not self variable because this may be a custom table
        Tindex = self.db.get('id', Tindexkey, with_doc=True)
        return Tindex[
            'table']  # table entries as as [######,######,######,######,######,]

    def TindexPut(self,
                  data,
                  tableName='Tindex'):  #enter ordered hash data into table

        if (self.db.exists()):
            self.db.open()
            self.db.id_ind.enc_key = self.key
            Tindexkey = self.TindexGet(tableName)
            TindexRow = self.db.get('id', Tindexkey, with_doc=True)
            try:
                TindexRow['table'].append(data)
            except KeyError:
                TindexRow['table'] = []
                TindexRow['table'].append(data)
            self.db.update(TindexRow)
            self.db.close()

            #append net entry to current table array

            #write table array to same index key
            return True

    def selectrow(self, idIN):
        #check if already open
        if (self.db.exists()):
            if (self.db.opened == False):
                self.db.open()
                self.db.id_ind.enc_key = self.key
                #select Qindex
                data = self.db.get('id', idIN, with_doc=True)
                self.db.close()
                return data
            else:
                data = self.db.get('id', idIN, with_doc=True)
                return data

    def updaterow(self, data):

        if (self.db.exists()):
            if (self.db.opened == False):
                self.db.open()
                self.db.id_ind.enc_key = self.key
                self.db.update(data)  #must include _id, must be dict/json
                self.db.close()
                return True
            else:
                self.db.update(data)  #must include _id, must be dict/json
                return True
Esempio n. 11
0
class DBSubsystem:
    '''
	import scan: scans existing self.db and rebuilds config file 
	create self.db: creates self.db file, master index, question index and table index



	'''
    def __init__(self, passkey, xtraDB=None):
        self.DATABASE_SOFTWARE_VERSION = "0.3.1a"
        self.key = passkey
        self.DBConfig = AppConfig()
        self.dbval = xtraDB

    def __del__(self):
        if (self.db.opened):
            self.db.close()


# ADD REBUILD OPTION

    def createDB(self):
        if (self.creationCheck()):
            self.buildDB()
            return True
        else:
            return False

    def creationCheck(self):
        if (Integrity().checkExists() == False):
            if (self.dbval != None):
                self.DBConfig.createConfig()
                self.DBConfig.putmap('databaseinfo', 'databasename',
                                     self.dbval)

                self.dbName = self.dbval

                return True
            else:
                return False

        else:  #if integrity passed as ok existing
            return False

    def buildDB(self):

        from _dbindex import EncUniqueHashIndex
        self.dbName = self.DBConfig.mapget('databaseinfo')['databasename']

        self.db = Database(self.dbName)
        id_ind = EncUniqueHashIndex(self.db.path, 'id')
        self.db.set_indexes([id_ind])
        self.db.create()
        self.db.id_ind.enc_key = self.key
        self.db.close()

        self.createMasterindex()  #create master index passkey, only once
        self.createQindex()
        self.createTindex()

        #add error handling
        return True
        '''
		@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@
	
		Index Creation
	
		@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@
	
		'''

    def createMasterindex(self):
        if (self.db.exists()):
            self.db.open()
            self.db.id_ind.enc_key = self.key

    #this function assumes database
        self.db.insert(
            dict(t='master',
                 Qindex=None,
                 Tindex=None,
                 DBVersion=self.DATABASE_SOFTWARE_VERSION))

        for curr in self.db.all(
                'id'
        ):  #since first passkey in self.db should be only one there, function only perfomed once
            if curr['t'] == 'master':
                self.masterIndex = ''.join(curr['_id'])
                self.DBConfig.putmap('databaseinfo', 'indexkey',
                                     self.masterIndex)  #masterkey=value
                break

                #add else statement for errors if couldnt be written for found

        self.db.close()
        return self.masterIndex

    def createQindex(self):
        if (self.db.exists()):
            self.db.open()
            self.db.id_ind.enc_key = self.key
    #this function assumes database
    #insert question index

        self.db.insert(dict(t='Qindex'))
        #get question index passkey, form type qintex (t=xxxx)
        for curr in self.db.all(
                'id'
        ):  #since first passkey in self.db should be only one there, function only perfomed once
            if curr['t'] == 'Qindex':
                self.Qindexkey = ''.join(curr['_id'])
                break

                #add else statement for errors if couldnt be written for found

                #write Qindex passkey to master index

        indexRow = self.db.get('id', self.masterIndex, with_doc=True)

        #write question index passkey to master index

        indexRow['Qindex'] = self.Qindexkey
        self.db.update(indexRow)
        self.db.close()

    #wrote new Qindex passkey to master index passkey

    def createTindex(self):

        self.dbName = self.DBConfig.mapget('databaseinfo')['databasename']
        self.masterIndex = self.DBConfig.mapget('databaseinfo')['indexkey']

        self.db = Database(self.dbName)
        if (self.db.exists()):
            self.db.open()
            self.db.id_ind.enc_key = self.key
    #this function assumes database
    #insert question index

        self.db.insert(dict(t='Tindex', table=[]))
        #get question index passkey, form type qintex (t=xxxx)
        for curr in self.db.all(
                'id'
        ):  #since first passkey in self.db should be only one there, function only perfomed once
            if curr['t'] == 'Tindex':
                self.Tindexkey = ''.join(curr['_id'])
                break

                #add else statement for errors if couldnt be written for found

                #write Qindex passkey to master index

        indexRow = self.db.get('id', self.masterIndex, with_doc=True)

        #write question index passkey to master index

        indexRow['Tindex'] = self.Tindexkey
        self.db.update(indexRow)
        self.db.close()

    #wrote new Qindex passkey to master index passkey
    '''
Esempio n. 12
0
class FileHandler:
	'''
	- performs file operations
	- gets file version info
	- verify hashes
	- encrypt 
	- decrypt


	'''


	def __init__(self):
		self.DBConfig = AppConfig()
		#self.DBConfig.mapget('section')['key']
		self.packtype = self.DBConfig.mapget('databaseinfo')['packtype']
		self.dbName = self.DBConfig.mapget('databaseinfo')['databasename']
		self.zipname = self.dbName + ".z"
		self.encryptedzipname = self.dbName + ".z" + ".crypt"






		if (DeviceDetection().embeddedapp): # =True
			import sys
			sys.path = ['/pythonHome/Lib','/pythonHome/','/pythonHome','.','/Lib','/pythonHome/system']

		#if database detected get version info


	def keygen(self,password,salt):
		dig = hmac.new(salt, msg=password, digestmod=hashlib.sha256)
		fkey = dig.hexdigest()
		skey = fkey[0:32]
		return skey


	def encrypt_file(self,key, in_filename, out_filename):
		with open(in_filename, 'rb') as infile:
			with open(out_filename, 'wb') as outfile:
				outfile.write(aes.encryptData(key,infile.read()))

	def decrypt_file(self,key, in_filename, out_filename):
		#out_filename = os.path.splitext(in_filename)[0]
		with open(in_filename, 'rb') as infile:
			with open(out_filename, 'wb') as outfile:
				outfile.write(aes.decryptData(key,infile.read()))


	def getfilesize(self,path):
		filesize=os.path.getsize(path) #returns float
		return filesize
	
	def moddate(self,path):
		t = os.path.getmtime(path)
		return datetime.datetime.fromtimestamp(t)
	
	
	def deletefile(self,file_name): # deletes singe file
		if (os.path.isfile(file_name)):
			try:
			 	os.remove(file_name)
			#except OSError:
			except Exception as e:
				print e
				print 'a'
			 	return False
			else:
				return True
		else:
			print "b"
			return False

	def checkfile(self,path):
		return os.path.isfile(path)
		
	def deletepath(self,path): #deletes entire path and everythin inside
		import shutil
		shutil.rmtree(path)


	
	def zipit(self,path,zip): #sip entire path to file
		zipf = zipfile.ZipFile(zip, 'w', zipfile.ZIP_DEFLATED)
		self.zipdir(path, zipf)
		zipf.close()
	
	def unzipit(self,path,zip):#unzip function
		with zipfile.ZipFile(zip, "r") as z:
			z.extractall(path)	
	
	
	
	def zipdir(self,path, zip):# directoyry stepping funtion use zipit
		for root, dirs, files in os.walk(path):
			for file in files:
				zip.write(os.path.join(root, file))
	

	
	
	def eraseALL(self):
		dbName=self.DBConfig.mapget('databaseinfo')['databasename']
		self.deletepath(dbName)
		self.deletefile("configs/seccuConfig.ini")
	
	
	
	
	
	def md5_for_file(self,path, block_size=256*128, hr=True):
		'''
		Block size directly depends on the block size of your filesystem
		to avoid performances issues
		Here I have blocks of 4096 octets (Default NTFS)
		'''
		md5 = hashlib.md5()
		with open(path,'rb') as f: 
			for chunk in iter(lambda: f.read(block_size), b''): 
				 md5.update(chunk)
		if hr:
			return md5.hexdigest()
		return md5.digest()
	
	
	
	def compareVersions(self):
		return true
		#this should be a seperate module
	
	
	def localVersion(self):
		return True
	def cloudVersion(self):
		return True


	def packdbsecure(self,password):


		#zip path
		self.zipit(self.dbName, self.zipname)
		#key
		salt = self.dbName
		key32 = self.keygen(password,salt)
		#encrypt
		self.encrypt_file(key32, self.zipname,self.encryptedzipname)
		
		pass








	def packdb(self):
		#zip path
		pass

		self.zipit(self.dbName, self.zipname) # self.dbName corresponds to (currDir)./dbname/xxxxx
		return True



	def unpackdbsecure(self):

		#key
		salt = self.dbName
		key32 = self.keygen(self.key , salt)
		#encrypt
		self.encrypt_file(key32, self.encryptedzipname, self.zipname)
		#unzip path
		self.unzipit(self.zipname , self.dbName)
	def unpackdb(self):
		pass




	def parsepacktype(self):
		#if regular zip
		#return False

		#if secure
		return False



	def cleanpacked(self):
		self.deletefile(str(self.finalpackname))


	def genpack(self,password=None):
		


		#check pack type

		#pack
		


		#get file hash
		#get version info
		if (self.parsepacktype()): #if secure or not



			#if it's true (secure)
			self.packdbsecure(password)
			self.finalpackname = self.encryptedzipname
		else: #regular zip
			self.packdb()
			self.finalpackname = self.zipname

		return True


	def genunpack(self):

		#check pack type
		# get verison info
		#verify file hash

		#(decrypt)
		#unpack

		#delete garbadge

		pass