Example #1
0
def acquirefile(dbPath):
	'''
	This process examines the database pointed to by dbPath. It  looks for any records which 
	have status 0 and looks at the dateModified time. If the
	elapsed time since the record was modified is greater than two minutes, then it checks
	the record's size. If the file pointed to by the record doesn't exist, then I change 
	this to another status like -1. If the file size is the same, then the databse marks 
	the file status as 1 or ready to hash. If the filesize is different, then the file's
	dateModified is updated and the status is unchanged, resulting in the file being re-examined
	in the next loop. Once the file is verified, the file is moved to a path defined by 
	workingPath (that is if the path is different).
	'''

	logging = DefaultLogger()
	loopcount = 0

	while True:
		sleep(5)
		if loopcount % 10 == 0:
			logging.debug('acquire loop is active...')
		loopcount += 1

		checkSingleFiles(dbPath)
Example #2
0
class DataStore():

    def __init__(self, storePath):
        self.debugLog = DefaultLogger()
        self.storePath = storePath
        self.createJobsTable(storePath)

    def fileHasNoDaisyNumber(self):
        return -70

    def errorAnalyzingFile(self):
        return -60

    def errorFileExistsInPathChain(self):
        return -50

    def errorMovingFileStatusCode(self):
        return -40

    def errorPathDoesntExistStatusCode(self):
        return -30       

    def operationFailedStatusCode(self):
        return -20

    def missingRecordStatusCode(self):
        return -10        

    def addedStatusCode(self):
        return 0

    def verifyStatusCode(self):
        return 10

    def inProcessStatusCode(self):
        return 15

    def fileHasBeenAnalyzedStatusCode(self):
        return 20

    def fileHasBeenMovedToFinalLocation(self):
        return 30

    def dbConnection(self):
        db = None
        try:
            db = sqlite3.connect(self.storePath)
        except Exception as e:
            DefaultLogger().debug(e.message)
        return db

    def createJobsTable(self, pathToDBFolder):
        try:
            db = self.dbConnection()
            cursor = db.cursor()
            cursor.execute('''CREATE TABLE IF NOT EXISTS 
            jobs(id INTEGER PRIMARY KEY, 
            fileName TEXT, 
            fileSize INTEGER,
            dateAdded DATETIME, 
            dateModified DATETIME, 
            status INTEGER,  
            analysis TEXT,
            vendor TEXT,
            daisyComments TEXT,
            commentsUpdatedToDaisy INTEGER,
            daisyStatus TEXT,
            statusUpdatedToDaisy INTEGER)''')
            db.commit()
        except Exception as e:
            info = 'Error: Unable to call createJobsTable' + e.message
            logger = DefaultLogger()
            self.debugLog.debug(info)

        db.close()
   
    def doesTheFilePathExistElseWhereInThePathStructure(self, filePath, operationType, pathStructureName):
        '''
        Checks to make sure the file isn't already in the queue, if is, then it moves to to a duplicate folder
        '''
        result = 0

        currentPathStructure = configurationOptions().pathStructureWithName(pathStructureName)
        #exlcude inBox
        for path in configurationOptions().pathStructurePathsToCheckForDuplicates():
            if os.path.exists(os.path.join(currentPathStructure[path], os.path.basename(filePath))):
                result += 1

        if result == 0:
            return False

        return True

    def addFileToDatabase(self, filePath):
        '''
        add a file to the database and mark its status as zero, if the file doesn't exist (which is unlikely) then return,  but I should log this
        '''
        if not os.path.exists(filePath):
            return

        fileSize = os.path.getsize(filePath)
        db = self.dbConnection()
        try:
            cursor = db.cursor()
            cursor.execute('''INSERT INTO jobs(
                fileName, 
                fileSize, 
                dateAdded,
                dateModified,  
                status, 
                analysis,
                vendor,
                daisyComments,
                commentsUpdatedToDaisy,
                daisyStatus,
                statusUpdatedToDaisy) 
            VALUES (?,?,?,?,?,?,?,?,?,?,?)''', (filePath, fileSize, datetime.datetime.now().strftime("%Y-%m-%d %H:%M:%S"), datetime.datetime.now().strftime("%Y-%m-%d %H:%M:%S"),0,'','UNKNOWN', '', '0','','0'))
            db.commit()
        except Exception as e:
            print 'addBWFFileToDatabase Error'
            self.debugLog.debug(e.message)
            db.rollback()
        db.close()
     
    def updateModificationDateForFilePath(self, filePath):
        db = self.dbConnection()
        cursor = db.cursor()
        try:
            cursor.execute('''SELECT * FROM jobs WHERE fileName=? AND status=?''',(filePath,0))
            data = cursor.fetchall()
        except Exception as e:
            self.debugLog.debug(e.message)
            return

        if len(data) > 1:
            #logging
            self.debugLog.debug('Error: record collision')
        else:
            try:
                key_id = data[0][0]
                cursor.execute('''UPDATE jobs SET dateModified=? WHERE id=?;''',(datetime.datetime.now().strftime("%Y-%m-%d %H:%M:%S"), key_id))
                db.commit()
            except Exception as e:
                self.debugLog.debug('Error: record collision')
                db.rollback()

        db.close()

    def recordsForVerifying(self):
        return self.recordsForStatus(self.addedStatusCode())

    def recordsReadyForProcessing(self):
        return self.recordsForStatus(self.verifyStatusCode())

    def oneRecordReadyForProcessing(self):
        return self.oneRecordForStatus(self.verifyStatusCode())

    def recordsForStatus(self, status):
        try:
            db = self.dbConnection()
            cursor = db.cursor()
            cursor.execute('''SELECT * FROM jobs WHERE status=?''', (status,))
            dbRecords = cursor.fetchall()
            records = [DataStoreRecord(record) for record in dbRecords]
            db.close()
            return records
        except Exception as e:
            self.debugLog.debug(e.message)
            return []

    def oneRecordForStatus(self, status):
        try:
            db = self.dbConnection()
            cursor = db.cursor()
            cursor.execute('''SELECT * FROM jobs WHERE status=? LIMIT 1''', (status,))
            dbRecords = cursor.fetchall()
            records = [DataStoreRecord(record) for record in dbRecords]
            db.close()
            if len(records) == 0:
                return None
            return records[0]
        except Exception as e:
            self.debugLog.debug(e.message)
            return []

    def updateRecordStatusWithID(self, status, key_id):
        try:
            db = self.dbConnection()
            cursor = db.cursor()
            cursor.execute('''UPDATE jobs SET status=? WHERE id=?;''',(status, key_id))
            db.commit()
            db.close()
        except Exception as e:
            self.debugLog.debug(e.message)
            db.rollback()

    def updateRecordAsStaticWithNewPath(self, newPath, key_id):
        try:
            db = self.dbConnection()
            cursor = db.cursor()
            cursor.execute('''UPDATE jobs SET fileName=?, status=? WHERE id=?;''',(newPath, self.verifyStatusCode(), key_id))
            db.commit()
            db.close()
        except Exception as e:
            self.debugLog.debug(e.message)
            db.rollback()

    def updateRecordAsCompleteWithFinalPath(self, newPath, key_id):
        try:
            db = self.dbConnection()
            cursor = db.cursor()
            cursor.execute('''UPDATE jobs SET fileName=?, status=?, dateModified=? WHERE id=?;''',(newPath, self.fileHasBeenMovedToFinalLocation(), datetime.datetime.now().strftime("%Y-%m-%d %H:%M:%S"), key_id))
            db.commit()
            db.close()
        except Exception as e:
            self.debugLog.debug(e.message)
            db.rollback()

    def updateRecordAsMissingWithID(self, key_id):
        self.updateRecordStatusWithID(self.missingRecordStatusCode(), key_id)

    def updateRecordAsInProcess(self, key_id):
        self.updateRecordStatusWithID(self.inProcessStatusCode(), key_id)

    def updateRecordWithAnalysisError(self, key_id):
        self.updateRecordStatusWithID(self.errorAnalyzingFile(), key_id)
    
    def updateRecordAsNotHavingADaisyNumber(self, key_id):
        self.updateRecordStatusWithID(self.fileHasNoDaisyNumber(), key_id)
    
    def updateRecordWithAnalysisData(self, analysisData, key_id):
        try:
            db = self.dbConnection()
            cursor = db.cursor()
            cursor.execute('''UPDATE jobs SET analysis=?, status=? WHERE id=?;''', (analysisData, self.fileHasBeenAnalyzedStatusCode(), key_id))
            db.commit()
            db.close()
            return True
        except Exception as e:
            self.debugLog.debug(e.message)
            db.rollback()

        return False

    def updateRecordAsMissingWithFileNameAndID(self, filePath, key_id):
        #we update the name in case any source file that gets moved collides with another file
        try:
            db = self.dbConnection()
            cursor = db.cursor()
            cursor.execute('''UPDATE jobs SET status=?, fileName=? WHERE id=?;''',(self.missingRecordStatusCode(), filePath,key_id))
            db.commit()
            db.close()
        except Exception as e:
            self.debugLog.debug(e.message)
            db.rollback()       
                    
    def updateRecordWithCurrentSizeAndDateModifiedWithID(self, currentSize, dateModified, key_id):
        try:
            db = self.dbConnection()
            cursor = db.cursor()
            cursor.execute('''UPDATE jobs SET fileSize=?, dateModified=? WHERE id=?;''', (currentSize, dateModified, key_id))
            db.commit()
            db.close()
        except Exception as e:
            self.debugLog.debug(e.message)
            db.rollback()

    def updateRecordWithVendor(self, vendor, key_id):
        try:
            db = self.dbConnection()
            cursor = db.cursor()
            cursor.execute('''UPDATE jobs SET vendor=? WHERE id=?;''', (vendor, key_id))
            db.commit()
            db.close()
        except Exception as e:
            self.debugLog.debug(e.message)
            db.rollback()

    def updateRecordWithComments(self, nComments, key_id):
        try:
            db = self.dbConnection()
            cursor = db.cursor()
            cursor.execute('''UPDATE jobs SET daisyComments=? WHERE id=?;''', (nComments, key_id))
            db.commit()
            db.close()
        except Exception as e:
            self.debugLog.debug(e.message)
            db.rollback()

    def updateRecordWithDaisyStatus(self, nStatus, key_id):
        try:
            db = self.dbConnection()
            cursor = db.cursor()
            cursor.execute('''UPDATE jobs SET daisyStatus=? WHERE id=?;''', (nStatus, key_id))
            db.commit()
            db.close()
        except Exception as e:
            self.debugLog.debug(e.message)
            db.rollback()
 
    def successfullyUpdatedDaisyComments(self, key_id):
        self.setDaisyCommentsPosted(1, key_id)

    def failedToUpdateDaisyComments(self, key_id):
        self.setDaisyCommentsPosted(-1, key_id)

    def setDaisyCommentsPosted(self, posted, key_id):
        try:
            db = self.dbConnection()
            cursor = db.cursor()
            cursor.execute('''UPDATE jobs SET commentsUpdatedToDaisy=? WHERE id=?;''', (posted, key_id))
            db.commit()
            db.close()
        except Exception as e:
            self.debugLog.debug(e.message)
            db.rollback()

    def successfullyUpdatedDaisyStatus(self, key_id):
        self.setDaisyStatusPosted(1, key_id)

    def failedToUpdateDaisyStatus(self, key_id):
        self.setDaisyStatusPosted(-1, key_id)

    def setDaisyStatusPosted(self, posted, key_id):
        try:
            db = self.dbConnection()
            cursor = db.cursor()
            cursor.execute('''UPDATE jobs SET statusUpdatedToDaisy=? WHERE id=?;''', (posted, key_id))
            db.commit()
            db.close()
        except Exception as e:
            self.debugLog.debug(e.message)
            db.rollback()
Example #3
0
def checkSingleFiles(dbPath):
	logging = DefaultLogger()

	if not os.path.exists(dbPath):
		logging.debug('Acquire File: can\'t find database at path')
		return
	
	datastore = DataStore(dbPath)
	data = datastore.recordsForVerifying()

	for record in data:

		key_id 				= record.id
		filePath 			= record.fileName
		recordSize 			= int(record.fileSize)
		dateModifiedString 	= record.dateModified

		dateLastModified = datetime.datetime.strptime(dateModifiedString, '%Y-%m-%d %H:%M:%S')
		timeDifference = datetime.datetime.now() - dateLastModified

		#This can change with an if/else should I decide I want to put temp files to be decrypted in another place
		sourcePath = configurationOptions().defaultPathStructure()['inBox']
		workingPath = configurationOptions().defaultPathStructure()['workingBox']

		if timeDifference.seconds < verificationWaitTime:
			continue

		lastSize = recordSize
		currentSize = 0

		if not os.path.exists(filePath):
			logging.debug('Acquire File: Will update record status as the file no longer exists')
			datastore.updateRecordAsMissingWithID(key_id)
			continue

		currentSize = os.path.getsize(filePath)

		if lastSize != currentSize:
			logging.debug(record)
			logging.debug('Acquire File: attempting db modify as file size has changed...')
			datastore.updateRecordWithCurrentSizeAndDateModifiedWithID(currentSize, datetime.datetime.now().strftime("%Y-%m-%d %H:%M:%S"), key_id)
			continue

		if currentSize == 0:
			continue
			# if the current size is zero, then continue until it isn't or never will be
			# its likely the file has been queued to copy but no data has been moved yet (actual OSX case) 
			
		logging.debug('Acquire File: attempting to lock the file to see if I own the file yet...')

		try:
			fileToCheck = open(filePath, 'rb')
			portalocker.lock(fileToCheck, portalocker.LOCK_EX)
			fileToCheck.close()
			logging.debug('Acquire File: proceeding to update the file status knowing that no one else is using it...')
		except Exception as e:
			logging.debug('Acquire File: unable to lock file as it is likely in use')
			continue

		#must test that file doesn't exist elsewhere in the path

		newPath = filePath
		try:
			newPath = pathAfterSafelyMovingFileToDestinationFolder(filePath, workingPath)
		except Exception as e:
			info = '''This shouldn\'t happen as pathAfterSafelyMovingFileToDestinationFolder should create a unique name that avoids any collisions, otherwise the file has been moved'''
			logging.debug(info)
			logging.debug('Acquire File: Error moving file')
			info = 'There was a problem moving the file into into the queue for: ' + os.path.basename(filePath)
			info = info + '\n' + 'This will require manual intervention as the occurrence is unique.'
			#SEND FAILURE EMAIL
			continue

		logging.debug('Acquire File: updating record file status and path....')
		datastore.updateRecordAsStaticWithNewPath(newPath, key_id)
Example #4
0
def analyzeBWFFile(dbPath, identifier=1):

    logging = DefaultLogger()
    loopcount = 0
    datastore = DataStore(dbPath)

    try:

        while True:

            sleep(60 + random.randint(1, 10))

            if loopcount % 20 == 0:
                logging.debug(
                    'bwf analyzer loop {} is active...'.format(identifier))
            loopcount += 1

            if not os.path.exists(dbPath):
                logging.debug('Acquire File: can not find database at path')
                return

            record = None

            #if daisy is not up then just wait until it is
            if isDaisyUp() == False:
                logging.debug('Daisy does not appear to be up')
                continue

            #get a lock on the file
            lock = lockWithFile()
            try:
                lock.acquire(timeout=-1)
                if lock.i_am_locking():
                    record = datastore.oneRecordReadyForProcessing()
                    if record != None:
                        logging.debug(
                            'process {} is acquiring the lock'.format(
                                identifier))
                        datastore.updateRecordAsInProcess(record.id)
                lock.release()
            except Exception as e:
                pass

            if record == None:
                continue

            filePath = record.fileName

            #lets check that is has a genuine Daisy Number
            if getDaisyNumber(os.path.basename(filePath)) == None:
                errorBox = configurationOptions().defaultPathStructure(
                )['errorBox']
                errorBox = os.path.expanduser(errorBox)
                sendProcessFailureMessage({
                    'subject':
                    'BWF Error: file added that has no DANumber',
                    'message':
                    'A file, %s, was deposited that does not have a Daisy Number'
                    % (os.path.basename(filePath))
                })

                #move to errorBox
                try:
                    print "Moving file %s into %s" % (filePath, errorBox)
                    newPath = pathAfterSafelyMovingFileToDestinationFolder(
                        filePath, errorBox)
                except Exception as e:
                    logging.debug('Analyze File: Error moving file')
                    info = '''This should not happen as pathAfterSafelyMovingFileToDestinationFolder should create a unique name that avoids any collisions, otherwise the file has been moved'''
                    logging.debug(info)
                    info = 'There was a problem moving the file into into the errorBox for: ' + os.path.basename(
                        filePath)
                    info = info + '\n' + 'This will require manual intervention as the occurrence is unique.'
                    sendProcessFailureMessage({
                        'subject': 'BWF Error',
                        'message': info
                    })
                    logging.debug(info)

                datastore.updateRecordAsNotHavingADaisyNumber(record.id)
                continue

            #lets look up metadata before we even proceed, if can't get the metadata we don't want to analyze files
            dataTuple = retrieveDataForDANumber(os.path.basename(filePath),
                                                identifier)
            logging.debug('Data for {} Before: {}'.format(
                os.path.basename(filePath), dataTuple))

            if dataTuple == None:
                #ok, lets send an email that will be sent at a maximum of 1 per 4 hours
                result = "Process Error: Daisy Information not Available:" + e.message
                sendPeriodicFailureMessage(result)
                logging.debug('A Periodic Failure Message attempt was made.')
                continue

            result = None
            resultObject = None
            vendor = dataTuple[0]
            comments = dataTuple[1]
            status = dataTuple[2]

            #once we have the metadata, lets examine the file
            try:
                logging.debug('Will examine %s in loop %s' %
                              (filePath, str(identifier)))
                resultObject = multiChannelBWFFileAnalysis(filePath)
                result = json.dumps(resultObject)
                if resultObject == None:
                    logging.debug(
                        'The analysis of the file %s is "None". This should not occur.'
                        % (filePath))
                    raise Exception(
                        'The analysis of the file %s is "None". This should not occur.'
                        % (filePath))
            except Exception as e:
                logging.debug(
                    'An exception occurred with %s in identifier %s.' %
                    (filePath, str(identifier)))
                #mark as error
                datastore.updateRecordWithAnalysisError(record.id)
                errorBox = configurationOptions().defaultPathStructure(
                )['errorBox']
                errorBox = os.path.expanduser(errorBox)

                #send email
                result = "Process Error: An Exception occurred when processing the file: %s. The file will be moved to %s" % (
                    e.message, errorBox)
                logging.debug(result)
                sendProcessFailureMessage({
                    'subject': 'Process Error',
                    'message': result
                })

                #move to errorBox
                try:
                    print "Moving file %s into %s" % (filePath, errorBox)
                    logging.debug("Moving file %s into %s" %
                                  (filePath, errorBox))
                    newPath = pathAfterSafelyMovingFileToDestinationFolder(
                        filePath, errorBox)
                except Exception as e:
                    logging.debug('Analyze File: Error moving file')
                    info = '''This should not happen as pathAfterSafelyMovingFileToDestinationFolder should create a unique name that avoids any collisions, otherwise the file has been moved'''
                    logging.debug(info)
                    info = 'There was a problem moving the file into into the errorBox for: ' + os.path.basename(
                        filePath)
                    info = info + '\n' + 'This will require manual intervention as the occurrence is unique.'
                    sendProcessFailureMessage({
                        'subject': 'Process Error Moving File',
                        'message': info
                    })
                    logging.debug(info)
                continue

            info = 'PreExisting Data for the following file %s: %s %s %s' % (
                os.path.basename(filePath), comments, vendor, status)
            logging.debug(info)

            resultObject['vendor'] = vendor

            #The result object is not None as we would have bailed otherwise

            if resultObject['result'] == 'success':
                if comments == None:
                    comments = ''

                #update Comments
                comments = stringMinusBWFAnalyzerInfo(comments)
                if comments != '':
                    comments += " "
                comments += BWFAnalyzerInfoForSuccess(
                    os.path.basename(filePath))
                success = setComments(
                    getDaisyNumber(os.path.basename(filePath)), comments)
                #update local datastore
                datastore.updateRecordWithComments(comments, record.id)

                #did we successfully update the comments?
                if success == True:
                    #update comments field in db and set to success
                    logging.debug('updating comments successfully')
                    datastore.successfullyUpdatedDaisyComments(record.id)
                else:
                    #put infor in db that you couldn't update Daisy
                    logging.debug('not updating comments successfully')
                    datastore.failedToUpdateDaisyComments(record.id)

                #update the status to pending fix
                #only if the status is Needs Attention, otherwise we don't have any further knowledge of what is going on
                nextStatus = 'NO CHANGE'
                success = True
                if status == "Needs Attention":
                    #ok to update status
                    success = setStatusAsPendingFix(
                        getDaisyNumber(os.path.basename(filePath)))
                    nextStatus = 'Pending Fix'

                if status in ['Being Made', 'Ordered']:
                    #ok to update status
                    success = setStatusAsPendingArchive(
                        getDaisyNumber(os.path.basename(filePath)))
                    nextStatus = 'Pending Archive'

                datastore.updateRecordWithDaisyStatus(nextStatus, record.id)
                if success == True:
                    #update staus field in db and set to success
                    logging.debug('updating status successfully')
                    datastore.successfullyUpdatedDaisyStatus(record.id)
                else:
                    #put infor in db that you couldn't update status in Daisy
                    logging.debug('not updating status successfully')
                    datastore.failedToUpdateDaisyStatus(record.id)

            else:
                sendAnalysisFailure(resultObject)

                if comments == None:
                    comments = ''

                #update Comments
                comments = stringMinusBWFAnalyzerInfo(comments)
                if comments != '':
                    comments += " "
                comments += BWFAnalyzerInfoForErrors(resultObject['errors'])
                success = setComments(
                    getDaisyNumber(os.path.basename(filePath)), comments)

                #update local datastore
                datastore.updateRecordWithComments(comments, record.id)

                if success == True:
                    #update comments field in db and set to success
                    logging.debug('updating comments successfully')
                    datastore.successfullyUpdatedDaisyComments(record.id)
                else:
                    #put infor in db that you couldn't update Daisy
                    logging.debug('not updating comments successfully')
                    datastore.failedToUpdateDaisyComments(record.id)

                #update Status
                if status not in ['Being Made', 'Ordered', 'Pending Archive']:
                    #ok to update status
                    success = setStatusAsNeedsAttention(
                        getDaisyNumber(os.path.basename(filePath)))
                    datastore.updateRecordWithDaisyStatus(
                        'Needs Attention', record.id)
                    if success == True:
                        #update staus field in db and set to success
                        logging.debug('updating status successfully')
                        datastore.successfullyUpdatedDaisyStatus(record.id)
                    else:
                        #put infor in db that you couldn't update status in Daisy
                        logging.debug('not updating status successfully')
                        datastore.failedToUpdateDaisyStatus(record.id)
                else:
                    success = setStatusAsPendingArchive(
                        getDaisyNumber(os.path.basename(filePath)))
                    datastore.updateRecordWithDaisyStatus(
                        'Pending Archive', record.id)
                    if success == True:
                        #update status field in db and set to success
                        logging.debug('updating status successfully')
                        datastore.successfullyUpdatedDaisyStatus(record.id)
                    else:
                        #put infor in db that you couldn't update status in Daisy
                        logging.debug('not updating status successfully')
                        datastore.failedToUpdateDaisyStatus(record.id)

            if datastore.updateRecordWithAnalysisData(result,
                                                      record.id) == False:
                info = 'Unable to save record %d %s' % (record.id, result)
                sendProcessFailureMessage({
                    'subject': 'Process Error Unable To Save Record',
                    'message': info
                })
                continue

            #update vendor info
            datastore.updateRecordWithVendor(vendor, record.id)

            dataTuple = retrieveDataForDANumber(os.path.basename(filePath),
                                                identifier)
            logging.debug('Data for {} After: {}'.format(
                os.path.basename(filePath), dataTuple))

            #now that we have saved the data, we are ready to move the file
            nextBox = configurationOptions().defaultPathStructure()['outBox']
            if resultObject['result'] != 'success':
                nextBox = configurationOptions().defaultPathStructure(
                )['failBox']
            nextBox = os.path.expanduser(nextBox)

            newPath = filePath

            try:
                newPath = pathAfterSafelyMovingFileToDestinationFolder(
                    filePath, nextBox)
            except Exception as e:
                logging.debug('Analyze File: Error moving file')
                info = '''This should not happen as pathAfterSafelyMovingFileToDestinationFolder should create a unique name that avoids any collisions, otherwise the file has been moved'''
                logging.debug(info)
                info = 'There was a problem moving the file into into ' + nextBox + ' for: ' + os.path.basename(
                    filePath)
                info = info + '\n' + 'This will require manual intervention as the occurrence is unique.'
                sendProcessFailureMessage({
                    'subject': 'Process Error Moving File',
                    'message': info
                })
                continue

            logging.debug(
                'Analyze File: preparing to move file to final path...')
            datastore.updateRecordAsCompleteWithFinalPath(newPath, record.id)

    except Exception as e:
        info = 'Exception in analyzeBWFFile' + e.message
        logging.debug(info)
        sendProcessFailureMessage({'subject': 'Exception!', 'message': info})
Example #5
0
class DataStore():
    def __init__(self, storePath):
        self.debugLog = DefaultLogger()
        self.storePath = storePath
        self.createJobsTable(storePath)

    def fileHasNoDaisyNumber(self):
        return -70

    def errorAnalyzingFile(self):
        return -60

    def errorFileExistsInPathChain(self):
        return -50

    def errorMovingFileStatusCode(self):
        return -40

    def errorPathDoesntExistStatusCode(self):
        return -30

    def operationFailedStatusCode(self):
        return -20

    def missingRecordStatusCode(self):
        return -10

    def addedStatusCode(self):
        return 0

    def verifyStatusCode(self):
        return 10

    def inProcessStatusCode(self):
        return 15

    def fileHasBeenAnalyzedStatusCode(self):
        return 20

    def fileHasBeenMovedToFinalLocation(self):
        return 30

    def dbConnection(self):
        db = None
        try:
            db = sqlite3.connect(self.storePath)
        except Exception as e:
            DefaultLogger().debug(e.message)
        return db

    def createJobsTable(self, pathToDBFolder):
        try:
            db = self.dbConnection()
            cursor = db.cursor()
            cursor.execute('''CREATE TABLE IF NOT EXISTS 
            jobs(id INTEGER PRIMARY KEY, 
            fileName TEXT, 
            fileSize INTEGER,
            dateAdded DATETIME, 
            dateModified DATETIME, 
            status INTEGER,  
            analysis TEXT,
            vendor TEXT,
            daisyComments TEXT,
            commentsUpdatedToDaisy INTEGER,
            daisyStatus TEXT,
            statusUpdatedToDaisy INTEGER)''')
            db.commit()
        except Exception as e:
            info = 'Error: Unable to call createJobsTable' + e.message
            logger = DefaultLogger()
            self.debugLog.debug(info)

        db.close()

    def doesTheFilePathExistElseWhereInThePathStructure(
            self, filePath, operationType, pathStructureName):
        '''
        Checks to make sure the file isn't already in the queue, if is, then it moves to to a duplicate folder
        '''
        result = 0

        currentPathStructure = configurationOptions().pathStructureWithName(
            pathStructureName)
        #exlcude inBox
        for path in configurationOptions(
        ).pathStructurePathsToCheckForDuplicates():
            if os.path.exists(
                    os.path.join(currentPathStructure[path],
                                 os.path.basename(filePath))):
                result += 1

        if result == 0:
            return False

        return True

    def addFileToDatabase(self, filePath):
        '''
        add a file to the database and mark its status as zero, if the file doesn't exist (which is unlikely) then return,  but I should log this
        '''
        if not os.path.exists(filePath):
            return

        fileSize = os.path.getsize(filePath)
        db = self.dbConnection()
        try:
            cursor = db.cursor()
            cursor.execute(
                '''INSERT INTO jobs(
                fileName, 
                fileSize, 
                dateAdded,
                dateModified,  
                status, 
                analysis,
                vendor,
                daisyComments,
                commentsUpdatedToDaisy,
                daisyStatus,
                statusUpdatedToDaisy) 
            VALUES (?,?,?,?,?,?,?,?,?,?,?)''',
                (filePath, fileSize,
                 datetime.datetime.now().strftime("%Y-%m-%d %H:%M:%S"),
                 datetime.datetime.now().strftime("%Y-%m-%d %H:%M:%S"), 0, '',
                 'UNKNOWN', '', '0', '', '0'))
            db.commit()
        except Exception as e:
            print 'addBWFFileToDatabase Error'
            self.debugLog.debug(e.message)
            db.rollback()
        db.close()

    def updateModificationDateForFilePath(self, filePath):
        db = self.dbConnection()
        cursor = db.cursor()
        try:
            cursor.execute(
                '''SELECT * FROM jobs WHERE fileName=? AND status=?''',
                (filePath, 0))
            data = cursor.fetchall()
        except Exception as e:
            self.debugLog.debug(e.message)
            return

        if len(data) > 1:
            #logging
            self.debugLog.debug('Error: record collision')
        else:
            try:
                key_id = data[0][0]
                cursor.execute(
                    '''UPDATE jobs SET dateModified=? WHERE id=?;''',
                    (datetime.datetime.now().strftime("%Y-%m-%d %H:%M:%S"),
                     key_id))
                db.commit()
            except Exception as e:
                self.debugLog.debug('Error: record collision')
                db.rollback()

        db.close()

    def recordsForVerifying(self):
        return self.recordsForStatus(self.addedStatusCode())

    def recordsReadyForProcessing(self):
        return self.recordsForStatus(self.verifyStatusCode())

    def oneRecordReadyForProcessing(self):
        return self.oneRecordForStatus(self.verifyStatusCode())

    def recordsForStatus(self, status):
        try:
            db = self.dbConnection()
            cursor = db.cursor()
            cursor.execute('''SELECT * FROM jobs WHERE status=?''', (status, ))
            dbRecords = cursor.fetchall()
            records = [DataStoreRecord(record) for record in dbRecords]
            db.close()
            return records
        except Exception as e:
            self.debugLog.debug(e.message)
            return []

    def oneRecordForStatus(self, status):
        try:
            db = self.dbConnection()
            cursor = db.cursor()
            cursor.execute('''SELECT * FROM jobs WHERE status=? LIMIT 1''',
                           (status, ))
            dbRecords = cursor.fetchall()
            records = [DataStoreRecord(record) for record in dbRecords]
            db.close()
            if len(records) == 0:
                return None
            return records[0]
        except Exception as e:
            self.debugLog.debug(e.message)
            return []

    def updateRecordStatusWithID(self, status, key_id):
        try:
            db = self.dbConnection()
            cursor = db.cursor()
            cursor.execute('''UPDATE jobs SET status=? WHERE id=?;''',
                           (status, key_id))
            db.commit()
            db.close()
        except Exception as e:
            self.debugLog.debug(e.message)
            db.rollback()

    def updateRecordAsStaticWithNewPath(self, newPath, key_id):
        try:
            db = self.dbConnection()
            cursor = db.cursor()
            cursor.execute(
                '''UPDATE jobs SET fileName=?, status=? WHERE id=?;''',
                (newPath, self.verifyStatusCode(), key_id))
            db.commit()
            db.close()
        except Exception as e:
            self.debugLog.debug(e.message)
            db.rollback()

    def updateRecordAsCompleteWithFinalPath(self, newPath, key_id):
        try:
            db = self.dbConnection()
            cursor = db.cursor()
            cursor.execute(
                '''UPDATE jobs SET fileName=?, status=?, dateModified=? WHERE id=?;''',
                (newPath, self.fileHasBeenMovedToFinalLocation(),
                 datetime.datetime.now().strftime("%Y-%m-%d %H:%M:%S"),
                 key_id))
            db.commit()
            db.close()
        except Exception as e:
            self.debugLog.debug(e.message)
            db.rollback()

    def updateRecordAsMissingWithID(self, key_id):
        self.updateRecordStatusWithID(self.missingRecordStatusCode(), key_id)

    def updateRecordAsInProcess(self, key_id):
        self.updateRecordStatusWithID(self.inProcessStatusCode(), key_id)

    def updateRecordWithAnalysisError(self, key_id):
        self.updateRecordStatusWithID(self.errorAnalyzingFile(), key_id)

    def updateRecordAsNotHavingADaisyNumber(self, key_id):
        self.updateRecordStatusWithID(self.fileHasNoDaisyNumber(), key_id)

    def updateRecordWithAnalysisData(self, analysisData, key_id):
        try:
            db = self.dbConnection()
            cursor = db.cursor()
            cursor.execute(
                '''UPDATE jobs SET analysis=?, status=? WHERE id=?;''',
                (analysisData, self.fileHasBeenAnalyzedStatusCode(), key_id))
            db.commit()
            db.close()
            return True
        except Exception as e:
            self.debugLog.debug(e.message)
            db.rollback()

        return False

    def updateRecordAsMissingWithFileNameAndID(self, filePath, key_id):
        #we update the name in case any source file that gets moved collides with another file
        try:
            db = self.dbConnection()
            cursor = db.cursor()
            cursor.execute(
                '''UPDATE jobs SET status=?, fileName=? WHERE id=?;''',
                (self.missingRecordStatusCode(), filePath, key_id))
            db.commit()
            db.close()
        except Exception as e:
            self.debugLog.debug(e.message)
            db.rollback()

    def updateRecordWithCurrentSizeAndDateModifiedWithID(
            self, currentSize, dateModified, key_id):
        try:
            db = self.dbConnection()
            cursor = db.cursor()
            cursor.execute(
                '''UPDATE jobs SET fileSize=?, dateModified=? WHERE id=?;''',
                (currentSize, dateModified, key_id))
            db.commit()
            db.close()
        except Exception as e:
            self.debugLog.debug(e.message)
            db.rollback()

    def updateRecordWithVendor(self, vendor, key_id):
        try:
            db = self.dbConnection()
            cursor = db.cursor()
            cursor.execute('''UPDATE jobs SET vendor=? WHERE id=?;''',
                           (vendor, key_id))
            db.commit()
            db.close()
        except Exception as e:
            self.debugLog.debug(e.message)
            db.rollback()

    def updateRecordWithComments(self, nComments, key_id):
        try:
            db = self.dbConnection()
            cursor = db.cursor()
            cursor.execute('''UPDATE jobs SET daisyComments=? WHERE id=?;''',
                           (nComments, key_id))
            db.commit()
            db.close()
        except Exception as e:
            self.debugLog.debug(e.message)
            db.rollback()

    def updateRecordWithDaisyStatus(self, nStatus, key_id):
        try:
            db = self.dbConnection()
            cursor = db.cursor()
            cursor.execute('''UPDATE jobs SET daisyStatus=? WHERE id=?;''',
                           (nStatus, key_id))
            db.commit()
            db.close()
        except Exception as e:
            self.debugLog.debug(e.message)
            db.rollback()

    def successfullyUpdatedDaisyComments(self, key_id):
        self.setDaisyCommentsPosted(1, key_id)

    def failedToUpdateDaisyComments(self, key_id):
        self.setDaisyCommentsPosted(-1, key_id)

    def setDaisyCommentsPosted(self, posted, key_id):
        try:
            db = self.dbConnection()
            cursor = db.cursor()
            cursor.execute(
                '''UPDATE jobs SET commentsUpdatedToDaisy=? WHERE id=?;''',
                (posted, key_id))
            db.commit()
            db.close()
        except Exception as e:
            self.debugLog.debug(e.message)
            db.rollback()

    def successfullyUpdatedDaisyStatus(self, key_id):
        self.setDaisyStatusPosted(1, key_id)

    def failedToUpdateDaisyStatus(self, key_id):
        self.setDaisyStatusPosted(-1, key_id)

    def setDaisyStatusPosted(self, posted, key_id):
        try:
            db = self.dbConnection()
            cursor = db.cursor()
            cursor.execute(
                '''UPDATE jobs SET statusUpdatedToDaisy=? WHERE id=?;''',
                (posted, key_id))
            db.commit()
            db.close()
        except Exception as e:
            self.debugLog.debug(e.message)
            db.rollback()
Example #6
0
def analyzeBWFFile(dbPath, identifier = 1):
	
	logging = DefaultLogger()
	loopcount = 0
	datastore = DataStore(dbPath)

	try:

		while True:

			sleep(60+random.randint(1,10))

			if loopcount % 20 == 0:
				logging.debug('bwf analyzer loop {} is active...'.format(identifier))
			loopcount += 1

			if not os.path.exists(dbPath):
				logging.debug('Acquire File: can not find database at path')
				return
		
			record =  None

			#if daisy is not up then just wait until it is
			if isDaisyUp() == False:
				logging.debug('Daisy does not appear to be up')
				continue

			#get a lock on the file
			lock = lockWithFile()
			try:
				lock.acquire(timeout=-1)
				if lock.i_am_locking():  
					record = datastore.oneRecordReadyForProcessing()
					if record != None:
						logging.debug('process {} is acquiring the lock'.format(identifier))
						datastore.updateRecordAsInProcess(record.id)
				lock.release()
			except Exception as e:
				pass

			if record == None:
				continue

			filePath = record.fileName

			#lets check that is has a genuine Daisy Number
			if getDaisyNumber(os.path.basename(filePath)) == None:
				errorBox = configurationOptions().defaultPathStructure()['errorBox']
				errorBox = os.path.expanduser(errorBox)	
				sendProcessFailureMessage({'subject':'BWF Error: file added that has no DANumber', 'message':'A file, %s, was deposited that does not have a Daisy Number' % (os.path.basename(filePath))})
				
				#move to errorBox
				try:
					print "Moving file %s into %s" % (filePath, errorBox)
					newPath = pathAfterSafelyMovingFileToDestinationFolder(filePath, errorBox)
				except Exception as e:
					logging.debug('Analyze File: Error moving file')
					info = '''This should not happen as pathAfterSafelyMovingFileToDestinationFolder should create a unique name that avoids any collisions, otherwise the file has been moved'''
					logging.debug(info)
					info = 'There was a problem moving the file into into the errorBox for: ' + os.path.basename(filePath)
					info = info + '\n' + 'This will require manual intervention as the occurrence is unique.'
					sendProcessFailureMessage({'subject':'BWF Error', 'message':info})
					logging.debug(info)

				datastore.updateRecordAsNotHavingADaisyNumber(record.id)
				continue

			#lets look up metadata before we even proceed, if can't get the metadata we don't want to analyze files
			dataTuple = retrieveDataForDANumber(os.path.basename(filePath), identifier)
			logging.debug('Data for {} Before: {}'.format(os.path.basename(filePath), dataTuple))

			if dataTuple == None:
				#ok, lets send an email that will be sent at a maximum of 1 per 4 hours
				result = "Process Error: Daisy Information not Available:" + e.message
				sendPeriodicFailureMessage(result)
				logging.debug('A Periodic Failure Message attempt was made.')
				continue

			result = None
			resultObject = None
			vendor = dataTuple[0]
			comments = dataTuple[1]
			status = dataTuple[2]
			
			#once we have the metadata, lets examine the file
			try:
				logging.debug('Will examine %s in loop %s' % (filePath, str(identifier)))
				resultObject = multiChannelBWFFileAnalysis(filePath)
				result = json.dumps(resultObject)
				if resultObject == None:
					logging.debug('The analysis of the file %s is "None". This should not occur.' % (filePath))
					raise Exception('The analysis of the file %s is "None". This should not occur.' % (filePath))
			except Exception as e:
				logging.debug('An exception occurred with %s in identifier %s.' % (filePath, str(identifier)))
				#mark as error
				datastore.updateRecordWithAnalysisError(record.id)
				errorBox = configurationOptions().defaultPathStructure()['errorBox']
				errorBox = os.path.expanduser(errorBox)
				
				#send email
				result = "Process Error: An Exception occurred when processing the file: %s. The file will be moved to %s" % (e.message, errorBox)
				logging.debug(result)
				sendProcessFailureMessage({'subject':'Process Error', 'message':result})

				#move to errorBox
				try:
					print "Moving file %s into %s" % (filePath, errorBox)
					logging.debug("Moving file %s into %s" % (filePath, errorBox))
					newPath = pathAfterSafelyMovingFileToDestinationFolder(filePath, errorBox)
				except Exception as e:
					logging.debug('Analyze File: Error moving file')
					info = '''This should not happen as pathAfterSafelyMovingFileToDestinationFolder should create a unique name that avoids any collisions, otherwise the file has been moved'''
					logging.debug(info)
					info = 'There was a problem moving the file into into the errorBox for: ' + os.path.basename(filePath)
					info = info + '\n' + 'This will require manual intervention as the occurrence is unique.'
					sendProcessFailureMessage({'subject':'Process Error Moving File', 'message':info})
					logging.debug(info)
				continue

			info = 'PreExisting Data for the following file %s: %s %s %s' % (os.path.basename(filePath), comments, vendor, status)
			logging.debug(info)

			resultObject['vendor'] = vendor

			#The result object is not None as we would have bailed otherwise

			if resultObject['result'] == 'success':
				if comments == None:
					comments = ''

				#update Comments 
				comments = stringMinusBWFAnalyzerInfo(comments)
				if comments != '':
					comments += " "
				comments += BWFAnalyzerInfoForSuccess(os.path.basename(filePath))
				success = setComments(getDaisyNumber(os.path.basename(filePath)), comments)
				#update local datastore
				datastore.updateRecordWithComments(comments, record.id)

				#did we successfully update the comments?
				if success == True:
					#update comments field in db and set to success
					logging.debug('updating comments successfully')
					datastore.successfullyUpdatedDaisyComments(record.id)
				else:
					#put infor in db that you couldn't update Daisy
					logging.debug('not updating comments successfully')
					datastore.failedToUpdateDaisyComments(record.id)

				#update the status to pending fix
				#only if the status is Needs Attention, otherwise we don't have any further knowledge of what is going on
				nextStatus = 'NO CHANGE'
				success = True
				if status == "Needs Attention":
					#ok to update status
					success = setStatusAsPendingFix(getDaisyNumber(os.path.basename(filePath)))
					nextStatus = 'Pending Fix'

				if status in ['Being Made', 'Ordered']:
					#ok to update status
					success = setStatusAsPendingArchive(getDaisyNumber(os.path.basename(filePath)))
					nextStatus = 'Pending Archive'
					
				datastore.updateRecordWithDaisyStatus(nextStatus, record.id)
				if success == True:
					#update staus field in db and set to success
					logging.debug('updating status successfully')
					datastore.successfullyUpdatedDaisyStatus(record.id)
				else:
					#put infor in db that you couldn't update status in Daisy		
					logging.debug('not updating status successfully')
					datastore.failedToUpdateDaisyStatus(record.id)

			else:
				sendAnalysisFailure(resultObject)

				if comments == None:
					comments = ''

				#update Comments 
				comments = stringMinusBWFAnalyzerInfo(comments)
				if comments != '':
					comments += " "
				comments += BWFAnalyzerInfoForErrors(resultObject['errors'])
				success = setComments(getDaisyNumber(os.path.basename(filePath)), comments)
				
				#update local datastore
				datastore.updateRecordWithComments(comments, record.id)

				if success == True:
					#update comments field in db and set to success
					logging.debug('updating comments successfully')
					datastore.successfullyUpdatedDaisyComments(record.id)
				else:
					#put infor in db that you couldn't update Daisy
					logging.debug('not updating comments successfully')
					datastore.failedToUpdateDaisyComments(record.id)

				#update Status
				if status not in ['Being Made', 'Ordered', 'Pending Archive']:
					#ok to update status
					success = setStatusAsNeedsAttention(getDaisyNumber(os.path.basename(filePath)))
					datastore.updateRecordWithDaisyStatus('Needs Attention', record.id)
					if success == True:
						#update staus field in db and set to success
						logging.debug('updating status successfully')
						datastore.successfullyUpdatedDaisyStatus(record.id)
					else:
						#put infor in db that you couldn't update status in Daisy
						logging.debug('not updating status successfully')		
						datastore.failedToUpdateDaisyStatus(record.id)
				else:
					success = setStatusAsPendingArchive(getDaisyNumber(os.path.basename(filePath)))
					datastore.updateRecordWithDaisyStatus('Pending Archive', record.id)
					if success == True:
						#update status field in db and set to success
						logging.debug('updating status successfully')
						datastore.successfullyUpdatedDaisyStatus(record.id)
					else:
						#put infor in db that you couldn't update status in Daisy
						logging.debug('not updating status successfully')		
						datastore.failedToUpdateDaisyStatus(record.id)

			if datastore.updateRecordWithAnalysisData(result, record.id) == False:
				info = 'Unable to save record %d %s' % (record.id, result) 
				sendProcessFailureMessage({'subject':'Process Error Unable To Save Record', 'message':info})
				continue

			#update vendor info
			datastore.updateRecordWithVendor(vendor, record.id)

			dataTuple = retrieveDataForDANumber(os.path.basename(filePath), identifier)
			logging.debug('Data for {} After: {}'.format(os.path.basename(filePath),dataTuple))

			#now that we have saved the data, we are ready to move the file
			nextBox = configurationOptions().defaultPathStructure()['outBox']
			if resultObject['result'] != 'success':
				nextBox = configurationOptions().defaultPathStructure()['failBox']
			nextBox = os.path.expanduser(nextBox)

			newPath = filePath

			try:
				newPath = pathAfterSafelyMovingFileToDestinationFolder(filePath, nextBox)
			except Exception as e:
				logging.debug('Analyze File: Error moving file')
				info = '''This should not happen as pathAfterSafelyMovingFileToDestinationFolder should create a unique name that avoids any collisions, otherwise the file has been moved'''
				logging.debug(info)
				info = 'There was a problem moving the file into into ' + nextBox + ' for: ' + os.path.basename(filePath)
				info = info + '\n' + 'This will require manual intervention as the occurrence is unique.'
				sendProcessFailureMessage({'subject':'Process Error Moving File', 'message':info})
				continue

			logging.debug('Analyze File: preparing to move file to final path...')
			datastore.updateRecordAsCompleteWithFinalPath(newPath, record.id)

	except Exception as e:
		info = 'Exception in analyzeBWFFile' + e.message
		logging.debug(info)
		sendProcessFailureMessage({'subject':'Exception!', 'message':info})
Example #7
0
class DataStore():

    def __init__(self, storePath):
        self.storePath = storePath
        self.createJobsTable(storePath)
        self.createArchiveManagerJobsTable(storePath)
        self.debugLog = DefaultLogger()

    def dbConnection(self):
        db = None
        try:
            db = sqlite3.connect(self.storePath)
        except Exception as e:
            self.debugLog.debug(e.message)
        return db

    def createJobsTable(self, pathToDBFolder):
        try:
            db = self.dbConnection()
            cursor = db.cursor()
            cursor.execute('''CREATE TABLE IF NOT EXISTS 
            jobs(id INTEGER PRIMARY KEY, 
            fileName TEXT, 
            fileSize INTEGER, 
            fileHash TEXT, 
            operationFileHash TEXT, 
            dateAdded DATETIME, 
            dateModified DATETIME, 
            dateOperationStart DATETIME, 
            dateOperationEnd DATETIME,
            dateHashStart DATETIME, 
            dateHashEnd DATETIME,
            dateOperationHashStart DATETIME, 
            dateOperationHashEnd DATETIME,
            operationFileName TEXT, 
            operationFileSize INTEGER, 
            status INTEGER, 
            processComplete INTEGER, 
            operationType TEXT, 
            pathStructureName TEXT, 
            isBatch INTEGER,
            batchName TEXT,
            batchUUID TEXT)''')
            db.commit()
        except Exception as e:
            print 'Error: Unable to call createJobsTable'
            self.debugLog.debug(e.message)

        db.close()

    def createArchiveManagerJobsTable(self, pathToDBFolder):
        try:
            db = self.dbConnection()
            cursor = db.cursor()
            cursor.execute('''CREATE TABLE IF NOT EXISTS 
            amjobs(id INTEGER PRIMARY KEY, 
            amNumber TEXT, 
            amData TEXT,
            amPath TEXT, 
            complete INTEGER,
            errorString TEXT,
            uuid TEXT)''')
            db.commit()
        except Exception as e:
            self.debugLog.debug(e.message)
        db.close()

    def addArchiveManagerJobToDataBaseWithUUID(self, amNumber, dataString, amPath, uuid):
        '''
        add an archive manager job to the database and mark its completion status as zero
        '''
        db = self.dbConnection()
        try:
            cursor = db.cursor()
            cursor.execute('''INSERT INTO amjobs(
                amNumber, 
                amData,
                amPath, 
                complete,
                errorString,
                uuid) 
            VALUES (?,?,?,?,?,?)''', (amNumber, dataString, amPath, 0, '', uuid))
            db.commit()
        except Exception as e:
            self.debugLog.debug(e.message)
            db.rollback()
        db.close()

    def addAndMarkArchiveManagerJobToDataBaseAsUnkown(self, amNumber, amPath):
        '''
        add an archive manager job to the database, but as we can't retrive any information about the 
        job mark its completion status as -1 and data as unknown
        '''
        db = self.dbConnection()
        try:
            cursor = db.cursor()
            cursor.execute('''INSERT INTO amjobs(
                amNumber, 
                amData,
                amPath, 
                complete,
                errorString,
                uuid) 
            VALUES (?,?,?,?,?,?)''', (amNumber, 'unknown', amPath, -1, '', ''))
            db.commit()
        except Exception as e:
            self.debugLog.debug(e.message)
            db.rollback()
        db.close()

    def updateArchiveManagerJobAsErrored(self, amRecord):
        '''
        update an archive manager job in the database and mark its completion status as errored, since it has
        error strings
        '''
        key_id = amRecord.id
        status = False
        db = self.dbConnection()
        try:
            cursor = db.cursor()
            cursor.execute('''UPDATE amJobs SET complete=? WHERE id=?;''',(-2, key_id))
            db.commit()
            status = True
        except Exception as e:
            self.debugLog.debug(e.message)
            db.rollback()
        db.close()
        return status

    def updateArchiveManagerJobAsReadyToComplete(self, amRecord):
        '''
        update an archive manager job in the database and mark its completion status as ready to finish
        '''
        key_id = amRecord.id
        status = False
        db = self.dbConnection()
        try:
            cursor = db.cursor()
            cursor.execute('''UPDATE amJobs SET complete=? WHERE id=?;''',(2, key_id))
            db.commit()
            status = True
        except Exception as e:
            self.debugLog.debug(e.message)
            db.rollback()
        db.close()
        return status

    def updateArchiveManagerJobAsComplete(self, amRecord):
        '''
        update an archive manager job in the database and mark its completion status as finished
        '''
        key_id = amRecord.id
        status = False
        db = self.dbConnection()
        try:
            cursor = db.cursor()
            cursor.execute('''UPDATE amJobs SET complete=? WHERE id=?;''',(1, key_id))
            db.commit()
            status = True
        except Exception as e:
            self.debugLog.debug(e.message)
            db.rollback()
        db.close()
        return status

    def updateArchiveManagerJobErrorString(self, amRecord, errorString):
        '''
        update an archive manager job's error string in the database
        '''
        key_id = amRecord.id
        status = False
        db = self.dbConnection()
        try:
            cursor = db.cursor()
            cursor.execute('''UPDATE amJobs SET errorString=? WHERE id=?;''',(errorString, key_id))
            db.commit()
            status = True
        except Exception as e:
            self.debugLog.debug(e.message)
            db.rollback()
        db.close()
        return status       

    def doesTheFilePathExistElseWhereInThePathStructure(self, filePath, operationType, pathStructureName):
        '''
        Checks to make sure the file isn't already in the queue, if is, then it moves to to a duplicate folder
        '''
        result = 0

        currentPathStructure = configurationOptions().pathStructureWithName(pathStructureName)
        #exlcude inBox
        for path in configurationOptions().pathStructurePathsToCheckForDuplicates():
            if os.path.exists(os.path.join(currentPathStructure[path], os.path.basename(filePath))):
                result += 1

        if result == 0:
            return False

        return True

    def addBatchFilePathToDataBaseStoreWithType(self, filePath, operationType, pathStructureName, batchName):
        '''
        add a batch file to the database and mark its status as zero, 
        if the file doesn't exist (which is unlikely) then return, 
        but I should log this
        '''
        self.addFilePathToDataBaseStoreWithType(filePath, operationType, pathStructureName, isBatch=1, batchName=batchName)

    def addFilePathToDataBaseStoreWithType(self, filePath, operationType, pathStructureName, isBatch=0, batchName=''):
        '''
        add a file to the database and mark its status as zero, 
        if the file doesn't exist (which is unlikely) then return, 
        but I should log this
        '''
        if not os.path.exists(filePath):
            return

        fileSize = os.path.getsize(filePath)
        db = self.dbConnection()
        try:
            cursor = db.cursor()
            cursor.execute('''INSERT INTO jobs(
                fileName, 
                fileSize, 
                fileHash, 
                operationFileHash, 
                dateAdded, 
                dateModified, 
                dateOperationStart, 
                dateOperationEnd, 
                dateHashStart, 
                dateHashEnd, 
                dateOperationHashStart, 
                dateOperationHashEnd, 
                operationFileName, 
                operationFileSize, 
                status, 
                processComplete, 
                operationType, 
                pathStructureName,
                isBatch,
                batchName,
                batchUUID) 
            VALUES (?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?)''', (filePath, fileSize, 'HASH','OPER_HASH', datetime.datetime.now().strftime("%Y-%m-%d %H:%M:%S"), datetime.datetime.now().strftime("%Y-%m-%d %H:%M:%S"), '0', '0', '0', '0', '0','0', 'operationFilePath', 0, 0, 0, operationType, pathStructureName,isBatch, batchName, 'NO_UUID'))
            db.commit()
        except Exception as e:
            print 'addFilePathToDataBaseStoreWithType Error'
            self.debugLog.debug(e.message)
            db.rollback()
        db.close()
     

    def updateModificationDateForFilePath(self, filePath):
        db = self.dbConnection()
        cursor = db.cursor()
        try:
            cursor.execute('''SELECT * FROM jobs WHERE fileName=? AND status=?''',(filePath,0))
            data = cursor.fetchall()
        except Exception as e:
            self.debugLog.debug(e.message)
            return

        if len(data) > 1:
            #logging
            self.debugLog.debug('Error: record collision')
        else:
            try:
                key_id = data[0][0]
                cursor.execute('''UPDATE jobs SET dateModified=? WHERE id=?;''',(datetime.datetime.now().strftime("%Y-%m-%d %H:%M:%S"), key_id))
                db.commit()
            except Exception as e:
                self.debugLog.debug('Error: record collision')
                db.rollback()

        db.close()

    def dataStoreRecordsForDataBaseRecords(self, records):
        dataStoreRecords = []
        for record in records:
            dataStoreRecords.append(DataStoreRecord(record))
        return dataStoreRecords

    def displayRecordForFile(self, filePath):
        db = self.dbConnection()
        cursor = db.cursor()
        cursor.execute('''SELECT * FROM jobs WHERE fileName=?''',(filePath,))
        data = cursor.fetchall()
        db.close()

    def noArchiveManagerDataExistsForRecord(self):
        return -90

    def errorGeneratingHash(self):
        return -90

    def daisyEntryNotFoundStatusCode(self):
        return -80

    def checksumLookupFailedStatusCode(self):
        return -70

    def checksumComparisonFailedStatusCode(self):
        return -60

    def errorFileExistsInPathChain(self):
        return -50

    def errorMovingFileStatusCode(self):
        return -40

    def errorPathDoesntExistStatusCode(self):
        return -30       

    def operationFailedStatusCode(self):
        return -20

    def missingRecordStatusCode(self):
        return -10        

    def addedStatusCode(self):
        return 0

    def verifyStatusCode(self):
        return 10

    def hashStartStatusCode(self):
        return 15

    def hashStatusCode(self):
        return 20

    def operationStartedStatusCode(self):
        return 25

    def operationCompleteStatusCode(self):
        return 30

    def reHashStartStatusCode(self):
        return 35

    def reHashStatusCode(self):
        return 40            

    def recordsForHashing(self):
        return self.recordsForStatus(self.verifyStatusCode())

    def recordsForReHashing(self):
        return self.recordsForStatus(self.operationCompleteStatusCode())    

    def recordsForVerifying(self):
        return self.recordsForStatus(self.addedStatusCode())

    def recordsReadyToEncrypt(self):
        return self.recordsForEncryptionStatus(self.hashStatusCode())

    def recordsReadyToDecrypt(self):
        return self.recordsForDecryptionStatus(self.hashStatusCode())

    def recordWithNumberFromAMJobsTable(self, amNumber):
        try:
            db = self.dbConnection()
            cursor = db.cursor()
            cursor.execute('''SELECT * FROM amjobs WHERE complete=? AND amNumber=?''', (0,amNumber))
            dbRecords = cursor.fetchall()
            amRecord = None
            if len(dbRecords) > 0:
                self.debugLog.debug('More than Zero Records')
                amRecord = ArchiveManagerRecord(dbRecords[0])
            elif len(dbRecords) > 1:
                self.debugLog.debug('More than 1 Record')
            db.close()
            return amRecord
        except Exception as e:
            self.debugLog.debug('Error recordWithNumberFromAMJobsTable')
            self.debugLog.debug(e.message)
            return []

    def archiveManagerJobsTableRecordWithUUID(self, uuidString):
        try:
            db = self.dbConnection()
            cursor = db.cursor()
            cursor.execute('''SELECT * FROM amjobs WHERE complete=? AND uuid=?''', (0, uuidString))
            dbRecords = cursor.fetchall()
            amRecord = None
            if len(dbRecords) > 0:
                amRecord = ArchiveManagerRecord(dbRecords[0])
            if len(dbRecords) > 1:
                logging('found one too many records for amNumber request!!!')
            db.close()
            return amRecord
        except Exception as e:
            self.debugLog.debug(e.message)
            return []

    def archiveManagerJobsReadyToStart(self):
        try:
            db = self.dbConnection()
            cursor = db.cursor()
            cursor.execute('''SELECT * FROM amjobs WHERE complete=?''', (0,))
            dbRecords = cursor.fetchall()
            records = [ArchiveManagerRecord(record) for record in dbRecords]
            db.close()
            return records
        except Exception as e:
            self.debugLog.debug(e.message)
            return []

    def archiveManagerJobsReadyToComplete(self):
        try:
            db = self.dbConnection()
            cursor = db.cursor()
            cursor.execute('''SELECT * FROM amjobs WHERE complete=?''', (2,))
            dbRecords = cursor.fetchall()
            records = [ArchiveManagerRecord(record) for record in dbRecords]
            db.close()
            return records
        except Exception as e:
            self.debugLog.debug(e.message)
            return []

    def archiveManagerJobsThatErrored(self):
        try:
            db = self.dbConnection()
            cursor = db.cursor()
            cursor.execute('''SELECT * FROM amjobs WHERE complete=?''', (-2,))
            dbRecords = cursor.fetchall()
            records = [ArchiveManagerRecord(record) for record in dbRecords]
            db.close()
            return records
        except Exception as e:
            self.debugLog.debug(e.message)
            return []            

    def recordsForStatus(self, status):
        try:
            db = self.dbConnection()
            cursor = db.cursor()
            cursor.execute('''SELECT * FROM jobs WHERE status=?''', (status,))
            dbRecords = cursor.fetchall()
            records = [DataStoreRecord(record) for record in dbRecords]
            db.close()
            return records
        except Exception as e:
            self.debugLog.debug(e.message)
            return []

    def recordsForUUID(self, uuid):
        try:
            db = self.dbConnection()
            cursor = db.cursor()
            cursor.execute('''SELECT * FROM jobs WHERE batchUUID=?''', (uuid,))
            dbRecords = cursor.fetchall()
            records = [DataStoreRecord(record) for record in dbRecords]
            db.close()
            return records
        except Exception as e:
            self.debugLog.debug(e.message)
            return []   

    def recordsForEncryptionStatus(self, status):
        try:
            db = self.dbConnection()
            cursor = db.cursor()
            cursor.execute('''SELECT * FROM jobs WHERE status=? AND operationType=?''', (status,'Encrypt'))
            dbRecords = cursor.fetchall()
            records = [DataStoreRecord(record) for record in dbRecords]
            db.close()
            return records
        except Exception as e:
            self.debugLog.debug(e.message)
            return []   

    def recordsForDecryptionStatus(self, status):
        try:
            db = self.dbConnection()
            cursor = db.cursor()
            cursor.execute('''SELECT * FROM jobs WHERE status=? AND operationType=?''', (status,'Decrypt'))
            dbRecords = cursor.fetchall()
            records = [DataStoreRecord(record) for record in dbRecords]
            db.close()
            return records
        except Exception as e:
            self.debugLog.debug(e.message)
            return []                

    def updateRecordStatusWithID(self, status, key_id):
        try:
            db = self.dbConnection()
            cursor = db.cursor()
            cursor.execute('''UPDATE jobs SET status=? WHERE id=?;''',(status, key_id))
            db.commit()
            db.close()
        except Exception as e:
            self.debugLog.debug(e.message)
            db.rollback()

    def updateRecordAsMissingWithID(self, key_id):
        try:
            db = self.dbConnection()
            cursor = db.cursor()
            cursor.execute('''UPDATE jobs SET status=? WHERE id=?;''',(self.missingRecordStatusCode(), key_id))
            db.commit()
            db.close()
        except Exception as e:
            self.debugLog.debug(e.message)
            db.rollback()

    def updateRecordAsMissingWithFileNameAndID(self, filePath, key_id):
        #we update the name in case any source file that gets moved collides with another file
        try:
            db = self.dbConnection()
            cursor = db.cursor()
            cursor.execute('''UPDATE jobs SET status=?, fileName=? WHERE id=?;''',(self.missingRecordStatusCode(), filePath,key_id))
            db.commit()
            db.close()
        except Exception as e:
            self.debugLog.debug(e.message)
            db.rollback()       
                    
    def updateRecordWithCurrentSizeAndDateModifiedWithID(self, currentSize, dateModified, key_id):
        try:
            db = self.dbConnection()
            cursor = db.cursor()
            cursor.execute('''UPDATE jobs SET fileSize=?, dateModified=? WHERE id=?;''', (currentSize, dateModified, key_id))
            db.commit()
            db.close()
        except Exception as e:
            self.debugLog.debug(e.message)
            db.rollback()

    def updateRecordAsStaticWithNewPath(self, newPath, key_id):
        try:
            db = self.dbConnection()
            cursor = db.cursor()
            cursor.execute('''UPDATE jobs SET fileName=?, status=? WHERE id=?;''',(newPath, self.verifyStatusCode(), key_id))
            db.commit()
            db.close()
        except Exception as e:
            self.debugLog.debug(e.message)
            db.rollback()

    def updateRecordAWithBatchUUIDReference(self, uuidReference, key_id):
        try:
            db = self.dbConnection()
            cursor = db.cursor()
            cursor.execute('''UPDATE jobs SET batchUUID=? WHERE id=?;''',(uuidReference, key_id))
            db.commit()
            db.close()
        except Exception as e:
            print 'Error in updateRecordAWithBatchUUIDReference'
            self.debugLog.debug(e.message)
            db.rollback()            

    def updateRecordAsDuplicateWithNewPath(self, newPath, key_id):
        try:
            db = self.dbConnection()
            cursor = db.cursor()
            cursor.execute('''UPDATE jobs SET fileName=?, status=? WHERE id=?;''',(newPath, self.errorFileExistsInPathChain(), key_id))
            db.commit()
            db.close()
        except Exception as e:
            self.debugLog.debug(e.message)
            db.rollback()

    def updateRecordWithHashStart(self, startTime, key_id):
        try:
            db = self.dbConnection()
            cursor = db.cursor()
            cursor.execute('''UPDATE jobs SET dateHashStart=?, status=? WHERE id=?;''',
                ( startTime, self.hashStartStatusCode(), key_id))
            db.commit()
            db.close()
        except Exception as e:
            self.debugLog.debug(e.message)
            db.rollback()

    def updateRecordWithReHashStart(self, startTime, key_id):
        try:
            db = self.dbConnection()
            cursor = db.cursor()
            cursor.execute('''UPDATE jobs SET dateOperationHashStart=?, status=? WHERE id=?;''',
                ( startTime, self.reHashStartStatusCode(), key_id))
            db.commit()
            db.close()
        except Exception as e:
            self.debugLog.debug(e.message)
            db.rollback()        

    def updateRecordWithHashForStartTimeAndEndTime(self, hashString, startTime, endTime, key_id):
        try:
            db = self.dbConnection()
            cursor = db.cursor()
            cursor.execute('''UPDATE jobs SET fileHash=?, dateHashStart=?, dateHashEnd=?, status=? WHERE id=?;''',(hashString, startTime, endTime, self.hashStatusCode(), key_id))
            db.commit()
            db.close()
        except Exception as e:
            self.debugLog.debug(e.message)
            db.rollback()

    def updateRecordWithFinalEncryptedPathAndHashForStartTimeAndEndTime(self, newPath, hashString, startTime, endTime, key_id):
        try:
            db = self.dbConnection()
            cursor = db.cursor()
            cursor.execute('''UPDATE jobs SET operationFileName=?, operationFileHash=?, dateOperationHashStart=?, dateOperationHashEnd=?, status=? WHERE id=?;''',
                (newPath, hashString, startTime, endTime, self.reHashStatusCode(), key_id))
            db.commit()
            db.close()
            return True
        except Exception as e:
            self.debugLog.debug(e.message)
            db.rollback()  
            return False                     

    def updateRecordStatusWithOperationStart(self, startTime, key_id):
        try:
            db = self.dbConnection()
            cursor = db.cursor()
            cursor.execute('''UPDATE jobs SET status=?, dateOperationStart=? WHERE id=?;''', 
                            (self.operationStartedStatusCode(), startTime, key_id))
            db.commit()
            db.close()
        except Exception as e:
            self.debugLog.debug(e.message)
            db.rollback()

    def updateRecordStatusWithEncryptedFileNameAndStartAndEndTime(self, statusValue, encryptedFilePath, startTime, endTime, key_id):
        try:
            db = self.dbConnection()
            cursor = db.cursor()
            cursor.execute('''UPDATE jobs SET status=?, operationFileName=?, dateOperationStart=?, dateOperationEnd=?  WHERE id=?;''', 
                            (statusValue, encryptedFilePath, startTime, endTime, key_id))
            db.commit()
            db.close()
        except Exception as e:
            self.debugLog.debug(e.message)
            db.rollback()   

    def updateRecordStatusWithDecryptedFileNameAndStartAndEndTime(self, statusValue, decryptedFilePath, startTime, endTime, key_id):
        try:
            db = self.dbConnection()
            cursor = db.cursor()
            cursor.execute('''UPDATE jobs SET status=?, operationFileName=?, dateOperationStart=?, dateOperationEnd=?  WHERE id=?;''', 
                            (statusValue, decryptedFilePath, startTime, endTime, key_id))
            db.commit()
            db.close()
        except Exception as e:
            self.debugLog.debug(e.message)
            db.rollback()