Example #1
0
def sendAnalysisFailure(resultObject):
    errorDict = failureHTMLText(resultObject)
    text = errorDict['text']
    html = errorDict['html']
    sendEmailNotification(
        configurationOptions().smtpServer, "*****@*****.**",
        configurationOptions().emailRecipients,
        "BWF Analysis Error: {}".format(resultObject['fileName']), text, html)
Example #2
0
def sendProcessFailureMessage(infoDict):
    subject = infoDict['subject']
    message = infoDict['message']

    errorMessage = generalfailureHTMLTextMessage(message)
    recipients = configurationOptions().serviceErrorRecipients

    text = errorMessage['text']
    html = errorMessage['html']
    sendEmailNotification(configurationOptions().smtpServer,
                          "*****@*****.**", recipients, subject,
                          text, html)
Example #3
0
def sendPeriodicFailureMessage(message):
    '''send an email with a periodic frequency'''

    errorMessage = generalfailureHTMLTextMessage(message)
    text = errorMessage['text']
    html = errorMessage['html']
    recipients = configurationOptions().serviceErrorRecipients

    pathToEmailFile = os.path.join(os.path.dirname(__file__), 'emailtimestamp')
    emailfile = None
    tempFile = None

    #does the email file exist? if no then create and send email immedaitely
    if not os.path.exists(pathToEmailFile):
        sendEmailNotification(
            configurationOptions().smtpServer, "*****@*****.**",
            recipients,
            "Periodic Message: There was a process error: %s" % (message),
            text, html)
        try:
            emailfile = open(pathToEmailFile, 'w+')
            return
        except Exception as e:
            pass

    tempEmailFile = os.path.join(os.path.dirname(__file__), 'tempFile')
    try:
        tempFile = open(tempEmailFile, 'w+')
    except Exception as e:
        sendEmailNotification(configurationOptions().smtpServer,
                              "*****@*****.**", recipients,
                              "Periodic Process Error Message: %s" % (message),
                              text, html)
        return

    #if the time last sent is greater than 30, then send
    difference = os.stat(tempEmailFile)[8] - os.stat(pathToEmailFile)[8]
    print difference

    #send email every 4 hours
    if difference > (60 * 60 * 4):
        print 'sending email'
        sendEmailNotification(configurationOptions().smtpServer,
                              "*****@*****.**", recipients,
                              "Periodic Process Error Message: %s" % (message),
                              text, html)
        emailfile = open(pathToEmailFile, 'w+')
    else:
        print 'not sending email'
Example #4
0
    def doesTheFilePathExistElseWhereInThePathStructure(self, filePath, operationType, pathStructureName):
        '''
        Checks to make sure the file isn't already in the queue, if is, then it moves to to a duplicate folder
        '''
        result = 0

        currentPathStructure = configurationOptions().pathStructureWithName(pathStructureName)
        #exlcude inBox
        for path in configurationOptions().pathStructurePathsToCheckForDuplicates():
            if os.path.exists(os.path.join(currentPathStructure[path], os.path.basename(filePath))):
                result += 1

        if result == 0:
            return False

        return True
Example #5
0
def DefaultDatabasePath():

    options = configurationOptions()
    dataBasePath = options.dataBasePath

    if not os.path.exists(dataBasePath):
        os.makedirs(dataBasePath)

    return os.path.join(dataBasePath, 'bwf_database.db')
Example #6
0
def DefaultDatabasePath():
	
	options = configurationOptions()
	dataBasePath = options.dataBasePath	

	if not os.path.exists(dataBasePath):
		os.makedirs(dataBasePath)

	return os.path.join(dataBasePath, 'bwf_database.db')
Example #7
0
def DefaultLogger():
	options = configurationOptions()
	logPath = options.logPath
	
	if not os.path.exists(logPath):
		os.makedirs(logPath)

	logPath = os.path.join(logPath, 'bwflog.log')
	logging.basicConfig(filename=logPath,level=logging.DEBUG, format='%(asctime)s - %(message)s', datefmt='%Y-%m-%d %H:%M:%S')
	return logging
Example #8
0
    def doesTheFilePathExistElseWhereInThePathStructure(
            self, filePath, operationType, pathStructureName):
        '''
        Checks to make sure the file isn't already in the queue, if is, then it moves to to a duplicate folder
        '''
        result = 0

        currentPathStructure = configurationOptions().pathStructureWithName(
            pathStructureName)
        #exlcude inBox
        for path in configurationOptions(
        ).pathStructurePathsToCheckForDuplicates():
            if os.path.exists(
                    os.path.join(currentPathStructure[path],
                                 os.path.basename(filePath))):
                result += 1

        if result == 0:
            return False

        return True
Example #9
0
def DefaultLogger():
    options = configurationOptions()
    logPath = options.logPath

    if not os.path.exists(logPath):
        os.makedirs(logPath)

    logPath = os.path.join(logPath, 'bwflog.log')
    logging.basicConfig(filename=logPath,
                        level=logging.DEBUG,
                        format='%(asctime)s - %(message)s',
                        datefmt='%Y-%m-%d %H:%M:%S')
    return logging
Example #10
0
def main():
  
  options = configurationOptions()
  if not options.isValid():
    return

  logging = DefaultLogger()
  dbPath = DefaultDatabasePath()
  cleanUpLockFiles()

  pathToWatch = options.defaultPathStructure()

  processObjects = []
  #paths
  processObjects.append({"target":folderObserver, "args":(pathToWatch, dbPath), "info":'recreating folder observer process...'})
  
  #Operations 
  processObjects.append({"target":acquirefile, "args":(dbPath,), "info":'recreating verifier process...'})
  
  #Processors
  for x in range(0,8):
    processObjects.append({"target":analyzeBWFFile, "args":(dbPath,('process ' + str(x))), "info":('recreating analyzer process %s...' % (str(x),))})

  for processObject in processObjects:
    processObject["process"] = Process(target=processObject['target'], args=processObject['args'])

  for processObject in processObjects:
    processObject["process"].start()

  try:
    while True:
      sleep(2)

      for processObject in processObjects:
        if not processObject['process'].is_alive() or processObject['process'].exitcode is not None:
          logging.debug(processObject['info'])
          processObject['process'].terminate()
          processObject['process'] = Process(target=processObject['target'], args=processObject['args'])
          processObject['process'].start()

  except KeyboardInterrupt:
    for processObject in processObjects:
      processObject['process'].stop()

  for processObject in processObjects:
    processObject['process'].join()
Example #11
0
def checkSingleFiles(dbPath):
	logging = DefaultLogger()

	if not os.path.exists(dbPath):
		logging.debug('Acquire File: can\'t find database at path')
		return
	
	datastore = DataStore(dbPath)
	data = datastore.recordsForVerifying()

	for record in data:

		key_id 				= record.id
		filePath 			= record.fileName
		recordSize 			= int(record.fileSize)
		dateModifiedString 	= record.dateModified

		dateLastModified = datetime.datetime.strptime(dateModifiedString, '%Y-%m-%d %H:%M:%S')
		timeDifference = datetime.datetime.now() - dateLastModified

		#This can change with an if/else should I decide I want to put temp files to be decrypted in another place
		sourcePath = configurationOptions().defaultPathStructure()['inBox']
		workingPath = configurationOptions().defaultPathStructure()['workingBox']

		if timeDifference.seconds < verificationWaitTime:
			continue

		lastSize = recordSize
		currentSize = 0

		if not os.path.exists(filePath):
			logging.debug('Acquire File: Will update record status as the file no longer exists')
			datastore.updateRecordAsMissingWithID(key_id)
			continue

		currentSize = os.path.getsize(filePath)

		if lastSize != currentSize:
			logging.debug(record)
			logging.debug('Acquire File: attempting db modify as file size has changed...')
			datastore.updateRecordWithCurrentSizeAndDateModifiedWithID(currentSize, datetime.datetime.now().strftime("%Y-%m-%d %H:%M:%S"), key_id)
			continue

		if currentSize == 0:
			continue
			# if the current size is zero, then continue until it isn't or never will be
			# its likely the file has been queued to copy but no data has been moved yet (actual OSX case) 
			
		logging.debug('Acquire File: attempting to lock the file to see if I own the file yet...')

		try:
			fileToCheck = open(filePath, 'rb')
			portalocker.lock(fileToCheck, portalocker.LOCK_EX)
			fileToCheck.close()
			logging.debug('Acquire File: proceeding to update the file status knowing that no one else is using it...')
		except Exception as e:
			logging.debug('Acquire File: unable to lock file as it is likely in use')
			continue

		#must test that file doesn't exist elsewhere in the path

		newPath = filePath
		try:
			newPath = pathAfterSafelyMovingFileToDestinationFolder(filePath, workingPath)
		except Exception as e:
			info = '''This shouldn\'t happen as pathAfterSafelyMovingFileToDestinationFolder should create a unique name that avoids any collisions, otherwise the file has been moved'''
			logging.debug(info)
			logging.debug('Acquire File: Error moving file')
			info = 'There was a problem moving the file into into the queue for: ' + os.path.basename(filePath)
			info = info + '\n' + 'This will require manual intervention as the occurrence is unique.'
			#SEND FAILURE EMAIL
			continue

		logging.debug('Acquire File: updating record file status and path....')
		datastore.updateRecordAsStaticWithNewPath(newPath, key_id)
Example #12
0
def analyzeBWFFile(dbPath, identifier=1):

    logging = DefaultLogger()
    loopcount = 0
    datastore = DataStore(dbPath)

    try:

        while True:

            sleep(60 + random.randint(1, 10))

            if loopcount % 20 == 0:
                logging.debug(
                    'bwf analyzer loop {} is active...'.format(identifier))
            loopcount += 1

            if not os.path.exists(dbPath):
                logging.debug('Acquire File: can not find database at path')
                return

            record = None

            #if daisy is not up then just wait until it is
            if isDaisyUp() == False:
                logging.debug('Daisy does not appear to be up')
                continue

            #get a lock on the file
            lock = lockWithFile()
            try:
                lock.acquire(timeout=-1)
                if lock.i_am_locking():
                    record = datastore.oneRecordReadyForProcessing()
                    if record != None:
                        logging.debug(
                            'process {} is acquiring the lock'.format(
                                identifier))
                        datastore.updateRecordAsInProcess(record.id)
                lock.release()
            except Exception as e:
                pass

            if record == None:
                continue

            filePath = record.fileName

            #lets check that is has a genuine Daisy Number
            if getDaisyNumber(os.path.basename(filePath)) == None:
                errorBox = configurationOptions().defaultPathStructure(
                )['errorBox']
                errorBox = os.path.expanduser(errorBox)
                sendProcessFailureMessage({
                    'subject':
                    'BWF Error: file added that has no DANumber',
                    'message':
                    'A file, %s, was deposited that does not have a Daisy Number'
                    % (os.path.basename(filePath))
                })

                #move to errorBox
                try:
                    print "Moving file %s into %s" % (filePath, errorBox)
                    newPath = pathAfterSafelyMovingFileToDestinationFolder(
                        filePath, errorBox)
                except Exception as e:
                    logging.debug('Analyze File: Error moving file')
                    info = '''This should not happen as pathAfterSafelyMovingFileToDestinationFolder should create a unique name that avoids any collisions, otherwise the file has been moved'''
                    logging.debug(info)
                    info = 'There was a problem moving the file into into the errorBox for: ' + os.path.basename(
                        filePath)
                    info = info + '\n' + 'This will require manual intervention as the occurrence is unique.'
                    sendProcessFailureMessage({
                        'subject': 'BWF Error',
                        'message': info
                    })
                    logging.debug(info)

                datastore.updateRecordAsNotHavingADaisyNumber(record.id)
                continue

            #lets look up metadata before we even proceed, if can't get the metadata we don't want to analyze files
            dataTuple = retrieveDataForDANumber(os.path.basename(filePath),
                                                identifier)
            logging.debug('Data for {} Before: {}'.format(
                os.path.basename(filePath), dataTuple))

            if dataTuple == None:
                #ok, lets send an email that will be sent at a maximum of 1 per 4 hours
                result = "Process Error: Daisy Information not Available:" + e.message
                sendPeriodicFailureMessage(result)
                logging.debug('A Periodic Failure Message attempt was made.')
                continue

            result = None
            resultObject = None
            vendor = dataTuple[0]
            comments = dataTuple[1]
            status = dataTuple[2]

            #once we have the metadata, lets examine the file
            try:
                logging.debug('Will examine %s in loop %s' %
                              (filePath, str(identifier)))
                resultObject = multiChannelBWFFileAnalysis(filePath)
                result = json.dumps(resultObject)
                if resultObject == None:
                    logging.debug(
                        'The analysis of the file %s is "None". This should not occur.'
                        % (filePath))
                    raise Exception(
                        'The analysis of the file %s is "None". This should not occur.'
                        % (filePath))
            except Exception as e:
                logging.debug(
                    'An exception occurred with %s in identifier %s.' %
                    (filePath, str(identifier)))
                #mark as error
                datastore.updateRecordWithAnalysisError(record.id)
                errorBox = configurationOptions().defaultPathStructure(
                )['errorBox']
                errorBox = os.path.expanduser(errorBox)

                #send email
                result = "Process Error: An Exception occurred when processing the file: %s. The file will be moved to %s" % (
                    e.message, errorBox)
                logging.debug(result)
                sendProcessFailureMessage({
                    'subject': 'Process Error',
                    'message': result
                })

                #move to errorBox
                try:
                    print "Moving file %s into %s" % (filePath, errorBox)
                    logging.debug("Moving file %s into %s" %
                                  (filePath, errorBox))
                    newPath = pathAfterSafelyMovingFileToDestinationFolder(
                        filePath, errorBox)
                except Exception as e:
                    logging.debug('Analyze File: Error moving file')
                    info = '''This should not happen as pathAfterSafelyMovingFileToDestinationFolder should create a unique name that avoids any collisions, otherwise the file has been moved'''
                    logging.debug(info)
                    info = 'There was a problem moving the file into into the errorBox for: ' + os.path.basename(
                        filePath)
                    info = info + '\n' + 'This will require manual intervention as the occurrence is unique.'
                    sendProcessFailureMessage({
                        'subject': 'Process Error Moving File',
                        'message': info
                    })
                    logging.debug(info)
                continue

            info = 'PreExisting Data for the following file %s: %s %s %s' % (
                os.path.basename(filePath), comments, vendor, status)
            logging.debug(info)

            resultObject['vendor'] = vendor

            #The result object is not None as we would have bailed otherwise

            if resultObject['result'] == 'success':
                if comments == None:
                    comments = ''

                #update Comments
                comments = stringMinusBWFAnalyzerInfo(comments)
                if comments != '':
                    comments += " "
                comments += BWFAnalyzerInfoForSuccess(
                    os.path.basename(filePath))
                success = setComments(
                    getDaisyNumber(os.path.basename(filePath)), comments)
                #update local datastore
                datastore.updateRecordWithComments(comments, record.id)

                #did we successfully update the comments?
                if success == True:
                    #update comments field in db and set to success
                    logging.debug('updating comments successfully')
                    datastore.successfullyUpdatedDaisyComments(record.id)
                else:
                    #put infor in db that you couldn't update Daisy
                    logging.debug('not updating comments successfully')
                    datastore.failedToUpdateDaisyComments(record.id)

                #update the status to pending fix
                #only if the status is Needs Attention, otherwise we don't have any further knowledge of what is going on
                nextStatus = 'NO CHANGE'
                success = True
                if status == "Needs Attention":
                    #ok to update status
                    success = setStatusAsPendingFix(
                        getDaisyNumber(os.path.basename(filePath)))
                    nextStatus = 'Pending Fix'

                if status in ['Being Made', 'Ordered']:
                    #ok to update status
                    success = setStatusAsPendingArchive(
                        getDaisyNumber(os.path.basename(filePath)))
                    nextStatus = 'Pending Archive'

                datastore.updateRecordWithDaisyStatus(nextStatus, record.id)
                if success == True:
                    #update staus field in db and set to success
                    logging.debug('updating status successfully')
                    datastore.successfullyUpdatedDaisyStatus(record.id)
                else:
                    #put infor in db that you couldn't update status in Daisy
                    logging.debug('not updating status successfully')
                    datastore.failedToUpdateDaisyStatus(record.id)

            else:
                sendAnalysisFailure(resultObject)

                if comments == None:
                    comments = ''

                #update Comments
                comments = stringMinusBWFAnalyzerInfo(comments)
                if comments != '':
                    comments += " "
                comments += BWFAnalyzerInfoForErrors(resultObject['errors'])
                success = setComments(
                    getDaisyNumber(os.path.basename(filePath)), comments)

                #update local datastore
                datastore.updateRecordWithComments(comments, record.id)

                if success == True:
                    #update comments field in db and set to success
                    logging.debug('updating comments successfully')
                    datastore.successfullyUpdatedDaisyComments(record.id)
                else:
                    #put infor in db that you couldn't update Daisy
                    logging.debug('not updating comments successfully')
                    datastore.failedToUpdateDaisyComments(record.id)

                #update Status
                if status not in ['Being Made', 'Ordered', 'Pending Archive']:
                    #ok to update status
                    success = setStatusAsNeedsAttention(
                        getDaisyNumber(os.path.basename(filePath)))
                    datastore.updateRecordWithDaisyStatus(
                        'Needs Attention', record.id)
                    if success == True:
                        #update staus field in db and set to success
                        logging.debug('updating status successfully')
                        datastore.successfullyUpdatedDaisyStatus(record.id)
                    else:
                        #put infor in db that you couldn't update status in Daisy
                        logging.debug('not updating status successfully')
                        datastore.failedToUpdateDaisyStatus(record.id)
                else:
                    success = setStatusAsPendingArchive(
                        getDaisyNumber(os.path.basename(filePath)))
                    datastore.updateRecordWithDaisyStatus(
                        'Pending Archive', record.id)
                    if success == True:
                        #update status field in db and set to success
                        logging.debug('updating status successfully')
                        datastore.successfullyUpdatedDaisyStatus(record.id)
                    else:
                        #put infor in db that you couldn't update status in Daisy
                        logging.debug('not updating status successfully')
                        datastore.failedToUpdateDaisyStatus(record.id)

            if datastore.updateRecordWithAnalysisData(result,
                                                      record.id) == False:
                info = 'Unable to save record %d %s' % (record.id, result)
                sendProcessFailureMessage({
                    'subject': 'Process Error Unable To Save Record',
                    'message': info
                })
                continue

            #update vendor info
            datastore.updateRecordWithVendor(vendor, record.id)

            dataTuple = retrieveDataForDANumber(os.path.basename(filePath),
                                                identifier)
            logging.debug('Data for {} After: {}'.format(
                os.path.basename(filePath), dataTuple))

            #now that we have saved the data, we are ready to move the file
            nextBox = configurationOptions().defaultPathStructure()['outBox']
            if resultObject['result'] != 'success':
                nextBox = configurationOptions().defaultPathStructure(
                )['failBox']
            nextBox = os.path.expanduser(nextBox)

            newPath = filePath

            try:
                newPath = pathAfterSafelyMovingFileToDestinationFolder(
                    filePath, nextBox)
            except Exception as e:
                logging.debug('Analyze File: Error moving file')
                info = '''This should not happen as pathAfterSafelyMovingFileToDestinationFolder should create a unique name that avoids any collisions, otherwise the file has been moved'''
                logging.debug(info)
                info = 'There was a problem moving the file into into ' + nextBox + ' for: ' + os.path.basename(
                    filePath)
                info = info + '\n' + 'This will require manual intervention as the occurrence is unique.'
                sendProcessFailureMessage({
                    'subject': 'Process Error Moving File',
                    'message': info
                })
                continue

            logging.debug(
                'Analyze File: preparing to move file to final path...')
            datastore.updateRecordAsCompleteWithFinalPath(newPath, record.id)

    except Exception as e:
        info = 'Exception in analyzeBWFFile' + e.message
        logging.debug(info)
        sendProcessFailureMessage({'subject': 'Exception!', 'message': info})
Example #13
0
def analyzeBWFFile(dbPath, identifier = 1):
	
	logging = DefaultLogger()
	loopcount = 0
	datastore = DataStore(dbPath)

	try:

		while True:

			sleep(60+random.randint(1,10))

			if loopcount % 20 == 0:
				logging.debug('bwf analyzer loop {} is active...'.format(identifier))
			loopcount += 1

			if not os.path.exists(dbPath):
				logging.debug('Acquire File: can not find database at path')
				return
		
			record =  None

			#if daisy is not up then just wait until it is
			if isDaisyUp() == False:
				logging.debug('Daisy does not appear to be up')
				continue

			#get a lock on the file
			lock = lockWithFile()
			try:
				lock.acquire(timeout=-1)
				if lock.i_am_locking():  
					record = datastore.oneRecordReadyForProcessing()
					if record != None:
						logging.debug('process {} is acquiring the lock'.format(identifier))
						datastore.updateRecordAsInProcess(record.id)
				lock.release()
			except Exception as e:
				pass

			if record == None:
				continue

			filePath = record.fileName

			#lets check that is has a genuine Daisy Number
			if getDaisyNumber(os.path.basename(filePath)) == None:
				errorBox = configurationOptions().defaultPathStructure()['errorBox']
				errorBox = os.path.expanduser(errorBox)	
				sendProcessFailureMessage({'subject':'BWF Error: file added that has no DANumber', 'message':'A file, %s, was deposited that does not have a Daisy Number' % (os.path.basename(filePath))})
				
				#move to errorBox
				try:
					print "Moving file %s into %s" % (filePath, errorBox)
					newPath = pathAfterSafelyMovingFileToDestinationFolder(filePath, errorBox)
				except Exception as e:
					logging.debug('Analyze File: Error moving file')
					info = '''This should not happen as pathAfterSafelyMovingFileToDestinationFolder should create a unique name that avoids any collisions, otherwise the file has been moved'''
					logging.debug(info)
					info = 'There was a problem moving the file into into the errorBox for: ' + os.path.basename(filePath)
					info = info + '\n' + 'This will require manual intervention as the occurrence is unique.'
					sendProcessFailureMessage({'subject':'BWF Error', 'message':info})
					logging.debug(info)

				datastore.updateRecordAsNotHavingADaisyNumber(record.id)
				continue

			#lets look up metadata before we even proceed, if can't get the metadata we don't want to analyze files
			dataTuple = retrieveDataForDANumber(os.path.basename(filePath), identifier)
			logging.debug('Data for {} Before: {}'.format(os.path.basename(filePath), dataTuple))

			if dataTuple == None:
				#ok, lets send an email that will be sent at a maximum of 1 per 4 hours
				result = "Process Error: Daisy Information not Available:" + e.message
				sendPeriodicFailureMessage(result)
				logging.debug('A Periodic Failure Message attempt was made.')
				continue

			result = None
			resultObject = None
			vendor = dataTuple[0]
			comments = dataTuple[1]
			status = dataTuple[2]
			
			#once we have the metadata, lets examine the file
			try:
				logging.debug('Will examine %s in loop %s' % (filePath, str(identifier)))
				resultObject = multiChannelBWFFileAnalysis(filePath)
				result = json.dumps(resultObject)
				if resultObject == None:
					logging.debug('The analysis of the file %s is "None". This should not occur.' % (filePath))
					raise Exception('The analysis of the file %s is "None". This should not occur.' % (filePath))
			except Exception as e:
				logging.debug('An exception occurred with %s in identifier %s.' % (filePath, str(identifier)))
				#mark as error
				datastore.updateRecordWithAnalysisError(record.id)
				errorBox = configurationOptions().defaultPathStructure()['errorBox']
				errorBox = os.path.expanduser(errorBox)
				
				#send email
				result = "Process Error: An Exception occurred when processing the file: %s. The file will be moved to %s" % (e.message, errorBox)
				logging.debug(result)
				sendProcessFailureMessage({'subject':'Process Error', 'message':result})

				#move to errorBox
				try:
					print "Moving file %s into %s" % (filePath, errorBox)
					logging.debug("Moving file %s into %s" % (filePath, errorBox))
					newPath = pathAfterSafelyMovingFileToDestinationFolder(filePath, errorBox)
				except Exception as e:
					logging.debug('Analyze File: Error moving file')
					info = '''This should not happen as pathAfterSafelyMovingFileToDestinationFolder should create a unique name that avoids any collisions, otherwise the file has been moved'''
					logging.debug(info)
					info = 'There was a problem moving the file into into the errorBox for: ' + os.path.basename(filePath)
					info = info + '\n' + 'This will require manual intervention as the occurrence is unique.'
					sendProcessFailureMessage({'subject':'Process Error Moving File', 'message':info})
					logging.debug(info)
				continue

			info = 'PreExisting Data for the following file %s: %s %s %s' % (os.path.basename(filePath), comments, vendor, status)
			logging.debug(info)

			resultObject['vendor'] = vendor

			#The result object is not None as we would have bailed otherwise

			if resultObject['result'] == 'success':
				if comments == None:
					comments = ''

				#update Comments 
				comments = stringMinusBWFAnalyzerInfo(comments)
				if comments != '':
					comments += " "
				comments += BWFAnalyzerInfoForSuccess(os.path.basename(filePath))
				success = setComments(getDaisyNumber(os.path.basename(filePath)), comments)
				#update local datastore
				datastore.updateRecordWithComments(comments, record.id)

				#did we successfully update the comments?
				if success == True:
					#update comments field in db and set to success
					logging.debug('updating comments successfully')
					datastore.successfullyUpdatedDaisyComments(record.id)
				else:
					#put infor in db that you couldn't update Daisy
					logging.debug('not updating comments successfully')
					datastore.failedToUpdateDaisyComments(record.id)

				#update the status to pending fix
				#only if the status is Needs Attention, otherwise we don't have any further knowledge of what is going on
				nextStatus = 'NO CHANGE'
				success = True
				if status == "Needs Attention":
					#ok to update status
					success = setStatusAsPendingFix(getDaisyNumber(os.path.basename(filePath)))
					nextStatus = 'Pending Fix'

				if status in ['Being Made', 'Ordered']:
					#ok to update status
					success = setStatusAsPendingArchive(getDaisyNumber(os.path.basename(filePath)))
					nextStatus = 'Pending Archive'
					
				datastore.updateRecordWithDaisyStatus(nextStatus, record.id)
				if success == True:
					#update staus field in db and set to success
					logging.debug('updating status successfully')
					datastore.successfullyUpdatedDaisyStatus(record.id)
				else:
					#put infor in db that you couldn't update status in Daisy		
					logging.debug('not updating status successfully')
					datastore.failedToUpdateDaisyStatus(record.id)

			else:
				sendAnalysisFailure(resultObject)

				if comments == None:
					comments = ''

				#update Comments 
				comments = stringMinusBWFAnalyzerInfo(comments)
				if comments != '':
					comments += " "
				comments += BWFAnalyzerInfoForErrors(resultObject['errors'])
				success = setComments(getDaisyNumber(os.path.basename(filePath)), comments)
				
				#update local datastore
				datastore.updateRecordWithComments(comments, record.id)

				if success == True:
					#update comments field in db and set to success
					logging.debug('updating comments successfully')
					datastore.successfullyUpdatedDaisyComments(record.id)
				else:
					#put infor in db that you couldn't update Daisy
					logging.debug('not updating comments successfully')
					datastore.failedToUpdateDaisyComments(record.id)

				#update Status
				if status not in ['Being Made', 'Ordered', 'Pending Archive']:
					#ok to update status
					success = setStatusAsNeedsAttention(getDaisyNumber(os.path.basename(filePath)))
					datastore.updateRecordWithDaisyStatus('Needs Attention', record.id)
					if success == True:
						#update staus field in db and set to success
						logging.debug('updating status successfully')
						datastore.successfullyUpdatedDaisyStatus(record.id)
					else:
						#put infor in db that you couldn't update status in Daisy
						logging.debug('not updating status successfully')		
						datastore.failedToUpdateDaisyStatus(record.id)
				else:
					success = setStatusAsPendingArchive(getDaisyNumber(os.path.basename(filePath)))
					datastore.updateRecordWithDaisyStatus('Pending Archive', record.id)
					if success == True:
						#update status field in db and set to success
						logging.debug('updating status successfully')
						datastore.successfullyUpdatedDaisyStatus(record.id)
					else:
						#put infor in db that you couldn't update status in Daisy
						logging.debug('not updating status successfully')		
						datastore.failedToUpdateDaisyStatus(record.id)

			if datastore.updateRecordWithAnalysisData(result, record.id) == False:
				info = 'Unable to save record %d %s' % (record.id, result) 
				sendProcessFailureMessage({'subject':'Process Error Unable To Save Record', 'message':info})
				continue

			#update vendor info
			datastore.updateRecordWithVendor(vendor, record.id)

			dataTuple = retrieveDataForDANumber(os.path.basename(filePath), identifier)
			logging.debug('Data for {} After: {}'.format(os.path.basename(filePath),dataTuple))

			#now that we have saved the data, we are ready to move the file
			nextBox = configurationOptions().defaultPathStructure()['outBox']
			if resultObject['result'] != 'success':
				nextBox = configurationOptions().defaultPathStructure()['failBox']
			nextBox = os.path.expanduser(nextBox)

			newPath = filePath

			try:
				newPath = pathAfterSafelyMovingFileToDestinationFolder(filePath, nextBox)
			except Exception as e:
				logging.debug('Analyze File: Error moving file')
				info = '''This should not happen as pathAfterSafelyMovingFileToDestinationFolder should create a unique name that avoids any collisions, otherwise the file has been moved'''
				logging.debug(info)
				info = 'There was a problem moving the file into into ' + nextBox + ' for: ' + os.path.basename(filePath)
				info = info + '\n' + 'This will require manual intervention as the occurrence is unique.'
				sendProcessFailureMessage({'subject':'Process Error Moving File', 'message':info})
				continue

			logging.debug('Analyze File: preparing to move file to final path...')
			datastore.updateRecordAsCompleteWithFinalPath(newPath, record.id)

	except Exception as e:
		info = 'Exception in analyzeBWFFile' + e.message
		logging.debug(info)
		sendProcessFailureMessage({'subject':'Exception!', 'message':info})