コード例 #1
0
ファイル: db.py プロジェクト: helio4k/dupReport
    def rollback(self, datespec):
        globs.log.write(globs.SEV_NOTICE, function='Database', action='rollback', msg='Rolling back database: spec={}'.format(datespec))

        # See if we're using a delta-based time spec (Issue #131)
        deltaParts = drdatetime.timeDeltaSpec(datespec)
        if deltaParts != False:
            today = datetime.now()
            globs.log.write(globs.SEV_DEBUG, function='Database', action='rollback', msg='Using delta timespec. Today={}'.format(today))
            for i in range(len(deltaParts)):
                tval = int(deltaParts[i][:-1])
                tspec = deltaParts[i][-1:]
                if tspec == 's': # Subtract seconds
                    today -= timedelta(seconds=tval)
                elif tspec == 'm':
                    today -= timedelta(minutes=tval)
                elif tspec == 'h':
                    today -= timedelta(hours=tval)
                elif tspec == 'd':
                    today -= timedelta(days=tval)
                elif tspec == 'w':
                    today -= timedelta(weeks=tval)
                globs.log.write(globs.SEV_DEBUG, function='Database', action='rollback', msg='Rolled back {}{}. Today now={}'.format(tval,tspec, today))
            newTimeStamp = today.timestamp()
        else:
            # Get timestamp for input date/time
            newTimeStamp = drdatetime.toTimestamp(datespec)

        # Delete all email records that happened after input datetime
        sqlStmt = 'DELETE FROM emails WHERE emailtimestamp > {}'.format(newTimeStamp)
        dbCursor = self.execSqlStmt(sqlStmt)

        # Delete all backup set records that happened after input datetime
        sqlStmt = 'SELECT source, destination FROM backupsets WHERE lastTimestamp > {}'.format(newTimeStamp)
        dbCursor = self.execSqlStmt(sqlStmt)
        setRows= dbCursor.fetchall()
        for source, destination in setRows:
            # Select largest timestamp from remaining data for that source/destination
            sqlStmt = 'select max(endTimeStamp), examinedFiles, sizeOfExaminedFiles, dupversion from emails where sourceComp = \'{}\' and destComp= \'{}\''.format(source, destination)
            dbCursor = self.execSqlStmt(sqlStmt)
            emailTimestamp, examinedFiles, sizeOfExaminedFiles, dupversion = dbCursor.fetchone()
            if emailTimestamp is None:
                # After the rollback, some srcdest pairs may have no corresponding entries in the the database, meaning they were not seen until after the rollback period
                # We should remove these from the database, to return it to the state it was in before the rollback.
                globs.log.write(globs.SEV_NOTICE, function='Database', action='rollback', msg='Deleting {}{}{} from backupsets. Not seen until after rollback.'.format(source, globs.opts['srcdestdelimiter'], destination))
                sqlStmt = 'DELETE FROM backupsets WHERE source = \"{}\" AND destination = \"{}\"'.format(source, destination)
                dbCursor = self.execSqlStmt(sqlStmt)
            else:
                globs.log.write(globs.SEV_NOTICE, function='Database', action='rollback', msg='Resetting {}{}{} to {}'.format(source, globs.opts['srcdestdelimiter'], destination, drdatetime.fromTimestamp(emailTimestamp)))
                # Update backupset table to reflect rolled-back date
                sqlStmt = 'update backupsets set lastFileCount={}, lastFileSize={}, lastTimestamp={}, dupversion=\'{}\' where source = \'{}\' and destination = \'{}\''.format(examinedFiles, sizeOfExaminedFiles, emailTimestamp, dupversion, source, destination)
                dbCursor = self.execSqlStmt(sqlStmt)
            
        self.dbCommit()
        return None
コード例 #2
0
ファイル: db.py プロジェクト: jfparis/dupReport
    def rollback(self, datespec):
        globs.log.write(1, 'db.rollback({})'.format(datespec))

        # Get timestamp for input date/time
        newTimeStamp = drdatetime.toTimestamp(datespec)

        # Delete all email records that happened after input datetime
        sqlStmt = 'DELETE FROM emails WHERE emailtimestamp > {}'.format(
            newTimeStamp)
        dbCursor = self.execSqlStmt(sqlStmt)

        # Delete all backup set records that happened after input datetime
        sqlStmt = 'SELECT source, destination FROM backupsets WHERE lastTimestamp > {}'.format(
            newTimeStamp)
        dbCursor = self.execSqlStmt(sqlStmt)
        setRows = dbCursor.fetchall()
        for source, destination in setRows:
            # Select largest timestamp from remaining data for that source/destination
            sqlStmt = 'select max(endTimeStamp), examinedFiles, sizeOfExaminedFiles from emails where sourceComp = \'{}\' and destComp= \'{}\''.format(
                source, destination)
            dbCursor = self.execSqlStmt(sqlStmt)
            emailTimestamp, examinedFiles, sizeOfExaminedFiles = dbCursor.fetchone(
            )
            if emailTimestamp is None:
                # After the rollback, some srcdest pairs may have no corresponding entries in the the database, meaning they were not seen until after the rollback period
                # We should remove these from the database, to return it to the state it was in before the rollback.
                globs.log.write(
                    2,
                    'Deleting {}{}{} from backupsets. Not seen until after rollback.'
                    .format(source, globs.opts['srcdestdelimiter'],
                            destination))
                sqlStmt = 'DELETE FROM backupsets WHERE source = \"{}\" AND destination = \"{}\"'.format(
                    source, destination)
                dbCursor = self.execSqlStmt(sqlStmt)
            else:
                globs.log.write(
                    2, 'Resetting {}{}{} to {}'.format(
                        source, globs.opts['srcdestdelimiter'], destination,
                        drdatetime.fromTimestamp(emailTimestamp)))
                # Update backupset table to reflect rolled-back date
                sqlStmt = 'update backupsets set lastFileCount={}, lastFileSize={}, lastTimestamp={} where source = \'{}\' and destination = \'{}\''.format(
                    examinedFiles, sizeOfExaminedFiles, emailTimestamp, source,
                    destination)
                dbCursor = self.execSqlStmt(sqlStmt)

        self.dbCommit()
        return None
コード例 #3
0
    def parenOrRaw(self, val, df=None, tf=None, tz=None):
        globs.log.write(
            1, 'dremail.parenOrRaw({}, {}, {}, {})'.format(val, df, tf, tz))

        retval = val  # Set default return as input value

        # Search for '(XXX)' in value
        pat = re.compile('\(.*\)')
        match = re.search(pat, val)
        if match:  # value found in parentheses
            retval = val[match.regs[0][0] + 1:match.regs[0][1] - 1]
        else:  # No parens found
            if df != None:  # Looking for date/time
                retval = drdatetime.toTimestamp(val,
                                                dfmt=df,
                                                tfmt=tf,
                                                utcOffset=tz)

        globs.log.write(1, 'retval=[{}]'.format(retval))
        return retval
コード例 #4
0
ファイル: dremail.py プロジェクト: DocFraggle/dupReport
    def processMessage(self, msg):

        globs.log.write(1, 'EmailServer.process_message()')

        # msgParts items:
        #    'messageID' - the message ID
        #    'subject' - the message subject
        #    'date'
        #    'time'
        #    'body' - Payload of message (i.e., not the Header)
        msgParts = {}

        # statusParts contains the individual lines from the Duplicati status emails
        statusParts = {}

        # dateParts contains the date & time strings for the SQL Query
        dateParts = {}

        # Check all the vital parts to see if they're there
        # If any of these are missing it means:
        #   (1) they are not from Duplicati, and
        #   (2) if we keep processing things will blow up down the line
        # To be safe, we'll just skip the message
        if msg['Message-Id'] is None or msg['Message-Id'] == '':
            globs.log.write(1, 'No message-Id. Abandoning processMessage()')
            return None, None
        if msg['Subject'] is None or msg['Subject'] == '':
            globs.log.write(1, 'No Subject. Abandoning processMessage()')
            return None, None
        if msg['Date'] is None or msg['Date'] == '':
            globs.log.write(1, 'No Date. Abandoning processMessage()')
            return None, None

        # get Subject
        decode = email.header.decode_header(msg['Subject'])[0]
        msgParts['subject'] = decode[0]
        if (type(msgParts['subject'])
                is not str):  # Email encoded as a byte object - See Issue #14
            msgParts['subject'] = msgParts['subject'].decode('utf-8')
        globs.log.write(3, 'Subject=[{}]'.format(msgParts['subject']))

        # See if it's a message of interest
        # Match subject field against 'subjectregex' parameter from RC file (Default: 'Duplicati Backup report for...')
        if re.search(globs.opts['subjectregex'], msgParts['subject']) == None:
            globs.log.write(
                1,
                'Message [{}] is not a Message of Interest. Skipping message.'.
                format(msg['Message-Id']))
            return None, None  # Not a message of Interest

        # Last chance to kick out bad messages
        # Get source & desination computers from email subject
        srcRegex = '{}{}'.format(globs.opts['srcregex'],
                                 re.escape(globs.opts['srcdestdelimiter']))
        destRegex = '{}{}'.format(re.escape(globs.opts['srcdestdelimiter']),
                                  globs.opts['destregex'])
        globs.log.write(
            3, 'srcregex=[{}]  destRegex=[{}]'.format(srcRegex, destRegex))
        # Does the Subject have a proper source/destination pair?
        partsSrc = re.search(srcRegex, msgParts['subject'])
        partsDest = re.search(destRegex, msgParts['subject'])
        if (partsSrc is None) or (
                partsDest is None
        ):  # Correct subject but delimeter not found. Something is wrong.
            globs.log.write(
                2,
                'srcdestdelimiter [{}] not found in subject. Skipping message.'
                .format(globs.opts['srcdestdelimiter']))
            return None, None

        # Get Message ID
        globs.log.write(3, 'msg[Message-Id]=[{}]'.format(msg['Message-Id']))
        msgParts['messageId'] = email.header.decode_header(
            msg['Message-Id'])[0][0]
        globs.log.write(
            3, 'msgParts[messageId]=[{}]'.format(msgParts['messageId']))
        if (type(msgParts['messageId'])
                is not str):  # Email encoded as a byte object - See Issue #14
            msgParts['messageId'] = msgParts['messageId'].decode('utf-8')
            globs.log.write(
                3, 'Revised messageId=[{}]'.format(msgParts['messageId']))

        # See if the record is already in the database, meaning we've seen it before
        if globs.db.searchForMessage(
                msgParts['messageId']):  # Message is already in database
            # Mark the email as being seen in the database
            globs.db.execSqlStmt(
                'UPDATE emails SET dbSeen = 1 WHERE messageId = \"{}\"'.format(
                    msgParts['messageId']))
            globs.db.dbCommit()
            return None, None

        # Message not yet in database. Proceed.
        globs.log.write(
            1, 'Message ID [{}] does not exist. Adding to DB'.format(
                msgParts['messageId']))

        dTup = email.utils.parsedate_tz(msg['Date'])
        if dTup:
            # See if there's timezone info in the email header data. May be 'None' if no TZ info in the date line
            # TZ info is represented by seconds offset from UTC
            # We don't need to adjust the email date for TimeZone info now, since date line in email already accounts for TZ.
            # All other calls to toTimestamp() should include timezone info
            msgParts['timezone'] = dTup[9]

            # Set date into a parseable string
            # It doesn't matter what date/time format we pass in (as long as it's valid)
            # When it comes back out, it'll be parsed into the user-defined format from the .rc file
            # For now, we'll use YYYY/MM/DD HH:MM:SS
            xDate = '{:04d}/{:02d}/{:02d} {:02d}:{:02d}:{:02d}'.format(
                dTup[0], dTup[1], dTup[2], dTup[3], dTup[4], dTup[5])
            dtTimStmp = drdatetime.toTimestamp(
                xDate, dfmt='YYYY/MM/DD',
                tfmt='HH:MM:SS')  # Convert the string into a timestamp
            msgParts['emailTimestamp'] = dtTimStmp
            globs.log.write(
                3, 'emailDate=[{}]-[{}]'.format(
                    dtTimStmp, drdatetime.fromTimestamp(dtTimStmp)))

        msgParts['sourceComp'] = re.search(
            srcRegex, msgParts['subject']).group().split(
                globs.opts['srcdestdelimiter'])[0]
        msgParts['destComp'] = re.search(destRegex,
                                         msgParts['subject']).group().split(
                                             globs.opts['srcdestdelimiter'])[1]
        globs.log.write(3, 'sourceComp=[{}] destComp=[{}] emailTimestamp=[{}] subject=[{}]'.format(msgParts['sourceComp'], \
            msgParts['destComp'], msgParts['emailTimestamp'], msgParts['subject']))

        # Search for source/destination pair in database. Add if not already there
        retVal = globs.db.searchSrcDestPair(msgParts['sourceComp'],
                                            msgParts['destComp'])

        # Extract the body (payload) from the email
        msgParts['body'] = msg.get_payload()
        globs.log.write(3, 'Body=[{}]'.format(msgParts['body']))

        # Go through each element in lineParts{}, get the value from the body, and assign it to the corresponding element in statusParts{}
        for section, regex, flag, typ in lineParts:
            statusParts[section] = self.searchMessagePart(
                msgParts['body'], regex, flag, typ)  # Get the field parts

        # Adjust fields if not a clean run
        globs.log.write(
            3, "statusParts['failed']=[{}]".format(statusParts['failed']))
        if statusParts['failed'] == '':  # Looks like a good run
            # See if there's a timestamp (xxxx.xxxx) already in the EndTime field
            # If so, use that, else calculate timestamp
            pat = re.compile('\(.*\)')

            match = re.search(pat, statusParts['endTimeStr'])
            if match:  # Timestamp found in line
                dateParts['endTimestamp'] = statusParts['endTimeStr'][
                    match.regs[0][0] + 1:match.regs[0][1] - 1]
            else:  # No timestamp found. Calculate timestamp
                #dt, tm = drdatetime.getDateTimeFmt(msgParts['sourceComp'], msgParts['destComp'])
                dt, tm = globs.optionManager.getRcSectionDateTimeFmt(
                    msgParts['sourceComp'], msgParts['destComp'])
                dateParts['endTimestamp'] = drdatetime.toTimestamp(
                    statusParts['endTimeStr'],
                    dfmt=dt,
                    tfmt=tm,
                    utcOffset=msgParts['timezone'])

            match = re.search(pat, statusParts['beginTimeStr'])
            if match:  # Timestamp found in line
                dateParts['beginTimestamp'] = statusParts['beginTimeStr'][
                    match.regs[0][0] + 1:match.regs[0][1] - 1]
            else:  # No timestamp found. Calculate timestamp
                dateParts['beginTimestamp'] = drdatetime.toTimestamp(
                    statusParts['beginTimeStr'],
                    utcOffset=msgParts['timezone'])
        else:  # Something went wrong. Let's gather the details.
            statusParts['errors'] = statusParts['failed']
            statusParts['parsedResult'] = 'Failure'
            statusParts['warnings'] = statusParts['details']
            globs.log.write(2, 'Errors=[{}]'.format(statusParts['errors']))
            globs.log.write(2, 'Warnings=[{}]'.format(statusParts['warnings']))

            # Since the backup job report never ran, we'll use the email date/time as the report date/time
            dateParts['endTimestamp'] = msgParts['emailTimestamp']
            dateParts['beginTimestamp'] = msgParts['emailTimestamp']
            globs.log.write(
                3, 'Failure message. Replaced date/time: end=[{}]  begin=[{}]'.
                format(dateParts['endTimestamp'],
                       dateParts['beginTimestamp'])),

        # Replace commas (,) with newlines (\n) in message fields. Sqlite really doesn't like commas in SQL statements!
        for part in ['messages', 'warnings', 'errors']:
            if statusParts[part] != '':
                statusParts[part] = statusParts[part].replace(',', '\n')

        # If we're just collecting and get a warning/error, we may need to send an email to the admin
        if (globs.opts['collect'] is
                True) and (globs.opts['warnoncollect'] is True) and (
                    (statusParts['warnings'] != '') or
                    (statusParts['errors'] != '')):
            errMsg = 'Duplicati error(s) on backup job\n'
            errMsg += 'Message ID {} on {}\n'.format(msgParts['messageId'],
                                                     msg['date'])
            errMsg += 'Subject: {}\n\n'.format(msgParts['subject'])
            if statusParts['warnings'] != '':
                errMsg += 'Warnings:' + statusParts['warnings'] + '\n\n'
            if statusParts['errors'] != '':
                errMsg += 'Errors:' + statusParts['errors'] + '\n\n'

            globs.outServer.sendErrorEmail(errMsg)

        globs.log.write(
            3, 'endTimeStamp=[{}] beginTimeStamp=[{}]'.format(
                drdatetime.fromTimestamp(dateParts['endTimestamp']),
                drdatetime.fromTimestamp(dateParts['beginTimestamp'])))

        sqlStmt = self.buildEmailSql(msgParts, statusParts, dateParts)
        globs.db.execSqlStmt(sqlStmt)
        globs.db.dbCommit()

        return msgParts, statusParts
コード例 #5
0
ファイル: convert.py プロジェクト: helio4k/dupReport
def doConvertDb(fromVersion):
    globs.log.write(
        globs.SEV_NOTICE,
        function='Convert',
        action='doConvertDb',
        msg='Converting database from version {} to version {}.{}.{}'.format(
            fromVersion, globs.dbVersion[0], globs.dbVersion[1],
            globs.dbVersion[2]))

    # Database version history
    # 1.0.1 - Convert from character-based date/time to unix timestamp format.
    # 1.0.2 - Calculate & store duraction of backup
    # 1.0.3 - Store new logdata field and Duplicati version numbers (per backup)
    # 3.0.0 - changes to report table for dupReport 3.0.0
    # 3.0.1 - Add bytesUploaded & bytesDownloaded fields to email & reports

    # Update DB version number
    if fromVersion < 101:  # Upgrade from DB version 100 (original format).
        globs.log.write(
            globs.SEV_NOTICE,
            function='Convert',
            action='doConvertDb',
            msg='Converting database from version {} to version 101'.format(
                fromVersion))
        sqlStmt = "create table report (source varchar(20), destination varchar(20), timestamp real, duration real, examinedFiles int, examinedFilesDelta int, \
        sizeOfExaminedFiles int, fileSizeDelta int, addedFiles int, deletedFiles int, modifiedFiles int, filesWithError int, parsedResult varchar(30), messages varchar(255), \
        warnings varchar(255), errors varchar(255), failedMsg varchar(100), dupversion varchar(100), logdata varchar(255))"

        globs.db.execSqlStmt(sqlStmt)

        # Clean up bad data in emails table left from older versions. Not sure how this happened, but it really screws things up
        globs.db.execSqlStmt(
            "DELETE FROM emails WHERE beginTime > '23:59:59' or endTime > '23:59:59'"
        )

        # In SQLite you can't just drop and add a column (of course :-(
        # You need to recreate the table with the new column & copy the data
        globs.db.execSqlStmt("ALTER TABLE emails RENAME TO _emails_old_")
        globs.db.execSqlStmt(
            "CREATE TABLE emails (messageId varchar(50), sourceComp varchar(50), destComp varchar(50), emailTimestamp real, deletedFiles int, deletedFolders int, modifiedFiles int, \
            examinedFiles int, openedFiles int, addedFiles int, sizeOfModifiedFiles int, sizeOfAddedFiles int, sizeOfExaminedFiles int, sizeOfOpenedFiles int, notProcessedFiles int, addedFolders int, \
            tooLargeFiles int, filesWithError int, modifiedFolders int, modifiedSymlinks int, addedSymlinks int, deletedSymlinks int, partialBackup varchar(30), dryRun varchar(30), mainOperation varchar(30), \
            parsedResult varchar(30), verboseOutput varchar(30), verboseErrors varchar(30), endTimestamp real, beginTimestamp real, duration real, messages varchar(255), warnings varchar(255), errors varchar(255), \
            failedMsg varchar(100), dbSeen int, dupversion varchar(100), logdata varchar(255))"
        )
        globs.db.execSqlStmt(
            "INSERT INTO emails (messageId, sourceComp, destComp, deletedFiles, deletedFolders, modifiedFiles, examinedFiles, openedFiles, addedFiles, sizeOfModifiedFiles, sizeOfAddedFiles, \
            sizeOfExaminedFiles, sizeOfOpenedFiles, notProcessedFiles, addedFolders, tooLargeFiles, filesWithError, modifiedFolders, modifiedSymlinks, addedSymlinks, deletedSymlinks, partialBackup, dryRun, mainOperation, \
            parsedResult, verboseOutput, verboseErrors, messages, warnings, errors, failedMsg) SELECT messageId, sourceComp, destComp, deletedFiles, deletedFolders, \
            modifiedFiles, examinedFiles, openedFiles, addedFiles, sizeOfModifiedFiles, sizeOfAddedFiles, sizeOfExaminedFiles, sizeOfOpenedFiles, notProcessedFiles, addedFolders, tooLargeFiles, filesWithError, modifiedFolders, \
            modifiedSymlinks, addedSymlinks, deletedSymlinks, partialBackup, dryRun, mainOperation, parsedResult, verboseOutput, verboseErrors, messages, warnings, errors, failedMsg FROM _emails_old_"
        )

        # Loop through emails table to update old char-based times to timestamps
        dbCursor = globs.db.execSqlStmt(
            "SELECT messageId, emailDate, emailTime, endDate, endTime, beginDate, beginTime FROM _emails_old_"
        )
        emailRows = dbCursor.fetchall()
        for messageId, emailDate, emailTime, endDate, endTime, beginDate, beginTime in emailRows:
            # Create email timestamp
            dateStr = '{} {}'.format(emailDate, emailTime)
            emailTimestamp = drdatetime.toTimestamp(dateStr, 'YYYY-MM-DD',
                                                    'HH:MM:SS')

            # Create endTime timestamp
            dateStr = '{} {}'.format(endDate, endTime)
            endTimestamp = drdatetime.toTimestamp(dateStr, 'YYYY/MM/DD',
                                                  'HH:MM:SS')

            # Create beginTime timestamp
            dateStr = '{} {}'.format(beginDate, beginTime)
            beginTimestamp = drdatetime.toTimestamp(dateStr, 'YYYY/MM/DD',
                                                    'HH:MM:SS')

            # Update emails table with new data
            if endTimestamp is not None and beginTimestamp is not None:
                sqlStmt = "UPDATE emails SET emailTimestamp = {}, endTimestamp = {}, beginTimestamp = {}, duration = {} WHERE messageId = \'{}\'".format(
                    emailTimestamp, endTimestamp, beginTimestamp,
                    (endTimestamp - beginTimestamp), messageId)
                globs.log.write(globs.SEV_NOTICE,
                                function='Convert',
                                action='doConvertDb',
                                msg=sqlStmt)
                globs.db.execSqlStmt(sqlStmt)

            globs.log.write(globs.SEV_NOTICE, function='Convert', action='doConvertDb', msg='messageId:{}  emailDate={} emailTime={} emailTimestamp={} endDate={} endTime={} endTimestamp={} beginDate={} beginTime={} beginTimestamp={} duration={}'.format(messageId, emailDate, emailTime, emailTimestamp,\
                endDate, endTime, endTimestamp, beginDate, beginTime, beginTimestamp, duration))
        globs.db.execSqlStmt("DROP TABLE _emails_old_")

        # Convert date/time to timestamps in backupsets table
        globs.db.execSqlStmt(
            "ALTER TABLE backupsets ADD COLUMN lastTimestamp real")
        dbCursor = globs.db.execSqlStmt(
            "SELECT source, destination, lastDate, lastTime from backupsets")
        setRows = dbCursor.fetchall()
        for source, destination, lastDate, lastTime in setRows:
            dateStr = '{} {}'.format(lastDate, lastTime)
            lastTimestamp = drdatetime.toTimestamp(dateStr, 'YYYY/MM/DD',
                                                   'HH:MM:SS')

            sqlStmt = "UPDATE backupsets SET lastTimestamp = {} WHERE source = \'{}\' AND destination = \'{}\'".format(
                lastTimestamp, source, destination)
            globs.db.execSqlStmt(sqlStmt)
            globs.log.write(
                globs.SEV_NOTICE,
                function='Convert',
                action='doConvertDb',
                msg=
                'Updating backupsets: Source={} destination={} lastDate={} lastTime={} lastTimestamp={}'
                .format(source, destination, lastDate, lastTime,
                        lastTimestamp))
        doConvertDb(101)
    elif fromVersion < 102:  # Upgrade from version 101
        globs.log.write(
            globs.SEV_NOTICE,
            function='Convert',
            action='doConvertDb',
            msg='Converting database from version {} to version 102'.format(
                fromVersion))
        globs.db.execSqlStmt("ALTER TABLE report ADD COLUMN duration real")
        globs.db.execSqlStmt(
            "ALTER TABLE report ADD COLUMN dupversion varchar(100)")
        globs.db.execSqlStmt(
            "ALTER TABLE report ADD COLUMN logdata varchar(255)")
        globs.db.execSqlStmt("UPDATE report SET duration = 0")
        globs.db.execSqlStmt("UPDATE report SET dupversion = ''")
        globs.db.execSqlStmt("UPDATE report SET logdata = ''")

        # Need to change duration column from varchar to real
        # In SQLite you can't just drop and add a column (of course :-(
        # You need to recreate the table with the new column & copy the data
        globs.db.execSqlStmt("ALTER TABLE emails RENAME TO _emails_old_")
        globs.db.execSqlStmt(
            "CREATE TABLE emails (messageId varchar(50), sourceComp varchar(50), destComp varchar(50), emailTimestamp real, deletedFiles int, deletedFolders int, modifiedFiles int, \
            examinedFiles int, openedFiles int, addedFiles int, sizeOfModifiedFiles int, sizeOfAddedFiles int, sizeOfExaminedFiles int, sizeOfOpenedFiles int, notProcessedFiles int, addedFolders int, \
            tooLargeFiles int, filesWithError int, modifiedFolders int, modifiedSymlinks int, addedSymlinks int, deletedSymlinks int, partialBackup varchar(30), dryRun varchar(30), mainOperation varchar(30), \
            parsedResult varchar(30), verboseOutput varchar(30), verboseErrors varchar(30), endTimestamp real, beginTimestamp real, duration real, messages varchar(255), warnings varchar(255), errors varchar(255), \
            failedMsg varchar(100), dbSeen int, dupversion varchar(100), logdata varchar(255))"
        )
        globs.db.execSqlStmt(
            "INSERT INTO emails (messageId, sourceComp, destComp, emailTimestamp, deletedFiles, deletedFolders, modifiedFiles, examinedFiles, openedFiles, addedFiles, sizeOfModifiedFiles, sizeOfAddedFiles, \
            sizeOfExaminedFiles, sizeOfOpenedFiles, notProcessedFiles, addedFolders, tooLargeFiles, filesWithError, modifiedFolders, modifiedSymlinks, addedSymlinks, deletedSymlinks, partialBackup, dryRun, mainOperation, \
            parsedResult, verboseOutput, verboseErrors, endTimestamp, beginTimestamp, messages, warnings, errors, failedMsg, dbSeen) SELECT messageId, sourceComp, destComp, emailTimestamp, deletedFiles, deletedFolders, \
            modifiedFiles, examinedFiles, openedFiles, addedFiles, sizeOfModifiedFiles, sizeOfAddedFiles, sizeOfExaminedFiles, sizeOfOpenedFiles, notProcessedFiles, addedFolders, tooLargeFiles, filesWithError, modifiedFolders, \
            modifiedSymlinks, addedSymlinks, deletedSymlinks, partialBackup, dryRun, mainOperation, parsedResult, verboseOutput, verboseErrors, endTimestamp, beginTimestamp, messages, warnings, errors, failedMsg, dbSeen FROM _emails_old_"
        )

        # Loop through new emails table and set duration field
        dbCursor = globs.db.execSqlStmt(
            "SELECT messageId, beginTimeStamp, endTimeStamp FROM emails")
        emailRows = dbCursor.fetchall()
        for messageId, beginTimeStamp, endTimeStamp in emailRows:
            # Update emails table with new data
            if endTimeStamp is not None and beginTimeStamp is not None:
                sqlStmt = "UPDATE emails SET duration = {} WHERE messageId = \'{}\'".format(
                    (endTimeStamp - beginTimeStamp), messageId)
                globs.db.execSqlStmt(sqlStmt)
        globs.db.execSqlStmt("DROP TABLE _emails_old_")
        doConvertDb(102)
    elif fromVersion < 103:  # Upgrade from version 102
        globs.log.write(
            globs.SEV_NOTICE,
            function='Convert',
            action='doConvertDb',
            msg='Converting database from version {} to version 103'.format(
                fromVersion))
        # Add dupversion & logdata fields to emails table
        globs.db.execSqlStmt(
            "ALTER TABLE emails ADD COLUMN dupversion varchar(100)")
        globs.db.execSqlStmt(
            "ALTER TABLE emails ADD COLUMN logdata varchar(255)")
        globs.db.execSqlStmt("UPDATE emails SET dupversion = ''")
        globs.db.execSqlStmt("UPDATE emails SET logdata = ''")

        # Add dupversion & logdata fields to report table
        globs.db.execSqlStmt(
            "ALTER TABLE report ADD COLUMN dupversion varchar(100)")
        globs.db.execSqlStmt(
            "ALTER TABLE report ADD COLUMN logdata varchar(255)")
        globs.db.execSqlStmt("UPDATE report SET dupversion = ''")
        globs.db.execSqlStmt("UPDATE report SET logdata = ''")
        doConvertDb(103)
    elif fromVersion < 300:  # Upgrade from version 103
        globs.log.write(
            globs.SEV_NOTICE,
            function='Convert',
            action='doConvertDb',
            msg='Converting database from version {} to version 300'.format(
                fromVersion))
        # Add date & time fields to reports table
        globs.db.execSqlStmt("ALTER TABLE report ADD COLUMN date real")
        globs.db.execSqlStmt("ALTER TABLE report ADD COLUMN time real")
        globs.db.execSqlStmt(
            "ALTER TABLE backupsets ADD COLUMN dupversion varchar(100)")

        # Insert last dupversion for all existing backupset rows
        globs.db.execSqlStmt(
            "UPDATE backupsets SET dupversion = (SELECT emails.dupversion FROM emails WHERE backupsets.source = emails.sourceComp and backupsets.destination = emails.destComp)"
        )
        doConvertDb(300)
        pass
    elif fromVersion < 301:  # Upgrade from version 300
        globs.log.write(
            globs.SEV_NOTICE,
            function='Convert',
            action='doConvertDb',
            msg='Converting database from version {} to version 301'.format(
                fromVersion))
        # Add BytesUplaoded & BytesDownloaded fields to reports table
        globs.db.execSqlStmt("ALTER TABLE emails ADD COLUMN bytesUploaded int")
        globs.db.execSqlStmt(
            "ALTER TABLE emails ADD COLUMN bytesDownloaded int")
        globs.db.execSqlStmt("ALTER TABLE report ADD COLUMN bytesUploaded int")
        globs.db.execSqlStmt(
            "ALTER TABLE report ADD COLUMN bytesDownloaded int")

        # Set default values for bytes Up/Downloaded
        globs.db.execSqlStmt(
            "UPDATE emails SET bytesUploaded=0, bytesDownloaded=0")

        # Insert last dupversion for all existing backupset rows
        doConvertDb(301)
        pass
    else:
        pass

    return None
コード例 #6
0
ファイル: convert.py プロジェクト: DocFraggle/dupReport
def convertDb(fromVersion):
    globs.log.write(1, 'Converting database to version 1.0.1')

    # Update DB version number
    globs.db.execSqlStmt(
        "UPDATE version SET major = 1, minor = 0, subminor = 1 WHERE desc = 'database'"
    )

    sqlStmt = "create table report (source varchar(20), destination varchar(20), timestamp real, examinedFiles int, examinedFilesDelta int, \
    sizeOfExaminedFiles int, fileSizeDelta int, addedFiles int, deletedFiles int, modifiedFiles int, filesWithError int, parsedResult varchar(30), messages varchar(255), \
    warnings varchar(255), errors varchar(255), failedMsg varchar(100))"

    globs.db.execSqlStmt(sqlStmt)

    # Add timestamp fields to tables
    globs.db.execSqlStmt("ALTER TABLE emails ADD COLUMN emailTimestamp real")
    globs.db.execSqlStmt("ALTER TABLE emails ADD COLUMN endTimestamp real")
    globs.db.execSqlStmt("ALTER TABLE emails ADD COLUMN beginTimestamp real")
    globs.db.execSqlStmt("ALTER TABLE emails ADD COLUMN dbSeen int")

    # Clean up bad data left from older versions. Not sure how this happened, but it really screws things up
    globs.db.execSqlStmt(
        "DELETE FROM emails WHERE beginTime > '23:59:59' or endTime > '23:59:59'"
    )

    # Loop through emails table
    dbCursor = globs.db.execSqlStmt(
        "SELECT messageId, emailDate, emailTime, endDate, endTime, beginDate, beginTime FROM emails"
    )
    emailRows = dbCursor.fetchall()
    for messageId, emailDate, emailTime, endDate, endTime, beginDate, beginTime in emailRows:
        # Create email timestamp
        dateStr = '{} {}'.format(emailDate, emailTime)
        emailTimestamp = drdatetime.toTimestamp(dateStr, 'YYYY-MM-DD',
                                                'HH:MM:SS')

        # Create endTime timestamp
        dateStr = '{} {}'.format(endDate, endTime)
        endTimestamp = drdatetime.toTimestamp(dateStr, 'YYYY/MM/DD',
                                              'HH:MM:SS')

        # Create beginTime timestamp
        dateStr = '{} {}'.format(beginDate, beginTime)
        beginTimestamp = drdatetime.toTimestamp(dateStr, 'YYYY/MM/DD',
                                                'HH:MM:SS')

        # Update emails table with new data
        if endTimestamp is not None and beginTimestamp is not None:
            sqlStmt = "UPDATE emails SET emailTimestamp = {}, endTimestamp = {}, beginTimestamp = {} WHERE messageId = \'{}\'".format(
                emailTimestamp, endTimestamp, beginTimestamp, messageId)
            globs.log.write(1, sqlStmt)
            globs.db.execSqlStmt(sqlStmt)

        globs.log.write(1, 'messageId:{}  emailDate={} emailTime={} emailTimestamp={} endDate={} endTime={} endTimestamp={} beginDate={} beginTime={} beginTimestamp={}'.format(messageId, emailDate, emailTime, emailTimestamp,\
            endDate, endTime, endTimestamp, beginDate, beginTime, beginTimestamp))

    globs.db.execSqlStmt(
        "ALTER TABLE backupsets ADD COLUMN lastTimestamp real")
    dbCursor = globs.db.execSqlStmt(
        "SELECT source, destination, lastDate, lastTime from backupsets")
    setRows = dbCursor.fetchall()
    for source, destination, lastDate, lastTime in setRows:
        dateStr = '{} {}'.format(lastDate, lastTime)
        lastTimestamp = drdatetime.toTimestamp(dateStr, 'YYYY/MM/DD',
                                               'HH:MM:SS')

        sqlStmt = "UPDATE backupsets SET lastTimestamp = {} WHERE source = \'{}\' AND destination = \'{}\'".format(
            lastTimestamp, source, destination)
        globs.db.execSqlStmt(sqlStmt)
        globs.log.write(
            1,
            'Source={}  destination={} lastDate={} lastTime={} lastTimestamp={}'
            .format(source, destination, lastDate, lastTime, lastTimestamp))

    globs.db.dbCommit()
    return None
コード例 #7
0
ファイル: options.py プロジェクト: DocFraggle/dupReport
    def readRcOptions(self):
        restart = False

        globs.log.write(1, 'options.readRcOptions({})'.format(self.rcFileName))
    
        # Extract sections and options from .rc file
        # Only need [main], [incoming], and [outgoing] sections
        # [report] and [headings] sections will be parsed when report object is initiated (report.py)
        for section in ('main', 'incoming', 'outgoing'):
            for name, value in self.parser.items(section):
                self.options[name] = value

        # Fix some of the datatypes
        self.options['verbose'] = int(self.options['verbose'])  # integer
        self.options['inport'] = int(self.options['inport'])    # integer
        self.options['outport'] = int(self.options['outport'])  # integer
        self.options['logappend'] = self.options['logappend'].lower() in ('true')   # boolean
        self.options['warnoncollect'] = self.options['warnoncollect'].lower() in ('true')   # boolean
        self.options['applyutcoffset'] = self.options['applyutcoffset'].lower() in ('true')   # boolean
        self.options['show24hourtime'] = self.options['show24hourtime'].lower() in ('true')   # boolean
        self.options['purgedb'] = self.options['purgedb'].lower() in ('true')   # boolean

        # Check for valid date format
        if self.options['dateformat'] not in drdatetime.dtFmtDefs:
            globs.log.err('RC file error: Invalid date format: [{}]'.format(self.options['dateformat']))
            restart = True

        # Check for valid time format
        if self.options['timeformat'] not in drdatetime.dtFmtDefs:
            globs.log.err('RC file error: Invalid time format [{}]'.format(self.options['timeformat']))
            restart = True

        # Now, override with command line options
        #
        # Database Path - default stored in globs.dbName
        if self.cmdLineArgs.dbpath != None:  # dbPath specified on command line
            self.options['dbpath'] = '{}/{}'.format(self.processPath(self.cmdLineArgs.dbpath), globs.dbName) 
        elif self.options['dbpath'] == '':  # No command line & not specified in RC file
            self.options['dbpath'] = '{}/{}'.format(os.path.dirname(path, globs.rcName), globs.dbName)
        else:  # Path specified in rc file. Add dbname for full path
            self.options['dbpath'] = '{}/{}'.format(self.processPath(self.options['dbpath']), globs.dbName)

        # Log file path
        if self.cmdLineArgs.logpath != None:  #logPath specified on command line
            self.options['logpath'] = '{}/{}'.format(self.processPath(self.cmdLineArgs.logpath), globs.logName)
        elif self.options['logpath'] == '':  # No command line & not specified in RC file
            self.options['logpath'] = '{}/{}'.format(globs.progPath, globs.logName)
        else:  # Path specified in rc file. Add dbname for full path
            self.options['logpath'] = '{}/{}'.format(self.processPath(self.options['logpath']), globs.logName)

        self.options['version'] = self.cmdLineArgs.Version
        self.options['collect'] = self.cmdLineArgs.collect
        self.options['report'] = self.cmdLineArgs.report
        self.options['nomail'] = self.cmdLineArgs.nomail
        self.options['remove'] = self.cmdLineArgs.remove
        self.options['stopbackupwarn'] = self.cmdLineArgs.stopbackupwarn

        # Check rollback specifications
        self.options['rollback'] = self.cmdLineArgs.rollback
        self.options['rollbackx'] = self.cmdLineArgs.rollbackx
        if self.options['rollback']: # Roll back and continue
            ret = drdatetime.toTimestamp(self.options['rollback'], self.options['dateformat'], self.options['timeformat'])
            if not ret:
                globs.log.err('Invalid rollback date specification: {}.'.format(self.options['rollback']))
                restart = True
        elif self.options['rollbackx']:  # Roll back and stop
            ret = drdatetime.toTimestamp(self.options['rollbackx'], self.options['dateformat'], self.options['timeformat'])
            if not ret:
                globs.log.err('Invalid rollback date specification: {}.'.format(self.options['rollback']))
                restart = True

        if self.cmdLineArgs.verbose != None:
            self.options['verbose'] = self.cmdLineArgs.verbose
        if self.cmdLineArgs.purgedb == True:
            self.options['purgedb'] = self.cmdLineArgs.purgedb
        self.options['logappend'] = self.cmdLineArgs.append
        self.options['initdb'] = self.cmdLineArgs.initdb
        
        # Store output files for later use
        self.options['file'] = self.cmdLineArgs.file
        if self.options['file']:
            globs.ofileList = self.options['file']

        globs.log.write(3, 'Parsed config options=[{}]'.format(self.options))
        globs.log.write(1, 'Need to restart? {}'.format(restart))

        return restart
コード例 #8
0
def runReport(startTime):
    globs.log.write(1, 'rpt_bydate()')

    # Get header and column info
    nFields, fldDefs, reportOpts, rptCols, rptTits = report.initReportVars()

    # Print the report title
    msgHtml, msgText, msgCsv = report.rptTop(reportOpts, nFields)

    # Remove columns we don't need for this report
    # These are already part of the report logic processing & subheaders
    # We won't need to loop through them for the report fields
    rptCols.remove('date')

    # Print column titles if not printing for each section
    if reportOpts['repeatheaders'] is False:
        msgHtml, msgText, msgCsv = report.rptPrintTitles(
            msgHtml, msgText, msgCsv, rptCols)

    # Get earliest & latest timestamps in the report table
    dbCursor = globs.db.execSqlStmt("SELECT min(timestamp) FROM report"
                                    )  # Smallest timestamp in the report table
    currentTs = dbCursor.fetchone()[0]
    dbCursor = globs.db.execSqlStmt("SELECT max(timestamp) FROM report"
                                    )  # Largest timestamp in the report table
    highestTs = dbCursor.fetchone()[0]

    while currentTs <= highestTs:
        currentDate, currentTime = drdatetime.fromTimestamp(
            currentTs,
            dfmt=globs.opts['dateformat'],
            tfmt=globs.opts['timeformat'])
        currentDateBeginTs = drdatetime.toTimestamp(
            currentDate + ' 00:00:00',
            dfmt=globs.opts['dateformat'],
            tfmt=globs.opts['timeformat']
        )  # Convert the string into a timestamp
        currentDateEndTs = drdatetime.toTimestamp(
            currentDate + ' 23:59:59',
            dfmt=globs.opts['dateformat'],
            tfmt=globs.opts['timeformat']
        )  # Convert the string into a timestamp

        sqlStmt = "SELECT source, destination, timestamp, examinedFiles, examinedFilesDelta, sizeOfExaminedFiles, fileSizeDelta, \
            addedFiles, deletedFiles, modifiedFiles, filesWithError, parsedResult, messages, warnings, errors \
            FROM report WHERE timestamp >= {} AND timestamp <= {}".format(
            currentDateBeginTs, currentDateEndTs)
        if reportOpts['sortby'] == 'source':
            sqlStmt += ' ORDER BY source, destination'
        elif reportOpts['sortby'] == 'destination':
            sqlStmt += ' ORDER BY destination, source'
        else:
            sqlStmt += ' ORDER BY timestamp'

        dbCursor = globs.db.execSqlStmt(sqlStmt)
        reportRows = dbCursor.fetchall()
        globs.log.write(3, 'reportRows=[{}]'.format(reportRows))

        if len(reportRows) != 0:
            subHead = globs.optionManager.getRcOption('report', 'subheading')
            if subHead is not None:
                # Substitute subheading keywords
                subHead = subHead.replace('#DATE#', currentDate)
            if subHead is None or subHead == '':
                msgHtml += '<tr><td colspan="{}" align="center" bgcolor="{}"><b>{}:</b> {}</td></tr>\n'.format(
                    nFields, reportOpts['subheadbg'], rptTits['date'],
                    currentDate)
                msgText += '***** {}: {} *****\n'.format(
                    rptTits['date'], currentDate)
                msgCsv += '\"***** {}: {} *****\"\n'.format(
                    rptTits['date'], currentDate)
            else:
                msgHtml += '<tr><td colspan="{}" align="center" bgcolor="{}">{}</td></tr>\n'.format(
                    nFields, reportOpts['subheadbg'], subHead)
                msgText += '***** {} *****\n'.format(subHead)
                msgCsv += '\"***** {} *****\"\n'.format(subHead)

            # Print column titles if printing for each section
            if reportOpts['repeatheaders'] is True:
                msgHtml, msgText, msgCsv = report.rptPrintTitles(
                    msgHtml, msgText, msgCsv, rptCols)


        for source, destination, timestamp, examinedFiles, examinedFilesDelta, sizeOfExaminedFiles, fileSizeDelta, \
            addedFiles, deletedFiles, modifiedFiles, filesWithError, parsedResult, messages, \
            warnings, errors in reportRows:

            # Get date and time from timestamp
            dateStr, timeStr = drdatetime.fromTimestamp(timestamp)

            # Print report fields
            # Each field takes up one column/cell in the table
            msgHtml += '<tr>'

            # The full list of possible fields in the report. printField() below will skip a field if it is emoved in the .rc file.
            titles = [
                'source', 'destination', 'time', 'files', 'filesplusminus',
                'size', 'sizeplusminus', 'added', 'deleted', 'modified',
                'errors', 'result'
            ]
            fields = [
                source, destination, timeStr, examinedFiles,
                examinedFilesDelta, sizeOfExaminedFiles, fileSizeDelta,
                addedFiles, deletedFiles, modifiedFiles, filesWithError,
                parsedResult
            ]

            for ttl, fld in zip(titles, fields):
                msgHtml += report.printField(ttl, fld, 'html')
                msgText += report.printField(ttl, fld, 'text')
                msgCsv += report.printField(ttl, fld, 'csv')

            msgHtml += '</tr>\n'
            msgText += '\n'
            msgCsv += '\n'

            fields = [messages, warnings, errors]
            options = ['displaymessages', 'displaywarnings', 'displayerrors']
            backgrounds = ['jobmessagebg', 'jobwarningbg', 'joberrorbg']
            titles = ['jobmessages', 'jobwarnings', 'joberrors']
            # Print message/warning/error fields
            # Each of these spans all the table columns
            for fld, opt, bg, tit in zip(fields, options, backgrounds, titles):
                if ((fld != '') and (reportOpts[opt] == True)):
                    msgHtml += '<tr><td colspan="{}" align="center" bgcolor="{}"><details><summary>{}</summary>{}</details></td></tr>\n'.format(
                        nFields, reportOpts[bg], rptTits[tit], fld)
                    msgText += '{}: {}\n'.format(rptTits[tit], fld)
                    msgCsv += '\"{}: {}\"\n'.format(rptTits[tit], fld)

        # Move current timestamp ahead 1 second
        currentTs = currentDateEndTs + 1

    # Now see which systems didn't report in
    dbCursor = globs.db.execSqlStmt(
        "SELECT source, destination, lastTimestamp FROM backupsets ORDER BY source, destination"
    )
    setRows = dbCursor.fetchall()
    globs.log.write(3, 'setRows=[{}]'.format(setRows))

    # Flag to let us know if we need to print a header for missing backupsets
    hdrFlag = 0
    for source, destination, lastTimestamp in setRows:
        dbCursor = globs.db.execSqlStmt(
            "SELECT count(*) FROM report WHERE source = \'{}\' AND destination = \'{}\'"
            .format(source, destination))
        seenRows = dbCursor.fetchone()[0]
        globs.log.write(3, 'seenRows=[{}]'.format(seenRows))
        if seenRows == 0:  # Didn't get any rows for source/Destination pair. Add to report
            if hdrFlag == 0:
                msgHtml += '<tr><td colspan="{}" align="center" bgcolor="{}"><b>Missing Backup Sets</b></td></tr>\n'.format(
                    nFields, reportOpts['subheadbg'])
                msgText += 'Missing Back Sets\n'
                msgCsv += '\"Missing Back Sets\"\n'
                hdrFlag = 1

            diff = drdatetime.daysSince(lastTimestamp)
            lastDateStr, lastTimeStr = drdatetime.fromTimestamp(lastTimestamp)
            msgHtml += '<tr><td colspan="{}" align="center" bgcolor="{}">{} to {}: <i>No new activity. Last activity on {} at {} ({} days ago)</i></td></tr>\n'.format(
                nFields, reportOpts['noactivitybg'], source, destination,
                lastDateStr, lastTimeStr, diff)
            msgText += '{} to {}: No new activity. Last activity on {} at {} ({} days ago)\n'.format(
                source, destination, lastDateStr, lastTimeStr, diff)
            msgCsv += '\"{} to {}: No new activity. Last activity on {} at {} ({} days ago)\"\n'.format(
                source, destination, lastDateStr, lastTimeStr, diff)

    # Add report footer
    msgHtml, msgText, msgCsv = report.rptBottom(msgHtml, msgText, msgCsv,
                                                startTime, nFields)

    # Return text & HTML messages to main program. It can decide which one it wants to use.
    return msgHtml, msgText, msgCsv
コード例 #9
0
    def processNextMessage(self):
        globs.log.write(1, 'dremail.processNextMessage()')
        self.connect()

        # Increment message counter to the next message.
        # Skip for message #0 because we haven't read any messages yet
        self.nextEmail += 1

        msgParts = {}  # msgParts contains extracts of message elements
        statusParts = {
        }  # statusParts contains the individual lines from the Duplicati status emails
        dateParts = {
        }  # dateParts contains the date & time strings for the SQL Query

        # Check no-more-mail conditions. Either no new emails to get or gone past the last email on list
        if (self.newEmails == None) or (self.nextEmail == self.numEmails):
            return None

        if self.protocol == 'pop3':
            # Get message header
            server_msg, body, octets = self.server.top(
                (self.newEmails[self.nextEmail]) + 1, 0)
            globs.log.write(
                3, 'server_msg=[{}]  body=[{}]  octets=[{}]'.format(
                    server_msg, body, octets))
            if server_msg[:3].decode() != '+OK':
                globs.log.write(
                    1, 'ERROR getting message: {}'.format(self.nextEmail))
                return '<INVALID>'

            # Get date, subject, and message ID from headers
            msgParts['date'], msgParts['subject'], msgParts[
                'messageId'] = self.extractHeaders(body.decode('utf-8'))

        elif self.protocol == 'imap':
            # Get message header
            retVal, data = self.server.fetch(
                self.newEmails[self.nextEmail],
                '(BODY.PEEK[HEADER.FIELDS (DATE SUBJECT MESSAGE-ID)])')
            if retVal != 'OK':
                globs.log.write(
                    1, 'ERROR getting message: {}'.format(self.nextEmail))
                return '<INVALID>'
            globs.log.write(
                3,
                'Server.fetch(): retVal=[{}] data=[{}]'.format(retVal, data))

            msgParts['date'], msgParts['subject'], msgParts[
                'messageId'] = self.extractHeaders(data[0][1].decode('utf-8'))

        else:  # Invalid protocol spec
            globs.log.err('Invalid protocol specification: {}.'.format(
                self.protocol))
            return None

        # Log message basics
        globs.log.write(
            1, '\n*****\nNext Message: Date=[{}] Subject=[{}] Message-Id=[{}]'.
            format(msgParts['date'], msgParts['subject'],
                   msgParts['messageId']))

        # Check if any of the vital parts are missing
        if msgParts['messageId'] is None or msgParts['messageId'] == '':
            globs.log.write(1,
                            'No message-Id. Abandoning processNextMessage()')
            return '<INVALID>'
        if msgParts['date'] is None or msgParts['date'] == '':
            globs.log.write(1, 'No Date. Abandoning processNextMessage()')
            return msgParts['messageId']
        if msgParts['subject'] is None or msgParts['subject'] == '':
            globs.log.write(1, 'No Subject. Abandoning processNextMessage()')
            return msgParts['messageId']

        # See if it's a message of interest
        # Match subject field against 'subjectregex' parameter from RC file (Default: 'Duplicati Backup report for...')
        if re.search(globs.opts['subjectregex'], msgParts['subject']) == None:
            globs.log.write(
                1,
                'Message [{}] is not a Message of Interest. Can\'t match subjectregex from .rc file. Skipping message.'
                .format(msgParts['messageId']))
            return msgParts['messageId']  # Not a message of Interest

        # Get source & desination computers from email subject
        srcRegex = '{}{}'.format(globs.opts['srcregex'],
                                 re.escape(globs.opts['srcdestdelimiter']))
        destRegex = '{}{}'.format(re.escape(globs.opts['srcdestdelimiter']),
                                  globs.opts['destregex'])
        globs.log.write(
            3, 'srcregex=[{}]  destRegex=[{}]'.format(srcRegex, destRegex))
        partsSrc = re.search(srcRegex, msgParts['subject'])
        partsDest = re.search(destRegex, msgParts['subject'])
        if (partsSrc is None) or (
                partsDest is None
        ):  # Correct subject but delimeter not found. Something is wrong.
            globs.log.write(
                2,
                'SrcDestDelimeter [{}] not found in subject line. Skipping message.'
                .format(globs.opts['srcdestdelimiter']))
            return msgParts['messageId']

        # See if the record is already in the database, meaning we've seen it before
        if globs.db.searchForMessage(
                msgParts['messageId']):  # Is message is already in database?
            # Mark the email as being seen in the database
            globs.db.execSqlStmt(
                'UPDATE emails SET dbSeen = 1 WHERE messageId = \"{}\"'.format(
                    msgParts['messageId']))
            globs.db.dbCommit()
            return msgParts['messageId']
        # Message not yet in database. Proceed.
        globs.log.write(
            1, 'Message ID [{}] does not yet exist in DB.'.format(
                msgParts['messageId']))

        dTup = email.utils.parsedate_tz(msgParts['date'])
        if dTup:
            # See if there's timezone info in the email header data. May be 'None' if no TZ info in the date line
            # TZ info is represented by seconds offset from UTC
            # We don't need to adjust the email date for TimeZone info now, since date line in email already accounts for TZ.
            # All other calls to toTimestamp() should include timezone info
            msgParts['timezone'] = dTup[9]

            # Set date into a parseable string
            # It doesn't matter what date/time format we pass in (as long as it's valid)
            # When it comes back out later, it'll be parsed into the user-defined format from the .rc file
            # For now, we'll use YYYY/MM/DD HH:MM:SS
            xDate = '{:04d}/{:02d}/{:02d} {:02d}:{:02d}:{:02d}'.format(
                dTup[0], dTup[1], dTup[2], dTup[3], dTup[4], dTup[5])
            dtTimStmp = drdatetime.toTimestamp(
                xDate, dfmt='YYYY/MM/DD',
                tfmt='HH:MM:SS')  # Convert the string into a timestamp
            msgParts['emailTimestamp'] = dtTimStmp
            globs.log.write(
                3, 'emailDate=[{}]-[{}]'.format(
                    dtTimStmp, drdatetime.fromTimestamp(dtTimStmp)))

        msgParts['sourceComp'] = re.search(
            srcRegex, msgParts['subject']).group().split(
                globs.opts['srcdestdelimiter'])[0]
        msgParts['destComp'] = re.search(destRegex,
                                         msgParts['subject']).group().split(
                                             globs.opts['srcdestdelimiter'])[1]
        globs.log.write(3, 'sourceComp=[{}] destComp=[{}] emailTimestamp=[{}] subject=[{}]'.format(msgParts['sourceComp'], \
            msgParts['destComp'], msgParts['emailTimestamp'], msgParts['subject']))

        # Search for source/destination pair in database. Add if not already there
        retVal = globs.db.searchSrcDestPair(msgParts['sourceComp'],
                                            msgParts['destComp'])

        # Extract the body (payload) from the email
        if self.protocol == 'pop3':
            # Retrieve the whole messsage. This is redundant with previous .top() call and results in extra data downloads
            # In cases where there is a mix of Duplicati and non-Duplicati emails to read, this actually saves time in the large scale.
            # In cases where all the emails on the server are Duplicati emails, this does, in fact, slow things down a bit
            # POP3 is a stupid protocol. Use IMAP if at all possible.
            server_msg, body, octets = self.server.retr(
                (self.newEmails[self.nextEmail]) + 1)
            msgTmp = ''
            for j in body:
                msgTmp += '{}\n'.format(j.decode("utf-8"))
            msgBody = email.message_from_string(
                msgTmp)._payload  # Get message body
        elif self.protocol == 'imap':
            # Retrieve just the body text of the message.
            retVal, data = self.server.fetch(self.newEmails[self.nextEmail],
                                             '(BODY.PEEK[TEXT])')

            # Fix issue #71
            # From https://stackoverflow.com/questions/2230037/how-to-fetch-an-email-body-using-imaplib-in-python
            # "...usually the data format is [(bytes, bytes), bytes] but when the message is marked as unseen manually,
            # the format is [bytes, (bytes, bytes), bytes] – Niklas R Sep 8 '15 at 23:29
            # Need to check if len(data)==2 (normally unread) or ==3 (manually set unread)
            globs.log.write(3, 'dataLen={}'.format(len(data)))
            if len(data) == 2:
                msgBody = data[0][1].decode('utf-8')  # Get message body
            else:
                msgBody = data[1][1].decode('utf-8')  # Get message body

        globs.log.write(3, 'Message Body=[{}]'.format(msgBody))

        # Go through each element in lineParts{}, get the value from the body, and assign it to the corresponding element in statusParts{}
        for section, regex, flag, typ in lineParts:
            statusParts[section] = self.searchMessagePart(
                msgBody, regex, flag, typ)  # Get the field parts

        # Adjust fields if not a clean run
        globs.log.write(
            3, "statusParts['failed']=[{}]".format(statusParts['failed']))
        if statusParts['failed'] == '':  # Looks like a good run
            # These fields can be included in parentheses in later versions of Duplicati
            # For example:
            #   SizeOfModifiedFiles: 23 KB (23556)
            #   SizeOfAddedFiles: 10.12 KB (10364)
            #   SizeOfExaminedFiles: 44.42 GB (47695243956)
            #   SizeOfOpenedFiles: 33.16 KB (33954)
            # Extract the parenthesized value (if present) or the raw value (if not)
            dt, tm = globs.optionManager.getRcSectionDateTimeFmt(
                msgParts['sourceComp'], msgParts['destComp'])
            dateParts['endTimestamp'] = self.parenOrRaw(
                statusParts['endTimeStr'],
                df=dt,
                tf=tm,
                tz=msgParts['timezone'])
            dateParts['beginTimestamp'] = self.parenOrRaw(
                statusParts['beginTimeStr'],
                df=dt,
                tf=tm,
                tz=msgParts['timezone'])
            globs.log.write(
                3,
                'Email indicates a successful backup. Date/time is: end=[{}]  begin=[{}]'
                .format(dateParts['endTimestamp'],
                        dateParts['beginTimestamp'])),

            statusParts['sizeOfModifiedFiles'] = self.parenOrRaw(
                statusParts['sizeOfModifiedFiles'])
            statusParts['sizeOfAddedFiles'] = self.parenOrRaw(
                statusParts['sizeOfAddedFiles'])
            statusParts['sizeOfExaminedFiles'] = self.parenOrRaw(
                statusParts['sizeOfExaminedFiles'])
            statusParts['sizeOfOpenedFiles'] = self.parenOrRaw(
                statusParts['sizeOfOpenedFiles'])

        else:  # Something went wrong. Let's gather the details.
            statusParts['errors'] = statusParts['failed']
            statusParts['parsedResult'] = 'Failure'
            statusParts['warnings'] = statusParts['details']
            globs.log.write(2, 'Errors=[{}]'.format(statusParts['errors']))
            globs.log.write(2, 'Warnings=[{}]'.format(statusParts['warnings']))

            # Since the backup job report never ran, we'll use the email date/time as the report date/time
            dateParts['endTimestamp'] = msgParts['emailTimestamp']
            dateParts['beginTimestamp'] = msgParts['emailTimestamp']
            globs.log.write(
                3,
                'Email indicates a failed backup. Replacing date/time with: end=[{}]  begin=[{}]'
                .format(dateParts['endTimestamp'],
                        dateParts['beginTimestamp'])),

        # Replace commas (,) with newlines (\n) in message fields. Sqlite really doesn't like commas in SQL statements!
        for part in ['messages', 'warnings', 'errors']:
            if statusParts[part] != '':
                statusParts[part] = statusParts[part].replace(',', '\n')

        # If we're just collecting and get a warning/error, we may need to send an email to the admin
        if (globs.opts['collect'] is
                True) and (globs.opts['warnoncollect'] is True) and (
                    (statusParts['warnings'] != '') or
                    (statusParts['errors'] != '')):
            errMsg = 'Duplicati error(s) on backup job\n'
            errMsg += 'Message ID {} on {}\n'.format(msgParts['messageId'],
                                                     msgParts['date'])
            errMsg += 'Subject: {}\n\n'.format(msgParts['subject'])
            if statusParts['warnings'] != '':
                errMsg += 'Warnings:' + statusParts['warnings'] + '\n\n'
            if statusParts['errors'] != '':
                errMsg += 'Errors:' + statusParts['errors'] + '\n\n'

            globs.outServer.sendErrorEmail(errMsg)

        globs.log.write(
            3, 'Resulting timestamps: endTimeStamp=[{}] beginTimeStamp=[{}]'.
            format(drdatetime.fromTimestamp(dateParts['endTimestamp']),
                   drdatetime.fromTimestamp(dateParts['beginTimestamp'])))

        sqlStmt = self.buildEmailSql(msgParts, statusParts, dateParts)
        globs.db.execSqlStmt(sqlStmt)
        globs.db.dbCommit()

        return msgParts['messageId']