コード例 #1
0
ファイル: report.py プロジェクト: jfparis/dupReport
def buildWarningMessage(source, destination, nDays, lastTimestamp, opts):
    globs.log.write(1,'buildWarningMessage({}, {}, {}, {})'.format(source, destination, nDays, lastTimestamp))
    lastDateStr, lastTimeStr = drdatetime.fromTimestamp(lastTimestamp)
    srcDest = '{}{}{}'.format(source, globs.opts['srcdestdelimiter'], destination)

    subj = globs.optionManager.getRcOption(srcDest, 'nbwsubject')
    if subj is None:
        subj = opts['nbwsubject']
    globs.log.write(3,'subj(original)={}'.format(subj))
    subj = subj.replace('#SOURCE#',source).replace('#DESTINATION#', destination).replace('#DELIMITER#', globs.opts['srcdestdelimiter']).replace('#DAYS#', str(nDays)).replace('#DATE#', lastDateStr).replace('#TIME#', lastTimeStr)
    globs.log.write(3,'subj(modified)={}'.format(subj))

    warnHtml='<html><head></head><body><table border=\"{}\" cellpadding=\"{}\">\n'.format(opts['border'],opts['padding'])
    warnHtml += '<tr><td bgcolor="#FF0000" align="center"><b>Backup Warning for {}{}{}</b></td></tr>\n'.format(source, globs.opts['srcdestdelimiter'], destination)
    warnHtml += '<tr><td bgcolor="#E6E6E6" align="center">Your last backup from {} to {} was on {} at {} - {} days ago.</td></tr>\n'.format(source, destination, lastDateStr,lastTimeStr, nDays)
    warnHtml += '<tr><td align="center"> {} has not been backed up in the last {} days!<br>'.format(source, nDays)
    warnHtml += "If {} has been powered off or has been offline for the last {} days, no further action is required.<br>\n".format(source, nDays)
    warnHtml += 'Your backups will resume the next time {} is brought back online.<br>'.format(source)
    warnHtml += 'Otherwise, please make sure your Duplicati service is running and/or manually run a backup as soon as possible!</td></tr>\n'
    warnHtml += '</table></body></html>'

    warnText = 'Backup Warning for {}{}{}!\n\n'.format(source,globs.opts['srcdestdelimiter'],destination)
    warnText += 'Your last backup from {} to {} was on {} at {} - {} days ago.\n\n'.format(source, destination, lastDateStr,lastTimeStr, nDays)
    warnText += "If {} has been powered off or has been offline for the last {} days, no further action is required.\n".format(source, nDays)
    warnText += 'Your backups will resume the next time {} is brought back online.\n'.format(source)
    warnText += 'Otherwise, please make sure your Duplicati service is running and/or manually run a backup as soon as possible!\n'
    
    sender = globs.opts['outsender']
    receiver = globs.optionManager.getRcOption(srcDest, 'receiver')
    if receiver is None:
        receiver = globs.opts['outreceiver']

    globs.log.write(3, 'Sending message to {}'.format(receiver))
    return warnHtml, warnText, subj, sender, receiver
コード例 #2
0
ファイル: db.py プロジェクト: DocFraggle/dupReport
    def rollback(self, datespec):
        globs.log.write(1, 'db.rollback({})'.format(datespec))

        newTimeStamp = drdatetime.toTimestamp(datespec)
        tsCheck = drdatetime.fromTimestamp(newTimeStamp)

        sqlStmt = 'DELETE FROM emails WHERE emailtimestamp > {}'.format(
            newTimeStamp)
        dbCursor = self.execSqlStmt(sqlStmt)

        sqlStmt = 'SELECT source, destination FROM backupsets WHERE lastTimestamp > {}'.format(
            newTimeStamp)
        dbCursor = self.execSqlStmt(sqlStmt)
        setRows = dbCursor.fetchall()
        for source, destination in setRows:
            # Select largest timestamp from remaining data
            sqlStmt = 'select max(endTimeStamp), examinedFiles, sizeOfExaminedFiles from emails where sourceComp = \'{}\' and destComp= \'{}\''.format(
                source, destination)
            dbCursor = self.execSqlStmt(sqlStmt)
            emailTimestamp, examinedFiles, sizeOfExaminedFiles = dbCursor.fetchone(
            )
            if emailTimestamp is None:
                # After the rollback, some srcdest pairs may have no corresponding entries in the the database, meaning they were not seen until after the rollback period
                # We should remove these from the database, to return it to the state it was in before the rollback.
                globs.log.write(
                    2, 'Deleting {}{}{} from backupsets.'.format(
                        source, globs.opts['srcdestdelimiter'], destination,
                        0))
                sqlStmt = 'DELETE FROM backupsets WHERE source = \"{}\" AND destination = \"{}\"'.format(
                    source, destination)
                dbCursor = self.execSqlStmt(sqlStmt)
            else:
                globs.log.write(
                    2, 'Resetting {}{}{} to {}'.format(
                        source, globs.opts['srcdestdelimiter'], destination,
                        drdatetime.fromTimestamp(emailTimestamp)))
                # Update backupset table to reflect rolled-back date
                sqlStmt = 'update backupsets set lastFileCount={}, lastFileSize={}, lastTimestamp={} where source = \'{}\' and destination = \'{}\''.format(
                    examinedFiles, sizeOfExaminedFiles, emailTimestamp, source,
                    destination)
                dbCursor = self.execSqlStmt(sqlStmt)

        self.dbCommit()
        return None
コード例 #3
0
ファイル: report.py プロジェクト: jfparis/dupReport
    def extractReportData(self):
        globs.log.write(1, 'extractReportData()')

        # Initialize report table. Delete all existing rows
        dbCursor = globs.db.execSqlStmt("DELETE FROM report")

        # Select source/destination pairs from database
        sqlStmt = "SELECT source, destination, lastTimestamp, lastFileCount, lastFileSize FROM backupsets ORDER BY source, destination"

        # Loop through backupsets table and then get latest activity for each src/dest pair
        dbCursor = globs.db.execSqlStmt(sqlStmt)
        bkSetRows = dbCursor.fetchall()
        globs.log.write(2, 'bkSetRows=[{}]'.format(bkSetRows))
        for source, destination, lastTimestamp, lastFileCount, lastFileSize in bkSetRows:
            globs.log.write(3, 'Src=[{}] Dest=[{}] lastTimestamp=[{}] lastFileCount=[{}] lastFileSize=[{}]'.format(source, 
                destination, lastTimestamp, lastFileCount, lastFileSize))

            # Select all activity for src/dest pair since last report run
            sqlStmt = 'SELECT endTimestamp, examinedFiles, sizeOfExaminedFiles, addedFiles, deletedFiles, modifiedFiles, \
                filesWithError, parsedResult, warnings, errors, messages FROM emails WHERE sourceComp=\'{}\' AND destComp=\'{}\' \
                AND  endTimestamp > {} order by endTimestamp'.format(source, destination, lastTimestamp)
            dbCursor = globs.db.execSqlStmt(sqlStmt)

            emailRows = dbCursor.fetchall()
            globs.log.write(3, 'emailRows=[{}]'.format(emailRows))
            if emailRows: 
                # Loop through each new activity and report
                for endTimeStamp, examinedFiles, sizeOfExaminedFiles, addedFiles, deletedFiles, modifiedFiles, \
                    filesWithError, parsedResult, warnings, errors, messages in emailRows:
            
                    # Determine file count & size difference from last run
                    examinedFilesDelta = examinedFiles - lastFileCount
                    globs.log.write(3, 'examinedFilesDelta = {} - {} = {}'.format(examinedFiles, lastFileCount, examinedFilesDelta))
                    fileSizeDelta = sizeOfExaminedFiles - lastFileSize
                    globs.log.write(3, 'fileSizeDelta = {} - {} = {}'.format(sizeOfExaminedFiles, lastFileSize, fileSizeDelta))

                    # Convert from timestamp to date & time strings
                    dateStr, timeStr = drdatetime.fromTimestamp(endTimeStamp)

                    sqlStmt = "INSERT INTO report (source, destination, timestamp, examinedFiles, examinedFilesDelta, sizeOfExaminedFiles, fileSizeDelta, \
                        addedFiles, deletedFiles, modifiedFiles, filesWithError, parsedResult, messages, warnings, errors) \
                        VALUES ('{}', '{}', {}, {}, {}, {}, {}, {}, {}, {}, {}, \"{}\", \"{}\", \"{}\", \"{}\")".format(source, destination, endTimeStamp, examinedFiles, \
                        examinedFilesDelta, sizeOfExaminedFiles, fileSizeDelta, addedFiles, deletedFiles, modifiedFiles, filesWithError, parsedResult, messages, warnings, errors)
                    globs.db.execSqlStmt(sqlStmt)

                    # Update latest activity into into backupsets
                    sqlStmt = 'UPDATE backupsets SET lastFileCount={}, lastFileSize={}, \
                        lasttimestamp=\'{}\' WHERE source=\'{}\' AND destination=\'{}\''.format(examinedFiles, sizeOfExaminedFiles, \
                        endTimeStamp, source, destination)
                    globs.db.execSqlStmt(sqlStmt)
                    globs.db.dbCommit()

                    # Set last file count & size the latest information
                    lastFileCount = examinedFiles
                    lastFileSize = sizeOfExaminedFiles
コード例 #4
0
ファイル: report.py プロジェクト: jfparis/dupReport
def lastSeenTable(opts):
    globs.log.write(1, 'report.lastSeenTable()')

    msgHtml = '<table border={} cellpadding="{}"><td align=\"center\" colspan = \"3\"><b>{}</b></td>\n'.format(opts['border'],opts['padding'], opts['lastseensummarytitle'])
    msgHtml += '<tr><td><b>Source</b></td><td><b>Destination</b></td><td><b>Last Seen</b></td></tr>\n'
    msgText = '***** {} *****\n(Source-Destination-Last Seen)\n'.format(opts['lastseensummarytitle'])
    msgCsv = '\"{}\",\n\"Source\",\"Destination\",\"Last Seen\"\n'.format(opts['lastseensummarytitle'])

    dbCursor = globs.db.execSqlStmt("SELECT source, destination, lastTimestamp FROM backupsets ORDER BY source, destination")
    sdSets = dbCursor.fetchall()
    globs.log.write(3,'sdSets=[{}]'.format(sdSets))
    for source, destination, lastTimestamp in sdSets:
        lastDate = drdatetime.fromTimestamp(lastTimestamp)
        days = drdatetime.daysSince(lastTimestamp)
        globs.log.write(3,'source=[{}] destination=[{}] lastTimestamp=[{}] lastDate=[{}] days=[{}]'.format(source, destination, lastTimestamp, lastDate, days))
        msgHtml += '<tr><td>{}</td><td>{}</td><td bgcolor=\"{}\">{} {} ({} days ago)</td></tr>\n'.format(source, destination, getLastSeenColor(opts, days), lastDate[0], lastDate[1], days)
        msgText += '{}{}{}: Last seen on {} {} ({} days ago)\n'.format(source, globs.opts['srcdestdelimiter'], destination, lastDate[0], lastDate[1], days)
        msgCsv += '\"{}\",\"{}\",\"{} {} ({} days ago)\"\n'.format(source, destination, lastDate[0], lastDate[1], days)

    msgHtml += '</table>'

    return msgHtml, msgText, msgCsv
コード例 #5
0
ファイル: rpt_srcdest.py プロジェクト: jfparis/dupReport
def runReport(startTime):
    globs.log.write(1, 'rpt_srcdest()')

    # Get header and column info
    nFields, fldDefs, reportOpts, rptCols, rptTits = report.initReportVars()

    # Print the report title
    msgHtml, msgText, msgCsv = report.rptTop(reportOpts, nFields)

    # Remove columns we don't need for this report
    # These are already part of the report logic processing & subheaders
    # we won't need to loop through them for the report fields
    rptCols.remove('source')
    rptCols.remove('destination')

    # Print column titles if not printing for each section
    if reportOpts['repeatheaders'] is False:
        msgHtml, msgText, msgCsv = report.rptPrintTitles(
            msgHtml, msgText, msgCsv, rptCols)

    # Select source/destination pairs from database
    sqlStmt = "SELECT source, destination, lastTimestamp, lastFileCount, lastFileSize from backupsets"

    # How should report be sorted?
    # Options are source & destination
    if reportOpts['sortby'] == 'source':
        sqlStmt = sqlStmt + " ORDER BY source, destination"
    else:
        sqlStmt = sqlStmt + " ORDER BY destination, source"
    dbCursor = globs.db.execSqlStmt(sqlStmt)
    bkSetRows = dbCursor.fetchall()
    globs.log.write(2, 'bkSetRows=[{}]'.format(bkSetRows))

    # Loop through backupsets table and then get latest activity for each src/dest pair
    for source, destination, lastTimestamp, lastFileCount, lastFileSize in bkSetRows:
        globs.log.write(
            3,
            'Src=[{}] Dest=[{}] lastTimestamp=[{}] lastFileCount=[{}] lastFileSize=[{}]'
            .format(source, destination, lastTimestamp, lastFileCount,
                    lastFileSize))

        # Add title for source/dest pair
        subHead = globs.optionManager.getRcOption('report', 'subheading')
        if subHead is not None:
            # Substitute subheading keywords
            subHead = subHead.replace('#SOURCE#',
                                      source).replace('#DESTINATION#',
                                                      destination)
        if subHead is None or subHead == '':
            msgHtml += '<tr><td colspan="{}" align="center" bgcolor="{}"><b>{}:</b> {} <b>{}:</b> {}</td></tr>\n'.format(nFields, reportOpts['subheadbg'], \
                rptTits['source'], source, rptTits['destination'], destination)
            msgText += '***** {}: {}    {}: {} *****\n'.format(
                rptTits['source'], source, rptTits['destination'], destination)
            msgCsv += '\"***** {}: {}    {}: {} *****\"\n'.format(
                rptTits['source'], source, rptTits['destination'], destination)
        else:
            msgHtml += '<tr><td colspan="{}" align="center" bgcolor="{}">{}</td></tr>\n'.format(
                nFields, reportOpts['subheadbg'], subHead)
            msgText += '***** {} *****\n'.format(subHead)
            msgCsv += '\"***** {} *****\"\n'.format(subHead)

        # Print column titles if printing for each section
        if reportOpts['repeatheaders'] is True:
            msgHtml, msgText, msgCsv = report.rptPrintTitles(
                msgHtml, msgText, msgCsv, rptCols)

        # Select all activity for src/dest pair since last report run
        sqlStmt = "SELECT timestamp, examinedFiles, examinedFilesDelta, sizeOfExaminedFiles, fileSizeDelta, \
            addedFiles, deletedFiles, modifiedFiles, filesWithError, parsedResult, messages, warnings, errors \
            FROM report WHERE source=\'{}\' AND destination=\'{}\' order by timestamp".format(
            source, destination)
        dbCursor = globs.db.execSqlStmt(sqlStmt)
        reportRows = dbCursor.fetchall()
        globs.log.write(3, 'reportRows=[{}]'.format(reportRows))
        if not reportRows:  # No rows found = no recent activity
            # Calculate days since last activity
            diff = drdatetime.daysSince(lastTimestamp)

            lastDateStr, lastTimeStr = drdatetime.fromTimestamp(lastTimestamp)
            msgHtml += '<tr><td colspan="{}" align="center" bgcolor="{}"><i>No new activity. Last activity on {} at {} ({} days ago)</i></td></tr>\n'.format(
                nFields, report.getLastSeenColor(reportOpts, diff),
                lastDateStr, lastTimeStr, diff)
            msgText += 'No new activity. Last activity on {} at {} ({} days ago)\n'.format(
                lastDateStr, lastTimeStr, diff)
            msgCsv += '\"No new activity. Last activity on {} at {} ({} days ago)\"\n'.format(
                lastDateStr, lastTimeStr, diff)
        else:
            # Loop through each new job email and report
            for timestamp, examinedFiles, examinedFilesDelta, sizeOfExaminedFiles, fileSizeDelta, \
                    addedFiles, deletedFiles, modifiedFiles, filesWithError, parsedResult, messages, \
                    warnings, errors in reportRows:

                # Get date and time from timestamp
                dateStr, timeStr = drdatetime.fromTimestamp(timestamp)

                # Print report fields
                # Each field takes up one column/cell in the table
                msgHtml += '<tr>'

                # The fill list of possible fields in the report. printField() below will skip a field if it is emoved in the .rc file.
                titles = [
                    'date', 'time', 'files', 'filesplusminus', 'size',
                    'sizeplusminus', 'added', 'deleted', 'modified', 'errors',
                    'result'
                ]
                fields = [
                    dateStr, timeStr, examinedFiles, examinedFilesDelta,
                    sizeOfExaminedFiles, fileSizeDelta, addedFiles,
                    deletedFiles, modifiedFiles, filesWithError, parsedResult
                ]

                for ttl, fld in zip(titles, fields):
                    msgHtml += report.printField(ttl, fld, 'html')
                    msgText += report.printField(ttl, fld, 'text')
                    msgCsv += report.printField(ttl, fld, 'csv')

                msgHtml += '</tr>\n'
                msgText += '\n'
                msgCsv += '\n'

                fields = [messages, warnings, errors]
                options = [
                    'displaymessages', 'displaywarnings', 'displayerrors'
                ]
                backgrounds = ['jobmessagebg', 'jobwarningbg', 'joberrorbg']
                titles = ['jobmessages', 'jobwarnings', 'joberrors']
                # Print message/warning/error fields
                # Each of these spans all the table columns
                for fld, opt, bg, tit in zip(fields, options, backgrounds,
                                             titles):
                    if ((fld != '') and (reportOpts[opt] == True)):
                        msgHtml += '<tr><td colspan="{}" align="center" bgcolor="{}"><details><summary>{}</summary><p>{}</details></td></tr>\n'.format(
                            nFields, reportOpts[bg], rptTits[tit], fld)
                        msgText += '{}: {}\n'.format(rptTits[tit], fld)
                        csvLine = '\"{}: {}\"\n'.format(
                            rptTits[tit], fld
                        ).replace('\n', ' ').replace(
                            '\r', ''
                        )  # Need to remove \n & \r because csv truncates after these characters
                        msgCsv += csvLine

    # Add report footer
    msgHtml, msgText, msgCsv = report.rptBottom(msgHtml, msgText, msgCsv,
                                                startTime, nFields)

    # Return text & HTML messages to main program. It can decide which one(s) it wants to use.
    return msgHtml, msgText, msgCsv
コード例 #6
0
ファイル: dremail.py プロジェクト: DocFraggle/dupReport
    def processMessage(self, msg):

        globs.log.write(1, 'EmailServer.process_message()')

        # msgParts items:
        #    'messageID' - the message ID
        #    'subject' - the message subject
        #    'date'
        #    'time'
        #    'body' - Payload of message (i.e., not the Header)
        msgParts = {}

        # statusParts contains the individual lines from the Duplicati status emails
        statusParts = {}

        # dateParts contains the date & time strings for the SQL Query
        dateParts = {}

        # Check all the vital parts to see if they're there
        # If any of these are missing it means:
        #   (1) they are not from Duplicati, and
        #   (2) if we keep processing things will blow up down the line
        # To be safe, we'll just skip the message
        if msg['Message-Id'] is None or msg['Message-Id'] == '':
            globs.log.write(1, 'No message-Id. Abandoning processMessage()')
            return None, None
        if msg['Subject'] is None or msg['Subject'] == '':
            globs.log.write(1, 'No Subject. Abandoning processMessage()')
            return None, None
        if msg['Date'] is None or msg['Date'] == '':
            globs.log.write(1, 'No Date. Abandoning processMessage()')
            return None, None

        # get Subject
        decode = email.header.decode_header(msg['Subject'])[0]
        msgParts['subject'] = decode[0]
        if (type(msgParts['subject'])
                is not str):  # Email encoded as a byte object - See Issue #14
            msgParts['subject'] = msgParts['subject'].decode('utf-8')
        globs.log.write(3, 'Subject=[{}]'.format(msgParts['subject']))

        # See if it's a message of interest
        # Match subject field against 'subjectregex' parameter from RC file (Default: 'Duplicati Backup report for...')
        if re.search(globs.opts['subjectregex'], msgParts['subject']) == None:
            globs.log.write(
                1,
                'Message [{}] is not a Message of Interest. Skipping message.'.
                format(msg['Message-Id']))
            return None, None  # Not a message of Interest

        # Last chance to kick out bad messages
        # Get source & desination computers from email subject
        srcRegex = '{}{}'.format(globs.opts['srcregex'],
                                 re.escape(globs.opts['srcdestdelimiter']))
        destRegex = '{}{}'.format(re.escape(globs.opts['srcdestdelimiter']),
                                  globs.opts['destregex'])
        globs.log.write(
            3, 'srcregex=[{}]  destRegex=[{}]'.format(srcRegex, destRegex))
        # Does the Subject have a proper source/destination pair?
        partsSrc = re.search(srcRegex, msgParts['subject'])
        partsDest = re.search(destRegex, msgParts['subject'])
        if (partsSrc is None) or (
                partsDest is None
        ):  # Correct subject but delimeter not found. Something is wrong.
            globs.log.write(
                2,
                'srcdestdelimiter [{}] not found in subject. Skipping message.'
                .format(globs.opts['srcdestdelimiter']))
            return None, None

        # Get Message ID
        globs.log.write(3, 'msg[Message-Id]=[{}]'.format(msg['Message-Id']))
        msgParts['messageId'] = email.header.decode_header(
            msg['Message-Id'])[0][0]
        globs.log.write(
            3, 'msgParts[messageId]=[{}]'.format(msgParts['messageId']))
        if (type(msgParts['messageId'])
                is not str):  # Email encoded as a byte object - See Issue #14
            msgParts['messageId'] = msgParts['messageId'].decode('utf-8')
            globs.log.write(
                3, 'Revised messageId=[{}]'.format(msgParts['messageId']))

        # See if the record is already in the database, meaning we've seen it before
        if globs.db.searchForMessage(
                msgParts['messageId']):  # Message is already in database
            # Mark the email as being seen in the database
            globs.db.execSqlStmt(
                'UPDATE emails SET dbSeen = 1 WHERE messageId = \"{}\"'.format(
                    msgParts['messageId']))
            globs.db.dbCommit()
            return None, None

        # Message not yet in database. Proceed.
        globs.log.write(
            1, 'Message ID [{}] does not exist. Adding to DB'.format(
                msgParts['messageId']))

        dTup = email.utils.parsedate_tz(msg['Date'])
        if dTup:
            # See if there's timezone info in the email header data. May be 'None' if no TZ info in the date line
            # TZ info is represented by seconds offset from UTC
            # We don't need to adjust the email date for TimeZone info now, since date line in email already accounts for TZ.
            # All other calls to toTimestamp() should include timezone info
            msgParts['timezone'] = dTup[9]

            # Set date into a parseable string
            # It doesn't matter what date/time format we pass in (as long as it's valid)
            # When it comes back out, it'll be parsed into the user-defined format from the .rc file
            # For now, we'll use YYYY/MM/DD HH:MM:SS
            xDate = '{:04d}/{:02d}/{:02d} {:02d}:{:02d}:{:02d}'.format(
                dTup[0], dTup[1], dTup[2], dTup[3], dTup[4], dTup[5])
            dtTimStmp = drdatetime.toTimestamp(
                xDate, dfmt='YYYY/MM/DD',
                tfmt='HH:MM:SS')  # Convert the string into a timestamp
            msgParts['emailTimestamp'] = dtTimStmp
            globs.log.write(
                3, 'emailDate=[{}]-[{}]'.format(
                    dtTimStmp, drdatetime.fromTimestamp(dtTimStmp)))

        msgParts['sourceComp'] = re.search(
            srcRegex, msgParts['subject']).group().split(
                globs.opts['srcdestdelimiter'])[0]
        msgParts['destComp'] = re.search(destRegex,
                                         msgParts['subject']).group().split(
                                             globs.opts['srcdestdelimiter'])[1]
        globs.log.write(3, 'sourceComp=[{}] destComp=[{}] emailTimestamp=[{}] subject=[{}]'.format(msgParts['sourceComp'], \
            msgParts['destComp'], msgParts['emailTimestamp'], msgParts['subject']))

        # Search for source/destination pair in database. Add if not already there
        retVal = globs.db.searchSrcDestPair(msgParts['sourceComp'],
                                            msgParts['destComp'])

        # Extract the body (payload) from the email
        msgParts['body'] = msg.get_payload()
        globs.log.write(3, 'Body=[{}]'.format(msgParts['body']))

        # Go through each element in lineParts{}, get the value from the body, and assign it to the corresponding element in statusParts{}
        for section, regex, flag, typ in lineParts:
            statusParts[section] = self.searchMessagePart(
                msgParts['body'], regex, flag, typ)  # Get the field parts

        # Adjust fields if not a clean run
        globs.log.write(
            3, "statusParts['failed']=[{}]".format(statusParts['failed']))
        if statusParts['failed'] == '':  # Looks like a good run
            # See if there's a timestamp (xxxx.xxxx) already in the EndTime field
            # If so, use that, else calculate timestamp
            pat = re.compile('\(.*\)')

            match = re.search(pat, statusParts['endTimeStr'])
            if match:  # Timestamp found in line
                dateParts['endTimestamp'] = statusParts['endTimeStr'][
                    match.regs[0][0] + 1:match.regs[0][1] - 1]
            else:  # No timestamp found. Calculate timestamp
                #dt, tm = drdatetime.getDateTimeFmt(msgParts['sourceComp'], msgParts['destComp'])
                dt, tm = globs.optionManager.getRcSectionDateTimeFmt(
                    msgParts['sourceComp'], msgParts['destComp'])
                dateParts['endTimestamp'] = drdatetime.toTimestamp(
                    statusParts['endTimeStr'],
                    dfmt=dt,
                    tfmt=tm,
                    utcOffset=msgParts['timezone'])

            match = re.search(pat, statusParts['beginTimeStr'])
            if match:  # Timestamp found in line
                dateParts['beginTimestamp'] = statusParts['beginTimeStr'][
                    match.regs[0][0] + 1:match.regs[0][1] - 1]
            else:  # No timestamp found. Calculate timestamp
                dateParts['beginTimestamp'] = drdatetime.toTimestamp(
                    statusParts['beginTimeStr'],
                    utcOffset=msgParts['timezone'])
        else:  # Something went wrong. Let's gather the details.
            statusParts['errors'] = statusParts['failed']
            statusParts['parsedResult'] = 'Failure'
            statusParts['warnings'] = statusParts['details']
            globs.log.write(2, 'Errors=[{}]'.format(statusParts['errors']))
            globs.log.write(2, 'Warnings=[{}]'.format(statusParts['warnings']))

            # Since the backup job report never ran, we'll use the email date/time as the report date/time
            dateParts['endTimestamp'] = msgParts['emailTimestamp']
            dateParts['beginTimestamp'] = msgParts['emailTimestamp']
            globs.log.write(
                3, 'Failure message. Replaced date/time: end=[{}]  begin=[{}]'.
                format(dateParts['endTimestamp'],
                       dateParts['beginTimestamp'])),

        # Replace commas (,) with newlines (\n) in message fields. Sqlite really doesn't like commas in SQL statements!
        for part in ['messages', 'warnings', 'errors']:
            if statusParts[part] != '':
                statusParts[part] = statusParts[part].replace(',', '\n')

        # If we're just collecting and get a warning/error, we may need to send an email to the admin
        if (globs.opts['collect'] is
                True) and (globs.opts['warnoncollect'] is True) and (
                    (statusParts['warnings'] != '') or
                    (statusParts['errors'] != '')):
            errMsg = 'Duplicati error(s) on backup job\n'
            errMsg += 'Message ID {} on {}\n'.format(msgParts['messageId'],
                                                     msg['date'])
            errMsg += 'Subject: {}\n\n'.format(msgParts['subject'])
            if statusParts['warnings'] != '':
                errMsg += 'Warnings:' + statusParts['warnings'] + '\n\n'
            if statusParts['errors'] != '':
                errMsg += 'Errors:' + statusParts['errors'] + '\n\n'

            globs.outServer.sendErrorEmail(errMsg)

        globs.log.write(
            3, 'endTimeStamp=[{}] beginTimeStamp=[{}]'.format(
                drdatetime.fromTimestamp(dateParts['endTimestamp']),
                drdatetime.fromTimestamp(dateParts['beginTimestamp'])))

        sqlStmt = self.buildEmailSql(msgParts, statusParts, dateParts)
        globs.db.execSqlStmt(sqlStmt)
        globs.db.dbCommit()

        return msgParts, statusParts
コード例 #7
0
ファイル: rpt_bysource.py プロジェクト: jfparis/dupReport
def runReport(startTime):
    globs.log.write(1, 'rpt_bysource()')

    # Get header and column info
    nFields, fldDefs, reportOpts, rptCols, rptTits = report.initReportVars()

    # Print the report title
    msgHtml, msgText, msgCsv = report.rptTop(reportOpts, nFields)

    # Remove columns we don't need for this report
    # These are already part of the report logic processing & subheaders
    # We won't need to loop through them for the report fields
    rptCols.remove('source')

    # Print column titles if not printing for each section
    if reportOpts['repeatheaders'] is False:
        msgHtml, msgText, msgCsv = report.rptPrintTitles(
            msgHtml, msgText, msgCsv, rptCols)

    # Select sources from database
    dbCursor = globs.db.execSqlStmt(
        "SELECT DISTINCT source FROM backupsets ORDER BY source")
    srcSet = dbCursor.fetchall()
    globs.log.write(2, 'srcSet=[{}]'.format(srcSet))

    # Loop through backupsets table and get all the potential destinations
    for srcKey in srcSet:

        # Add Source title
        subHead = globs.optionManager.getRcOption('report', 'subheading')
        if subHead is not None:
            # Substitute subheading keywords
            subHead = subHead.replace('#SOURCE#', srcKey[0])
        if subHead is None or subHead == '':
            msgHtml += '<tr><td colspan="{}" align="center" bgcolor="{}"><b>{}:</b> {}</td></tr>\n'.format(
                nFields, reportOpts['subheadbg'], rptTits['source'], srcKey[0])
            msgText += '***** {}: {}*****\n'.format(rptTits['source'],
                                                    srcKey[0])
            msgCsv += '\"***** {}: {}*****\",\n'.format(
                rptTits['source'], srcKey[0])
        else:
            msgHtml += '<tr><td colspan="{}" align="center" bgcolor="{}">{}</td></tr>\n'.format(
                nFields, reportOpts['subheadbg'], subHead)
            msgText += '***** {} *****\n'.format(subHead)
            msgCsv += '\"***** {} *****\"\n'.format(subHead)

        # Print column titles if printing for each section
        if reportOpts['repeatheaders'] is True:
            msgHtml, msgText, msgCsv = report.rptPrintTitles(
                msgHtml, msgText, msgCsv, rptCols)

        sqlStmt = "SELECT destination, timestamp, examinedFiles, examinedFilesDelta, sizeOfExaminedFiles, fileSizeDelta, addedFiles, deletedFiles, modifiedFiles, filesWithError, \
            parsedResult, messages, warnings, errors FROM report WHERE source=\'{}\'".format(
            srcKey[0])
        if reportOpts['sortby'] == 'destination':
            sqlStmt += ' ORDER BY destination'
        else:
            sqlStmt += ' ORDER BY timestamp'

        dbCursor = globs.db.execSqlStmt(sqlStmt)
        reportRows = dbCursor.fetchall()
        globs.log.write(3, 'reportRows=[{}]'.format(reportRows))

        # Loop through each new activity for the source/destination and add to report
        for destination, timestamp, examinedFiles, examinedFilesDelta, sizeOfExaminedFiles, fileSizeDelta, \
            addedFiles, deletedFiles, modifiedFiles, filesWithError, parsedResult, messages, \
            warnings, errors in reportRows:

            # Get date and time from timestamp
            dateStr, timeStr = drdatetime.fromTimestamp(timestamp)

            # Print report fields
            # Each field takes up one column/cell in the table
            msgHtml += '<tr>'

            # The fill list of possible fields in the report. printField() below will skip a field if it is emoved in the .rc file.
            titles = [
                'destination', 'date', 'time', 'files', 'filesplusminus',
                'size', 'sizeplusminus', 'added', 'deleted', 'modified',
                'errors', 'result'
            ]
            fields = [
                destination, dateStr, timeStr, examinedFiles,
                examinedFilesDelta, sizeOfExaminedFiles, fileSizeDelta,
                addedFiles, deletedFiles, modifiedFiles, filesWithError,
                parsedResult
            ]

            for ttl, fld in zip(titles, fields):
                msgHtml += report.printField(ttl, fld, 'html')
                msgText += report.printField(ttl, fld, 'text')
                msgCsv += report.printField(ttl, fld, 'csv')

            msgHtml += '</tr>\n'
            msgText += '\n'
            msgCsv += '\n'

            fields = [messages, warnings, errors]
            options = ['displaymessages', 'displaywarnings', 'displayerrors']
            backgrounds = ['jobmessagebg', 'jobwarningbg', 'joberrorbg']
            titles = ['jobmessages', 'jobwarnings', 'joberrors']
            # Print message/warning/error fields
            # Each of these spans all the table columns
            for fld, opt, bg, tit in zip(fields, options, backgrounds, titles):
                if ((fld != '') and (reportOpts[opt] == True)):
                    msgHtml += '<tr><td colspan="{}" align="center" bgcolor="{}"><details><summary>{}</summary>{}</details></td></tr>\n'.format(
                        nFields, reportOpts[bg], rptTits[tit], fld)
                    msgText += '{}: {}\n'.format(rptTits[tit], fld)
                    msgCsv += '\"{}: {}\",\n'.format(rptTits[tit], fld)

        # Show inactivity - Look for missing source/dest pairs in report
        dbCursor = globs.db.execSqlStmt(
            "SELECT destination, lastTimestamp, lastFileCount, lastFileSize FROM backupsets WHERE source = '{}' ORDER BY source"
            .format(srcKey[0]))
        missingRows = dbCursor.fetchall()
        for destination, lastTimestamp, lastFileCount, lastFileSize in missingRows:
            dbCursor = globs.db.execSqlStmt(
                'SELECT count(*) FROM report WHERE source=\"{}\" AND destination=\"{}\"'
                .format(srcKey[0], destination))
            countRows = dbCursor.fetchone()
            if countRows[0] == 0:
                # Calculate days since last activity
                diff = drdatetime.daysSince(lastTimestamp)
                lastDateStr, lastTimeStr = drdatetime.fromTimestamp(
                    lastTimestamp)
                msgHtml += '<tr>'
                msgHtml += report.printField('destination', destination,
                                             'html')
                msgHtml += '<td colspan="{}" align="center" bgcolor="{}"><i>No new activity. Last activity on {} at {} ({} days ago)</i></td>'.format(
                    nFields - 1, report.getLastSeenColor(reportOpts, diff),
                    lastDateStr, lastTimeStr, diff)
                msgHtml += '</tr>\n'

                msgText += report.printField('destination', destination,
                                             'text')
                msgText += '{}: No new activity. Last activity on {} at {} ({} days ago)\n'.format(
                    destination, lastDateStr, lastTimeStr, diff)

                msgCsv += report.printField('destination', destination, 'csv')
                msgCsv += '\"{}: No new activity. Last activity on {} at {} ({} days ago)\"\n'.format(
                    destination, lastDateStr, lastTimeStr, diff)

    # Add report footer
    msgHtml, msgText, msgCsv = report.rptBottom(msgHtml, msgText, msgCsv,
                                                startTime, nFields)

    # Return text & HTML messages to main program. It can decide which one it wants to use.
    return msgHtml, msgText, msgCsv
コード例 #8
0
ファイル: dupapprise.py プロジェクト: helio4k/dupReport
    def sendNotifications(self):
        sqlStmt = "SELECT source, destination, parsedResult, messages, warnings, errors, timestamp FROM report ORDER BY source"
        dbCursor = globs.db.execSqlStmt(sqlStmt)
        reportRows = dbCursor.fetchall()

        for source, destination, parsedResult, messages, warnings, errors, timestamp in reportRows:
            globs.log.write(
                globs.SEV_NOTICE,
                function='Apprise',
                action='sendNotifications',
                msg=
                'Preparing Apprise message for {}-{}, parsedResult={} msglevel={}'
                .format(source, destination, parsedResult,
                        self.appriseOpts['msglevel']))

            # See if we need to send a notification based on the result status
            if self.appriseOpts['msglevel'] == 'warning':
                if parsedResult.lower() not in ('warning', 'failure'):
                    globs.log.write(
                        globs.SEV_NOTICE,
                        function='Apprise',
                        action='sendNotifications',
                        msg='Msglevel mismatch at warning level - skipping')
                    continue
            elif self.appriseOpts['msglevel'] == 'failure':
                if parsedResult.lower() != 'failure':
                    globs.log.write(
                        globs.SEV_NOTICE,
                        function='Apprise',
                        action='sendNotifications',
                        msg='Msglevel mismatch at failure level - skipping')
                    continue

            globs.log.write(globs.SEV_DEBUG,
                            function='Apprise',
                            action='sendNotifications',
                            msg='Apprise message is sendable.')

            newTitle = self.parseMessage(self.appriseOpts['title'], source,
                                         destination, parsedResult, messages,
                                         warnings, errors,
                                         drdatetime.fromTimestamp(timestamp))
            newBody = self.parseMessage(self.appriseOpts['body'], source,
                                        destination, parsedResult, messages,
                                        warnings, errors,
                                        drdatetime.fromTimestamp(timestamp))

            tLen = self.appriseOpts['titletruncate']
            if tLen != 0:
                newTitle = (
                    newTitle[:tLen]) if len(newTitle) > tLen else newTitle
            bLen = self.appriseOpts['bodytruncate']
            if bLen != 0:
                newBody = (newBody[:bLen]) if len(newBody) > bLen else newBody

            globs.log.write(
                globs.SEV_DEBUG,
                function='Apprise',
                action='sendNotifications',
                msg='Sending notification: Title=[{}] Body=[{}]'.format(
                    newTitle, newBody))
            result = self.appriseConn.notify(title=newTitle, body=newBody)
            globs.log.write(globs.SEV_NOTICE,
                            function='Apprise',
                            action='sendNotifications',
                            msg='Apprise sent. Result={}.'.format(result))

        return
コード例 #9
0
ファイル: db.py プロジェクト: helio4k/dupReport
    def rollback(self, datespec):
        globs.log.write(globs.SEV_NOTICE, function='Database', action='rollback', msg='Rolling back database: spec={}'.format(datespec))

        # See if we're using a delta-based time spec (Issue #131)
        deltaParts = drdatetime.timeDeltaSpec(datespec)
        if deltaParts != False:
            today = datetime.now()
            globs.log.write(globs.SEV_DEBUG, function='Database', action='rollback', msg='Using delta timespec. Today={}'.format(today))
            for i in range(len(deltaParts)):
                tval = int(deltaParts[i][:-1])
                tspec = deltaParts[i][-1:]
                if tspec == 's': # Subtract seconds
                    today -= timedelta(seconds=tval)
                elif tspec == 'm':
                    today -= timedelta(minutes=tval)
                elif tspec == 'h':
                    today -= timedelta(hours=tval)
                elif tspec == 'd':
                    today -= timedelta(days=tval)
                elif tspec == 'w':
                    today -= timedelta(weeks=tval)
                globs.log.write(globs.SEV_DEBUG, function='Database', action='rollback', msg='Rolled back {}{}. Today now={}'.format(tval,tspec, today))
            newTimeStamp = today.timestamp()
        else:
            # Get timestamp for input date/time
            newTimeStamp = drdatetime.toTimestamp(datespec)

        # Delete all email records that happened after input datetime
        sqlStmt = 'DELETE FROM emails WHERE emailtimestamp > {}'.format(newTimeStamp)
        dbCursor = self.execSqlStmt(sqlStmt)

        # Delete all backup set records that happened after input datetime
        sqlStmt = 'SELECT source, destination FROM backupsets WHERE lastTimestamp > {}'.format(newTimeStamp)
        dbCursor = self.execSqlStmt(sqlStmt)
        setRows= dbCursor.fetchall()
        for source, destination in setRows:
            # Select largest timestamp from remaining data for that source/destination
            sqlStmt = 'select max(endTimeStamp), examinedFiles, sizeOfExaminedFiles, dupversion from emails where sourceComp = \'{}\' and destComp= \'{}\''.format(source, destination)
            dbCursor = self.execSqlStmt(sqlStmt)
            emailTimestamp, examinedFiles, sizeOfExaminedFiles, dupversion = dbCursor.fetchone()
            if emailTimestamp is None:
                # After the rollback, some srcdest pairs may have no corresponding entries in the the database, meaning they were not seen until after the rollback period
                # We should remove these from the database, to return it to the state it was in before the rollback.
                globs.log.write(globs.SEV_NOTICE, function='Database', action='rollback', msg='Deleting {}{}{} from backupsets. Not seen until after rollback.'.format(source, globs.opts['srcdestdelimiter'], destination))
                sqlStmt = 'DELETE FROM backupsets WHERE source = \"{}\" AND destination = \"{}\"'.format(source, destination)
                dbCursor = self.execSqlStmt(sqlStmt)
            else:
                globs.log.write(globs.SEV_NOTICE, function='Database', action='rollback', msg='Resetting {}{}{} to {}'.format(source, globs.opts['srcdestdelimiter'], destination, drdatetime.fromTimestamp(emailTimestamp)))
                # Update backupset table to reflect rolled-back date
                sqlStmt = 'update backupsets set lastFileCount={}, lastFileSize={}, lastTimestamp={}, dupversion=\'{}\' where source = \'{}\' and destination = \'{}\''.format(examinedFiles, sizeOfExaminedFiles, emailTimestamp, dupversion, source, destination)
                dbCursor = self.execSqlStmt(sqlStmt)
            
        self.dbCommit()
        return None
コード例 #10
0
def runReport(startTime):
    globs.log.write(1, 'rpt_bydate()')

    # Get header and column info
    nFields, fldDefs, reportOpts, rptCols, rptTits = report.initReportVars()

    # Print the report title
    msgHtml, msgText, msgCsv = report.rptTop(reportOpts, nFields)

    # Remove columns we don't need for this report
    # These are already part of the report logic processing & subheaders
    # We won't need to loop through them for the report fields
    rptCols.remove('date')

    # Print column titles if not printing for each section
    if reportOpts['repeatheaders'] is False:
        msgHtml, msgText, msgCsv = report.rptPrintTitles(
            msgHtml, msgText, msgCsv, rptCols)

    # Get earliest & latest timestamps in the report table
    dbCursor = globs.db.execSqlStmt("SELECT min(timestamp) FROM report"
                                    )  # Smallest timestamp in the report table
    currentTs = dbCursor.fetchone()[0]
    dbCursor = globs.db.execSqlStmt("SELECT max(timestamp) FROM report"
                                    )  # Largest timestamp in the report table
    highestTs = dbCursor.fetchone()[0]

    while currentTs <= highestTs:
        currentDate, currentTime = drdatetime.fromTimestamp(
            currentTs,
            dfmt=globs.opts['dateformat'],
            tfmt=globs.opts['timeformat'])
        currentDateBeginTs = drdatetime.toTimestamp(
            currentDate + ' 00:00:00',
            dfmt=globs.opts['dateformat'],
            tfmt=globs.opts['timeformat']
        )  # Convert the string into a timestamp
        currentDateEndTs = drdatetime.toTimestamp(
            currentDate + ' 23:59:59',
            dfmt=globs.opts['dateformat'],
            tfmt=globs.opts['timeformat']
        )  # Convert the string into a timestamp

        sqlStmt = "SELECT source, destination, timestamp, examinedFiles, examinedFilesDelta, sizeOfExaminedFiles, fileSizeDelta, \
            addedFiles, deletedFiles, modifiedFiles, filesWithError, parsedResult, messages, warnings, errors \
            FROM report WHERE timestamp >= {} AND timestamp <= {}".format(
            currentDateBeginTs, currentDateEndTs)
        if reportOpts['sortby'] == 'source':
            sqlStmt += ' ORDER BY source, destination'
        elif reportOpts['sortby'] == 'destination':
            sqlStmt += ' ORDER BY destination, source'
        else:
            sqlStmt += ' ORDER BY timestamp'

        dbCursor = globs.db.execSqlStmt(sqlStmt)
        reportRows = dbCursor.fetchall()
        globs.log.write(3, 'reportRows=[{}]'.format(reportRows))

        if len(reportRows) != 0:
            subHead = globs.optionManager.getRcOption('report', 'subheading')
            if subHead is not None:
                # Substitute subheading keywords
                subHead = subHead.replace('#DATE#', currentDate)
            if subHead is None or subHead == '':
                msgHtml += '<tr><td colspan="{}" align="center" bgcolor="{}"><b>{}:</b> {}</td></tr>\n'.format(
                    nFields, reportOpts['subheadbg'], rptTits['date'],
                    currentDate)
                msgText += '***** {}: {} *****\n'.format(
                    rptTits['date'], currentDate)
                msgCsv += '\"***** {}: {} *****\"\n'.format(
                    rptTits['date'], currentDate)
            else:
                msgHtml += '<tr><td colspan="{}" align="center" bgcolor="{}">{}</td></tr>\n'.format(
                    nFields, reportOpts['subheadbg'], subHead)
                msgText += '***** {} *****\n'.format(subHead)
                msgCsv += '\"***** {} *****\"\n'.format(subHead)

            # Print column titles if printing for each section
            if reportOpts['repeatheaders'] is True:
                msgHtml, msgText, msgCsv = report.rptPrintTitles(
                    msgHtml, msgText, msgCsv, rptCols)


        for source, destination, timestamp, examinedFiles, examinedFilesDelta, sizeOfExaminedFiles, fileSizeDelta, \
            addedFiles, deletedFiles, modifiedFiles, filesWithError, parsedResult, messages, \
            warnings, errors in reportRows:

            # Get date and time from timestamp
            dateStr, timeStr = drdatetime.fromTimestamp(timestamp)

            # Print report fields
            # Each field takes up one column/cell in the table
            msgHtml += '<tr>'

            # The full list of possible fields in the report. printField() below will skip a field if it is emoved in the .rc file.
            titles = [
                'source', 'destination', 'time', 'files', 'filesplusminus',
                'size', 'sizeplusminus', 'added', 'deleted', 'modified',
                'errors', 'result'
            ]
            fields = [
                source, destination, timeStr, examinedFiles,
                examinedFilesDelta, sizeOfExaminedFiles, fileSizeDelta,
                addedFiles, deletedFiles, modifiedFiles, filesWithError,
                parsedResult
            ]

            for ttl, fld in zip(titles, fields):
                msgHtml += report.printField(ttl, fld, 'html')
                msgText += report.printField(ttl, fld, 'text')
                msgCsv += report.printField(ttl, fld, 'csv')

            msgHtml += '</tr>\n'
            msgText += '\n'
            msgCsv += '\n'

            fields = [messages, warnings, errors]
            options = ['displaymessages', 'displaywarnings', 'displayerrors']
            backgrounds = ['jobmessagebg', 'jobwarningbg', 'joberrorbg']
            titles = ['jobmessages', 'jobwarnings', 'joberrors']
            # Print message/warning/error fields
            # Each of these spans all the table columns
            for fld, opt, bg, tit in zip(fields, options, backgrounds, titles):
                if ((fld != '') and (reportOpts[opt] == True)):
                    msgHtml += '<tr><td colspan="{}" align="center" bgcolor="{}"><details><summary>{}</summary>{}</details></td></tr>\n'.format(
                        nFields, reportOpts[bg], rptTits[tit], fld)
                    msgText += '{}: {}\n'.format(rptTits[tit], fld)
                    msgCsv += '\"{}: {}\"\n'.format(rptTits[tit], fld)

        # Move current timestamp ahead 1 second
        currentTs = currentDateEndTs + 1

    # Now see which systems didn't report in
    dbCursor = globs.db.execSqlStmt(
        "SELECT source, destination, lastTimestamp FROM backupsets ORDER BY source, destination"
    )
    setRows = dbCursor.fetchall()
    globs.log.write(3, 'setRows=[{}]'.format(setRows))

    # Flag to let us know if we need to print a header for missing backupsets
    hdrFlag = 0
    for source, destination, lastTimestamp in setRows:
        dbCursor = globs.db.execSqlStmt(
            "SELECT count(*) FROM report WHERE source = \'{}\' AND destination = \'{}\'"
            .format(source, destination))
        seenRows = dbCursor.fetchone()[0]
        globs.log.write(3, 'seenRows=[{}]'.format(seenRows))
        if seenRows == 0:  # Didn't get any rows for source/Destination pair. Add to report
            if hdrFlag == 0:
                msgHtml += '<tr><td colspan="{}" align="center" bgcolor="{}"><b>Missing Backup Sets</b></td></tr>\n'.format(
                    nFields, reportOpts['subheadbg'])
                msgText += 'Missing Back Sets\n'
                msgCsv += '\"Missing Back Sets\"\n'
                hdrFlag = 1

            diff = drdatetime.daysSince(lastTimestamp)
            lastDateStr, lastTimeStr = drdatetime.fromTimestamp(lastTimestamp)
            msgHtml += '<tr><td colspan="{}" align="center" bgcolor="{}">{} to {}: <i>No new activity. Last activity on {} at {} ({} days ago)</i></td></tr>\n'.format(
                nFields, reportOpts['noactivitybg'], source, destination,
                lastDateStr, lastTimeStr, diff)
            msgText += '{} to {}: No new activity. Last activity on {} at {} ({} days ago)\n'.format(
                source, destination, lastDateStr, lastTimeStr, diff)
            msgCsv += '\"{} to {}: No new activity. Last activity on {} at {} ({} days ago)\"\n'.format(
                source, destination, lastDateStr, lastTimeStr, diff)

    # Add report footer
    msgHtml, msgText, msgCsv = report.rptBottom(msgHtml, msgText, msgCsv,
                                                startTime, nFields)

    # Return text & HTML messages to main program. It can decide which one it wants to use.
    return msgHtml, msgText, msgCsv
コード例 #11
0
    def processNextMessage(self):
        globs.log.write(1, 'dremail.processNextMessage()')
        self.connect()

        # Increment message counter to the next message.
        # Skip for message #0 because we haven't read any messages yet
        self.nextEmail += 1

        msgParts = {}  # msgParts contains extracts of message elements
        statusParts = {
        }  # statusParts contains the individual lines from the Duplicati status emails
        dateParts = {
        }  # dateParts contains the date & time strings for the SQL Query

        # Check no-more-mail conditions. Either no new emails to get or gone past the last email on list
        if (self.newEmails == None) or (self.nextEmail == self.numEmails):
            return None

        if self.protocol == 'pop3':
            # Get message header
            server_msg, body, octets = self.server.top(
                (self.newEmails[self.nextEmail]) + 1, 0)
            globs.log.write(
                3, 'server_msg=[{}]  body=[{}]  octets=[{}]'.format(
                    server_msg, body, octets))
            if server_msg[:3].decode() != '+OK':
                globs.log.write(
                    1, 'ERROR getting message: {}'.format(self.nextEmail))
                return '<INVALID>'

            # Get date, subject, and message ID from headers
            msgParts['date'], msgParts['subject'], msgParts[
                'messageId'] = self.extractHeaders(body.decode('utf-8'))

        elif self.protocol == 'imap':
            # Get message header
            retVal, data = self.server.fetch(
                self.newEmails[self.nextEmail],
                '(BODY.PEEK[HEADER.FIELDS (DATE SUBJECT MESSAGE-ID)])')
            if retVal != 'OK':
                globs.log.write(
                    1, 'ERROR getting message: {}'.format(self.nextEmail))
                return '<INVALID>'
            globs.log.write(
                3,
                'Server.fetch(): retVal=[{}] data=[{}]'.format(retVal, data))

            msgParts['date'], msgParts['subject'], msgParts[
                'messageId'] = self.extractHeaders(data[0][1].decode('utf-8'))

        else:  # Invalid protocol spec
            globs.log.err('Invalid protocol specification: {}.'.format(
                self.protocol))
            return None

        # Log message basics
        globs.log.write(
            1, '\n*****\nNext Message: Date=[{}] Subject=[{}] Message-Id=[{}]'.
            format(msgParts['date'], msgParts['subject'],
                   msgParts['messageId']))

        # Check if any of the vital parts are missing
        if msgParts['messageId'] is None or msgParts['messageId'] == '':
            globs.log.write(1,
                            'No message-Id. Abandoning processNextMessage()')
            return '<INVALID>'
        if msgParts['date'] is None or msgParts['date'] == '':
            globs.log.write(1, 'No Date. Abandoning processNextMessage()')
            return msgParts['messageId']
        if msgParts['subject'] is None or msgParts['subject'] == '':
            globs.log.write(1, 'No Subject. Abandoning processNextMessage()')
            return msgParts['messageId']

        # See if it's a message of interest
        # Match subject field against 'subjectregex' parameter from RC file (Default: 'Duplicati Backup report for...')
        if re.search(globs.opts['subjectregex'], msgParts['subject']) == None:
            globs.log.write(
                1,
                'Message [{}] is not a Message of Interest. Can\'t match subjectregex from .rc file. Skipping message.'
                .format(msgParts['messageId']))
            return msgParts['messageId']  # Not a message of Interest

        # Get source & desination computers from email subject
        srcRegex = '{}{}'.format(globs.opts['srcregex'],
                                 re.escape(globs.opts['srcdestdelimiter']))
        destRegex = '{}{}'.format(re.escape(globs.opts['srcdestdelimiter']),
                                  globs.opts['destregex'])
        globs.log.write(
            3, 'srcregex=[{}]  destRegex=[{}]'.format(srcRegex, destRegex))
        partsSrc = re.search(srcRegex, msgParts['subject'])
        partsDest = re.search(destRegex, msgParts['subject'])
        if (partsSrc is None) or (
                partsDest is None
        ):  # Correct subject but delimeter not found. Something is wrong.
            globs.log.write(
                2,
                'SrcDestDelimeter [{}] not found in subject line. Skipping message.'
                .format(globs.opts['srcdestdelimiter']))
            return msgParts['messageId']

        # See if the record is already in the database, meaning we've seen it before
        if globs.db.searchForMessage(
                msgParts['messageId']):  # Is message is already in database?
            # Mark the email as being seen in the database
            globs.db.execSqlStmt(
                'UPDATE emails SET dbSeen = 1 WHERE messageId = \"{}\"'.format(
                    msgParts['messageId']))
            globs.db.dbCommit()
            return msgParts['messageId']
        # Message not yet in database. Proceed.
        globs.log.write(
            1, 'Message ID [{}] does not yet exist in DB.'.format(
                msgParts['messageId']))

        dTup = email.utils.parsedate_tz(msgParts['date'])
        if dTup:
            # See if there's timezone info in the email header data. May be 'None' if no TZ info in the date line
            # TZ info is represented by seconds offset from UTC
            # We don't need to adjust the email date for TimeZone info now, since date line in email already accounts for TZ.
            # All other calls to toTimestamp() should include timezone info
            msgParts['timezone'] = dTup[9]

            # Set date into a parseable string
            # It doesn't matter what date/time format we pass in (as long as it's valid)
            # When it comes back out later, it'll be parsed into the user-defined format from the .rc file
            # For now, we'll use YYYY/MM/DD HH:MM:SS
            xDate = '{:04d}/{:02d}/{:02d} {:02d}:{:02d}:{:02d}'.format(
                dTup[0], dTup[1], dTup[2], dTup[3], dTup[4], dTup[5])
            dtTimStmp = drdatetime.toTimestamp(
                xDate, dfmt='YYYY/MM/DD',
                tfmt='HH:MM:SS')  # Convert the string into a timestamp
            msgParts['emailTimestamp'] = dtTimStmp
            globs.log.write(
                3, 'emailDate=[{}]-[{}]'.format(
                    dtTimStmp, drdatetime.fromTimestamp(dtTimStmp)))

        msgParts['sourceComp'] = re.search(
            srcRegex, msgParts['subject']).group().split(
                globs.opts['srcdestdelimiter'])[0]
        msgParts['destComp'] = re.search(destRegex,
                                         msgParts['subject']).group().split(
                                             globs.opts['srcdestdelimiter'])[1]
        globs.log.write(3, 'sourceComp=[{}] destComp=[{}] emailTimestamp=[{}] subject=[{}]'.format(msgParts['sourceComp'], \
            msgParts['destComp'], msgParts['emailTimestamp'], msgParts['subject']))

        # Search for source/destination pair in database. Add if not already there
        retVal = globs.db.searchSrcDestPair(msgParts['sourceComp'],
                                            msgParts['destComp'])

        # Extract the body (payload) from the email
        if self.protocol == 'pop3':
            # Retrieve the whole messsage. This is redundant with previous .top() call and results in extra data downloads
            # In cases where there is a mix of Duplicati and non-Duplicati emails to read, this actually saves time in the large scale.
            # In cases where all the emails on the server are Duplicati emails, this does, in fact, slow things down a bit
            # POP3 is a stupid protocol. Use IMAP if at all possible.
            server_msg, body, octets = self.server.retr(
                (self.newEmails[self.nextEmail]) + 1)
            msgTmp = ''
            for j in body:
                msgTmp += '{}\n'.format(j.decode("utf-8"))
            msgBody = email.message_from_string(
                msgTmp)._payload  # Get message body
        elif self.protocol == 'imap':
            # Retrieve just the body text of the message.
            retVal, data = self.server.fetch(self.newEmails[self.nextEmail],
                                             '(BODY.PEEK[TEXT])')

            # Fix issue #71
            # From https://stackoverflow.com/questions/2230037/how-to-fetch-an-email-body-using-imaplib-in-python
            # "...usually the data format is [(bytes, bytes), bytes] but when the message is marked as unseen manually,
            # the format is [bytes, (bytes, bytes), bytes] – Niklas R Sep 8 '15 at 23:29
            # Need to check if len(data)==2 (normally unread) or ==3 (manually set unread)
            globs.log.write(3, 'dataLen={}'.format(len(data)))
            if len(data) == 2:
                msgBody = data[0][1].decode('utf-8')  # Get message body
            else:
                msgBody = data[1][1].decode('utf-8')  # Get message body

        globs.log.write(3, 'Message Body=[{}]'.format(msgBody))

        # Go through each element in lineParts{}, get the value from the body, and assign it to the corresponding element in statusParts{}
        for section, regex, flag, typ in lineParts:
            statusParts[section] = self.searchMessagePart(
                msgBody, regex, flag, typ)  # Get the field parts

        # Adjust fields if not a clean run
        globs.log.write(
            3, "statusParts['failed']=[{}]".format(statusParts['failed']))
        if statusParts['failed'] == '':  # Looks like a good run
            # These fields can be included in parentheses in later versions of Duplicati
            # For example:
            #   SizeOfModifiedFiles: 23 KB (23556)
            #   SizeOfAddedFiles: 10.12 KB (10364)
            #   SizeOfExaminedFiles: 44.42 GB (47695243956)
            #   SizeOfOpenedFiles: 33.16 KB (33954)
            # Extract the parenthesized value (if present) or the raw value (if not)
            dt, tm = globs.optionManager.getRcSectionDateTimeFmt(
                msgParts['sourceComp'], msgParts['destComp'])
            dateParts['endTimestamp'] = self.parenOrRaw(
                statusParts['endTimeStr'],
                df=dt,
                tf=tm,
                tz=msgParts['timezone'])
            dateParts['beginTimestamp'] = self.parenOrRaw(
                statusParts['beginTimeStr'],
                df=dt,
                tf=tm,
                tz=msgParts['timezone'])
            globs.log.write(
                3,
                'Email indicates a successful backup. Date/time is: end=[{}]  begin=[{}]'
                .format(dateParts['endTimestamp'],
                        dateParts['beginTimestamp'])),

            statusParts['sizeOfModifiedFiles'] = self.parenOrRaw(
                statusParts['sizeOfModifiedFiles'])
            statusParts['sizeOfAddedFiles'] = self.parenOrRaw(
                statusParts['sizeOfAddedFiles'])
            statusParts['sizeOfExaminedFiles'] = self.parenOrRaw(
                statusParts['sizeOfExaminedFiles'])
            statusParts['sizeOfOpenedFiles'] = self.parenOrRaw(
                statusParts['sizeOfOpenedFiles'])

        else:  # Something went wrong. Let's gather the details.
            statusParts['errors'] = statusParts['failed']
            statusParts['parsedResult'] = 'Failure'
            statusParts['warnings'] = statusParts['details']
            globs.log.write(2, 'Errors=[{}]'.format(statusParts['errors']))
            globs.log.write(2, 'Warnings=[{}]'.format(statusParts['warnings']))

            # Since the backup job report never ran, we'll use the email date/time as the report date/time
            dateParts['endTimestamp'] = msgParts['emailTimestamp']
            dateParts['beginTimestamp'] = msgParts['emailTimestamp']
            globs.log.write(
                3,
                'Email indicates a failed backup. Replacing date/time with: end=[{}]  begin=[{}]'
                .format(dateParts['endTimestamp'],
                        dateParts['beginTimestamp'])),

        # Replace commas (,) with newlines (\n) in message fields. Sqlite really doesn't like commas in SQL statements!
        for part in ['messages', 'warnings', 'errors']:
            if statusParts[part] != '':
                statusParts[part] = statusParts[part].replace(',', '\n')

        # If we're just collecting and get a warning/error, we may need to send an email to the admin
        if (globs.opts['collect'] is
                True) and (globs.opts['warnoncollect'] is True) and (
                    (statusParts['warnings'] != '') or
                    (statusParts['errors'] != '')):
            errMsg = 'Duplicati error(s) on backup job\n'
            errMsg += 'Message ID {} on {}\n'.format(msgParts['messageId'],
                                                     msgParts['date'])
            errMsg += 'Subject: {}\n\n'.format(msgParts['subject'])
            if statusParts['warnings'] != '':
                errMsg += 'Warnings:' + statusParts['warnings'] + '\n\n'
            if statusParts['errors'] != '':
                errMsg += 'Errors:' + statusParts['errors'] + '\n\n'

            globs.outServer.sendErrorEmail(errMsg)

        globs.log.write(
            3, 'Resulting timestamps: endTimeStamp=[{}] beginTimeStamp=[{}]'.
            format(drdatetime.fromTimestamp(dateParts['endTimestamp']),
                   drdatetime.fromTimestamp(dateParts['beginTimestamp'])))

        sqlStmt = self.buildEmailSql(msgParts, statusParts, dateParts)
        globs.db.execSqlStmt(sqlStmt)
        globs.db.dbCommit()

        return msgParts['messageId']