Exemplo n.º 1
0
def main(filterOnTime=True):
    '''Run all subroutines necessary to process the console log.
    params:
    <filterOnTime>: if False, the timestamp of the last run in the
    config file is ignored, i.e. all entries will be processed
    regardless of age.
    '''

    # prepare limits for time filter
    period = wdlib.set_timefilter('last_consolelog', filterOnTime)

    # locate logfile
    conspath = get_consolelog_path()
    if not conspath:
        err('No console logfile specified. Aborted.\n')
        return False

    # wake up operator
    print wdlib.version_header(__file__, conspath, period)

    # main processing step:
    # parse console log and print results
    ntotal, nskipped = parse_consolelog(period, conspath)
    if not ntotal:
        return False

    # log status at end of processing
    wdlib.log_status('last_consolelog', is_OK=True)

    wdlib.info(
        'console log: lines read/skipped/parsed: %s %s %s' %
        (wdlib.pr(ntotal), wdlib.pr(nskipped), wdlib.pr(ntotal - nskipped)))

    return True  # main()
Exemplo n.º 2
0
def main(filterOnTime=True):
    ''' Check /etc/passwd for anomalies.
    file is parsed, results written to output file, printed.
    '''

    # prepare limit for time filter
    period = wdlib.set_timefilter('last_passwd', filterOnTime)

    # get filename
    pwdpath = ''
    conf = wdlib.check_conf()
    if conf:
        pwdpath = wdlib.abspath(conf.get('PATHS', 'passwd'))

    # error checking:
    if not pwdpath:
        err('check_passwd: Cannot find passwd file "%s". Aborted.\n' % pwdpath)
        return False

    if wdlib.has_wildcards(pwdpath):
        err('check_passwd: No wildcards allowed ("%s")! Aborted.\n' % pwdpath)
        return False

    print wdlib.version_header(__file__, pwdpath, period)

    if parse_passwd(pwdpath, period):
        if write_results(pwdpath, outputname):
            reportOn_passwd(outputname)
        else:
            wdlib.info('write_results fails!')
    else:
        wdlib.info('parse_passwd: nothing found!')

    # log status at end of processing
    wdlib.log_status('last_passwd', is_OK=True)
Exemplo n.º 3
0
def cleanup(failed=True):
    ''' log status, restore stdout if neccessary
        if failed: abort
    '''

    # log status at end of processing
    wdlib.log_status('daily_run', is_OK=not failed)

    wdlib.info('')

    # restore output if redirected
    wdlib.redirect_stdout('')

    sys.exit(not failed)
Exemplo n.º 4
0
def main(filterOnTime=True, sourcesToDo=all_sources):
    '''Run all subroutines necessary to process the syslog.
    params:
    <filterOnTime>: if False, the timestamp of the last run in the
    config file is ignored, i.e. all entries will be processed
    regardless of age.
    <sourcesToDo>: list of sources to process; defaults to all
    '''

    # prepare limit for time filter
    period = wdlib.set_timefilter('last_syslog', filterOnTime)

    # locate logfile
    slpath = get_syslog_path()
    if not slpath:
        err('scan_syslog: Cannot find syslog file "%s". Aborted.\n' % slpath)
        return False

    # wake up operator
    print wdlib.version_header(__file__, slpath, period)

    # split syslog into tmp files, one for each source
    skipIfNotParsed = wdlib.boolfromconf('CHECKVALUES', 'SYSLOG_SKIP_UNKNOWN')

    ntotal, nskipped = split_syslog(period, slpath, skipIfNotParsed,
                                    sourcesToDo)
    wdlib.info(
        'syslog: lines read/skipped/parsed: %s %s %s' %
        (wdlib.pr(ntotal), wdlib.pr(nskipped), wdlib.pr(ntotal - nskipped)))

    if not ntotal:
        return False

    # parse the data files, write output files, delete input tmp files
    if not parse_allsources():  # exec parser
        return False

    # read output files and create report
    reportOn_allsources()

    # log status at end of processing
    wdlib.log_status('last_syslog', is_OK=True)

    return True  # main()
Exemplo n.º 5
0
def reportOn_allsources():
    ''' Report routine wrapper.

    Open files, call specific report routine for each source, close files.'''

    global sources

    outpath = wdlib.out_path()
    # see which output files exist
    for source in sources:
        d = sources[source]
        for part in ('_FAIL', '_OK'):
            d['fname'] = fname = os.path.join(outpath, d['name'] + part)
            if os.path.isfile(
                    fname):  # catches empty files AND non-existing ones
                # run reportOn_<source>
                d['handles'] = fh = wdlib.openfile(fname)
                if not fh:
                    wdlib.info('report: cannot open %s\n' % fname)
                    continue
                # call source specific routine
                reporter = 'reportOn_%s(d)' % source
                eval(reporter, globals(), locals())
                fh.close()
Exemplo n.º 6
0
def parse_consolelog(period, conspath=''):
    ''' Find logfile, parse content, write results to files.
    <period>: process only events within this period (serial times).
    '''

    # list of counting dicts, descriptions and filenames
    global _events

    # REs
    isatime = re.compile(r'^\d?\d[:]\d\d$')  # contains a time [H]H:mm
    isadate = re.compile(r'''               # contains a date YYYY-[M]M-[D]D
        (?ix)                # ignore case, allow verbose RE
        ^
        (?P<year>\d{4,4})
        [-]
        (?P<month>\d{1,2})
        [-]
        (?P<day>\d{1,2})
        $
    ''')

    islogon = re.compile(r'''
        (?ix)                # ignore case, allow verbose RE
        ^\s*                 # skip w.sp. from BOL
        LOGON\s+for          # marker w/ embedded white space
        [^"]*["]             # until quote
        (?P<user>[^,]*)      # group 'user' not containg comma
        [,]                  # one comma
        (?P<queue>[^"]*)     # up to quote
        ["]
        \s+on\s+LDEV\s+      # marker w/ embedded white spacemarker
        (?P<ldev>\d+)        # group 'ldev': 1... numbers only
        \D*$                 # any non-numeric rest to EOL
    ''')

    islogoff = re.compile(r'''
        (?ix)                # ignore case, allow verbose RE
        ^\s*                 # skip w.sp. from BOL
        LOGOFF\s+on\s+       # marker w/ trailing w.sp.
        LDEV\s+[#]           # marker w/ embedded white space
        (?P<ldev>[\d]+)      # group 'ldev': 1... numbers only
        \D*$                 # any non-numeric rest to EOL
    ''')

    iswarning = re.compile(r'''
        (?ix)                # ignore case, allow verbose RE
        ^.*                  # skip junk from BOL
        WARNING:\s+          # marker w/ trailing w.sp.
        (?P<notice>.+[^\s])\s+
        USER\s+              # marker w/ trailing w.sp.
        (?P<user>[^\s]+)
        .*$
    ''')

    logons = [{}, {}]  # for collecting jobs/sessions:
    #     {logtm,PID,user,queue,LDEV}

    _events = [  # indices: [jobtype=session|job|warning][eventtype]
        [  # sessions
            {
                'name': 'sessions_OK',
                'desc': 'sessions completed',
                'out': []
            }, {
                'name': 'sessions_nologon',
                'desc': 'sessions without logon',
                'out': []
            }, {
                'name': 'sessions_nologoff',
                'desc': 'sessions without logoff',
                'out': []
            }
        ],
        [  # jobs
            {
                'name': 'jobs_OK',
                'desc': 'jobs completed',
                'out': []
            }, {
                'name': 'jobs_nologon',
                'desc': 'jobs without logon',
                'out': []
            }, {
                'name': 'jobs_nologoff',
                'desc': 'jobs without logoff',
                'out': []
            }
        ],
        [  # warnings
            # d is a list, not a dict!
            {
                'name': 'console_warnings',
                'desc': 'console warnings',
                'out': []
            }
        ]
    ]
    # jobtypes
    jt_session = 0
    jt_job = 1
    jt_warn = 2
    jobtypes = (jt_session, jt_job, jt_warn)
    # eventtypes
    ev_completed = 0
    ev_nologon = 1
    ev_nologoff = 2
    ev_warn = 0
    eventtypes = (ev_completed, ev_nologon, ev_nologoff)
    ev_legends = [
        '%-20s %10s %-10s %-15s %-4s %s' %
        ('login', 'duration', 'user', 'queue', 'LDEV', 'PID'),
        '%-20s %-4s %s' % ('logoff', 'LDEV', 'PID'),
        '%-20s %-10s %-15s %-4s %s' %
        ('login', 'user', 'queue', 'LDEV', 'PID'),
    ]

    # as the console.log records times only, not dates,
    # we'll append the (creation) date of the file itself
    # approximation: use the modified time minus some margin
    # get the date before opening the file!
    if not wdlib.has_wildcards(conspath):
        sdate = os.path.getmtime(conspath) - (3 * 3600.)
    else:
        sdate = wdlib._curserial - (3 * 3600.)
    # truncate to midnight; logline's hh:mm will be added then
    startdate = wdlib.day_at_midnight(sdate)  # is a serial (sec since epoch)
    wdlib.info('\nparse_consolelog: assumed starting date of log entries: %s\n' \
        % wdlib.datestr(startdate,'%Y-%m-%d')) # debug

    log = wdlib.openfile(conspath)
    if not log:
        err('Cannot open %s. Aborted.\n' % conspath)
        return 0, 0  # error abort

    # count line total for displaying percentage during scan
    if wdlib.verbose:
        linescounted = 0
        for line in log:
            linescounted += 1
        log.close()
        log = wdlib.openfile(conspath)
        step = linescounted // 10
        if not step: step = 2

    outpath = wdlib.out_path()
    for jt in jobtypes:
        for et in eventtypes:
            if et < len(_events[jt]):
                this = _events[jt][et]
                this['fname'] = fname = os.path.join(outpath, this['name'])
                this['fh'] = fh = wdlib.createfile(fname)
                if not fh:
                    err('parse_consolelog: cannot create %s. Aborted.\n' %
                        fname)
                    return 0, 0  # error abort

                # write header
                legend = ev_legends[et]
                if jt == jt_warn:
                    legend = '%-20s %-30s %s' % ('date', 'warning', 'user')
                wdlib.write_header(fh, this['desc'], legend)

    # -----------
    # start processing

    lasttm = 0.  # for detecting day rollover
    one_day = wdlib.one_day  # increment at day rollover
    nlines = ntotal = 0

    for line in log:  # read including EOL char
        ntotal += 1

        # display count up
        # can't use info() here because of \r usage
        if wdlib.verbose:
            if (ntotal % step) == 0:
                print >> sys.stderr, '%8s\r' % wdlib.pr(ntotal),

        if line.isspace():  # ...if only whitespace...
            continue

        part = line.split('/', 3)  # need time, job ID, ID2, msg
        part = [x.strip() for x in part]

        # more checks on validity

        timestamp = part[0]
        # timestamp valid?
        if not isatime.match(timestamp):
            continue

        # recognize day rollovers before skipping lines

        # artificial timestamps might be inserted to get the 'real'
        # starting date, look for these:
        # format: "HH:mm/YYYY-MM-DD/current timestamp"
        if len(part) >= 3 and part[2] == "current timestamp":
            tm = isadate.match(part[1])
            if tm:
                # tm=(tm_year,tm_mon,tm_day,tm_hour,tm_min,tm_sec,tm_wday,tm_yday,tm_isdst)
                current = time.localtime()  # need some values for fill-in...
                yr = int(tm.group('year'))
                mo = int(tm.group('month'))
                dy = int(tm.group('day'))
                startdate = time.mktime((
                    yr,
                    mo,
                    dy,
                    0,
                    0,
                    0,  # at midnight
                    current[6],
                    current[7],
                    current[8]  # wd, yd, isdst
                ))
                wdlib.info(
                    'parse_consolelog: NEW assumed starting date of log \
entries: %s 00:00' % wdlib.datestr(startdate, '%Y-%m-%d'))

            # put this timestamp into lasttm to catch a possible rollover!!
            lasttm = startdate + timeserial(timestamp)

            # now discard this line
            continue

        # timestamp contains hour and minute only
        # and might repeat within a logfile
        # append a day,month,year from the starting date
        logtm = startdate + timeserial(timestamp)
        # increment day if this time is earlier than previous
        # last minute might be wrong, tolerate jitter
        if logtm < (lasttm - 120.):
            startdate += one_day
            logtm += one_day
        lasttm = logtm  # keep for next loop

        # -----------
        # time filter
        if not period[0] <= logtm < period[1]:
            continue

        # skip line if this is not a valid entry for a job or a session
        if len(part) < 4:
            continue
        ID1 = part[1]

        if ID1[0] == '\\':
            ID1 = ID1[1:]

        jobtype = jt_session
        if not ID1.startswith('#S'):
            jobtype = jt_job
            if not ID1.startswith('#J'):
                jobtype = jt_warn
                if not ID1.startswith('#????'):
                    continue

        # line OK, read contents
        nlines += 1

        # ----------- next part
        # part[2]: second process ID
        PID = ID1 + '/' + part[2]

        # ----------- next part
        # evaluate REs only if necessary
        msg = part[3]
        a_logon = islogon.match(msg)
        if not a_logon:
            a_logoff = islogoff.match(msg)
            if not a_logoff:
                a_warning = iswarning.match(msg)
                if not a_warning:
                    # not of interest here, skip
                    nlines -= 1
                    continue
        if a_logon:
            # process LOGON
            user = a_logon.group('user')
            queue = a_logon.group('queue')
            ldev = a_logon.group('ldev')
            # just save infos and wait for logoff event
            logons[jobtype][PID] = (logtm, user, queue, ldev, PID)
        elif a_logoff:
            # process LOGOFF
            if PID in logons[jobtype]:
                logtm_in, user, queue, ldev, dummy = logons[jobtype][PID]
                del logons[jobtype][PID]
                # output
                out = _events[jobtype][ev_completed]['out']
                out.append('%-20s %-10s %-10s %-15s %-4s %s\n' %
                           (wdlib.datestr(logtm_in, timefmt),
                            wdlib.elapsedstr(logtm - logtm_in), user, queue,
                            ldev, PID))
            else:
                # logoff without logon
                ldev = a_logoff.group('ldev')
                # output
                out = _events[jobtype][ev_nologon]['out']
                out.append('%-20s %-4s %s\n' %
                           (wdlib.datestr(logtm, timefmt), ldev, PID))
        elif a_warning:
            # record logtm, user, notice
            notice = a_warning.group('notice')
            user = a_warning.group('user')
            # output
            out = _events[jobtype][ev_warn]['out']
            out.append('%-20s %-30s %s\n' %
                       (wdlib.datestr(logtm, timefmt), notice, user))

    log.close()

    # remaining logons are non-logoff events
    for jobtype in (jt_session, jt_job):
        inlist = logons[jobtype].values()
        out = _events[jobtype][ev_nologoff]['out']
        for x in inlist:
            (logtm, user, queue, ldev, PID) = x
            out.append('%-20s %-10s %-15s %-4s %s\n' %
                       (wdlib.datestr(logtm, timefmt), user, queue, ldev, PID))

    # actually write the files; delete if empty
    for e in _events:
        for j in e:
            fh = j['fh']
            _list = j['out']
            if not len(_list):
                fh.close()
                wdlib.rm(j['fname'])
            else:
                _list.sort()
                for line in _list:
                    fh.write(line)
                fh.close()

    # output: counts and file references
    for e in _events:
        for j in e:
            n = len(j['out'])
            if n:
                print '%-30s: %7d (see file %s)' % (j['desc'], n, j['name'])
            else:  # file empty
                print '%-30s: %7d (%s)' % (j['desc'], 0, 'no output')

    # ----------------
    # done processing CONSOLE.LOG
    return (ntotal, ntotal - nlines)
Exemplo n.º 7
0
def main():
    ''' Prepare the folder structure, call scanning modules
    and create report, make copies, mail report.'''

    # ----------------
    # process command line
    # all options except 'help' handled later
    # 'help' will display usage and exit silently

    # when adding modules, modify list modules[] below!

    usage = __doc__ + '\nusage: %prog [options] file(s), -h for help on options'
    p = optparse.OptionParser(usage=usage)
    add = p.add_option  # =a shortcut
    add('-s',
        '--syslog',
        action='store_true',
        dest='opt_s',
        default=False,
        help='scan syslog.log ONLY')
    add('-c',
        '--consolelog',
        action='store_true',
        dest='opt_c',
        default=False,
        help='scan console.log ONLY')
    add('-l',
        '--last',
        action='store_true',
        dest='opt_l',
        default=False,
        help='scan login records ONLY')
    add('-p',
        '--passwd',
        action='store_true',
        dest='opt_p',
        default=False,
        help='scan /etc/passwd ONLY')
    add('-!',
        '--all',
        action='store_false',
        dest='filterOnTime',
        default=True,
        help='do NOT filter on timestamp')
    add('-v',
        '--verbose',
        action='store_true',
        dest='verbose',
        default=False,
        help='enable some more info on stderr')
    add('-o',
        '--output',
        action='store',
        dest='file',
        default='',
        type='string',
        help='redirect output to FILE')
    add('-f',
        '--conf',
        action='store',
        dest='confpath',
        default='',
        type='string',
        help='use as configuration file')
    add('-D',
        '--DEBUG',
        action='store_true',
        dest='DEBUG',
        default=False,
        help='enable debug msgs')

    # parse command line
    (options, args) = p.parse_args()

    # ----------------
    if options.verbose:
        wdlib.verbose = True
    else:
        # if stderr is redirected, verbose if False
        try:
            wdlib.verbose = sys.stderr.isatty()
        except AttributeError:
            pass  # leave it at False

    debugging = options.DEBUG

    # ----------------
    # process run_module boolean options
    modules = [
        (scan_syslog.main, options.opt_s),
        (scan_consolelog.main, options.opt_c),
        (scan_last.main, options.opt_l),
        (check_passwd.main, options.opt_p),
    ]
    # default action w/o run_options is to run all scans
    mods = [m[0] for m in modules if m[1]]
    # if all switches are False, mods is now empty;
    # that means to run them all:
    if not mods:
        mods = [m[0] for m in modules]

    # ----------------
    # determine the configuration file path (path + filename)
    # MANDATORY! or processing will be aborted soon.
    # if empty, some default will be used, see wdlib.py
    # returns the current valid confpath
    confpath = wdlib.set_confpath(options.confpath)

    # ----------------
    # check config file <confpath> for existence
    if not os.path.isfile(confpath) or not wdlib.check_conf():
        err('\n\n')
        err('%s\n' % sep)
        err('%s\n' % __version__)
        err('    WDUX configuration file is missing!\n')
        err('    specified: %s\n' % (confpath))
        err('    Aborted.\n')
        err('%s\n' % sep)
        err('\n\n')
        sys.exit(1)

    # ----------------
    # convert conf file
    convert_conf()

    # ----------------
    # check all folders and create if missing
    if not create_folder_structure():
        cleanup()

    # check mandatory conf options
    if not has_all_conf_options(confpath):
        sys.exit(1)

    # ----------------
    # get cluster status
    get_clusterstatus()

    # ----------------
    # for the operator
    if wdlib.verbose:
        wdux_header(confpath)
        print_overview(mods, wdlib.verbose)

    # ----------------
    # set or get the reportname
    reportname = options.file or get_reportname()

    # ----------------
    # from here on, all print statements print to file <reportname>
    # sys.stderr still goes to sys.stderr
    if not debugging:
        if not wdlib.redirect_stdout(reportname):
            err('wdux: cannot redirect output to report file "%s"! Aborted.\n'
                % reportname)
            sys.exit(1)

    # ----------------
    # here: for the report
    wdux_header(confpath)
    print_overview(mods, verbose=True)

    # ----------------
    # run the modules
    for module in mods:
        module(options.filterOnTime)

    # ----------------
    wdux_endheader()

    # ----------------
    # undo redirection; otherwise os.system() call fails!
    if not debugging:
        wdlib.redirect_stdout('')

    # ----------------
    # duplicate the files
    # this doesn't make sense if the filename is specified for this run
    if not options.file and not debugging:
        make_copies(reportname)

    # ----------------
    # mail the report
    if wdlib.verbose:
        wdlib.info(
            'hint: mail is not sent if verbose or stderr is not redirected!')
    else:
        sendmail(reportname)

    # ----------------
    # done
    cleanup(failed=False)  # successful exit