Exemplo n.º 1
0
    def fdparse(cls, value):
        """
        Parse a file or directory name and type ('f' or 'd') from hsi output.
        Return the two values in a tuple if successful, or None if not.

        In "ls -P" output, directory lines look like

            DIRECTORY       /home/tpb/bearcat

        File lines look like (cos is '5081')

            FILE    /home/tpb/halloy_test   111670  111670  3962+300150
                X0352700        5081    0       1  01/29/2004       15:25:02
                03/19/2012      13:09:50

        If the file is empty, it looks like:

            FILE    /home/tpb/hic_test/empty     0       0            0
                                6056    0       1  07/10/2014       14:30:40
                07/10/2014      14:30:40

        The fields are separated by '\t', which is probably the easiest way to
        parse the line, especially when some values are missing.
        """
        try:
            q = cls.rgxl
        except AttributeError:
            cls.rgxl = re.compile("(.)([r-][w-][x-]){3}(\s+\S+){3}" +
                                  "(\s+\d+)(\s+\w{3}\s+\d+\s+[\d:]+)" +
                                  "\s+(\S+)")
            cls.map = {'DIRECTORY': 'd',
                       'd': 'd',
                       'FILE': 'f',
                       '-': 'f'}

        if any([value.startswith("FILE"),
                value.startswith("DIRECTORY")]):
            x = value.split('\t')
            ptype = cls.map[util.pop0(x)]
            pname = util.pop0(x).strip()
            util.pop0(x)
            util.pop0(x)
            util.pop0(x)
            cart = util.pop0(x)
            if cart is not None:
                cart = cart.strip()
            cos = util.pop0(x)
            if cos is not None:
                cos = cos.strip()
            else:
                cos = ''
            return Checkable(path=pname, type=ptype, cos=cos, cart=cart)
        else:
            ltup = re.findall(cls.rgxl, value)
            if ltup:
                (type, ign1, ign2, ign3, ign4, fname) = ltup[0]
                return Checkable(path=fname, type=cls.map[type])
        return None
Exemplo n.º 2
0
    def fdparse(cls, value):
        """
        Parse a file or directory name and type ('f' or 'd') from hsi output.
        Return the two values in a tuple if successful, or None if not.

        In "ls -P" output, directory lines look like

            DIRECTORY       /home/tpb/bearcat

        File lines look like (cos is '5081')

            FILE    /home/tpb/halloy_test   111670  111670  3962+300150
                X0352700        5081    0       1  01/29/2004       15:25:02
                03/19/2012      13:09:50

        If the file is empty, it looks like:

            FILE    /home/tpb/hic_test/empty     0       0            0
                                6056    0       1  07/10/2014       14:30:40
                07/10/2014      14:30:40

        The fields are separated by '\t', which is probably the easiest way to
        parse the line, especially when some values are missing.
        """
        try:
            q = cls.rgxl
        except AttributeError:
            cls.rgxl = re.compile("(.)([r-][w-][x-]){3}(\s+\S+){3}" +
                                  "(\s+\d+)(\s+\w{3}\s+\d+\s+[\d:]+)" +
                                  "\s+(\S+)")
            cls.map = {'DIRECTORY': 'd', 'd': 'd', 'FILE': 'f', '-': 'f'}

        if any([value.startswith("FILE"), value.startswith("DIRECTORY")]):
            x = value.split('\t')
            ptype = cls.map[util.pop0(x)]
            pname = util.pop0(x).strip()
            util.pop0(x)
            util.pop0(x)
            util.pop0(x)
            cart = util.pop0(x)
            if cart is not None:
                cart = cart.strip()
            cos = util.pop0(x)
            if cos is not None:
                cos = cos.strip()
            else:
                cos = ''
            return Checkable(path=pname, type=ptype, cos=cos, cart=cart)
        else:
            ltup = re.findall(cls.rgxl, value)
            if ltup:
                (type, ign1, ign2, ign3, ign4, fname) = ltup[0]
                return Checkable(path=fname, type=cls.map[type])
        return None
Exemplo n.º 3
0
def check_path(path, verbose=False, plugin=True, xof=True):
    """
    If plugin is True, we want to log and store, which tcc_report does by
    default so we leave those flags alone.

    If plugin is False, we're interactive and we want to write any report to
    stdout. However, we only make a report if 1) verbose is True, or 2) the
    counts don't match.
    """
    cosinfo = get_cos_info()
    nsobj = path_nsobject(path)
    try:
        bfl = get_bitfile_set(int(nsobj), 1)
    except U.HpssicError as e:
        if plugin:
            CrawlConfig.log(e.value)
            return
        elif xof:
            raise SystemExit(e.value)
        else:
            raise U.HpssicError(e.value)

    bf = U.pop0(bfl)
    sc_count = int(bf['SC_COUNT'])
    cos_count = int(cosinfo[bf['BFATTR_COS_ID']])

    if plugin and sc_count != cos_count:
        tcc_report(bf, path=path)
    elif not plugin and (verbose or sc_count != cos_count):
        print(tcc_report(bf, path=path, log=False, store=False))
Exemplo n.º 4
0
def check_path(path, verbose=False, plugin=True, xof=True):
    """
    If plugin is True, we want to log and store, which tcc_report does by
    default so we leave those flags alone.

    If plugin is False, we're interactive and we want to write any report to
    stdout. However, we only make a report if 1) verbose is True, or 2) the
    counts don't match.
    """
    cosinfo = get_cos_info()
    nsobj = path_nsobject(path)
    try:
        bfl = get_bitfile_set(int(nsobj), 1)
    except U.HpssicError as e:
        if plugin:
            CrawlConfig.log(e.value)
            return
        elif xof:
            raise SystemExit(e.value)
        else:
            raise U.HpssicError(e.value)

    bf = U.pop0(bfl)
    sc_count = int(bf['SC_COUNT'])
    cos_count = int(cosinfo[bf['BFATTR_COS_ID']])

    if plugin and sc_count != cos_count:
        tcc_report(bf, path=path)
    elif not plugin and (verbose or sc_count != cos_count):
        print(tcc_report(bf, path=path, log=False, store=False))
Exemplo n.º 5
0
def log(*args, **kwargs):
    """
    Manage a singleton logging object. If we already have one, we use it.

    If the caller sets *reopen* or *close*, we close the currently open object
    if any. For *reopen*, we get a new one.

    If there's anything in *args*, we expect args[0] to be a format string with
    subsequent elements matching format specifiers.
    """
    def kwget(kwargs, name, defval):
        if name in kwargs:
            rval = kwargs[name]
        else:
            rval = defval
        return rval

    logpath = kwget(kwargs, 'logpath', '')
    cfg = kwget(kwargs, 'cfg', None)
    close = kwget(kwargs, 'close', False)
    if logpath is None:
        logpath = ''

    if close and hasattr(log, '_logger'):
        while 0 < len(log._logger.handlers):
            h = U.pop0(log._logger.handlers)
            h.close()
            del h
        del log._logger

    if not hasattr(log, '_logger') and (logpath or cfg):
        log._logger = new_logger(logpath=logpath, cfg=cfg)

    if 0 < len(args):
        cframe = sys._getframe(1)
        caller_name = cframe.f_code.co_name
        caller_file = cframe.f_code.co_filename
        caller_lineno = cframe.f_lineno
        fmt = (caller_name +
               "(%s:%d): " % (caller_file, caller_lineno) +
               args[0])
        nargs = (fmt,) + args[1:]
        try:
            fixup_log_handlers(log._logger)
            log._logger.info(*nargs)
        except AttributeError:
            log._logger = new_logger(logpath=logpath, cfg=cfg)
            log._logger.info(*nargs)

    if hasattr(log, '_logger'):
        return log._logger
    else:
        return None
Exemplo n.º 6
0
def new_logger(logpath='', cfg=None):
    """
    Return a new logging object for this process. The log file path is derived
    from (in order):

     - logpath if set
     - environment ($CRAWL_LOG)
     - cfg
     - default (/var/log/hpssic.log if writable, else /tmp/hpssic.log)
    """
    rval = logging.getLogger('hpssic')
    logging.raiseExceptions = True
    rval.setLevel(logging.INFO)

    while 0 < len(rval.handlers):
        z = U.pop0(rval.handlers)
        del z
    rval.addHandler(get_log_handler(logpath=logpath, cfg=cfg))

    rval.info('-' * (55 - len(U.hostname())))

    return rval
Exemplo n.º 7
0
def new_logger(logpath='', cfg=None):
    """
    Return a new logging object for this process. The log file path is derived
    from (in order):

     - logpath if set
     - environment ($CRAWL_LOG)
     - cfg
     - default (/var/log/hpssic.log if writable, else /tmp/hpssic.log)
    """

    # -------------------------------------------------------------------------
    def cfg_get(func, section, option, defval):
        if cfg:
            rval = func(section, option, defval)
        else:
            rval = defval
        return rval

    # -------------------------------------------------------------------------
    def raiseError(record=None):
        raise

    envname = os.getenv('CRAWL_LOG')
    try:
        dcfg = get_config()
    except:
        dcfg = None

    if logpath != '':
        final_logpath = logpath
    elif envname:
        final_logpath = envname
    elif cfg:
        try:
            final_logpath = cfg.get('crawler', 'logpath')
        except NoOptionError:
            final_logpath = U.default_logpath()
        except NoSectionError:
            final_logpath = U.default_logpath()
    elif dcfg:
        try:
            final_logpath = dcfg.get('crawler', 'logpath')
        except NoOptionError:
            final_logpath = U.default_logpath()
        except NoSectionError:
            final_logpath = U.default_logpath()
    else:
        final_logpath = U.default_logpath()

    rval = logging.getLogger('hpssic')
    rval.setLevel(logging.INFO)
    host = U.hostname()

    for h in rval.handlers:
        h.close()
        del h

    if cfg:
        maxBytes = cfg.get_size('crawler', 'logsize', 10 * 1024 * 1024)
        backupCount = cfg.get_size('crawler', 'logmax', 5)
        archdir = cfg.get_d(
            'crawler', 'archive_dir',
            U.pathjoin(U.dirname(final_logpath), 'hpss_log_archive'))
    else:
        maxBytes = 10 * 1024 * 1024
        backupCount = 5
        archdir = U.pathjoin(U.dirname(final_logpath), 'hpss_log_archive')

    fh = U.ArchiveLogfileHandler(final_logpath,
                                 maxBytes=maxBytes,
                                 backupCount=backupCount,
                                 archdir=archdir)

    strfmt = "%" + "(asctime)s [%s] " % host + '%' + "(message)s"
    fmt = logging.Formatter(strfmt, datefmt="%Y.%m%d %H:%M:%S")
    fh.setFormatter(fmt)
    fh.handleError = raiseError

    while 0 < len(rval.handlers):
        z = U.pop0(rval.handlers)
        del z
    rval.addHandler(fh)

    rval.info('-' * (55 - len(host)))

    return rval