Exemplo n.º 1
0
 def CheckXmlDoc(self, xmlDoc, external, resourceType=None):
     content = 0
     DebugPrint(4, 'DEBUG: In CheckXmlDoc')
     for checker in self.XmlRecordCheckers:
         DebugPrint(3, 'Running : ' + str(checker) + str(xmlDoc) + str(external) + str(resourceType))
         content = content + checker(xmlDoc, external, resourceType)
     return content
Exemplo n.º 2
0
def disconnect():
    ##
    ## __disconnect
    ##
    ## Author - Tim Byrne
    ##
    ## Disconnects the module-level object __connection__.
    ##

    global connected

    try:
        if connected and Config.get_UseSSL() != 0:
            connection.close()
            DebugPrint(1, 'Disconnected from ' + Config.get_SSLHost())
    except:
        if not connectionError:  # We've already complained, so shut up
            DebugPrint(
                0,
                'Failed to disconnect from ' + Config.get_SSLHost() + ': ',
                sys.exc_info(),
                '--',
                sys.exc_info()[0],
                '++',
                sys.exc_info()[1],
            )

    connected = False
Exemplo n.º 3
0
def Reprocess():
    _, result = ReprocessList()
    while not connect_utils.connectionError and result and sandbox_mgmt.hasMoreOutstandingRecord:
        # This is decreased in SearchOutstanding

        tarcount = sandbox_mgmt.outstandingStagedTarCount
        scount = sandbox_mgmt.outstandingStagedRecordCount

        # Need to look for left over files
        sandbox_mgmt.SearchOutstandingRecord()

        if len(sandbox_mgmt.outstandingRecord) == 0:
            DebugPrint(4, 'DEBUG: quit reprocessing loop due empty list')
            break

        # This is potentially decreased in ReprocessList
        rcount = sandbox_mgmt.outstandingRecordCount

        # Attempt to reprocess any outstanding records
        ReprocessList()
        if rcount == sandbox_mgmt.outstandingRecordCount and scount == sandbox_mgmt.outstandingStagedRecordCount and tarcount \
            == sandbox_mgmt.outstandingStagedTarCount:
            DebugPrint(
                3,
                'Reprocessing seems stalled, stopping it until next successful send'
            )
            # We are not making progress
            break
def StandardCheckXmldoc(xmlDoc, recordElement, external, prefix):
    '''Check for and fill in suitable values for important attributes'''

    if not xmlDoc.documentElement:  # Major problem
        return

    if external:

        # Local namespace
        namespace = xmlDoc.documentElement.namespaceURI

        # ProbeName
        probeNameNodes = recordElement.getElementsByTagNameNS(
            namespace, 'ProbeName')
        if not probeNameNodes:
            node = xmlDoc.createElementNS(namespace, prefix + 'ProbeName')
            textNode = xmlDoc.createTextNode(Config.get_ProbeName())
            node.appendChild(textNode)
            recordElement.appendChild(node)
        elif probeNameNodes.length > 1:
            [jobIdType, jobId] = FindBestJobId(recordElement, namespace)
            DebugPrint(
                0, 'Warning: too many ProbeName entities in ' + jobIdType +
                ' ' + jobId)

        # SiteName
        siteNameNodes = recordElement.getElementsByTagNameNS(
            namespace, 'SiteName')
        if not siteNameNodes:
            node = xmlDoc.createElementNS(namespace, prefix + 'SiteName')
            textNode = xmlDoc.createTextNode(Config.get_SiteName())
            node.appendChild(textNode)
            recordElement.appendChild(node)
        elif siteNameNodes.length > 1:
            [jobIdType, jobId] = FindBestJobId(recordElement, namespace)
            DebugPrint(
                0, 'Warning: too many SiteName entities in ' + jobIdType +
                ' ' + jobId)

        # grid
        gridNodes = recordElement.getElementsByTagNameNS(namespace, 'Grid')
        if not gridNodes:
            node = xmlDoc.createElementNS(namespace, prefix + 'Grid')
            textNode = xmlDoc.createTextNode(Config.get_Grid())
            node.appendChild(textNode)
            recordElement.appendChild(node)
        elif gridNodes.length == 1:
            grid = gridNodes[0].firstChild.data
            grid_info = Config.get_Grid()
            if grid_info and (not grid or grid == 'Unknown'):
                gridNodes[0].firstChild.data = grid_info
            if not gridNodes[0].firstChild.data:  # Remove null entry
                recordElement.removeChild(gridNodes[0])
                gridNodes[0].unlink()
        else:
            # Too many entries
            (jobIdType, jobId) = FindBestJobId(recordElement, namespace)
            DebugPrint(
                0, 'Warning: too many grid entities in ' + jobIdType + ' ' +
                jobId)
Exemplo n.º 5
0
def __InitializeDictionary__():
    """
    For internal use only.  Parse the user-vo-map file and initialize the
    module-internal data structures with the contents.  From now on, VO
    information lookup in this module will be done via an in-memory lookup.

    Will only be attempted once
    """

    # Check if there was previously an error
    # If so, do not retry initialization
    global __dictionaryErrorStatus, __voiToVOcDictionary, __UserVODictionary
    if __dictionaryErrorStatus:
        return

    __voiToVOcDictionary = {}
    __UserVODictionary = {}
    __dictionaryErrorStatus = True

    mapfile = config.Config.get_UserVOMapFile()
    if mapfile == None:
        DebugPrint(2, "WARNING: No mapfile specified; not using VO mapping.")
        return

    try:
        __InitializeDictionary_internal(mapfile)
        __dictionaryErrorStatus = False
    except IOError, e:
        DebugPrint(
            0,
            'WARNING: IO error exception initializing user-vo-map mapfile %s: %s'
            % (mapfile, str(e)))
        DebugPrintTraceback()
Exemplo n.º 6
0
def processHistoryDir():
    """
    Condor schedd will write one file per finished job into this directory.
    We must convert it from a Condor ClassAd to a certinfo file and rename it
    based on the routed job's name.
    """
    history_dir = Config.get_CondorCEHistoryFolder()
    output_dir = Config.get_DataFolder()
    if not history_dir:
        DebugPrint(3, "No Condor-CE history specified; will not process for" \
            " certinfo.")
    if not os.path.exists(history_dir):
        DebugPrint(3, "Condor-CE history directory %s does not exist." \
            % history_dir)
    for full_filename in glob.glob(os.path.join(history_dir, "history.*")):
        _, filename = os.path.split(full_filename)
        if not historyRe.match(filename):
            DebugPrint(
                3, "Ignoring history file %s as it does not match "
                "the regular expression" % filename)
            continue
        try:
            classadToCertinfo(full_filename, output_dir)
        except KeyboardInterrupt:
            raise
        except SystemExit:
            raise
        except Exception, e:
            DebugPrint(0, "Failure when trying to process Condor-CE history %s" \
                " into a certinfo file: %s" % (filename, str(e)))
            DebugPrintTraceback(e)
Exemplo n.º 7
0
def CompressOutbox(probe_dir, outbox, outfiles):

    # Compress the probe_dir/outbox and stored the resulting tar.gz file
    # in probe_dir/staged

    global outstandingStagedTarCount

    staged_store = os.path.join(probe_dir, 'staged', 'store')
    Mkdir(staged_store)

    staging_name = GenerateFilename('tz.', staged_store)
    DebugPrint(1, 'Compressing outbox in tar.bz2 file: ' + staging_name)

    try:
        tar = tarfile.open(staging_name, 'w:bz2')
    except KeyboardInterrupt:
        raise
    except SystemExit:
        raise
    except Exception, e:
        DebugPrint(
            0, 'Warning: Exception caught while opening tar.bz2 file: ' +
            staging_name + ':')
        DebugPrint(0, 'Caught exception: ', e)
        DebugPrintTraceback()
        return False
Exemplo n.º 8
0
def RemoveOldQuarantine(nDays=31, maxSize=200):

    # Default to 31 days or 200Mb whichever is lower.
    quarantine = os.path.join(
        os.path.join(Config.get_DataFolder(), "quarantine"))
    if os.path.exists(quarantine):
        DebugPrint(1, 'Removing quarantines data files older than ', nDays,
                   ' days from ', quarantine)
        RemoveOldFiles(nDays, os.path.join(quarantine, '*'), maxSize)
        #quarantine files are under subdirectory
        subpath = os.listdir(quarantine)
        for dir_quar in subpath:
            if not os.path.isdir(os.path.join(quarantine, dir_quar)):
                continue
            DebugPrint(1, 'Removing quarantines data files older than ', nDays,
                       ' days from ', os.path.join(quarantine, dir_quar))
            RemoveOldFiles(nDays, os.path.join(quarantine, dir_quar, '*'),
                           maxSize)
    fragment = Config.getFilenameFragment()
    for current_dir in backupDirList:
        gratiapath = os.path.join(current_dir, 'gratiafiles')
        subpath = os.path.join(gratiapath, 'subdir.' + fragment)
        quarantine = os.path.join(subpath, 'quarantine')
        if os.path.exists(quarantine):
            DebugPrint(1, 'Removing quarantines data files older than ', nDays,
                       ' days from ', quarantine)
            RemoveOldFiles(nDays, os.path.join(quarantine, '*'), maxSize)
Exemplo n.º 9
0
 def __init__(self, target=None, min_val=None, default_val=None):
     """
     Open or Create a checkpoint file
     target - checkpoint filename (optionally null)
     min_val - checkpoint cannot be less than it
     default_val - checkpoint will assume this value if no checkpoint value is available
     """
     self._val = default_val
     self._fp = None
     self._target = None
     if target:
         try:
             fd = os.open(target, os.O_RDWR | os.O_CREAT)
             self._fp = os.fdopen(fd, 'r+')
             self._val = long(self._fp.readline())
             if self._val < min_val:
                 self._val = min_val
             DebugPrint(3, "Resuming from checkpoint in %s" % target)
         except IOError:
             #raise IOError("Could not open checkpoint file %s" % target)
             DebugPrint(1, "Could not open checkpoint file %s" % target)
         except EOFError:
             DebugPrint(1, "Empty or truncated checkpoint file %s" % target)
         except ValueError:
             DebugPrint(1, "Failed to read checkpoint file %s" % target)
     if self._val < min_val:
         # None can be compared with integers (it is smaller)
         DebugPrint(3, "Checkpoint adjusted to %s" % min_val)
         self._val = min_val
Exemplo n.º 10
0
def __disconnect_at_exit__():
    """
    Insure that we properly shutdown the connection at the end of the process.
    
    This includes sending any outstanding records and printing the statistics
    """

    if global_state.bundle_size > 1 and global_state.CurrentBundle.nItems > 0:
        responseString, _ = bundle.ProcessBundle(global_state.CurrentBundle)
        DebugPrint(0, responseString)
        DebugPrint(
            0, '***********************************************************')
    connect_utils.disconnect()
    if config.Config:
        try:
            sandbox_mgmt.RemoveOldLogs(Config.get_LogRotate())
            sandbox_mgmt.RemoveOldJobData(Config.get_DataFileExpiration())
            sandbox_mgmt.RemoveOldQuarantine(Config.get_DataFileExpiration(),
                                             Config.get_QuarantineSize())
        except KeyboardInterrupt:
            raise
        except SystemExit:
            raise
        except Exception, exception:
            DebugPrint(0, 'Exception caught at top level: ' + str(exception))
            DebugPrintTraceback()
Exemplo n.º 11
0
def classadToCertinfo(filename, output_dir):
    """
    Process the classad of the finished job into a certinfo file in the same
    directory.  On failure, do not throw an exception, but quarantine the
    classad file.
    On success, the classad history file is deleted.

    This function should not throw and does not return anything
    """
    DebugPrint(4, "Converting ClassAd %s to certinfo file." % filename)
    try:
        fd = open(filename)
    except IOError as ie:
        DebugPrint(1, "Unable to open ClassAd %s for certinfo conversion" \
            ": %s" % (filename, str(ie)))
        return

    for classad in fdToClassad(fd):

        if not createCertinfoFile(classad, output_dir):
            DebugPrint(0, "Failed to convert certinfo file %s; sending to " \
                "quarantine." % filename)
            sandbox_mgmt.QuarantineFile(filename, False)
            continue

        file_utils.RemoveFile(filename)
Exemplo n.º 12
0
def parse_datetime(date_string_in, return_seconds=False, assume_local=False):
    """Parse date/time string and return datetime object.

    This function provides only limited support of the iso8601 format
    e.g. Time zone specifications (different from Z for UTC) are not supported
    Can raise ValueError is the format is not valid

    :param date_string_in: date/time string in iso8601 (%Y-%m-%dT%H:%M:%S[Z]) format
        Other formats are accepted: %Y-%m-%d, %Y%m%d[T%H:%M:%S[Z]], %Y-%m-%d %H:%M:%S
    :param return_seconds: return seconds form the Epoch instead of a datetime object
    :param assume_local: assume that a naive time is local when returning seconds (cannot express naive time)
    :return: datetime, None if errors occur
    """
    # from python 2.5 datetime.strptime(date_string, format)
    # previous: datetime(*(time.strptime(date_string, format)[0:6]))
    # These external modules provide robust parsing/formatting:
    #  pyiso8601 - https://pypi.python.org/pypi/iso8601
    #  isodate - https://pypi.python.org/pypi/isodate
    #  dateutil -
    #  ciso8601 - https://github.com/elasticsales/ciso8601
    date_string = date_string_in.strip()
    is_utc = None
    # TODO: add support for timezones different form Z
    if date_string[-1] == 'Z':
        is_utc = True
        date_string = date_string[:-1]
    try:
        # fast track for the most likely format
        result = time.strptime(date_string, "%Y-%m-%dT%H:%M:%S")
    except ValueError:
        # normalize the string to %Y%m%d[ %H:%M:%S]
        dt_arr = date_string.split('T')
        if not len(dt_arr) == 2:
            dt_arr = date_string.split()
            if not len(dt_arr) == 2:
                dt_arr.append('')
        date_string = ("%s %s" %
                       (dt_arr[0].replace('-', ''), dt_arr[1])).strip()
        try:
            result = time.strptime(date_string, "%Y%m%d %H:%M:%S")
        except ValueError:
            # Wrong format, try the next
            try:
                # try second string format
                result = time.strptime(date_string, "%Y%m%d")
            except ValueError, e:
                # No valid format
                DebugPrint(
                    2, "Wrong format, Date parsing failed for %s: %s" %
                    (date_string_in, e))
                #return None
                raise
            except Exception, e:
                DebugPrint(
                    2, "Date parsing failed for %s: %s" % (date_string_in, e))
                #return None
                raise
Exemplo n.º 13
0
def readCertInfoLog(localJobId):
    ''' Look for and read contents of certificate log if present'''

    DebugPrint(4, 'readCertInfoLog: received (' + str(localJobId) + r')')

    # First get the list of accounting log file
    pattern = Config.get_CertInfoLogPattern()

    if pattern == r'':
        return None
    logs = glob.glob(pattern)
    if not logs:
        return None

    # Sort from newest first
    logs_sorting = [(-os.path.getmtime(filename), filename)
                    for filename in logs]
    logs_sorting.sort()
    logs = [filename for (_, filename) in logs_sorting]

    # Search in each log
    what = "lrmsID=" + str(localJobId)
    for myfile in logs:
        for line in open(myfile).readlines():
            if what in line:
                # If we could use a newer version of python (we have to work with 1.4), we could use
                # shlex:
                # res = dict(item.split('=',1) for item in shlex.split(line))
                # Newer version of python support this one line creation of the dictionary by not 1.3.4 (SL4 :()
                # res = dict(item.split('=',1) for item in __quoteSplit.findall(line))
                res = {}
                for item in __quoteSplit.findall(line):
                    split_item = item.split('=', 1)
                    res[split_item[0]] = split_item[1]
                if 'lrmsID' in res and res['lrmsID'] == str(localJobId):
                    if 'userDN' in res:
                        res['DN'] = res['userDN']
                    else:
                        res['DN'] = None
                    if 'userFQAN' in res:
                        res['FQAN'] = res['userFQAN']
                    else:
                        res['FQAN'] = None
                    res['VO'] = None
                    DebugPrint(
                        0, 'Warning: found valid certinfo file for ' +
                        str(localJobId) + ' in the log files: ' + pattern +
                        ' with ' + str(res))
                    return res
    DebugPrint(
        0, 'Warning: unable to find valid certinfo file for ' +
        str(localJobId) + ' in the log files: ' + pattern)
    return None
Exemplo n.º 14
0
def verifyFromCertInfo(xmlDoc, userIdentityNode, namespace):
    ''' Use localJobID and probeName to find cert info file and insert info into XML record'''

    # Collect data needed by certinfo reader

    DebugPrint(4, 'DEBUG: Get JobIdentity')
    jobIdentityNode = GetNode(
        xmlDoc.getElementsByTagNameNS(namespace, 'JobIdentity'))
    if jobIdentityNode == None:
        return
    DebugPrint(4, 'DEBUG: Get JobIdentity: OK')
    localJobId = GetNodeData(
        jobIdentityNode.getElementsByTagNameNS(namespace, 'LocalJobId'))
    DebugPrint(4, 'DEBUG: Get localJobId: ', localJobId)
    usageRecord = userIdentityNode.parentNode
    probeName = GetNodeData(
        usageRecord.getElementsByTagNameNS(namespace, 'ProbeName'))
    DebugPrint(4, 'DEBUG: Get probeName: ', probeName)

    # Read certinfo

    DebugPrint(
        4, 'DEBUG: call readCertInfo(' + str(localJobId) + r', ' +
        str(probeName) + ')')
    certInfo = readCertInfo(localJobId, probeName)
    DebugPrint(4, 'DEBUG: call readCertInfo: OK')
    DebugPrint(4, 'DEBUG: certInfo: ' + str(certInfo))
    if certInfo == None:
        DebugPrint(4, 'DEBUG: Returning without processing certInfo')
        return

    return populateFromCertInfo(certInfo, xmlDoc, userIdentityNode, namespace)
Exemplo n.º 15
0
def ListOutstandingRecord(dirname, isstaged):
    """
    Put in OustandingRecord the name of the file in dir, if any
    Return true if reach the maximum number of files
    """

    global outstandingStagedRecordCount
    global outstandingRecordCount

    if not os.path.exists(dirname):
        return False

    files = os.listdir(dirname)
    nfiles = len(files)
    DebugPrint(
        4, 'DEBUG: ListOutstanding for ' + dirname + ' adding ' + str(nfiles))
    if isstaged:
        outstandingStagedRecordCount += nfiles
    else:
        outstandingRecordCount += nfiles
    for f in files:
        AddOutstandingRecord(os.path.join(dirname, f))
        if len(outstandingRecord) >= __maxFilesToReprocess__:
            return True
    return False
Exemplo n.º 16
0
def InitDirList():
    '''Initialize the list of backup directories'''

    Mkdir(Config.get_WorkingFolder())

    DirListAdd(Config.get_WorkingFolder())
    DebugPrint(1, 'List of backup directories: ', backupDirList)
Exemplo n.º 17
0
def RemoveOldJobData(nDays=31):
    dataDir = Config.get_DataFolder()
    DebugPrint(1, 'Removing incomplete data files older than ', nDays,
               ' days from ', dataDir)
    RemoveOldFiles(nDays, os.path.join(dataDir, 'gratia_certinfo_*'))
    RemoveOldFiles(nDays, os.path.join(dataDir, 'gratia_condor_log*'))
    RemoveOldFiles(nDays, os.path.join(dataDir, 'gram_condor_log*'))
Exemplo n.º 18
0
    def RemoveTransientInputFiles(self):
        ''' Delete all the transient input files. '''

        for filename in self.TransientInputFiles:
            DebugPrint(1, 'Deleting transient input file: ' + filename)
            RemoveFile(filename)
        self.TransientInputFiles = []
Exemplo n.º 19
0
def RemoveRecordFile(filename):
    # Remove a record file and reduce the oustanding record count

    global outstandingRecordCount
    global outstandingStagedRecordCount

    if RemoveFile(filename):
        # Decrease the count only if the file was really removed

        dirname = os.path.dirname(filename)
        if os.path.basename(dirname) == 'outbox' and os.path.basename(os.path.dirname(dirname)) == 'staged':
            DebugPrint(3, 'Remove the staged record: ' + filename)
            outstandingStagedRecordCount += -1
        else:
            outstandingRecordCount += -1
            DebugPrint(3, 'Remove the record: ' + filename)
Exemplo n.º 20
0
def __handle_timeout__(signum, _):
    """
    Insure that we properly shutdown the connection in case of timeout
    """
    DebugPrint(3, 'Signal handler "handle_timeout" called with signal', signum)
    raise GratiaTimeout("Connection to Collector lasted more than: " +
                        str(timeout) + " second")
Exemplo n.º 21
0
def queryJob(jobid):
    """
    Query the Condor-CE directly for the equivalent of the certinfo.

    This is only done in the case where we couldn't determine the info from
    the files on disk.  While we're at it, we pull the data for all jobs and
    subsequent lookups will perform admirably.
    """
    global _queryCache
    if _queryCache == None:
        directory = Config.get_DataFolder()
        job_info = queryAllJobs()
        for classad in job_info.values():
            # On failure, there is not much to do - ignore
            DebugPrint("Creating certinfo file for %s." % \
                classad['GlobalJobId'])
            createCertinfoFile(classad, directory)
        _queryCache = job_info
    info = _queryCache.get(jobid, {})
    certinfo = {}
    if 'x509UserProxyVOName' in info:
        certinfo["VO"] = info['x509UserProxyVOName']
    if 'x509userproxysubject' in info:
        certinfo['DN'] = info['x509userproxysubject']
    if 'x509UserProxyFirstFQAN' in info:
        certinfo['FQAN'] = info['x509UserProxyFirstFQAN']
    return certinfo
Exemplo n.º 22
0
def classadToCertinfo(filename, output_dir):
    """
    Process the classad of the finished job into a certinfo file in the same
    directory.  On failure, do not throw an exception, but quarantine the
    classad file.
    On success, the classad history file is deleted.

    This function should not throw and does not return anything
    """
    DebugPrint(4, "Converting ClassAd %s to certinfo file." % filename)
    try:
        fd = open(filename)
    except IOError, ie:
        DebugPrint(1, "Unable to open ClassAd %s for certinfo conversion" \
            ": %s" % (filename, str(ie)))
        return
Exemplo n.º 23
0
def Initialize(customConfig='ProbeConfig'):
    '''This function initializes the Gratia metering engine'''

    if len(sandbox_mgmt.backupDirList) == 0:

        # This has to be the first thing done (DebugPrint uses
        # the information

        config.Config = probe_config.ProbeConfiguration(customConfig)

        DebugPrint(0, 'Initializing Gratia with ' + customConfig)

        # Initialize cleanup function.
        atexit.register(__disconnect_at_exit__)

        global_state.bundle_size = Config.get_BundleSize()
        connect_utils.timeout = Config.get_ConnectionTimeout()

        global_state.CurrentBundle = bundle.Bundle()

        send.Handshake()

        # Need to initialize the list of possible directories
        sandbox_mgmt.InitDirList()

        # Need to look for left over files
        sandbox_mgmt.SearchOutstandingRecord()

        # Process the Condor-CE history directory.
        condor_ce.processHistoryDir()

        # Attempt to reprocess any outstanding records

        reprocess.Reprocess()
Exemplo n.º 24
0
def QuarantineFile(filename, isempty):

    # If we have trouble with a file, let's quarantine it
    # If the quarantine reason is 'only' that the file is empty,
    # list the file as such.

    dirname = os.path.dirname(filename)
    pardirname = os.path.dirname(dirname)
    if os.path.basename(dirname) != 'outbox':
        toppath = dirname
    else:
        if os.path.basename(pardirname) == 'staged':
            toppath = os.path.dirname(pardirname)
        else:
            toppath = pardirname
    quarantine = os.path.join(toppath, 'quarantine')
    Mkdir(quarantine)
    DebugPrint(0, 'Putting a quarantine file in: ' + quarantine)
    DebugPrint(3,
               'Putting a file in quarantine: ' + os.path.basename(filename))
    if isempty:
        try:
            emptyfiles = open(os.path.join(quarantine, 'emptyfile'), 'a')
            emptyfiles.write(filename + '\n')
            emptyfiles.close()
        except:
            DebugPrint(
                0,
                'failed to record that file was empty: ',
                filename,
                '--',
                sys.exc_info(),
                '--',
                sys.exc_info()[0],
                '++',
                sys.exc_info()[1],
            )
    else:
        dest = os.path.join(quarantine, os.path.basename(filename))
        try:
            shutil.copy2(filename, dest)
        except IOError, ie:
            DebugPrint(
                1,
                "Unable to copy file %s to dest %s due to error: %s; ignoring"
                % (filename, dest, ie.strerror))
            return
Exemplo n.º 25
0
def Maintenance():
    '''This perform routine maintenance that is usually done at'''

    send.Handshake()

    # Need to look for left over files

    sandbox_mgmt.SearchOutstandingRecord()

    # Attempt to reprocess any outstanding records

    reprocess.Reprocess()

    if global_state.bundle_size > 1 and global_state.CurrentBundle.nItems > 0:
        responseString, _ = bundle.ProcessBundle(global_state.CurrentBundle)
        DebugPrint(0, responseString)
        DebugPrint(0, '***********************************************************')
Exemplo n.º 26
0
 def run_command(cmd, cmd_filter=None, timeout=None, get_stderr=False):
     # TODO: better, more robust and with timeout
     # timeout ignored for now
     DebugPrint(5, "Invoking: %s" % cmd)
     if get_stderr:
         cmd = "%s 2>&1" % cmd
     fd = os.popen(cmd)
     res = fd.read()
     if fd.close():
         DebugPrint(4, "Unable to invoke '%s'" % cmd)
         #raise Exception("Unable to invoke command")
     else:
         if cmd_filter:
             res = cmd_filter(res.strip())
         if not res:
             DebugPrint(4, "Unable to parse the command output (filter %s): %s" % (cmd_filter, cmd))
         return res
Exemplo n.º 27
0
def removeCertInfoFile(xmlDoc, userIdentityNode, namespace):
    ''' Use localJobID and probeName to find cert info file and remove file'''
    # Collect data needed by certinfo reader

    DebugPrint(4, 'DEBUG: Get JobIdentity')
    jobIdentityNode = GetNode(
        xmlDoc.getElementsByTagNameNS(namespace, 'JobIdentity'))
    if jobIdentityNode == None:
        return
    DebugPrint(4, 'DEBUG: Get JobIdentity: OK')
    localJobId = GetNodeData(
        jobIdentityNode.getElementsByTagNameNS(namespace, 'LocalJobId'))
    DebugPrint(4, 'DEBUG: Get localJobId: ', localJobId)
    usageRecord = userIdentityNode.parentNode
    probeName = GetNodeData(
        usageRecord.getElementsByTagNameNS(namespace, 'ProbeName'))
    DebugPrint(4, 'DEBUG: Get probeName: ', probeName)

    # Use _findCertinfoFile to find and remove the file, XML is ignored
    # Looking only for exact match, globbing is disabled.
    # Use _findCertinfoFile(localJobId, probeName) to look for more files with globbing if the exact matck
    # is not found (gratia_certinfo_*_localJobId*)
    DebugPrint(
        4, 'DEBUG: call _findCertinfoFile(' + str(localJobId) + r', ' +
        str(probeName) + ')')
    certinfo_touple = _findCertinfoFile(localJobId, probeName)
    if not certinfo_touple:
        # matching certinfo file not found
        DebugPrint(4, 'DEBUG: unable to find and remove  certinfo file')
        return None
    # Get results and remove file
    certinfo_fname = certinfo_touple[0]
    DebugPrint(4, 'DEBUG: removing certinfo file' + str(certinfo_fname))
    file_utils.RemoveFile(certinfo_fname)  # Clean up.
    return certinfo_fname
Exemplo n.º 28
0
def DebugPrintLevel(level, *args):
    if level <= 0:
        level_str = "CRITICAL"
    elif level >= 4:
        level_str = "DEBUG"
    else:
        level_str = ["CRITICAL", "ERROR", "WARNING", "INFO", "DEBUG"][level]
    level_str = "%s - EnstoreStorage: " % level_str
    DebugPrint(level, level_str, *args)
Exemplo n.º 29
0
 def get_opts(self, option=None):
     """Return the command line option
     """
     if not option:
         return self._opts
     try:
         return self._opts[option]
     except (TypeError, KeyError):
         DebugPrint(5, "No option %s, returning None" % option)
     return None
Exemplo n.º 30
0
 def __init__(self, customConfig='ProbeConfig'):
     if os.path.exists(customConfig):
         self.__configname = customConfig
     else:
         DebugPrint(
             0, "Error: configuration file %s doesn't exist" %
             (customConfig, ))
         raise utils.InternalError(
             "Error: configuration file %s doesn't exist" %
             (customConfig, ))