Exemple #1
0
def get_output(jobid, directory, wms_proxy=False):
    """Retrieve the output of a job on the grid"""

    cmd = 'glite-wms-job-output'
    exec_bin = True
    # general WMS options (somehow used by the glite-wms-job-output
    # command)
    if config['Config']:
        cmd += ' --config %s' % config['Config']

    if not check_proxy():
        logger.warning('LCG plugin is not active.')
        return False, None
    if not credential().isValid('01:00'):
        logger.warning('GRID proxy lifetime shorter than 1 hour')
        return False, None

    cmd = '%s --noint --dir %s %s' % (cmd, directory, jobid)

    logger.debug('job get output command: %s' % cmd)

    rc, output, m = getShell().cmd1(
        '%s%s' % (__get_cmd_prefix_hack__(binary=exec_bin), cmd),
        allowed_exit=[0, 255])

    match = re.search('directory:\n\s*([^\t\n\r\f\v]+)\s*\n', output)

    if not match:
        logger.warning('Job output fetch failed.')
        __print_gridcmd_log__('(.*-output.*\.log)', output)
        return False, 'cannot fetch job output'

    # job output fetching succeeded, try to remove the glite command
    # logfile if it exists
    __clean_gridcmd_log__('(.*-output.*\.log)', output)

    outdir = match.group(1)

    #       some versions of LCG middleware create an extra output directory (named <uid>_<jid_hash>)
    #       inside the job.outputdir. Try to match the jid_hash in the outdir. Do output movement
    #       if the <jid_hash> is found in the path of outdir.
    import urlparse
    jid_hash = urlparse.urlparse(jobid)[2][1:]

    if outdir.count(jid_hash):
        if getShell().system('mv "%s"/* "%s"' % (outdir, directory)) == 0:
            try:
                os.rmdir(outdir)
            except Exception as msg:
                logger.warning(
                    "Error trying to remove the empty directory %s:\n%s" %
                    (outdir, msg))
        else:
            logger.warning(
                "Error moving output from %s to %s.\nOutput is left in %s." %
                (outdir, directory, outdir))
    else:
        pass

    return __get_app_exitcode__(directory)
Exemple #2
0
def get_output(jobid, directory, wms_proxy=False):
    """Retrieve the output of a job on the grid"""

    cmd = "glite-wms-job-output"
    exec_bin = True
    # general WMS options (somehow used by the glite-wms-job-output
    # command)
    if config["Config"]:
        cmd += " --config %s" % config["Config"]

    if not check_proxy():
        logger.warning("LCG plugin is not active.")
        return False, None
    if not credential().isValid("01:00"):
        logger.warning("GRID proxy lifetime shorter than 1 hour")
        return False, None

    cmd = "%s --noint --dir %s %s" % (cmd, directory, jobid)

    logger.debug("job get output command: %s" % cmd)

    rc, output, m = getShell().cmd1("%s%s" % (__get_cmd_prefix_hack__(binary=exec_bin), cmd), allowed_exit=[0, 255])

    match = re.search("directory:\n\s*([^\t\n\r\f\v]+)\s*\n", output)

    if not match:
        logger.warning("Job output fetch failed.")
        __print_gridcmd_log__("(.*-output.*\.log)", output)
        return False, "cannot fetch job output"

    # job output fetching succeeded, try to remove the glite command
    # logfile if it exists
    __clean_gridcmd_log__("(.*-output.*\.log)", output)

    outdir = match.group(1)

    #       some versions of LCG middleware create an extra output directory (named <uid>_<jid_hash>)
    #       inside the job.outputdir. Try to match the jid_hash in the outdir. Do output movement
    #       if the <jid_hash> is found in the path of outdir.
    import urlparse

    jid_hash = urlparse.urlparse(jobid)[2][1:]

    if outdir.count(jid_hash):
        if getShell().system('mv "%s"/* "%s"' % (outdir, directory)) == 0:
            try:
                os.rmdir(outdir)
            except Exception as msg:
                logger.warning("Error trying to remove the empty directory %s:\n%s" % (outdir, msg))
        else:
            logger.warning("Error moving output from %s to %s.\nOutput is left in %s." % (outdir, directory, outdir))
    else:
        pass

    return __get_app_exitcode__(directory)
Exemple #3
0
def __get_lfc_host__():
    """Gets the LFC_HOST: from current shell or querying BDII on demand"""
    lfc_host = None

    if 'LFC_HOST' in getShell().env:
        lfc_host = getShell().env['LFC_HOST']

    if not lfc_host:
        lfc_host = __get_default_lfc__()

    return lfc_host
Exemple #4
0
def __get_lfc_host__():
    """Gets the LFC_HOST: from current shell or querying BDII on demand"""
    lfc_host = None

    if 'LFC_HOST' in getShell().env:
        lfc_host = getShell().env['LFC_HOST']

    if not lfc_host:
        lfc_host = __get_default_lfc__()

    return lfc_host
Exemple #5
0
def submit(jdlpath, ce=None, perusable=False):
    """Submit a JDL file to LCG"""

    # doing job submission
    cmd = "glite-wms-job-submit -a"
    exec_bin = True

    if not check_proxy():
        logger.warning("LCG plugin not active.")
        return

    if not credential().isValid("01:00"):
        logger.warning("GRID proxy lifetime shorter than 1 hour")
        return

    submit_opt = __set_submit_option__()

    if not submit_opt:
        return
    else:
        cmd += submit_opt

    if ce:
        cmd += " -r %s" % ce

    cmd = '%s --nomsg "%s" < /dev/null' % (cmd, jdlpath)

    logger.debug("job submit command: %s" % cmd)

    rc, output, m = getShell().cmd1(
        "%s%s" % (__get_cmd_prefix_hack__(binary=exec_bin), cmd),
        allowed_exit=[0, 255],
        timeout=config["SubmissionTimeout"],
    )

    if output:
        output = "%s" % output.strip()

    match = re.search(".*(https://\S+:9000/[0-9A-Za-z_\.\-]+)", output)

    if match:
        logger.debug("job id: %s" % match.group(1))
        if perusable:
            logger.info("Enabling perusal")
            getShell().cmd1("glite-wms-job-perusal --set -f stdout %s" % match.group(1))

        # remove the glite command log if it exists
        __clean_gridcmd_log__("(.*-job-submit.*\.log)", output)
        return match.group(1)

    else:
        logger.warning("Job submission failed.")
        __print_gridcmd_log__("(.*-job-submit.*\.log)", output)
        return
Exemple #6
0
def submit(jdlpath, ce=None, perusable=False):
    """Submit a JDL file to LCG"""

    # doing job submission
    cmd = 'glite-wms-job-submit -a'
    exec_bin = True

    if not check_proxy():
        logger.warning('LCG plugin not active.')
        return

    if not credential().isValid('01:00'):
        logger.warning('GRID proxy lifetime shorter than 1 hour')
        return

    submit_opt = __set_submit_option__()

    if not submit_opt:
        return
    else:
        cmd += submit_opt

    if ce:
        cmd += ' -r %s' % ce

    cmd = '%s --nomsg "%s" < /dev/null' % (cmd, jdlpath)

    logger.debug('job submit command: %s' % cmd)

    rc, output, m = getShell().cmd1(
        '%s%s' % (__get_cmd_prefix_hack__(binary=exec_bin), cmd),
        allowed_exit=[0, 255],
        timeout=config['SubmissionTimeout'])

    if output:
        output = "%s" % output.strip()

    match = re.search('.*(https://\S+:9000/[0-9A-Za-z_\.\-]+)', output)

    if match:
        logger.debug('job id: %s' % match.group(1))
        if perusable:
            logger.info("Enabling perusal")
            getShell().cmd1("glite-wms-job-perusal --set -f stdout %s" %
                            match.group(1))

        # remove the glite command log if it exists
        __clean_gridcmd_log__('(.*-job-submit.*\.log)', output)
        return match.group(1)

    else:
        logger.warning('Job submission failed.')
        __print_gridcmd_log__('(.*-job-submit.*\.log)', output)
        return
Exemple #7
0
def arc_submit(jdlpath, ce, verbose):
    """ARC CE direct job submission"""

    # use the CREAM UI check as it's the same
    if not __cream_ui_check__():
        return

    # No longer need to specify CE if available in client.conf
    # if not ce:
    #    logger.warning('No CREAM CE endpoint specified')
    #    return

    # write to a temporary XML file as otherwise can't submit in parallel
    tmpstr = "/tmp/" + randomString() + ".arcsub.xml"
    cmd = "arcsub %s -S org.nordugridftpjob -j %s" % (__arc_get_config_file_arg__(), tmpstr)
    exec_bin = True

    if verbose:
        cmd += " -d DEBUG "

    if ce:
        cmd += " -c %s" % ce

    cmd = '%s "%s" < /dev/null' % (cmd, jdlpath)

    logger.debug("job submit command: %s" % cmd)

    rc, output, m = getShell().cmd1(
        "%s%s" % (__get_cmd_prefix_hack__(binary=exec_bin), cmd),
        allowed_exit=[0, 255],
        timeout=config["SubmissionTimeout"],
    )

    if output:
        output = "%s" % output.strip()
    getShell().system("rm " + tmpstr)

    # Job submitted with jobid:
    # gsiftp://lcgce01.phy.bris.ac.uk:2811/jobs/vSoLDmvvEljnvnizHq7yZUKmABFKDmABFKDmCTGKDmABFKDmfN955m
    match = re.search("(gsiftp://\S+:2811/jobs/[0-9A-Za-z_\.\-]+)$", output)

    # Job submitted with jobid: https://ce2.dur.scotac.uk:8443/arex/..
    if not match:
        match = re.search("(https://\S+:8443/arex/[0-9A-Za-z_\.\-]+)$", output)

    if match:
        logger.debug("job id: %s" % match.group(1))
        return match.group(1)
    else:
        logger.warning("Job submission failed.")
        return
Exemple #8
0
def arc_submit(jdlpath, ce, verbose):
    """ARC CE direct job submission"""

    # use the CREAM UI check as it's the same
    if not __cream_ui_check__():
        return

    # No longer need to specify CE if available in client.conf
    # if not ce:
    #    logger.warning('No CREAM CE endpoint specified')
    #    return

    # write to a temporary XML file as otherwise can't submit in parallel
    tmpstr = '/tmp/' + randomString() + '.arcsub.xml'
    cmd = 'arcsub %s -S org.nordugrid.gridftpjob -j %s' % (
        __arc_get_config_file_arg__(), tmpstr)
    exec_bin = True

    if verbose:
        cmd += ' -d DEBUG '

    if ce:
        cmd += ' -c %s' % ce

    cmd = '%s "%s" < /dev/null' % (cmd, jdlpath)

    logger.debug('job submit command: %s' % cmd)

    rc, output, m = getShell().cmd1(
        '%s%s' % (__get_cmd_prefix_hack__(binary=exec_bin), cmd),
        allowed_exit=[0, 255],
        timeout=config['SubmissionTimeout'])

    if output:
        output = "%s" % output.strip()
    getShell().system('rm ' + tmpstr)

    # Job submitted with jobid:
    # gsiftp://lcgce01.phy.bris.ac.uk:2811/jobs/vSoLDmvvEljnvnizHq7yZUKmABFKDmABFKDmCTGKDmABFKDmfN955m
    match = re.search('(gsiftp://\S+:2811/jobs/[0-9A-Za-z_\.\-]+)$', output)

    # Job submitted with jobid: https://ce2.dur.scotac.uk:8443/arex/..
    if not match:
        match = re.search('(https://\S+:8443/arex/[0-9A-Za-z_\.\-]+)$', output)

    if match:
        logger.debug('job id: %s' % match.group(1))
        return match.group(1)
    else:
        logger.warning('Job submission failed.')
        return
Exemple #9
0
def get_output(jobid, directory, cred_req):
    """Retrieve the output of a job on the grid"""

    cmd = 'glite-wms-job-output'
    # general WMS options (somehow used by the glite-wms-job-output
    # command)
    if config['Config']:
        cmd += ' --config %s' % config['Config']

    cmd = '%s --noint --dir %s %s' % (cmd, directory, jobid)

    logger.debug('job get output command: %s' % cmd)

    rc, output, m = getShell(cred_req).cmd1(cmd, allowed_exit=[0, 255])

    match = re.search('directory:\n\s*([^\t\n\r\f\v]+)\s*\n', output)

    if not match:
        logger.warning('Job output fetch failed.')
        __print_gridcmd_log__('(.*-output.*\.log)', output)
        return False, 'cannot fetch job output'

    # job output fetching succeeded, try to remove the glite command
    # logfile if it exists
    __clean_gridcmd_log__('(.*-output.*\.log)', output)

    outdir = match.group(1)

#       some versions of LCG middleware create an extra output directory (named <uid>_<jid_hash>)
#       inside the job.outputdir. Try to match the jid_hash in the outdir. Do output movement
#       if the <jid_hash> is found in the path of outdir.
    import urlparse
    jid_hash = urlparse.urlparse(jobid)[2][1:]

    if outdir.count(jid_hash):
        if getShell(cred_req).system('mv "%s"/* "%s"' % (outdir, directory)) == 0:
            try:
                os.rmdir(outdir)
            except Exception as msg:
                logger.warning(
                    "Error trying to remove the empty directory %s:\n%s" % (outdir, msg))
        else:
            logger.warning("Error moving output from %s to %s.\nOutput is left in %s." % (
                outdir, directory, outdir))
    else:
        pass

    return __get_app_exitcode__(directory)
Exemple #10
0
def arc_purgeMultiple(jobids):
    """ARC CE job purging"""

    if not __cream_ui_check__():
        return False

    idsfile = tempfile.mktemp('.jids')
    with open(idsfile, 'w') as ids_file:
        ids_file.write('\n'.join(jobids) + '\n')

    cmd = 'arcclean'
    exec_bin = True

    cmd = '%s %s -i %s -j %s' % (cmd, __arc_get_config_file_arg__(), idsfile,
                                 config["ArcJobListFile"])

    logger.debug('job purge command: %s' % cmd)

    rc, output, m = getShell().cmd1(
        '%s%s' % (__get_cmd_prefix_hack__(binary=exec_bin), cmd),
        allowed_exit=[0, 255])

    logger.debug(output)

    # clean up tempfile
    if os.path.exists(idsfile):
        os.remove(idsfile)

    if rc == 0:
        return True
    else:
        return False
Exemple #11
0
def arc_get_output(jid, directory):
    """ARC CE job output retrieval"""

    if not __cream_ui_check__():
        return (False, None)

    # construct URI list from ID and output from arcls
    cmd = 'arcls %s %s' % (__arc_get_config_file_arg__(), jid)
    exec_bin = True
    logger.debug('arcls command: %s' % cmd)
    rc, output, m = getShell().cmd1(
        '%s%s' % (__get_cmd_prefix_hack__(binary=exec_bin), cmd),
        allowed_exit=[0, 255],
        timeout=config['SubmissionTimeout'])
    if rc:
        logger.error(
            "Could not find directory associated with ARC job ID '%s'" % jid)
        return False

    # URI is JID + filename
    gfiles = []
    for uri in output.split("\n"):
        if len(uri) == 0:
            continue
        uri = jid + "/" + uri
        gf = GridftpFileIndex()
        gf.id = uri
        gfiles.append(gf)

    cache = GridftpSandboxCache()
    cache.vo = config['VirtualOrganisation']
    cache.uploaded_files = gfiles
    return cache.download(files=map(lambda x: x.id, gfiles),
                          dest_dir=directory)
Exemple #12
0
def __arc_sync__(cedict):
    """Collect jobs to jobs.xml"""

    if cedict[0]:
        cmd = 'arcsync %s -j %s -f -c %s' % (__arc_get_config_file_arg__(),
                                             config["ArcJobListFile"],
                                             ' -c '.join(cedict))
    else:
        cmd = 'arcsync %s -j %s -f ' % (__arc_get_config_file_arg__(),
                                        config["ArcJobListFile"])

    if not check_proxy():
        logger.warning('LCG plugin is not active.')
        return False
    if not credential().isValid('01:00'):
        logger.warning('GRID proxy lifetime shorter than 1 hour')
        return False

    logger.debug('sync ARC jobs list with: %s' % cmd)
    rc, output, m = getShell().cmd1(
        '%s%s' % (__get_cmd_prefix_hack__(binary=True), cmd),
        allowed_exit=[0, 255],
        timeout=config['StatusPollingTimeout'])
    if rc != 0:
        logger.error('Unable to sync ARC jobs. Error: %s' % output)
Exemple #13
0
def cancel(jobid):
    """Cancel a job"""

    cmd = "glite-wms-job-cancel"
    exec_bin = True

    if not check_proxy():
        logger.warning("LCG plugin is not active.")
        return False
    if not credential().isValid("01:00"):
        logger.warning("GRID proxy lifetime shorter than 1 hour")
        return False

    cmd = "%s --noint %s" % (cmd, jobid)

    logger.debug("job cancel command: %s" % cmd)

    rc, output, m = getShell().cmd1("%s%s" % (__get_cmd_prefix_hack__(binary=exec_bin), cmd), allowed_exit=[0, 255])

    if rc == 0:
        # job cancelling succeeded, try to remove the glite command logfile
        # if it exists
        __clean_gridcmd_log__("(.*-job-cancel.*\.log)", output)
        return True
    else:
        logger.warning("Failed to cancel job %s.\n%s" % (jobid, output))
        __print_gridcmd_log__("(.*-job-cancel.*\.log)", output)
        return False
Exemple #14
0
def cancel_multiple(jobids, cred_req):
    """Cancel multiple jobs in one LCG job cancellation call"""

    # compose a temporary file with job ids in it
    if not jobids:
        return True

    # do the cancellation using a proper LCG command
    cmd = 'glite-wms-job-cancel'

    idsfile = tempfile.mktemp('.jids')
    with open(idsfile, 'w') as ids_file:
        ids_file.write('\n'.join(jobids) + '\n')

    # compose the cancel command
    cmd = '%s --noint -i %s' % (cmd, idsfile)

    logger.debug('job cancel command: %s' % cmd)

    rc, output, m = getShell(cred_req).cmd1(cmd, allowed_exit=[0, 255])

    # clean up tempfile
    if os.path.exists(idsfile):
        os.remove(idsfile)

    if rc == 0:
        # job cancelling succeeded, try to remove the glite command logfile
        # if it exists
        __clean_gridcmd_log__('(.*-job-cancel.*\.log)', output)
        return True
    else:
        logger.warning("Failed to cancel jobs.\n%s" % output)
        __print_gridcmd_log__('(.*-job-cancel.*\.log)', output)
        return False
Exemple #15
0
 def __init__(self, middleware="EDG"):
     super(GridProxy, self).__init__()
     self.middleware = middleware
     if self.middleware:
         self.shell = getShell(self.middleware)
     self.chooseCommandSet()
     return
Exemple #16
0
def __arc_sync__(cedict):
    """Collect jobs to jobs.xml"""

    if cedict[0]:
        cmd = "arcsync %s -j %s -f -c %s" % (
            __arc_get_config_file_arg__(),
            config["ArcJobListFile"],
            " -c ".join(cedict),
        )
    else:
        cmd = "arcsync %s -j %s -f " % (__arc_get_config_file_arg__(), config["ArcJobListFile"])

    if not check_proxy():
        logger.warning("LCG plugin is not active.")
        return False
    if not credential().isValid("01:00"):
        logger.warning("GRID proxy lifetime shorter than 1 hour")
        return False

    logger.debug("sync ARC jobs list with: %s" % cmd)
    rc, output, m = getShell().cmd1(
        "%s%s" % (__get_cmd_prefix_hack__(binary=True), cmd),
        allowed_exit=[0, 255],
        timeout=config["StatusPollingTimeout"],
    )
    if rc != 0:
        logger.error("Unable to sync ARC jobs. Error: %s" % output)
Exemple #17
0
def arc_get_output(jid, directory):
    """ARC CE job output retrieval"""

    if not __cream_ui_check__():
        return (False, None)

    # construct URI list from ID and output from arcls
    cmd = "arcls %s %s" % (__arc_get_config_file_arg__(), jid)
    exec_bin = True
    logger.debug("arcls command: %s" % cmd)
    rc, output, m = getShell().cmd1(
        "%s%s" % (__get_cmd_prefix_hack__(binary=exec_bin), cmd),
        allowed_exit=[0, 255],
        timeout=config["SubmissionTimeout"],
    )
    if rc:
        logger.error("Could not find directory associated with ARC job ID '%s'" % jid)
        return False

    # URI is JID + filename
    gfiles = []
    for uri in output.split("\n"):
        if len(uri) == 0:
            continue
        uri = jid + "/" + uri
        gf = GridftpFileIndex()
        gf.id = uri
        gfiles.append(gf)

    cache = GridftpSandboxCache()
    cache.vo = config["VirtualOrganisation"]
    cache.uploaded_files = gfiles
    return cache.download(files=map(lambda x: x.id, gfiles), dest_dir=directory)
Exemple #18
0
def cream_status(jobids):
    """CREAM CE job status query"""

    if not __cream_ui_check__():
        return [], []

    if not jobids:
        return [], []

    idsfile = tempfile.mktemp('.jids')
    with open(idsfile, 'w') as ids_file:
        ids_file.write('##CREAMJOBS##\n' + '\n'.join(jobids) + '\n')

    cmd = 'glite-ce-job-status'
    exec_bin = True

    cmd = '%s -L 2 -n -i %s' % (cmd, idsfile)
    logger.debug('job status command: %s' % cmd)

    rc, output, m = getShell().cmd1(
        '%s%s' % (__get_cmd_prefix_hack__(binary=exec_bin), cmd),
        allowed_exit=[0, 255],
        timeout=config['StatusPollingTimeout'])
    jobInfoDict = {}
    if rc == 0 and output:
        jobInfoDict = __cream_parse_job_status__(output)

    # clean up tempfile
    if os.path.exists(idsfile):
        os.remove(idsfile)

    return jobInfoDict
Exemple #19
0
    def __init__(self,middleware='EDG'):

        self.active = False

        self.re_token = re.compile('^token:(.*):(.*)$')

        self.credential = None

        self.middleware = middleware.upper()

        self.perusable = False

        self.config = getConfig('LCG')

#       check that UI has been set up
#       start up a shell object specific to the middleware
        self.shell = getShell(self.middleware)

        self.proxy_id = {}

        if not self.shell:
            logger.warning('LCG-%s UI has not been configured. The plugin has been disabled.' % self.middleware)
            return
        
#       create credential for this Grid object
        self.active = self.check_proxy()
Exemple #20
0
def cream_cancelMultiple(jobids):
    """CREAM CE job cancelling"""

    if not __cream_ui_check__():
        return False

    idsfile = tempfile.mktemp(".jids")
    with open(idsfile, "w") as ids_file:
        ids_file.write("##CREAMJOBS##\n" + "\n".join(jobids) + "\n")

    cmd = "glite-ce-job-cancel"
    exec_bin = True

    cmd = "%s -n -N -i %s" % (cmd, idsfile)

    logger.debug("job cancel command: %s" % cmd)

    rc, output, m = getShell().cmd1("%s%s" % (__get_cmd_prefix_hack__(binary=exec_bin), cmd), allowed_exit=[0, 255])

    logger.debug(output)

    # clean up tempfile
    if os.path.exists(idsfile):
        os.remove(idsfile)

    if rc == 0:
        return True
    else:
        return False
Exemple #21
0
def cream_status(jobids):
    """CREAM CE job status query"""

    if not __cream_ui_check__():
        return [], []

    if not jobids:
        return [], []

    idsfile = tempfile.mktemp(".jids")
    with open(idsfile, "w") as ids_file:
        ids_file.write("##CREAMJOBS##\n" + "\n".join(jobids) + "\n")

    cmd = "glite-ce-job-status"
    exec_bin = True

    cmd = "%s -L 2 -n -i %s" % (cmd, idsfile)
    logger.debug("job status command: %s" % cmd)

    rc, output, m = getShell().cmd1(
        "%s%s" % (__get_cmd_prefix_hack__(binary=exec_bin), cmd),
        allowed_exit=[0, 255],
        timeout=config["StatusPollingTimeout"],
    )
    jobInfoDict = {}
    if rc == 0 and output:
        jobInfoDict = __cream_parse_job_status__(output)

    # clean up tempfile
    if os.path.exists(idsfile):
        os.remove(idsfile)

    return jobInfoDict
Exemple #22
0
def get_loginfo(jobids, directory, cred_req, verbosity=1):
    """Fetch the logging info of the given job and save the output in the job's outputdir"""

    cmd = 'glite-wms-job-logging-info -v %d' % verbosity

    log_output = directory + '/__jobloginfo__.log'

    idsfile = tempfile.mktemp('.jids')
    with open(idsfile, 'w') as ids_file:
        ids_file.write('\n'.join(jobids) + '\n')

    cmd = '%s --noint -o %s -i %s' % (cmd, log_output, idsfile)

    logger.debug('job logging info command: %s' % cmd)

    rc, output, m = getShell(cred_req).cmd1(cmd, allowed_exit=[0, 255])
    os.remove(idsfile)

    if rc != 0:
        __print_gridcmd_log__('(.*-logging-info.*\.log)', output)
        return False
    else:
        # logging-info checking succeeded, try to remove the glite command
        # logfile if it exists
        __clean_gridcmd_log__('(.*-logging-info.*\.log)', output)
        # returns the path to the saved logging info if success
        return log_output
Exemple #23
0
def cream_proxy_delegation(ce, delid, cred_req):
    """CREAM CE proxy delegation"""

    if not ce:
        logger.warning('No CREAM CE endpoint specified')
        return

    if not delid:

        logger.debug('making new proxy delegation to %s' % ce)

        cmd = 'glite-ce-delegate-proxy'

        cmd += ' -e %s' % ce.split('/cream')[0]

        delid = '%s_%s' % (credential_store[cred_req].identity, get_uuid())

        cmd = '%s "%s"' % (cmd, delid)

        logger.debug('proxy delegation command: %s' % cmd)

        rc, output, m = getShell(cred_req).cmd1(
            cmd, allowed_exit=[0, 255], timeout=config['SubmissionTimeout'])
        if rc != 0:
            # failed to delegate proxy
            logger.error('proxy delegation error: %s' % output)
            delid = ''
        else:
            # proxy delegated successfully
            t_expire = datetime.datetime.now(
            ) + credential_store[cred_req].time_left()

            logger.debug('new proxy at %s valid until %s' % (ce, t_expire))

    return delid
Exemple #24
0
def arc_status(jobids, ce_list, cred_req):
    """ARC CE job status query"""

    if not jobids:
        return [], []

    idsfile = tempfile.mktemp('.jids')
    with open(idsfile, 'w') as ids_file:
        ids_file.write('\n'.join(jobids) + '\n')

    cmd = 'arcstat'

    cmd += ' %s -i %s -j %s' % (__arc_get_config_file_arg__(), idsfile,
                                config["ArcJobListFile"])
    logger.debug('job status command: %s' % cmd)

    rc, output, m = getShell(cred_req).cmd1(
        cmd, allowed_exit=[0, 1, 255], timeout=config['StatusPollingTimeout'])
    job_info_dict = {}

    if rc != 0:
        logger.warning(
            'jobs not found in XML file: arcsync will be executed to update the job information'
        )
        __arc_sync__(ce_list, cred_req)

    if rc == 0 and output:
        job_info_dict = __arc_parse_job_status__(output)

    return job_info_dict
Exemple #25
0
def arc_purgeMultiple(jobids):
    """ARC CE job purging"""

    if not __cream_ui_check__():
        return False

    idsfile = tempfile.mktemp(".jids")
    with open(idsfile, "w") as ids_file:
        ids_file.write("\n".join(jobids) + "\n")

    cmd = "arcclean"
    exec_bin = True

    cmd = "%s %s -i %s -j %s" % (cmd, __arc_get_config_file_arg__(), idsfile, config["ArcJobListFile"])

    logger.debug("job purge command: %s" % cmd)

    rc, output, m = getShell().cmd1("%s%s" % (__get_cmd_prefix_hack__(binary=exec_bin), cmd), allowed_exit=[0, 255])

    logger.debug(output)

    # clean up tempfile
    if os.path.exists(idsfile):
        os.remove(idsfile)

    if rc == 0:
        return True
    else:
        return False
Exemple #26
0
def cancel(jobid):
    """Cancel a job"""

    cmd = 'glite-wms-job-cancel'
    exec_bin = True

    if not check_proxy():
        logger.warning('LCG plugin is not active.')
        return False
    if not credential().isValid('01:00'):
        logger.warning('GRID proxy lifetime shorter than 1 hour')
        return False

    cmd = '%s --noint %s' % (cmd, jobid)

    logger.debug('job cancel command: %s' % cmd)

    rc, output, m = getShell().cmd1(
        '%s%s' % (__get_cmd_prefix_hack__(binary=exec_bin), cmd),
        allowed_exit=[0, 255])

    if rc == 0:
        # job cancelling succeeded, try to remove the glite command logfile
        # if it exists
        __clean_gridcmd_log__('(.*-job-cancel.*\.log)', output)
        return True
    else:
        logger.warning("Failed to cancel job %s.\n%s" % (jobid, output))
        __print_gridcmd_log__('(.*-job-cancel.*\.log)', output)
        return False
Exemple #27
0
def cream_cancel_multiple(jobids, cred_req):
    """CREAM CE job cancelling"""

    idsfile = tempfile.mktemp('.jids')
    with open(idsfile, 'w') as ids_file:
        ids_file.write('##CREAMJOBS##\n' + '\n'.join(jobids) + '\n')

    cmd = 'glite-ce-job-cancel'

    cmd = '%s -n -N -i %s' % (cmd, idsfile)

    logger.debug('job cancel command: %s' % cmd)

    rc, output, m = getShell(cred_req).cmd1(cmd, allowed_exit=[0, 255])

    logger.debug(output)

    # clean up tempfile
    if os.path.exists(idsfile):
        os.remove(idsfile)

    if rc == 0:
        return True
    else:
        return False
Exemple #28
0
def cream_cancelMultiple(jobids):
    """CREAM CE job cancelling"""

    if not __cream_ui_check__():
        return False

    idsfile = tempfile.mktemp('.jids')
    with open(idsfile, 'w') as ids_file:
        ids_file.write('##CREAMJOBS##\n' + '\n'.join(jobids) + '\n')

    cmd = 'glite-ce-job-cancel'
    exec_bin = True

    cmd = '%s -n -N -i %s' % (cmd, idsfile)

    logger.debug('job cancel command: %s' % cmd)

    rc, output, m = getShell().cmd1(
        '%s%s' % (__get_cmd_prefix_hack__(binary=exec_bin), cmd),
        allowed_exit=[0, 255])

    logger.debug(output)

    # clean up tempfile
    if os.path.exists(idsfile):
        os.remove(idsfile)

    if rc == 0:
        return True
    else:
        return False
Exemple #29
0
def cream_status(jobids, cred_req):
    """CREAM CE job status query"""

    if not jobids:
        return [], []

    idsfile = tempfile.mktemp('.jids')
    with open(idsfile, 'w') as ids_file:
        ids_file.write('##CREAMJOBS##\n' + '\n'.join(jobids) + '\n')

    cmd = 'glite-ce-job-status'

    cmd = '%s -L 2 -n -i %s' % (cmd, idsfile)
    logger.debug('job status command: %s' % cmd)

    rc, output, m = getShell(cred_req).cmd1(
        cmd, allowed_exit=[0, 255], timeout=config['StatusPollingTimeout'])
    job_info_dict = {}
    if rc == 0 and output:
        job_info_dict = __cream_parse_job_status__(output)

    # clean up tempfile
    if os.path.exists(idsfile):
        os.remove(idsfile)

    return job_info_dict
Exemple #30
0
def arc_get_output(jid, directory, cred_req):
    """ARC CE job output retrieval"""

    # construct URI list from ID and output from arcls
    cmd = 'arcls %s %s' % (__arc_get_config_file_arg__(), jid)
    logger.debug('arcls command: %s' % cmd)
    rc, output, m = getShell(cred_req).cmd1(cmd,
                                            allowed_exit=[0, 255],
                                            timeout=config['SubmissionTimeout'])
    if rc:
        logger.error(
            "Could not find directory associated with ARC job ID '%s'" % jid)
        return False

    # URI is JID + filename
    gfiles = []
    for uri in output.split("\n"):
        if len(uri) == 0:
            continue
        uri = jid + "/" + uri
        gf = GridftpFileIndex()
        gf.id = uri
        gfiles.append(gf)

    cache = GridftpSandboxCache()
    cache.uploaded_files = gfiles
    return cache.download(cred_req=cred_req, files=map(lambda x: x.id, gfiles), dest_dir=directory)
Exemple #31
0
def arc_cancel_multiple(jobids, cred_req):
    """Cancel multiple jobs in one LCG job cancellation call"""

    # compose a temporary file with job ids in it
    if not jobids:
        return True

    cmd = 'arckill'

    idsfile = tempfile.mktemp('.jids')
    with open(idsfile, 'w') as ids_file:
        ids_file.write('\n'.join(jobids) + '\n')

    # compose the cancel comman
    cmd = '%s %s -i %s -j %s' % (
        cmd, __arc_get_config_file_arg__(), idsfile, config["ArcJobListFile"])

    logger.debug('job cancel command: %s' % cmd)

    rc, output, m = getShell(cred_req).cmd1(cmd, allowed_exit=[0, 255])

    if rc == 0:
        # job cancelling succeeded, try to remove the glite command logfile
        # if it exists
        __clean_gridcmd_log__('(.*-job-cancel.*\.log)', output)
        return True
    else:
        logger.warning("Failed to cancel jobs.\n%s" % output)
        __print_gridcmd_log__('(.*-job-cancel.*\.log)', output)
        return False
Exemple #32
0
def cream_cancel_multiple(jobids, cred_req):
    """CREAM CE job cancelling"""

    idsfile = tempfile.mktemp('.jids')
    with open(idsfile, 'w') as ids_file:
        ids_file.write('##CREAMJOBS##\n' + '\n'.join(jobids) + '\n')

    cmd = 'glite-ce-job-cancel'

    cmd = '%s -n -N -i %s' % (cmd, idsfile)

    logger.debug('job cancel command: %s' % cmd)

    rc, output, m = getShell(cred_req).cmd1(cmd, allowed_exit=[0, 255])

    logger.debug(output)

    # clean up tempfile
    if os.path.exists(idsfile):
        os.remove(idsfile)

    if rc == 0:
        return True
    else:
        return False
Exemple #33
0
def cancel_multiple(jobids, cred_req):
    """Cancel multiple jobs in one LCG job cancellation call"""

    # compose a temporary file with job ids in it
    if not jobids:
        return True

    # do the cancellation using a proper LCG command
    cmd = 'glite-wms-job-cancel'

    idsfile = tempfile.mktemp('.jids')
    with open(idsfile, 'w') as ids_file:
        ids_file.write('\n'.join(jobids) + '\n')

    # compose the cancel command
    cmd = '%s --noint -i %s' % (cmd, idsfile)

    logger.debug('job cancel command: %s' % cmd)

    rc, output, m = getShell(cred_req).cmd1(cmd, allowed_exit=[0, 255])

    # clean up tempfile
    if os.path.exists(idsfile):
        os.remove(idsfile)

    if rc == 0:
        # job cancelling succeeded, try to remove the glite command logfile
        # if it exists
        __clean_gridcmd_log__('(.*-job-cancel.*\.log)', output)
        return True
    else:
        logger.warning("Failed to cancel jobs.\n%s" % output)
        __print_gridcmd_log__('(.*-job-cancel.*\.log)', output)
        return False
Exemple #34
0
def cream_proxy_delegation(ce, delid, cred_req):
    """CREAM CE proxy delegation"""

    if not ce:
        logger.warning('No CREAM CE endpoint specified')
        return

    if not delid:

        logger.debug('making new proxy delegation to %s' % ce)

        cmd = 'glite-ce-delegate-proxy'

        cmd += ' -e %s' % ce.split('/cream')[0]

        delid = '%s_%s' % (credential_store[cred_req].identity, get_uuid())

        cmd = '%s "%s"' % (cmd, delid)

        logger.debug('proxy delegation command: %s' % cmd)

        rc, output, m = getShell(cred_req).cmd1(cmd,
                                                allowed_exit=[0, 255],
                                                timeout=config['SubmissionTimeout'])
        if rc != 0:
            # failed to delegate proxy
            logger.error('proxy delegation error: %s' % output)
            delid = ''
        else:
            # proxy delegated successfully
            t_expire = datetime.datetime.now() + credential_store[cred_req].time_left()

            logger.debug('new proxy at %s valid until %s' % (ce, t_expire))

    return delid
Exemple #35
0
def get_loginfo(jobids, directory, cred_req, verbosity=1):
    """Fetch the logging info of the given job and save the output in the job's outputdir"""

    cmd = 'glite-wms-job-logging-info -v %d' % verbosity

    log_output = directory + '/__jobloginfo__.log'

    idsfile = tempfile.mktemp('.jids')
    with open(idsfile, 'w') as ids_file:
        ids_file.write('\n'.join(jobids) + '\n')

    cmd = '%s --noint -o %s -i %s' % (cmd, log_output, idsfile)

    logger.debug('job logging info command: %s' % cmd)

    rc, output, m = getShell(cred_req).cmd1(cmd, allowed_exit=[0, 255])
    os.remove(idsfile)

    if rc != 0:
        __print_gridcmd_log__('(.*-logging-info.*\.log)', output)
        return False
    else:
        # logging-info checking succeeded, try to remove the glite command
        # logfile if it exists
        __clean_gridcmd_log__('(.*-logging-info.*\.log)', output)
        # returns the path to the saved logging info if success
        return log_output
Exemple #36
0
def arc_status(jobids, ce_list, cred_req):
    """ARC CE job status query"""

    if not jobids:
        return [], []

    idsfile = tempfile.mktemp('.jids')
    with open(idsfile, 'w') as ids_file:
        ids_file.write('\n'.join(jobids) + '\n')

    cmd = 'arcstat'

    cmd += ' %s -i %s -j %s' % (__arc_get_config_file_arg__(), idsfile, config["ArcJobListFile"])
    logger.debug('job status command: %s' % cmd)

    rc, output, m = getShell(cred_req).cmd1(cmd,
                                            allowed_exit=[0, 1, 255],
                                            timeout=config['StatusPollingTimeout'])
    job_info_dict = {}

    if rc != 0:
        logger.warning('jobs not found in XML file: arcsync will be executed to update the job information')
        __arc_sync__(ce_list, cred_req)

    if rc == 0 and output:
        job_info_dict = __arc_parse_job_status__(output)

    return job_info_dict
Exemple #37
0
def native_master_cancel(jobids, cred_req):
    """Native bulk cancellation supported by GLITE middleware."""

    cmd = 'glite-wms-job-cancel'

    if not __set_submit_option__():
        return False

    idsfile = tempfile.mktemp('.jids')
    with open(idsfile, 'w') as ids_file:
        ids_file.write('\n'.join(jobids) + '\n')

    cmd = '%s --noint -i %s' % (cmd, idsfile)

    logger.debug('job cancel command: %s' % cmd)

    rc, output, m = getShell(cred_req).cmd1(cmd, allowed_exit=[0, 255])

    # clean up tempfile
    if os.path.exists(idsfile):
        os.remove(idsfile)

    if rc != 0:
        logger.warning('Job cancellation failed.')
        __print_gridcmd_log__('(.*-job-cancel.*\.log)', output)
        return False
    else:
        # job cancellation succeeded, try to remove the glite command
        # logfile if it exists
        __clean_gridcmd_log__('(.*-job-cancel.*\.log)', output)
        return True
Exemple #38
0
def arc_get_output(jid, directory, cred_req):
    """ARC CE job output retrieval"""

    # construct URI list from ID and output from arcls
    cmd = 'arcls %s %s' % (__arc_get_config_file_arg__(), jid)
    logger.debug('arcls command: %s' % cmd)
    rc, output, m = getShell(cred_req).cmd1(
        cmd, allowed_exit=[0, 255], timeout=config['SubmissionTimeout'])
    if rc:
        logger.error(
            "Could not find directory associated with ARC job ID '%s'" % jid)
        return False

    # URI is JID + filename
    gfiles = []
    for uri in output.split("\n"):
        if len(uri) == 0:
            continue
        uri = jid + "/" + uri
        gf = GridftpFileIndex()
        gf.id = uri
        gfiles.append(gf)

    cache = GridftpSandboxCache()
    cache.uploaded_files = gfiles
    return cache.download(cred_req=cred_req,
                          files=map(lambda x: x.id, gfiles),
                          dest_dir=directory)
Exemple #39
0
def arc_cancel_multiple(jobids, cred_req):
    """Cancel multiple jobs in one LCG job cancellation call"""

    # compose a temporary file with job ids in it
    if not jobids:
        return True

    cmd = 'arckill'

    idsfile = tempfile.mktemp('.jids')
    with open(idsfile, 'w') as ids_file:
        ids_file.write('\n'.join(jobids) + '\n')

    # compose the cancel comman
    cmd = '%s %s -i %s -j %s' % (cmd, __arc_get_config_file_arg__(), idsfile,
                                 config["ArcJobListFile"])

    logger.debug('job cancel command: %s' % cmd)

    rc, output, m = getShell(cred_req).cmd1(cmd, allowed_exit=[0, 255])

    if rc == 0:
        # job cancelling succeeded, try to remove the glite command logfile
        # if it exists
        __clean_gridcmd_log__('(.*-job-cancel.*\.log)', output)
        return True
    else:
        logger.warning("Failed to cancel jobs.\n%s" % output)
        __print_gridcmd_log__('(.*-job-cancel.*\.log)', output)
        return False
Exemple #40
0
def native_master_cancel(jobids, cred_req):
    """Native bulk cancellation supported by GLITE middleware."""

    cmd = 'glite-wms-job-cancel'

    if not __set_submit_option__():
        return False

    idsfile = tempfile.mktemp('.jids')
    with open(idsfile, 'w') as ids_file:
        ids_file.write('\n'.join(jobids) + '\n')

    cmd = '%s --noint -i %s' % (cmd, idsfile)

    logger.debug('job cancel command: %s' % cmd)

    rc, output, m = getShell(cred_req).cmd1(cmd, allowed_exit=[0, 255])

    # clean up tempfile
    if os.path.exists(idsfile):
        os.remove(idsfile)

    if rc != 0:
        logger.warning('Job cancellation failed.')
        __print_gridcmd_log__('(.*-job-cancel.*\.log)', output)
        return False
    else:
        # job cancellation succeeded, try to remove the glite command
        # logfile if it exists
        __clean_gridcmd_log__('(.*-job-cancel.*\.log)', output)
        return True
Exemple #41
0
def cream_status(jobids, cred_req):
    """CREAM CE job status query"""

    if not jobids:
        return [], []

    idsfile = tempfile.mktemp('.jids')
    with open(idsfile, 'w') as ids_file:
        ids_file.write('##CREAMJOBS##\n' + '\n'.join(jobids) + '\n')

    cmd = 'glite-ce-job-status'

    cmd = '%s -L 2 -n -i %s' % (cmd, idsfile)
    logger.debug('job status command: %s' % cmd)

    rc, output, m = getShell(cred_req).cmd1(cmd,
                                            allowed_exit=[0, 255],
                                            timeout=config['StatusPollingTimeout'])
    job_info_dict = {}
    if rc == 0 and output:
        job_info_dict = __cream_parse_job_status__(output)

    # clean up tempfile
    if os.path.exists(idsfile):
        os.remove(idsfile)

    return job_info_dict
Exemple #42
0
def arc_info(cred_req):
    """Run the arcinfo command"""

    cmd = 'arcinfo %s > /dev/null' % __arc_get_config_file_arg__()
    logger.debug("Running arcinfo command '%s'" % cmd)

    rc, output, m = getShell(cred_req).cmd1(
        cmd, allowed_exit=[0, 1, 255], timeout=config['StatusPollingTimeout'])
    return rc, output
Exemple #43
0
 def __init__(self, middleware="EDG"):
     super(GridProxy, self).__init__()
     self.middleware = middleware
     if self.middleware:
         self.shell = getShell(self.middleware)
     self.gridCommand = GridCommand()
     self.vomsCommand = VomsCommand()
     self.chooseCommandSet()
     return
Exemple #44
0
def arc_submit(jdlpath, ce, verbose, cred_req):
    """ARC CE direct job submission"""

    # No longer need to specify CE if available in client.conf
    # if not ce:
    #    logger.warning('No CREAM CE endpoint specified')
    #    return

    # write to a temporary XML file as otherwise can't submit in parallel
    tmpstr = '/tmp/' + randomString() + '.arcsub.xml'
    cmd = 'arcsub %s -S org.nordugrid.gridftpjob -j %s' % (__arc_get_config_file_arg__(), tmpstr)

    if verbose:
        cmd += ' -d DEBUG '

    if ce:
        cmd += ' -c %s' % ce

    cmd = '%s "%s" < /dev/null' % (cmd, jdlpath)

    logger.debug('job submit command: %s' % cmd)

    rc, output, m = getShell(cred_req).cmd1(cmd,
                                            allowed_exit=[0, 255],
                                            timeout=config['SubmissionTimeout'])

    if output:
        output = "%s" % output.strip()
    getShell().system('rm ' + tmpstr)

    # Job submitted with jobid:
    # gsiftp://lcgce01.phy.bris.ac.uk:2811/jobs/vSoLDmvvEljnvnizHq7yZUKmABFKDmABFKDmCTGKDmABFKDmfN955m
    match = re.search(r'(gsiftp://\S+:2811/jobs/[0-9A-Za-z_\.\-]+)$', output)

    # Job submitted with jobid: https://ce2.dur.scotgrid.ac.uk:8443/arex/..
    if not match:
        match = re.search(r'(https://\S+:8443/arex/[0-9A-Za-z_\.\-]+)$', output)

    if match:
        logger.debug('job id: %s' % match.group(1))
        return match.group(1)
    else:
        logger.warning('Job submission failed.')
        return
Exemple #45
0
def submit(jdlpath, cred_req, ce=None, perusable=False):
    """Submit a JDL file to LCG"""

    # doing job submission
    cmd = 'glite-wms-job-submit -a'

    submit_opt = __set_submit_option__()

    if not submit_opt:
        return
    else:
        cmd += submit_opt

    if ce:
        cmd += ' -r %s' % ce

    cmd = '%s --nomsg "%s" < /dev/null' % (cmd, jdlpath)

    logger.debug('job submit command: %s' % cmd)

    rc, output, m = getShell(cred_req).cmd1(
        cmd, allowed_exit=[0, 255], timeout=config['SubmissionTimeout'])

    if output:
        output = "%s" % output.strip()

    match = re.search('.*(https://\S+:9000/[0-9A-Za-z_\.\-]+)', output)

    if match:
        logger.debug('job id: %s' % match.group(1))
        if perusable:
            logger.info("Enabling perusal")
            getShell(cred_req).cmd1(
                "glite-wms-job-perusal --set -f stdout %s" % match.group(1))

        # remove the glite command log if it exists
        __clean_gridcmd_log__('(.*-job-submit.*\.log)', output)
        return match.group(1)

    else:
        logger.warning('Job submission failed.')
        __print_gridcmd_log__('(.*-job-submit.*\.log)', output)
        return
Exemple #46
0
def arc_info(cred_req):
    """Run the arcinfo command"""

    cmd = 'arcinfo %s > /dev/null' % __arc_get_config_file_arg__()
    logger.debug("Running arcinfo command '%s'" % cmd)

    rc, output, m = getShell(cred_req).cmd1(cmd,
                                            allowed_exit=[0, 1, 255],
                                            timeout=config['StatusPollingTimeout'])
    return rc, output
Exemple #47
0
def submit(jdlpath, cred_req, ce=None, perusable=False):
    """Submit a JDL file to LCG"""

    # doing job submission
    cmd = 'glite-wms-job-submit -a'

    submit_opt = __set_submit_option__()

    if not submit_opt:
        return
    else:
        cmd += submit_opt

    if ce:
        cmd += ' -r %s' % ce

    cmd = '%s --nomsg "%s" < /dev/null' % (cmd, jdlpath)

    logger.debug('job submit command: %s' % cmd)

    rc, output, m = getShell(cred_req).cmd1(cmd,
                                            allowed_exit=[0, 255],
                                            timeout=config['SubmissionTimeout'])

    if output:
        output = "%s" % output.strip()

    match = re.search('.*(https://\S+:9000/[0-9A-Za-z_\.\-]+)', output)

    if match:
        logger.debug('job id: %s' % match.group(1))
        if perusable:
            logger.info("Enabling perusal")
            getShell(cred_req).cmd1("glite-wms-job-perusal --set -f stdout %s" % match.group(1))

        # remove the glite command log if it exists
        __clean_gridcmd_log__('(.*-job-submit.*\.log)', output)
        return match.group(1)

    else:
        logger.warning('Job submission failed.')
        __print_gridcmd_log__('(.*-job-submit.*\.log)', output)
        return
Exemple #48
0
def arc_info():
    """Run the arcinfo command"""

    cmd = 'arcinfo %s > /dev/null' % __arc_get_config_file_arg__()
    logger.debug("Running arcinfo command '%s'" % cmd)

    rc, output, m = getShell().cmd1(
        '%s%s' % (__get_cmd_prefix_hack__(binary=True), cmd),
        allowed_exit=[0, 1, 255],
        timeout=config['StatusPollingTimeout'])
    return rc, output
Exemple #49
0
def arc_info():
    """Run the arcinfo command"""

    cmd = "arcinfo %s > /dev/null" % __arc_get_config_file_arg__()
    logger.debug("Running arcinfo command '%s'" % cmd)

    rc, output, m = getShell().cmd1(
        "%s%s" % (__get_cmd_prefix_hack__(binary=True), cmd),
        allowed_exit=[0, 1, 255],
        timeout=config["StatusPollingTimeout"],
    )
    return rc, output
Exemple #50
0
def getEnvironment(c):
    import PACKAGE
    PACKAGE.standardSetup()
    
    #   set up X509_CERT_DIR for DQ2
    from Ganga.Utility.GridShell import getShell
    gshell = getShell()
    if gshell:
       try:
          return { 'X509_CERT_DIR' : gshell.env['X509_CERT_DIR'], 'X509_USER_PROXY' : gshell.env['X509_USER_PROXY']  }
       except KeyError:
          return { 'X509_CERT_DIR' : '/etc/grid-security/certificates' }
    def impl_delete(self, files=[], opts=''):
        """
        Deletes multiple files from remote gridftp server
        """

        shell = getShell(self.middleware)

        # the algorithm of downloading one file to a local directory
        class MyAlgorithm(Algorithm):
            def __init__(self, cacheObj):
                Algorithm.__init__(self)
                self.cacheObj = cacheObj

            def process(self, file):

                destURI = file.id

                uri_info = urisplit(destURI)

                cmd = 'uberftp %s "rm %s"' % (uri_info[1], uri_info[2])

                rc, output, m = self.cacheObj.__cmd_retry_loop__(
                    shell, cmd, self.cacheObj.max_try)

                if rc != 0:
                    self.cacheObj.logger.error(output)
                    return False
                else:
                    self.__appendResult__(file.id, file)
                    return True

        myAlg = MyAlgorithm(cacheObj=self)
        myData = Data(collection=files)

        runner = MTRunner(name='sandboxcache_lcgdel',
                          algorithm=myAlg,
                          data=myData)
        runner.start()
        runner.join(-1)

        # update the local index file
        del_files = runner.getResults().values()
        all_files = self.get_cached_files()

        left_files = []
        for f in all_files:
            if f not in del_files:
                left_files.append(f)

        self.impl_bookkeepUploadedFiles(left_files, append=False)

        return del_files
    def impl_delete(self, files=[], opts=''):
        """
        Deletes multiple files from remote gridftp server
        """

        shell = getShell(self.middleware)

        # the algorithm of downloading one file to a local directory
        class MyAlgorithm(Algorithm):

            def __init__(self, cacheObj):
                Algorithm.__init__(self)
                self.cacheObj = cacheObj

            def process(self, file):

                destURI = file.id

                uri_info = urisplit(destURI)

                cmd = 'uberftp %s "rm %s"' % (uri_info[1], uri_info[2])

                rc, output, m = self.cacheObj.__cmd_retry_loop__(
                    shell, cmd, self.cacheObj.max_try)

                if rc != 0:
                    self.cacheObj.logger.error(output)
                    return False
                else:
                    self.__appendResult__(file.id, file)
                    return True

        myAlg = MyAlgorithm(cacheObj=self)
        myData = Data(collection=files)

        runner = MTRunner(
            name='sandboxcache_lcgdel', algorithm=myAlg, data=myData)
        runner.start()
        runner.join(-1)

        # update the local index file
        del_files = runner.getResults().values()
        all_files = self.get_cached_files()

        left_files = []
        for f in all_files:
            if f not in del_files:
                left_files.append(f)

        self.impl_bookkeepUploadedFiles(left_files, append=False)

        return del_files
Exemple #53
0
def list_match(jdlpath, ce=None):
    """Returns a list of computing elements can run the job"""

    re_ce = re.compile('^\s*\-\s*(\S+:(2119|8443)/\S+)\s*$')

    matched_ces = []

    cmd = 'glite-wms-job-list-match -a'
    exec_bin = True

    if not check_proxy():
        logger.warning('LCG plugin not active.')
        return

    if not credential().isValid('01:00'):
        logger.warning('GRID proxy lifetime shorter than 1 hour')
        return

    submit_opt = __set_submit_option__()

    if not submit_opt:
        return matched_ces
    else:
        cmd += submit_opt

    cmd = '%s --noint "%s"' % (cmd, jdlpath)

    logger.debug('job list match command: %s' % cmd)

    rc, output, m = getShell().cmd1(
        '%s%s' % (__get_cmd_prefix_hack__(binary=exec_bin), cmd),
        allowed_exit=[0, 255])

    for l in output.split('\n'):

        matches = re_ce.match(l)

        if matches:
            matched_ces.append(matches.group(1))

    if ce:
        if matched_ces.count(ce) > 0:
            matched_ces = [ce]
        else:
            matched_ces = []

    logger.debug('== matched CEs ==')
    for myce in matched_ces:
        logger.debug(myce)
    logger.debug('== matched CEs ==')

    return matched_ces
Exemple #54
0
def wrap_lcg_infosites(opts=""):
    """Wrap the lcg-infosites command"""

    cmd = 'lcg-infosites --vo %s %s' % (config['VirtualOrganisation'], opts)

    logger.debug('lcg-infosites command: %s' % cmd)

    rc, output, m = getShell().cmd1('%s' % cmd, allowed_exit=[0, 255])

    if rc != 0:
        return ""
    else:
        return output
Exemple #55
0
        def run(self):
            gridshell = getShell(middleware=Download.prefix_hack)
            gridshell.env['DQ2_URL_SERVER'] = configDQ2['DQ2_URL_SERVER']
            gridshell.env['DQ2_URL_SERVER_SSL'] = configDQ2[
                'DQ2_URL_SERVER_SSL']
            gridshell.env['DQ2_LOCAL_ID'] = ''

            ## Don't set up from the included DQ2 package as this will fail, either because
            # of python version (2.5+ required) or LFC python bindings missing

            #import GangaAtlas.PACKAGE
            #try:
            #    pythonpath=GangaAtlas.PACKAGE.setup.getPackagePath2('DQ2Clients','PYTHONPATH',force=False)
            #except:
            #    pythonpath = ''
            #gridshell.env['PYTHONPATH'] = gridshell.env['PYTHONPATH']+':'+pythonpath

            ## exclude the Ganga-owned external package for LFC python binding
            pythonpaths = []
            for path in gridshell.env['PYTHONPATH'].split(':'):
                if not re.match('.*\/external\/lfc\/.*', path):
                    pythonpaths.append(path)
            gridshell.env['PYTHONPATH'] = ':'.join(pythonpaths)

            ## exclude any rubbish from Athena
            ld_lib_paths = []
            for path in gridshell.env['LD_LIBRARY_PATH'].split(':'):
                if not re.match('.*\/external\/lfc\/.*',
                                path) and not re.match(
                                    '.*\/sw\/lcg\/external\/.*', path):
                    ld_lib_paths.append(path)
            gridshell.env['LD_LIBRARY_PATH'] = ':'.join(ld_lib_paths)

            paths = []
            for path in gridshell.env['PATH'].split(':'):
                if not re.match('.*\/external\/lfc\/.*',
                                path) and not re.match(
                                    '.*\/sw\/lcg\/external\/.*', path):
                    paths.append(path)
            gridshell.env['PATH'] = ':'.join(paths)

            rc, out, m = gridshell.cmd1("source " + configDQ2['setupScript'] +
                                        " && " + self.cmd,
                                        allowed_exit=[0, 255])

            if (rc == 0):
                logger.debug("dq2-get finished: %s", self.cmd)
                logger.debug("dq2-get output: %s %s %s" % (rc, out, m))
                logger.warning("dq2-get finished")
            else:
                logger.error("Error occured during %s %s", self.cmd, out)
Exemple #56
0
def standardSetup():

    import PACKAGE
    PACKAGE.standardSetup()

    # set up X509_CERT_DIR for DQ2
    from Ganga.Utility.GridShell import getShell
    gshell = getShell()
    if gshell:
        try:
            os.environ.update({'X509_CERT_DIR':gshell.env['X509_CERT_DIR'],
                               'X509_USER_PROXY':gshell.env['X509_USER_PROXY']})
        except KeyError:
            os.environ.update({'X509_CERT_DIR':'/etc/grid-security/certificates'})
Exemple #57
0
 def run(self):
     gridshell = getShell()
     gridshell.env['LFC_HOST'] = config['ATLASOutputDatasetLFC']
     gridshell.env['LCG_CATALOG_TYPE'] = 'lfc'
     rc, out, m = gridshell.cmd1(self.cmd,allowed_exit=[0,255])
     #rc, out = getstatusoutput(self.cmd)
     if (rc==0):
         logger.debug("lcg-cp finished: %s", self.cmd)
         logger.info("lcg-cp of %s finished", self.pfn)
         #Download.lock.acquire()
         #Download.rootfile[self.ifile].append(self.pfn)
         #Download.lock.release()
     else:
         logger.error("Error occured during %s %s", self.cmd, out)
Exemple #58
0
 def run(self):
     gridshell = getShell(middleware=Download.prefix_hack)
     gridshell.env['LFC_HOST'] = config['ATLASOutputDatasetLFC']
     gridshell.env['LCG_CATALOG_TYPE'] = 'lfc'
     rc, out, m = gridshell.cmd1(self.cmd, allowed_exit=[0, 255])
     #rc, out = getstatusoutput(self.cmd)
     if (rc == 0):
         logger.debug("lcg-cp finished: %s", self.cmd)
         logger.info("lcg-cp of %s finished", self.pfn)
         #Download.lock.acquire()
         #Download.rootfile[self.ifile].append(self.pfn)
         #Download.lock.release()
     else:
         logger.error("Error occured during %s %s", self.cmd, out)
Exemple #59
0
def wrap_lcg_infosites(opts=""):
    """Wrap the lcg-infosites command"""

    cmd = 'lcg-infosites --vo %s %s' % (
        config['VirtualOrganisation'], opts)

    logger.debug('lcg-infosites command: %s' % cmd)

    rc, output, m = getShell().cmd1('%s' % cmd, allowed_exit=[0, 255])

    if rc != 0:
        return ""
    else:
        return output
Exemple #60
0
        def run(self):
            gridshell = getShell(middleware=Download.prefix_hack)

            gridshell.env['LFC_HOST'] = config['ATLASOutputDatasetLFC']
            gridshell.env['LCG_CATALOG_TYPE'] = 'lfc'

            rc, out, m = gridshell.cmd1(self.cmd, allowed_exit=[0, 255])
            #rc, out = getstatusoutput(self.cmd)
            if (rc == 0):
                logger.debug("lcglr: %s", self.cmd)
                Download.lock.acquire()
                Download.lfns.append(out.strip())
                Download.lock.release()
            else:
                logger.error("Error occured during %s %s", self.cmd, out)