Exemple #1
0
def clean():
    """
    Clean jobs that satisfy parameters in current request context.

    Parameters are given in request URL, they are:
        'id': a list of job IDs
        'name': a substring that has to be present in job names
        'state': state that jobs have to be in

    Returns:
        status 200: A string with number of cleaned jobs.
        status 401: A string with error message.
    """
    try:
        proxyid = getProxyId()
    except errors.NoSuchProxyError:
        return 'Wrong or no client certificate', 401

    try:
        jobids = getIDs()
    except Exception:
        return 'Invalid id parameter', 400

    name_filter = request.args.get('name', default='')
    state_filter = request.args.get('state', default='')

    jmgr = jobmgr.JobManager()
    numDeleted = jmgr.cleanJobs(proxyid, jobids, state_filter, name_filter)
    return json.dumps(numDeleted)
Exemple #2
0
def getResults():
    """
    Return a .zip archive of job results folder.

    Request potentionaly accepts a list of IDs but only the first one is
    processed because it's easier to respond for one job.

    Function creates a .zip archive of job results.
    .zip archive could just be sent in response but since response is the last
    thing that function does, there is no simple way of deleting archive
    afterwards and cleaning the job. The solution is therefore to create an
    archive, read it's binary presentation (read it as a binary file) to
    memory, remove the archive and send byte stream as a file to the client.

    Returns:
        status 200: A .zip archive of job results.
        status 4** or 500: A string with error message.
    """
    try:
        proxyid = getProxyId()
    except errors.NoSuchProxyError:
        return 'Wrong or no client certificate', 401

    try:
        jobids = getIDs()
    except Exception:
        return 'Invalid id parameter', 400
    if not jobids:
        return 'No job ID given', 400
    jobid = [jobids[0]]  # only take first job

    # get job results
    jmgr = jobmgr.JobManager()
    results = jmgr.getJobs(proxyid, jobid)
    if not results.jobdicts:
        return 'Results for job not found', 404
    resultDir = results.jobdicts[0]['dir']

    # create archive and read bytes
    # TODO: exception handling is not optimal, there should be finally
    # part that closes file but that would require more understanding of
    # the exceptions raised for different functions which is impossible
    # to find in python documentation.
    # TODO: should probably split this for more systematic error handling
    try:
        archivePath = shutil.make_archive(resultDir, 'zip', resultDir)
        archive = open(archivePath, 'rb')
        byteStream = io.BytesIO(archive.read())
        archive.close()
        os.remove(archivePath)
    except Exception as e:
        return 'Server error: {}'.format(str(e)), 500

    return send_file(byteStream,
                     mimetype='application/zip',
                     as_attachment=True,
                     attachment_filename=archivePath.split('/')[-1])
Exemple #3
0
def patch():
    """
    Set jobs' state based on request parameters.

    Parameter that defines operation is passed in body of request in
    JSON format. It is a JSON object with a single property "arcstate",
    that has to be one of possible settable states
    (for instance {"arcstate": "tofetch"}).

    Other parameters are passed in URL, they are:
        'id': a list of job IDs
        'name': a substring that has to be present in job names
        'state': state that jobs have to be in

    Returns:
        status 200: A string with a number of affected jobs.
        status 4**: A string with error message.
    """
    try:
        proxyid = getProxyId()
    except errors.NoSuchProxyError:
        return 'Wrong or no client certificate', 401

    try:
        jobids = getIDs()
    except Exception:
        return 'Invalid id parameter', 400

    name_filter = request.args.get('name', default='')
    state_filter = request.args.get('state', default='')

    jmgr = jobmgr.JobManager()

    # force ignores incomptable mimetype, silent returns None instead of
    # calling on_json_loading_failed() of request object
    patch = request.get_json(force=True, silent=True)
    if not patch:
        return 'Request data is not valid JSON', 400

    try:
        arcstate = patch['arcstate']
    except KeyError:
        return "Request data has no 'arcstate' property", 400

    if arcstate == 'tofetch':
        num = jmgr.fetchJobs(proxyid, jobids, name_filter)
    elif arcstate == 'tocancel':
        num = jmgr.killJobs(proxyid, jobids, state_filter, name_filter)
    elif arcstate == 'toresubmit':
        num = jmgr.resubmitJobs(proxyid, jobids, name_filter)
    else:
        return "'arcstate' should be either 'tofetch' or 'tocancel' or 'toresubmit'", 400
    return json.dumps(num)
Exemple #4
0
def stat():
    """
    Return status info for jobs in JSON format.

    There are several parameters that can be given in URL. Possible
    filtering parameters are:
        'id': a list of job IDs
        'name': a substring that has to be present in job names
        'state': state that jobs have to be in

    There are also two parameters that define which attributes should be
    returned:
        'client': a list of column names from client table
        'arc': a list of column names from arc table

    Returns:
        status 200: A JSON list of JSON objects with jobs' status info.
        status 4**: A string with error message.
    """
    try:
        proxyid = getProxyId()
    except errors.NoSuchProxyError:
        return 'Wrong or no client certificate', 401

    try:
        jobids = getIDs()
    except Exception:
        return 'Invalid id parameter', 400

    name_filter = request.args.get('name', default='')
    state_filter = request.args.get('state', default='')

    clicols = request.args.get('client', default=[])
    if clicols:
        clicols = clicols.split(',')

    arccols = request.args.get('arc', default=[])
    if arccols:
        arccols = arccols.split(',')

    jmgr = jobmgr.JobManager()
    try:
        jobdicts = jmgr.getJobStats(proxyid, jobids, state_filter, name_filter,
                                    clicols, arccols)
    except Exception as e:
        # TODO: could also be server error
        return str(e), 400
    else:
        return json.dumps(jobdicts)
Exemple #5
0
def submit():
    """
    Submit job from current request context.

    Submission of jobs is done with multipart/form-data POST request.
    Job is submitted as a form containing xRSL file (name="xrsl")
    and site name (name="site").

    Returns:
        status 200: A string with id of submitted job.
        status 4** or 500: A string with error message.
    """
    try:
        proxyid = getProxyId()
    except errors.NoSuchProxyError:
        return 'Wrong or no client certificate', 401

    jmgr = jobmgr.JobManager()

    site = request.form.get('site', None)
    if not site:
        return 'No site given', 400
    xrsl_file = request.files.get('xrsl', None)
    if not xrsl_file:
        return 'No job description file given', 400
    jobdesc = xrsl_file.read()
    try:
        jobmgr.checkJobDesc(jobdesc)
        jobmgr.checkSite(site)
    except errors.InvalidJobDescriptionError as e:
        return 'Invalid job description', 400
    except errors.NoSuchSiteError as e:
        return 'Invalid site', 400
    else:
        try:
            jobid = jmgr.clidb.insertJobAndDescription(jobdesc, proxyid, site)
        except Exception as e:
            return 'Server error: {}'.format(str(e)), 500
        else:
            return str(jobid)
Exemple #6
0
def deleteProxies():
    """
    Delete proxies from database.

    Parameter has to be given in url: 'id' which is a list of proxy IDs that
    should be deleted.

    Function first fetches all proxies that match the DN of a certificate
    from request. Then it deletes those whose IDs are in 'id' parameter.
    This is done so that user cannot delete any proxies but his own.

    Returns:
        status 200: A string with a number of deleted proxies.
        status 401: A string with error message.
    """
    dn = getCertDN()
    pmgr = proxymgr.ProxyManager()
    jmgr = jobmgr.JobManager()
    proxies = pmgr.getProxiesWithDN(dn, columns=['id'])

    try:
        proxyids = getIDs()
    except Exception:
        return 'Invalid id parameter', 400
    if not proxyids:
        return 'Wrong or no client certificate', 401

    numDeleted = 0
    for proxy in proxies:
        if proxy['id'] in proxyids:
            # do not remove a proxy on which jobs depend
            if not jmgr.getJobStats(proxy['id'], [], '', '', clicols=['id']):
                pmgr.arcdb.deleteProxy(proxy['id'])
                proxyids.remove(proxy['id'])  # optimize a little bit
                numDeleted += 1
    return json.dumps(numDeleted)
Exemple #7
0
def main():
    # parse arguments
    parser = argparse.ArgumentParser(description='Get jobs from aCT')
    parser.add_argument('-a',
                        '--all',
                        action='store_true',
                        help='all jobs that match other criteria')
    parser.add_argument('-j',
                        '--jobs',
                        default='',
                        help='comma separated list of job IDs or ranges')
    parser.add_argument(
        '-f',
        '--find',
        default='',
        help='get only jobs with matching (sub)string in their name')
    parser.add_argument('-s',
                        '--state',
                        default='',
                        help='get only jobs with certain state')
    parser.add_argument('-v',
                        '--verbose',
                        action='store_true',
                        help='show more information')
    parser.add_argument('-p',
                        '--proxy',
                        default=None,
                        help='custom path to proxy certificate')
    parser.add_argument('-n',
                        '--no-clean',
                        action='store_true',
                        help='do not clean jobs')

    clicommon.showHelpOnCommandOnly(parser)

    args = parser.parse_args()

    # logging
    logFormat = "[%(asctime)s] [%(filename)s:%(lineno)d] [%(levelname)s] - %(message)s"
    if args.verbose:
        logging.basicConfig(format=logFormat,
                            level=logging.DEBUG,
                            stream=sys.stdout)
    else:
        logging.basicConfig(format=logFormat,
                            level=logging.DEBUG,
                            filename=os.devnull)

    # create a list of jobs to work on
    if args.all:
        jobs = []  # empty means all jobs
    elif args.jobs:
        try:
            jobs = jobmgr.getIDsFromList(args.jobs)
        except InvalidJobRangeError as e:
            print("error: range '{}' is not a valid range".format(e.jobRange))
            sys.exit(2)
        except InvalidJobIDError as e:
            print("error: ID '{}' is not a valid ID".format(e.jobid))
            sys.exit(3)
    else:
        print("error: no jobs specified (use -a or -j)")
        sys.exit(10)

    # get proxy ID given proxy
    proxyid = clicommon.getProxyIdFromProxy(args.proxy)

    # get job info
    manager = jobmgr.JobManager()
    try:
        results = manager.getJobs(proxyid, jobs, args.state, args.find)
    except TmpConfigurationError:
        print('error: tmp directory not configured')
        sys.exit(5)

    if not results.jobdicts:
        print('no jobs to get')
        sys.exit(0)

    # copy job results
    dontRemove = []
    for result in results.jobdicts:
        try:
            if result['dir']:  # if there are job results in tmp
                dst_dirname = os.path.basename(os.path.normpath(
                    result['name']))
                dstdir = getLocalDir(dst_dirname)
                shutil.copytree(result['dir'], dstdir)
                print('Results stored at: {}'.format(dstdir))
            else:
                raise NoJobDirectoryError(result['dir'])

        except NoJobDirectoryError as e:
            print('error: tmp results directory {} does not exist'.format(
                e.jobdir))
        except TargetDirExistsError as e:
            print('error: job destination {} already exists'.format(e.dstdir))
            # don't clean job that could not be removed
            dontRemove.append(result['id'])

    # delete jobs that should not be removed from results
    for jobid in dontRemove:
        for result in results.jobdicts:
            if result['id'] == jobid:
                jobix = results.clientIDs.index(result['id'])
                del results.clientIDs[jobix]
                del results.arcIDs[jobix]
                del results.jobdicts[jobix]

    # clean jobs
    if not args.no_clean:
        manager.forceCleanJobs(results)
Exemple #8
0
def main():
    # parse arguments
    parser = argparse.ArgumentParser(description='Kill jobs')
    parser.add_argument('-a',
                        '--all',
                        action='store_true',
                        help='all jobs that match other criteria')
    parser.add_argument('-j',
                        '--jobs',
                        default='',
                        help='comma separated list of job IDs or ranges')
    parser.add_argument(
        '-f',
        '--find',
        default='',
        help='get only jobs with matching (sub)string in their name')
    parser.add_argument('-s',
                        '--state',
                        default='',
                        help='get only jobs with certain state')
    parser.add_argument('-v',
                        '--verbose',
                        action='store_true',
                        help='show more information')
    parser.add_argument('-p',
                        '--proxy',
                        default=None,
                        help='custom path to proxy certificate')

    clicommon.showHelpOnCommandOnly(parser)

    args = parser.parse_args()

    # logging
    logFormat = "[%(asctime)s] [%(filename)s:%(lineno)d] [%(levelname)s] - %(message)s"
    if args.verbose:
        logging.basicConfig(format=logFormat,
                            level=logging.DEBUG,
                            stream=sys.stdout)
    else:
        logging.basicConfig(format=logFormat,
                            level=logging.DEBUG,
                            filename=os.devnull)

    # create a list of jobs to work on
    if args.all:
        jobs = []  # empty means all jobs
    elif args.jobs:
        try:
            jobs = jobmgr.getIDsFromList(args.jobs)
        except InvalidJobRangeError as e:
            print("error: range '{}' is not a valid range".format(e.jobRange))
            sys.exit(2)
        except InvalidJobIDError as e:
            print("error: ID '{}' is not a valid ID".format(e.jobid))
            sys.exit(3)
    else:
        print("error: no jobs specified (use -a or -j)")
        sys.exit(10)

    # get proxy ID given proxy
    proxyid = clicommon.getProxyIdFromProxy(args.proxy)

    # kill jobs
    manager = jobmgr.JobManager()
    numKilled = manager.killJobs(proxyid, jobs, args.state, args.find)
    print('Jobs killed: {}'.format(numKilled))
Exemple #9
0
def main():
    # parse arguments
    parser = argparse.ArgumentParser(description='Get job info from aCT')
    parser.add_argument('-a',
                        '--all',
                        action='store_true',
                        help='all jobs that match other criteria')
    parser.add_argument(
        '-j',
        '--jobs',
        default='',
        help='ID/range(id1-id2;id1<id2)/comma separated list of IDs/ranges')
    parser.add_argument(
        '-f',
        '--find',
        default='',
        help='get only jobs with matching (sub)string in their name')
    parser.add_argument('-s',
                        '--state',
                        default='',
                        help='get only jobs with certain state')
    parser.add_argument('-n',
                        '--name',
                        default='',
                        help='get only jobs with given name')
    parser.add_argument('-v',
                        '--verbose',
                        action='store_true',
                        help='show more information')
    parser.add_argument('-p',
                        '--proxy',
                        default=None,
                        help='custom path to proxy certificate')

    # arguments passed directly to arccat
    parser.add_argument('-o',
                        '--stdout',
                        action='store_true',
                        help='show the stdout of the job (default)',
                        default=True)
    parser.add_argument('-e',
                        '--stderr',
                        action='store_true',
                        help='show the stderr of the job')
    #parser.add_argument('-l', '--joblog', action='store_true',
    #        help='show A-REX\'s error log of the job')
    #parser.add_argument('-P', '--listplugins', action='store_true',
    #        help='list the available plugins')
    #parser.add_argument('-t', '--timeout', type=int, nargs=1,
    #        help='timeout in seconds (default 20)', default=20)

    clicommon.showHelpOnCommandOnly(parser)

    args = parser.parse_args()

    # logging
    logFormat = "[%(asctime)s] [%(filename)s:%(lineno)d] [%(levelname)s] - %(message)s"
    if args.verbose:
        logging.basicConfig(format=logFormat,
                            level=logging.DEBUG,
                            stream=sys.stdout)
    else:
        logging.basicConfig(format=logFormat,
                            level=logging.DEBUG,
                            filename=os.devnull)

    # get column names from database
    manager = jobmgr.JobManager()

    # create a list of jobs to work on
    if args.all:
        jobs = []  # empty means all jobs
    elif args.jobs:
        try:
            jobs = jobmgr.getIDsFromList(args.jobs)
        except InvalidJobRangeError as e:
            print("error: range '{}' is not a valid range".format(e.jobRange))
            sys.exit(2)
        except InvalidJobIDError as e:
            print("error: ID '{}' is not a valid ID".format(e.jobid))
            sys.exit(3)
    else:
        print("error: no jobs specified (use -a or -j)")
        sys.exit(10)

    proxyid = clicommon.getProxyIdFromProxy(args.proxy)

    # get ARC job IDs of jobs that match filters
    try:
        jobdicts = manager.getJobStats(proxyid,
                                       jobs,
                                       args.state,
                                       args.find,
                                       clicols=[],
                                       arccols=["JobID", "StdOut", "StdErr"],
                                       jobname=args.name)
    except Exception as e:
        print('error: {}'.format(str(e)))
        sys.exit(9)

    if not jobdicts:  # no jobs so just exit
        print('no jobs found that fit given filters')
        sys.exit(0)

    for job in jobdicts:
        url = job["a_JobID"] + "/"
        if args.stderr:
            url += job["a_StdErr"]
        elif args.stdout:
            url += job["a_StdOut"]
        subprocess.run(["arccp", url, "-"])
Exemple #10
0
def main():
    # parse arguments
    parser = argparse.ArgumentParser(description='Get job info from aCT')
    parser.add_argument('-a',
                        '--all',
                        action='store_true',
                        help='all jobs that match other criteria')
    parser.add_argument(
        '-j',
        '--jobs',
        default='',
        help='ID/range(id1-id2;id1<id2)/comma separated list of IDs/ranges')
    parser.add_argument(
        '-f',
        '--find',
        default='',
        help='get only jobs with matching (sub)string in their name')
    parser.add_argument('-s',
                        '--state',
                        default='',
                        help='get only jobs with certain state')
    parser.add_argument('-v',
                        '--verbose',
                        action='store_true',
                        help='show more information')
    parser.add_argument('-p',
                        '--proxy',
                        default=None,
                        help='custom path to proxy certificate')
    parser.add_argument('--arc-cols',
                        default='JobID,State,arcstate',
                        help='columns from ARC table that should be fetched')
    parser.add_argument(
        '--client-cols',
        default='id,jobname',
        help='columns from client table that should be fetched')
    parser.add_argument('--get-cols',
                        action='store_true',
                        help='print all available column names')

    clicommon.showHelpOnCommandOnly(parser)

    args = parser.parse_args()

    # logging
    logFormat = "[%(asctime)s] [%(filename)s:%(lineno)d] [%(levelname)s] - %(message)s"
    if args.verbose:
        logging.basicConfig(format=logFormat,
                            level=logging.DEBUG,
                            stream=sys.stdout)
    else:
        logging.basicConfig(format=logFormat,
                            level=logging.DEBUG,
                            filename=os.devnull)

    # get column names from database
    manager = jobmgr.JobManager()
    if args.get_cols:
        clientCols = manager.getClientColumns()
        arcCols = manager.getArcColumns()
        print('client cols:', end=' ')
        for col in clientCols:
            print(col, end=' ')
        print()
        print('arc cols:', end=' ')
        for col in arcCols:
            print(col, end=' ')
        print()
        sys.exit(0)

    # create a list of jobs to work on
    if args.all:
        jobs = []  # empty means all jobs
    elif args.jobs:
        try:
            jobs = jobmgr.getIDsFromList(args.jobs)
        except InvalidJobRangeError as e:
            print("error: range '{}' is not a valid range".format(e.jobRange))
            sys.exit(2)
        except InvalidJobIDError as e:
            print("error: ID '{}' is not a valid ID".format(e.jobid))
            sys.exit(3)
    else:
        print("error: no jobs specified (use -a or -j)")
        sys.exit(10)

    # create column lists
    if not args.client_cols:
        clicols = []
    else:
        clicols = args.client_cols.split(',')
    if not args.arc_cols:
        arccols = []
    else:
        arccols = args.arc_cols.split(',')

    # get proxy ID given proxy
    proxyid = clicommon.getProxyIdFromProxy(args.proxy)

    # get information
    try:
        jobdicts = manager.getJobStats(proxyid,
                                       jobs,
                                       args.state,
                                       args.find,
                                       clicols=clicols,
                                       arccols=arccols)
    except Exception as e:
        print('error: {}'.format(str(e)))
        sys.exit(9)

    if not jobdicts:  # no jobs so just exit
        sys.exit(0)

    # For each column, determine biggest sized value so that output can
    # be nicely formatted.
    colsizes = {}
    for job in jobdicts:
        for key, value in job.items():
            # All keys have a letter and underscore prepended, which is not
            # used when printing
            colsize = max(len(str(key[2:])), len(str(value)))
            try:
                if colsize > colsizes[key]:
                    colsizes[key] = colsize
            except KeyError:
                colsizes[key] = colsize

    # Print table header
    for col in clicols:
        print('{:<{width}}'.format(col, width=colsizes['c_' + col]), end=' ')
    for col in arccols:
        print('{:<{width}}'.format(col, width=colsizes['a_' + col]), end=' ')
    print()
    line = ''
    for value in colsizes.values():
        line += '-' * value
    line += '-' * (len(colsizes) - 1)
    print(line)

    # Print jobs
    for job in jobdicts:
        for col in clicols:
            fullKey = 'c_' + col
            txt = job.get(fullKey)
            # just in case the value is a bunch of whitespace
            # TODO: This (str(txt)) might not be a general fix; it is a direct
            #       fix for the problem encountered with
            #       datetime.datetime object for 'created' field that
            #       has to be converted to a string.
            #       The same fix is used for arccols below.
            # TODO: This fix assumes that all job fields are properly
            #       convertible to string. Is that really so?
            if not txt or str(txt).strip() == '':  # short circuit important!
                txt = "''"
            print('{:<{width}}'.format(txt, width=colsizes[fullKey]), end=' ')
        for col in arccols:
            fullKey = 'a_' + col
            txt = job.get(fullKey)
            # just in case the value is a bunch of whitespace
            if not txt or str(txt).strip() == '':  # short circuit important!
                txt = "''"
            print('{:<{width}}'.format(str(txt), width=colsizes[fullKey]),
                  end=' ')
        print()