Exemplo n.º 1
0
def submit(job_row):
    """
    Submits a job to QueueManager, if successful will store returned queue id.

    Input:
        job_row: A row from the jobs table. The datafiles associated
            with this job will be submitted to be processed.
    Outputs:
        None
    """
    fns = pipeline_utils.get_fns_for_jobid(job_row["id"])

    script = os.path.join(config.basic.pipelinedir, "bin", "%s_search.py" % config.basic.survey)

    # Specify requested resources for job submission
    if job_row["task"] == "rfifind":
        res = [4 * 60 * 60, 1024, 25]
    elif "search" in job_row["task"]:
        res = [165240, 1024, 28]  # 45.9 hrs
    elif job_row["task"] == "sifting":  # Sifting should be quick
        res = [30 * 60, 256, 5]
    elif "folding" in job_row["task"]:
        res = [96 * 60 * 60, 3000, 28]
    # elif job_row['task']=='tidyup':
    #    res = [30*60, 256, 5]
    options = job_row["task"]

    try:
        SPAN512_job.presubmission_check(fns)
        outdir = SPAN512_job.get_output_dir(fns)
        # Attempt to submit the job
        queue_id = config.jobpooler.queue_manager.submit(
            fns, outdir, job_row["id"], resources=res, script=script, opts=options
        )
    except (FailedPreCheckError):
        # Error caught during presubmission check.
        exceptionmsgs = traceback.format_exception(*sys.exc_info())
        errormsg = "Job ID: %d " % job_row["id"]
        errormsg += "failed presubmission check!\n\n"
        errormsg += "".join(exceptionmsgs)

        jobpool_cout.outs("Job ID: %d failed presubmission check!\n\t%s\n" % (job_row["id"], exceptionmsgs[-1]))

        if config.email.send_on_terminal_failures:
            # Send error email
            msg = "Presubmission check failed!\n"
            msg += "Job ID: %d\n\n" % (job_row["id"])
            msg += errormsg
            msg += "\n*** Job has been terminally failed. ***\n"
            msg += "*** Job will NOT be re-submitted! ***\n"
            if config.basic.delete_rawdata:
                jobpool_cout.outs("Job #%d will NOT be retried. " "Data files will be deleted." % job_row["id"])
                msg += "*** Raw data files will be deleted. ***\n"
            else:
                jobpool_cout.outs("Job #%d will NOT be retried. " % job_row["id"])
            notification = mailer.ErrorMailer(msg, subject="Job failed presubmission check - Terminal")
            notification.send()

        if config.basic.delete_rawdata:
            pipeline_utils.clean_up(job_row["id"])

        queries = []
        arglist = []
        queries.append(
            "INSERT INTO job_submits ("
            "job_id, "
            "status, "
            "created_at, "
            "updated_at, "
            "details) "
            "VALUES (%d, %s, '%s', '%s', %s)"
            % (job_row["id"], "precheck_failed", jobtracker.nowstr(), jobtracker.nowstr(), errormsg)
        )
        queries.append(
            "UPDATE jobs "
            "SET status='terminal_failure', "
            "details='Failed presubmission check', "
            "updated_at='%s'"
            "WHERE id=%d" % (jobtracker.nowstr(), job_row["id"])
        )
        jobtracker.query(queries)

    except (queue_managers.QueueManagerJobFatalError, datafile.DataFileError):
        # Error caught during job submission.
        exceptionmsgs = traceback.format_exception(*sys.exc_info())
        errormsg = "Error while submitting job!\n"
        errormsg += "\tJob ID: %d\n\n" % job_row["id"]
        errormsg += "".join(exceptionmsgs)

        jobpool_cout.outs("Error while submitting job!\n" "\tJob ID: %d\n\t%s\n" % (job_row["id"], exceptionmsgs[-1]))

        queries = []
        arglist = []
        queries.append(
            "INSERT INTO job_submits ("
            "job_id, "
            "status, "
            "created_at, "
            "updated_at, "
            "details) "
            "VALUES (%d, %s, '%s', '%s', %s)"
            % (job_row["id"], "submission_failed", jobtracker.nowstr(), jobtracker.nowstr(), errormsg)
        )
        queries.append(
            "UPDATE jobs "
            "SET status='failed', "
            "details='Error while submitting job', "
            "updated_at='%s' "
            "WHERE id=%d" % (jobtracker.nowstr(), job_row["id"])
        )
        jobtracker.execute(queries)
    except queue_managers.QueueManagerNonFatalError:
        # Do nothing. Don't submit the job. Don't mark the job as 'submitted'.
        # Don't mark the job as 'failed'. The job submission will be retried.
        pass
    except queue_managers.QueueManagerFatalError:
        # A fatal error occurred. Re-raise!
        raise
    else:
        # No error occurred
        msg = "Submitted job to process:\n"
        msg += "\tJob ID: %d, Queue ID: %s\n" % (job_row["id"], queue_id)
        msg += "\tData file(s):\n"
        for fn in fns:
            msg += "\t%s\n" % fn
        jobpool_cout.outs(msg)
        queries = []
        queries.append(
            "INSERT INTO job_submits ("
            "job_id, "
            "queue_id, "
            "output_dir, "
            "status, "
            "created_at, "
            "updated_at, "
            "details) "
            "VALUES (%d,'%s','%s','%s','%s','%s','%s')"
            % (
                job_row["id"],
                queue_id,
                outdir,
                "running",
                jobtracker.nowstr(),
                jobtracker.nowstr(),
                "Job submitted to queue",
            )
        )
        queries.append(
            "UPDATE jobs "
            "SET status='submitted', "
            "details='Job submitted to queue', "
            "updated_at='%s' "
            "WHERE id=%d" % (jobtracker.nowstr(), job_row["id"])
        )
        jobtracker.query(queries)