Ejemplo n.º 1
0
def job_status(job_id):
    is_job_in_db =job_exists(job_id)
    if not is_job_in_db:
        return jsonify(status='failure', message='Unable to locate job')

    async_id = get_async_id(job_id)
    dataset_id = None #request.args.get('dataset_id')
    peakfile_id = None # request.args.get('peakfile_id')
    if is_job_type_encode(job_id):
        data = get_encode_from_jobid(job_id)
        dataset_id = data['dataset_id']
        peakfile_id = data['peakfile_id']
    job_db = get_job_status(job_id)
    status = job_db['status']
    job = run_job.AsyncResult(async_id)
    #job = workflow.AsyncResult(async_id)
    #job = run_motif_analysis_job.AsyncResult(async_id)
    print 'JOB STATE: {}'.format(job.state)

    if status == 'pending':
        return json.dumps({'status': 'pending', 'job_id':job_id})
    elif status == 'success':
        #post_process(async_id, job_id)
        images = ['motif{}Combined_plots.png'.format(i) for i in range(1, N_MEME_MOTIFS+1)]
        if dataset_id:
            metadata = json.loads(get_encode_metadata(peakfile_id))
            summary = read_summary('encode/{}/{}'.format(dataset_id, peakfile_id))
            motif_occurrences=summary['motif_occurrences']
            peaks = summary['peaks']
            sorted_mo = sorted(motif_occurrences.items(), key=operator.itemgetter(1), reverse=True)
            motifs = [i for i,j in sorted_mo if float(j)/peaks>0.1]
            images = ['/static/jobs/encode/{}/{}/{}Combined_plots.png'.format(dataset_id, peakfile_id, i) for i,j in sorted_mo if float(j)/peaks>0.1]
            rcimages = ['/static/jobs/encode/{}/{}/{}Combined_plots_rc.png'.format(dataset_id, peakfile_id, i) for i,j in sorted_mo if float(j)/peaks>0.1]
        else:
            summary = read_summary(job_id)
            motif_occurrences=summary['motif_occurrences']
            peaks = summary['peaks']
            sorted_mo = sorted(motif_occurrences.items(), key=operator.itemgetter(1), reverse=True)
            images = {i:'/static/jobs/{}/{}Combined_plots.png'.format(job_id, i) for i,j in sorted_mo if float(j)/peaks>0.1}
            motifs = [i for i,j in sorted_mo if float(j)/peaks>0.1]
            #images = ['/static/jobs/{}/{}Combined_plots.png'.format(job_id, i) for i,j in sorted_mo if float(j)/peaks>0.1]
            rcimages = {i:'/static/jobs/{}/{}Combined_plots_rc.png'.format(job_id, i) for i,j in sorted_mo if float(j)/peaks>0.1}
            #rcimages = ['/static/jobs/{}/{}Combined_plots_rc.png'.format(job_id, i) for i,j in sorted_mo if float(j)/peaks>0.1]
            metadata = {'filename': get_filename(async_id)}
        return jsonify(status=job.status,
                       job_id=job_id,
                       motifs=motifs,
                       images=images,
                       rcimages=rcimages,
                       motif_occurrences=dict(sorted_mo),
                       metadata=metadata,
                       peaks=summary['peaks'])
    else:
        exception_log = job_db['exception_log']
        if not exception_log:
            exception_log="{'stderr':'none', 'stdlog':'none', 'output':'none'}"
        status = 'failure'
        return jsonify(status=status, message=json.loads(exception_log))
Ejemplo n.º 2
0
def encodejobs(dataset_id, peakfile_id):
    job_status = encode_job_status(peakfile_id)
    if job_status == 'success':
        summary = read_summary('encode/{}/{}'.format(dataset_id, peakfile_id))
        metadata = json.loads(get_encode_metadata(peakfile_id))
        motif_occurrences=summary['motif_occurrences']
        peaks = summary['peaks']
        sorted_mo = sorted(motif_occurrences.items(), key=operator.itemgetter(1), reverse=True)
        images = {i:'/static/jobs/encode/{}/{}/{}Combined_plots.png'.format(dataset_id, peakfile_id, i) for i,j in sorted_mo if float(j)/peaks>0.1}
        rcimages = {i:'/static/jobs/encode/{}/{}/{}Combined_plots_rc.png'.format(dataset_id, peakfile_id, i) for i,j in sorted_mo if float(j)/peaks>0.1}
        data = ({'motifs':images, 'motif_occurrences': summary['motif_occurrences'], 'peaks': summary['peaks'], 'rcimages':rcimages })
        if request.method=='POST':
            job_id = get_encode_jobid(peakfile_id)
            return jsonify(job_id=job_id,
                           dataset_id=dataset_id,
                           peakfile_id=peakfile_id,
                           data=data)
        return render_template('encoderesults.html', job_id='none', data=data, metadata=(metadata))#motifs=images, motif_occurrences=summary['motif_occurrences'], peaks=summary['peaks'], job_id='none')
    elif job_status == 'pending':
        metadata = get_metadata_for_peakfile(dataset_id, peakfile_id)
        job_id = get_encode_jobid(peakfile_id)
        if request.method=='GET':
            return render_template('encoderesults.html', job_id=job_id, dataset_id=dataset_id, peakfile_id=peakfile_id, data=json.dumps({}), metadata=json.dumps(metadata))
        else:
            return jsonify(job_id=job_id,
                           dataset_id=dataset_id,
                           peakfile_id=peakfile_id)
    elif job_status == 'inexistent':
        metadata = get_metadata_for_peakfile(dataset_id, peakfile_id)
        job = process_job(request, metadata)
        if request.method=='GET':
            return render_template('encoderesults.html', job_id=job.job_id, dataset_id=dataset_id, peakfile_id=peakfile_id, data=json.dumps({}), metadata=json.dumps(metadata))
        else:
            return jsonify(job_id=job.job_id,
                           dataset_id=dataset_id,
                           peakfile_id=peakfile_id)


    elif job_status == 'error':
        return json.dumps({'status': 'error', 'response': metadata})