Ejemplo n.º 1
0
def abort(code):
    if code == 400: return web.HTTPBadRequest()
    elif code == 401: return web.HTTPUnauthorized()
    elif code == 402: return web.HTTPPaymentRequired()
    elif code == 403: return web.HTTPForbidden()
    elif code == 404: return web.HTTPNotFound()
    elif code == 405: return web.HTTPMethodNotAllowed()
    elif code == 406: return web.HTTPNotAcceptable()
    elif code == 407: return web.HTTPProxyAuthenticationRequired()
    elif code == 408: return web.HTTPRequestTimeout()
    elif code == 409: return web.HTTPConflict()
    elif code == 410: return web.HTTPGone()
    elif code == 411: return web.HTTPLengthRequired()
    elif code == 412: return web.HTTPPreconditionFailed()
    elif code == 413: return web.HTTPRequestEntityTooLarge()
    elif code == 414: return web.HTTPRequestURITooLong()
    elif code == 415: return web.HTTPUnsupportedMediaType()
    elif code == 416: return web.HTTPRequestRangeNotSatisfiable()
    elif code == 417: return web.HTTPExpectationFailed()
    elif code == 421: return web.HTTPMisdirectedRequest()
    elif code == 422: return web.HTTPUnprocessableEntity()
    elif code == 424: return web.HTTPFailedDependency()
    elif code == 426: return web.HTTPUpgradeRequired()
    elif code == 428: return web.HTTPPreconditionRequired()
    elif code == 429: return web.HTTPTooManyRequests()
    elif code == 431: return web.HTTPRequestHeaderFieldsTooLarge()
    elif code == 451: return web.HTTPUnavailableForLegalReasons()
    else: return web.HTTPBadRequest()
Ejemplo n.º 2
0
async def handle_kkdcp(request):

    length = request.content_length
    if length is None:
        raise web.HTTPLengthRequired(text="Length is required.")
    if length > MAX_LENGTH:
        raise web.HTTPRequestEntityTooLarge(text="Request is too large.")

    try:
        data = await request.read()
        proxy_request = codec.decode(data)
    except codec.ParserError as e:
        raise web.HTTPBadRequest(text=str(e))

    loop = asyncio.get_event_loop()

    # TODO: Change this to look up the KDC to talk to
    try:
        krb5_response = await asyncio.wait_for(forward_kerberos(proxy_request.message, loop=loop), timeout=15, loop=loop)
    except asyncio.TimeoutError:
        raise web.HTTPServiceUnavailable(text="Timeout waiting for Kerberos server")

    return web.Response(body=codec.encode(krb5_response), content_type="application/kerberos")
Ejemplo n.º 3
0
async def submit(request):
    global filerouter
    global servermode
    sysconf = au.get_system_conf()
    size_cutoff = sysconf['gui_input_size_limit']
    if request.content_length is None:
        return web.HTTPLengthRequired(
            text=json.dumps({
                'status': 'fail',
                'msg': 'Content-Length header required'
            }))
    if request.content_length > size_cutoff * 1024 * 1024:
        return web.HTTPRequestEntityTooLarge(text=json.dumps(
            {
                'status': 'fail',
                'msg': f'Input is too big. Limit is {size_cutoff}MB.'
            }))
    if servermode and server_ready:
        r = await cravat_multiuser.is_loggedin(request)
        if r == False:
            return web.json_response({'status': 'notloggedin'})
    jobs_dirs = await filerouter.get_jobs_dirs(request)
    jobs_dir = jobs_dirs[0]
    job_id = get_next_job_id()
    job_dir = os.path.join(jobs_dir, job_id)
    os.makedirs(job_dir, exist_ok=True)
    reader = await request.multipart()
    job_options = {}
    input_files = []
    while True:
        part = await reader.next()
        if not part:
            break
        if part.name.startswith('file_'):
            input_files.append(part)
            # Have to write to disk here
            wfname = part.filename
            wpath = os.path.join(job_dir, wfname)
            with open(wpath, 'wb') as wf:
                wf.write(await part.read())
        elif part.name == 'options':
            job_options = await part.json()
    input_fnames = [fp.filename for fp in input_files]
    run_name = input_fnames[0]
    if len(input_fnames) > 1:
        run_name += '_and_' + str(len(input_fnames) - 1) + '_files'
    info_fname = '{}.status.json'.format(run_name)
    job_info_fpath = os.path.join(job_dir, info_fname)
    job = WebJob(job_dir, job_info_fpath)
    job.save_job_options(job_options)
    job.set_info_values(orig_input_fname=input_fnames,
                        run_name=run_name,
                        submission_time=datetime.datetime.now().isoformat(),
                        viewable=False)
    # Subprocess arguments
    input_fpaths = [os.path.join(job_dir, fn) for fn in input_fnames]
    run_args = ['oc', 'run']
    for fn in input_fnames:
        run_args.append(os.path.join(job_dir, fn))
    # Annotators
    if 'annotators' in job_options and len(
            job_options['annotators']
    ) > 0 and job_options['annotators'][0] != '':
        annotators = job_options['annotators']
        annotators.sort()
        run_args.append('-a')
        run_args.extend(annotators)
    else:
        annotators = ''
        run_args.append('-e')
        run_args.append('*')
    # Liftover assembly
    run_args.append('-l')
    if 'assembly' in job_options:
        assembly = job_options['assembly']
    else:
        assembly = constants.default_assembly
    run_args.append(assembly)
    if servermode and server_ready:
        await cravat_multiuser.update_user_settings(request,
                                                    {'lastAssembly': assembly})
    else:
        au.set_cravat_conf_prop('last_assembly', assembly)
    # Reports
    if 'reports' in job_options and len(job_options['reports']) > 0:
        run_args.append('-t')
        run_args.extend(job_options['reports'])
    else:
        run_args.extend(['--skip', 'reporter'])
    # Note
    if 'note' in job_options:
        note = job_options['note']
        if note != '':
            run_args.append('--note')
            run_args.append(note)
    # Forced input format
    if 'forcedinputformat' in job_options and job_options['forcedinputformat']:
        run_args.append('--input-format')
        run_args.append(job_options['forcedinputformat'])
    if servermode:
        run_args.append('--writeadmindb')
        run_args.extend(['--jobid', job_id])
    run_args.append('--temp-files')
    global job_queue
    global run_jobs_info
    job_ids = run_jobs_info['job_ids']
    job_ids.append(job_id)
    run_jobs_info['job_ids'] = job_ids
    qitem = {'cmd': 'submit', 'job_id': job_id, 'run_args': run_args}
    job_queue.put(qitem)
    status = {'status': 'Submitted'}
    job.set_info_values(status=status)
    if servermode and server_ready:
        await cravat_multiuser.add_job_info(request, job)
    # makes temporary status.json
    status_json = {}
    status_json['job_dir'] = job_dir
    status_json['id'] = job_id
    status_json['run_name'] = run_name
    status_json['assembly'] = assembly
    status_json['db_path'] = ''
    status_json['orig_input_fname'] = input_fnames
    status_json['orig_input_path'] = input_fpaths
    status_json['submission_time'] = datetime.datetime.now().isoformat()
    status_json['viewable'] = False
    status_json['note'] = note
    status_json['status'] = 'Submitted'
    status_json['reports'] = []
    pkg_ver = au.get_current_package_version()
    status_json['open_cravat_version'] = pkg_ver
    status_json['annotators'] = annotators
    with open(os.path.join(job_dir, run_name + '.status.json'), 'w') as wf:
        json.dump(status_json, wf, indent=2, sort_keys=True)
    return web.json_response(job.get_info_dict())