Exemple #1
0
def main(*args, **kwargs):
    #check to make sure everything is set up properly
    if not os.path.exists(converted):
        os.makedirs(converted)
    if len(parameters.PROTEIN_DATABASES) > 0:
        if os.path.exists(parameters.HOME + "/bin/blastdbcmd") and os.path.exists(parameters.HOME + "/bin/makeblastdb"):
            print " + Validating protein databases indexes"
            for f in parameters.PROTEIN_DATABASES:
                p = subprocess.Popen([parameters.HOME + "/bin/blastdbcmd", "-db", f, "-info"], stderr=subprocess.PIPE, stdout=subprocess.PIPE)
                (out, err) = p.communicate()
                p.stderr.close()
                p.stdout.close()
                if p.wait() != 0:
                    subprocess.call([parameters.HOME + "/bin/makeblastdb", "-in", f, "-parse_seqids", "-dbtype", "prot"])
    Jobs.start()

    session_factory = UnencryptedCookieSessionFactoryConfig(parameters.SECRET_KEY)
    config = Configurator(renderer_globals_factory=RendererGlobals, session_factory=session_factory)
    #Routes: give each URL a name
    config.add_route("index", "/")
    config.add_route("upload", "/init")
    config.add_route("convert", "/init_local")
    config.add_route("convert_url", "/init_url")
    config.add_route("query_init", "/query_init")
    config.add_route("add", "/add")
    config.add_route("merge", "/merge")
    config.add_route("view", "/view")
    config.add_route("results", "/results")
    config.add_route("peptide", "/peptide")
    config.add_route("select", "/select")
    config.add_route("spectrum", "/spectrum")
    config.add_route("lc", "/lc")
    config.add_route("tooltip", "/tooltip/{type}")
    config.add_route("gpmdb_peptide", "/gpmdb_peptide")
    #Views: now associate a function with each route name
    #config.add_view(SearchHit, route_name="search_hit")
    config.add_view(Index, route_name="index")
    config.add_view(Upload, route_name="upload")
    config.add_view(Convert, route_name="convert")
    config.add_view(ConvertUrl, route_name="convert_url")
    config.add_view(QueryInitStatus, route_name="query_init")
    config.add_view(AddFile, route_name="add")
    config.add_view(MergeFile, route_name="merge")
    config.add_view(View, route_name="view")
    config.add_view(ListResults, route_name="results")
    config.add_view(ListPeptide, route_name="peptide")
    config.add_view(SelectInfo, route_name="select")
    config.add_view(Spectrum, route_name="spectrum")
    config.add_view(SpectumLC, route_name="lc")
    config.add_view(Tooltip, route_name="tooltip")
    config.add_view(GPMDB.GetObservationsForPeptide, route_name="gpmdb_peptide", renderer="json")
    #these views provide content which does not change, such as images and javascript
    config.add_static_view("/favicon.ico", parameters.HOME + "/res/favicon.ico", cache_max_age=3600 * 24 * 7)
    config.add_static_view("res", parameters.HOME + "/res", cache_max_age=3600 * 24 * 7)
    config.add_static_view("test", parameters.HOME + "/test", cache_max_age=0)
    return config.make_wsgi_app()
Exemple #2
0
def Convert(req):
    #for when this is running on the same server as galaxy
    #just use the local files directly
    try:
        fs = binascii.unhexlify(req.GET["file"])
    except:
        return HTTPBadRequest_Param("file")
    try:
        ref = Referencers[req.GET["type"]]
    except:
        return HTTPBadRequest_Param("type")
    #Build the index file
    if not os.path.exists(converted):
        os.makedirs(converted)
    (jobid, f) = Jobs.add_job("local", fs, 7 * 24 * 60 * 60, ref=ref)
    resp = render_to_response(templates + "upload.pt", {"file": f, "jobid": str(jobid)}, request=req)
    resp.cache_expires(0)
    return resp
Exemple #3
0
def Upload(req):
    #uploading from a remote server
    fs = req.POST.getall("uploadedfiles[]")
    if platform.system() == "Windows":  # Windows has an extra .file in here for some reason
        for f in fs:
            if hasattr(f.file, "file"):
                f.file = f.file.file
    for f in fs:
        f.file.seek(0)
    #Build the index file
    if not os.path.exists(converted):
        os.makedirs(converted)
    try:
        cleanup = req.POST["delete"]
    except:
        cleanup = 7
    (jobid, f) = Jobs.add_job("remote", fs, cleanup * 24 * 60 * 60)
    json_response = '{"file":"' + f + '","jobid":' + str(jobid) + '}\r\n'
    resp = Response(json_response)
    resp.cache_expires(0)
    return resp
Exemple #4
0
def ConvertUrl(req):
    _read_shortcut_params(req)
    default_hash = ""
    try:
        req_scan = req.GET["scan"]
        default_hash = "#S0;None;%s" % req_scan
    except KeyError:
        pass

    try:
        url = req.GET["url"]
    except:
        return HTTPBadRequest_Param("url")
    try:
        ref = Referencers[req.GET["type"]]
    except:
        return HTTPBadRequest_Param("type")
    #Build the index file
    if not os.path.exists(converted):
        os.makedirs(converted)
    (jobid, f) = Jobs.add_job("url", str(url), 7 * 24 * 60 * 60, ref=ref)
    resp = render_to_response(templates + "upload.pt", {"file": f, "jobid": str(jobid), "default_hash": default_hash}, request=req)
    resp.cache_expires(0)
    return resp
def run(argv):
    parser = getParser()
    args = parser.parse_args(argv[1:])
    config = yaml.load(open(args.config,'r'))
    for ky in ['force','rootdir','rundir','verbose','flush_interval',
               'writers_hang', 'masters_hang', 'num_samples']:
        val = getattr(args,ky)
        if val in [None, False]: continue
        print("replacing config[%s] with %s (from command line)" % (ky,val))
        config[ky]=val
    
    jobs = Jobs(config)

    if args.kill:
        jobs.kill_all()
        return

    prepare_output_directory(config)
    config_filename = copy_config_to_rundir(config)

    daq_writer_hosts = assign_hosts('daq_writer', config)
    daq_master_hosts = assign_hosts('daq_master', config)
    ana_reader_hosts = assign_hosts('daq_master', config)

    jobs.launch('daq_writer', daq_writer_hosts)
    time.sleep(2)
    jobs.launch('daq_master', daq_master_hosts)
    time.sleep(2)
    jobs.launch('ana_reader_master', ana_reader_hosts)
    jobs.wait()