Exemple #1
0
def open_databases( conf ):
    """
    A helper function that returns handles to the hdf and sql databases
    """
    db = hdf.hdf_open( conf.HDF_DATABASE, mode='r' )
    session = sql.get_session( conf.SQL_URI )
    return db, session
Exemple #2
0
def loader( conf ):
    from atlas import hdf
    from mod454.schema import Mod454Schema as Schema
    last_chrom = table = None
    db = hdf.hdf_open( conf.HDF_DATABASE, mode='a', title='HDF database')
    gp = hdf.create_group( db=db, name=conf.DATA_LABEL, desc='data group', clobber=conf.CLOBBER ) 
    fit_meta = conf.fit[2]
    # iterate over the file and insert into table
    for line in open( conf.fit[1], "r" ):
        if line.startswith("chrom"): continue  #Skip possible header
        if line.startswith("#"): continue
        fields = line.rstrip('\r\n').split('\t')
        chrom = fields[fit_meta.chromCol]
        if chrom != last_chrom:
            if table: table.flush()
            table = hdf.create_table( db=db, name=chrom, where=gp, schema=Schema, clobber=False )
            last_chrom = chrom
        try:
            position = int(fields[fit_meta.positionCol])
            forward = float(fields[fit_meta.forwardCol])
            reverse = fit_meta.reverseCol > -1 and float(fields[fit_meta.reverseCol]) or 0.0
            row = ( position, forward, reverse, forward+reverse, )
            table.append( [ row ] )
        except ValueError:
            # Ignore bad lines
            pass
    table.flush()
    db.close()
Exemple #3
0
    def index(self, trans, dataset_id=None, **kwds):
        """
        Main request handler
        """
        color = cycle( [LIGHT, WHITE] )
        data = trans.app.model.HistoryDatasetAssociation.get( dataset_id )
        if not data:
            raise paste.httpexceptions.HTTPRequestRangeNotSatisfiable( "Invalid reference dataset id: %s." % str( dataset_id ) )
        # the main configuration file
        conf = BaseConf(
            TITLE = "<i>%s</i>: %s" % (data.metadata.dbkey, data.metadata.label),
            HDF_DATABASE = os.path.join( data.extra_files_path, data.metadata.hdf ),
            SQL_URI = "sqlite:///%s" % os.path.join( data.extra_files_path, data.metadata.sqlite ),
            LABEL = data.metadata.label,
            FIT_LABEL = "%s-SIGMA-%d" % (data.metadata.label, 20),
            PRED_LABEL = "PRED-%s-SIGMA-%d" % (data.metadata.label, 20),
            )

        try:
            session = sql.get_session( conf.SQL_URI )
        except:
            return trans.fill_template_mako('genetrack/invalid.html', dataset_id=dataset_id)

        if os.path.exists( conf.HDF_DATABASE ):
            db = hdf.hdf_open( conf.HDF_DATABASE, mode='r' )
            conf.CHROM_FIELDS = [(x,x) for x in hdf.GroupData(db=db, name=conf.LABEL).labels]
            db.close()
        else:
            query = session.execute(sql.select([sql.feature_table.c.chrom]).distinct())
            conf.CHROM_FIELDS = [(x.chrom,x.chrom) for x in query]

        # generate a new form based on the configuration
        form = formlib.main_form( conf )
        
        # clear the tempdir every once in a while
        atlas_utils.clear_tempdir( dir=conf.IMAGE_DIR, days=1, chance=10)

        incoming = form.defaults()
        incoming.update( kwds )
        
        # manage the zoom and pan requests
        incoming = formlib.zoom_change( kdict=incoming, levels=conf.LEVELS)
        incoming = formlib.pan_view( kdict=incoming )
        
        # process the form
        param = atlas.Param( **incoming )
        form.process( incoming )

        if kwds and form.isSuccessful():
            # adds the sucessfull parameters
            param.update( form.values() )

        # if it was a search word not a number go to search page
        try:
            center = int( param.feature )
        except ValueError:
            # go and search for these
            return trans.response.send_redirect( web.url_for( controller='genetrack', action='search', word=param.feature, dataset_id=dataset_id ) )

        param.width  = min( [2000, int(param.img_size)] )
        param.xscale = [ param.start, param.end ] 
        param.show_labels = ( param.end - param.start ) <= SHOW_LABEL_LIMIT    
        
        # get the template and the function used to generate the tracks
        tmpl_name, track_maker  = conf.PLOT_MAPPER[param.plot]
        
        # check against a hash, display an image that already exists if it was previously created.
        hash = sha.new()
        hash.update(str(dataset_id))
        for key in sorted(kwds.keys()):
            hash.update(str(kwds[key]))
        fname = "%s.png" % hash.hexdigest()
        fpath = os.path.join(conf.IMAGE_DIR, fname)

        charts = []
        param.fname  = fname
        
        # The SHA1 hash should uniquely identify the qs that created the plot...
        if os.path.exists(fpath):
            os.utime(fpath, (time.time(), time.time()))
            return trans.fill_template_mako(tmpl_name, conf=conf, form=form, param=param, dataset_id=dataset_id)
        
        # If the hashed filename doesn't exist, create it.
        if track_maker is not None and os.path.exists( conf.HDF_DATABASE ):
            # generate the fit track
            charts = track_maker( param=param, conf=conf )
            
        for label in list_labels( session ):
            charts.extend( feature_chart(param=param, session=session, label=label.name, label_dict={label.name:label.id}, color=color))

        track_chart = consolidate_charts( charts, param )
        track_chart.save(fname=fpath)

        return trans.fill_template_mako(tmpl_name, conf=conf, form=form, param=param, dataset_id=dataset_id)