Esempio n. 1
0
def getTrackData(user_name, root):
    #start from root and recursively makeItems
    #if they have a key, ie are queries/FeatureTracks turn only the relevant bits into a json string suitable for trackInfo.
    #Make a big list of all these and print to the server
    #important to read in SequenceTrack data (and other "default" tracks, like one with gene annotations or something) here
    print "inside getTrackData"
    trackData = []

    #manually add the sequence trackdata
    #TODO
    #will have to add "seq/{assembly}/{refseq}" and change directory structure
    #accordingly.  Remember to modify the prepare-refseq script to not overwrite    #hg18 that's already there.  Also, have to give each donor genome a
    # assembly field to be able to pass correctly to Christos's stuff
    seqtrack = \
    {"args" : {"chunkSize" : 20000}, \
     "url" : "seq/{assembly}/{refseq}/", \
     "type" : "SequenceTrack", \
     "label" : "DNA", \
     "key" : "DNA"}
    trackData.append(seqtrack)

    #TODO:
    #there has to be a way to make this less gross
    #generate all the query trackdata
    for project in os.listdir(root):
        project_path = "%s/%s" % (root, project)
        if project.startswith( PROJECT_PREFIX ) and \
           os.path.isdir( project_path ) :
            for donor in os.listdir(project_path):
                donor_path = "%s/%s" % (project_path, donor)
                if donor.startswith( DONOR_PREFIX ) and \
                   os.path.isdir(donor_path) :
                    for query in os.listdir(donor_path):
                        query_path = "%s/%s" % (donor_path, query)
                        if query.startswith( QUERY_PREFIX ) and \
                           os.path.isdir(query_path) :

                            item = filestore_dojotree.makeItem(
                                user_name, query_path)
                            properties = ["url", "label", "key", "type"]
                            if len(item["sub_results"]) > 0:
                                for sr in item["sub_results"]:
                                    entry = {}
                                    for p in properties:
                                        entry[p] = item[p]
                                    entry["key"] = item["key"] + "/" + sr
                                    entry["url"] = item["url"] % sr
                                    trackData.append(entry)
                            else:
                                entry = {}
                                for p in properties:
                                    entry[p] = item[p]
                                trackData.append(entry)
                            print entry

    utils.printToServer(json.dumps(trackData))
Esempio n. 2
0
def getTrackData( user_name, root ) :
    #start from root and recursively makeItems
    #if they have a key, ie are queries/FeatureTracks turn only the relevant bits into a json string suitable for trackInfo.  
    #Make a big list of all these and print to the server
    #important to read in SequenceTrack data (and other "default" tracks, like one with gene annotations or something) here
    print "inside getTrackData"
    trackData = []
    
    #manually add the sequence trackdata
    #TODO
    #will have to add "seq/{assembly}/{refseq}" and change directory structure
    #accordingly.  Remember to modify the prepare-refseq script to not overwrite    #hg18 that's already there.  Also, have to give each donor genome a 
    # assembly field to be able to pass correctly to Christos's stuff
    seqtrack = \
    {"args" : {"chunkSize" : 20000}, \
     "url" : "seq/{assembly}/{refseq}/", \
     "type" : "SequenceTrack", \
     "label" : "DNA", \
     "key" : "DNA"}
    trackData.append( seqtrack )

    #TODO:
    #there has to be a way to make this less gross
    #generate all the query trackdata
    for project in os.listdir(root) :
        project_path = "%s/%s" % (root,project)
        if project.startswith( PROJECT_PREFIX ) and \
           os.path.isdir( project_path ) :
            for donor in os.listdir(project_path) :
                donor_path = "%s/%s" % (project_path,donor)
                if donor.startswith( DONOR_PREFIX ) and \
                   os.path.isdir(donor_path) :
                    for query in os.listdir( donor_path ) :
                        query_path = "%s/%s" % (donor_path, query)
                        if query.startswith( QUERY_PREFIX ) and \
                           os.path.isdir(query_path) :

                            item = filestore_dojotree.makeItem( user_name, query_path )
                            properties = ["url","label","key","type"]
                            if len(item["sub_results"]) > 0 :
                                for sr in item["sub_results"] :
                                    entry = {}
                                    for p in properties :
                                        entry[p] = item[p]
                                    entry["key"] = item["key"]+"/"+sr
                                    entry["url"] = item["url"] % sr
                                    trackData.append(entry)
                            else :
                                entry = {}
                                for p in properties :
                                    entry[p] = item[p]
                                trackData.append(entry)
                            print entry

    utils.printToServer( json.dumps(trackData) );
def handleQuery( user_name, path ) :
    print "handleQuery,", path
    total = 0
    items = []
    things = os.listdir( path )
    things = sorted( things, key=utils.chromKeyer )
    for thing in things :
        total += 1
        fullpath = "%s/%s" % (path,thing)
        item = makeItem( user_name, fullpath )
        if item :
            items.append( item )

    jresponse = {'total' : total, 'items' : items}
    response = json.dumps( jresponse )
    print response
    utils.printToServer( response )
Esempio n. 4
0
def handleQuery(user_name, path):
    print "handleQuery,", path
    total = 0
    items = []
    things = os.listdir(path)
    things = sorted(things, key=utils.chromKeyer)
    for thing in things:
        total += 1
        fullpath = "%s/%s" % (path, thing)
        item = makeItem(user_name, fullpath)
        if item:
            items.append(item)

    jresponse = {'total': total, 'items': items}
    response = json.dumps(jresponse)
    print response
    utils.printToServer(response)
Esempio n. 5
0
importre = re.compile(r'\s*import\s+(.*)\s*;')

debugging = False 

def moveIfExists( source, dest ) :
    if os.path.exists( source ) :
        shutil.move( source, dest )
        return True
    else :
        print "\n%s doesn't exist!\n" % source
        return False

def validateGenomes( query, perms ) :
    pass

utils.printToServer( 'Content-type: text/json\n\n' )
#utils.printToServer( utils.textarea_opener )

if debugging :
    query_name = 'debug'
    query = '''use parsed_tables
genome NA18507;
import READS;
import genes;

H1=select interval_creation() from READS using intervals(location,
                                                         mate_loc, both_mates) where location>=0 and mate_loc>=0 and
((mate_loc-location>1000 and mate_loc-location<2000000) or
 (location-mate_loc>1000 and location-mate_loc<2000000))

select * from H1 where interval_coverage>5'''
Esempio n. 6
0
import utils
import time
import hashlib
from subprocess import Popen, PIPE

err_filename = "%s/login_error.txt" % (GlobalConfig.DEBUG_DIR)
sys.stderr = open(err_filename, 'w')
out_filename = "%s/login_output.txt" % (GlobalConfig.DEBUG_DIR)
sys.stdout = open(out_filename, 'w')

fields = cgi.FieldStorage()
user_name = fields.getvalue("user_name")
passwd = fields.getvalue("passwd")
is_registering = fields.getvalue("is_registering")

utils.printToServer('Content-type: text/json\n\n')
status, message = "ok", "ok"

print fields

if is_registering == 'true':
    #add permissions
    passwd_file = "../lib/passwds.json"
    perm_file = "../lib/permissions.json"
    jperms = utils.fileToJson(perm_file)
    jpasswds = utils.fileToJson(passwd_file)
    if user_name in jpasswds:
        status, message = "bad", "A user by the name '%s' already exists" % user_name
    else:
        m = hashlib.md5()
        m.update(passwd)
import sys
sys.path.append("../lib")
import GlobalConfig
import utils
import time
import re
from subprocess import Popen, PIPE

err_filename = "%s/delete_table_error.txt" % (GlobalConfig.DEBUG_DIR)
sys.stderr = open( err_filename,'w')
out_filename = "%s/delete_table_output.txt" % (GlobalConfig.DEBUG_DIR)
sys.stdout = open( out_filename,'w')

fields = cgi.FieldStorage()

utils.printToServer( 'Content-type: text/html\n\n' )

def validateSchema( first_line, filename ) :

    #name check
    m = rename.match( first_line )
    linename = m.group(1)
    name_check = linename.lower() == filename.lower()

    #TODO:
    #column type checking?

    if name_check :
        return (True,"good to go")
    else :
        return (False, "%s != %s" % (linename, filename))
Esempio n. 8
0

def moveIfExists(source, dest):
    if os.path.exists(source):
        shutil.move(source, dest)
        return True
    else:
        print "\n%s doesn't exist!\n" % source
        return False


def validateGenomes(query, perms):
    pass


utils.printToServer('Content-type: text/json\n\n')
#utils.printToServer( utils.textarea_opener )

if debugging:
    query_name = 'debug'
    query = '''use parsed_tables
genome NA18507;
import READS;
import genes;

H1=select interval_creation() from READS using intervals(location,
                                                         mate_loc, both_mates) where location>=0 and mate_loc>=0 and
((mate_loc-location>1000 and mate_loc-location<2000000) or
 (location-mate_loc>1000 and location-mate_loc<2000000))

select * from H1 where interval_coverage>5'''
Esempio n. 9
0
    os.mkdir("%s/%s" % (user_upload_root, refinfo_name))
    fout = open(
        "%s/%s/%s" % (user_upload_root, refinfo_name, refinfo_file.filename),
        'w')
    handle = refinfo_file.file
    lines = handle.read()
    fout.write(lines)
    fout.close()
    refinfo = refinfo_name
else:
    refinfo = fields.getvalue("refinfo")

#project_names = fields.getvalue("project_names")
#project_names = project_names.split(",")

utils.printToServer('Content-type: text/html\n\n')

status, messages = "notcool", []

upload_dir = "/home/uploader/%s/%s" % (user_name, donor_name)
#if not os.path.exists( upload_dir ) :
#messages.append( "No folder named: %s exists" % upload_dir )
#else :
root = "%s/%s/%s" % (user_upload_root, refinfo, donor_name)
#shutil.move( upload_dir, root )

#TODO
#index this using the ref file (maybe not all chroms are there, or are named differently)
#also, run_biosql will need to be changed
#run script to index
for chrom in range(1, 23) + ['X', 'Y']:
import sys
sys.path.append("../lib")
import GlobalConfig
import utils
import time
import re
from subprocess import Popen, PIPE

err_filename = "%s/upload_error.txt" % (GlobalConfig.DEBUG_DIR)
sys.stderr = open( err_filename,'w')
out_filename = "%s/upload_output.txt" % (GlobalConfig.DEBUG_DIR)
sys.stdout = open( out_filename,'w')

fields = cgi.FieldStorage()
print fields
utils.printToServer( 'Content-type: text/html\n\n' )

#TODO: ownership and permission for inteval files
rename = re.compile(r'#?table\s+(\w+)\s+(.*);', re.I)
def validateSchema( first_line, filename ) :

    #name check
    m = rename.match( first_line )
    linename = m.group(1)
    name_check = linename.lower() == filename.lower()

    #TODO:
    #column type checking?

    if name_check :
        return (True,"good to go")
Esempio n. 11
0
def handlePath(user_name, path):
    print "handlePath,", path
    item = makeItem(user_name, path)
    utils.printToServer(json.dumps(item))
def handlePath( user_name, path ) :
    print "handlePath,", path
    item = makeItem( user_name, path )
    utils.printToServer( json.dumps( item ) )
                        if ext == "txt" :
                            item["txts"].append( thing )

            if len(item['sub_results']) > 0 :
                item['url'] = "%s/%%s/trackData.json" % tt
            else :
                item['url'] = "%s/trackData.json" % tt
        if is_dir :
            item["children"] = getChildren( user_name, path )

    return item


if __name__ == '__main__' :

    utils.printToServer( "Content-type: text/json\n\n" )

    projects_root = "%s/data/tracks" % ROOT_DIR

    sys.stderr = open("%s/filestore_err.txt" % DEBUG_DIR,'w')
    sys.stdout = open("%s/filestore_out.txt" % DEBUG_DIR,'w')

    user_name = "generic"
    print os.environ["HTTP_COOKIE"]
    for kvpair in os.environ["HTTP_COOKIE"].split(";") :
        [k,v] = kvpair.split("=",1)
        if k.endswith("user_name") :
            user_name = v
        elif k.endswith("passwd") :
            passwd = v
Esempio n. 14
0
                            if len(item["sub_results"]) > 0:
                                for sr in item["sub_results"]:
                                    entry = {}
                                    for p in properties:
                                        entry[p] = item[p]
                                    entry["key"] = item["key"] + "/" + sr
                                    entry["url"] = item["url"] % sr
                                    trackData.append(entry)
                            else:
                                entry = {}
                                for p in properties:
                                    entry[p] = item[p]
                                trackData.append(entry)
                            print entry

    utils.printToServer(json.dumps(trackData))


if __name__ == '__main__':

    utils.printToServer("Content-type: text/json\n\n")

    project_root = "%s/data/tracks" % ROOT_DIR
    sys.stderr = open("%s/trackdata_err.txt" % DEBUG_DIR, 'w')
    sys.stdout = open("%s/trackdata_out.txt" % DEBUG_DIR, 'w')

    dparams = cgi.parse()
    print dparams

    getTrackData(dparams["user_name"], project_root)
Esempio n. 15
0
import GlobalConfig
import utils
import time

err_filename = "%s/create_new_project_error.txt" % (GlobalConfig.DEBUG_DIR)
sys.stderr = open(err_filename, 'w')
out_filename = "%s/create_new_project_output.txt" % (GlobalConfig.DEBUG_DIR)
sys.stdout = open(out_filename, 'w')

fields = cgi.FieldStorage()
user_name = fields.getvalue("user_name")
project_name = fields.getvalue("project_name")
assembly = fields.getvalue("assembly")
#fileitem = field['reference_file']

utils.printToServer('Content-type: text/json\n\n')

print fields
project_dir = "%s/data/tracks/%s%s" % \
               (GlobalConfig.ROOT_DIR,
                GlobalConfig.PROJECT_PREFIX,
                project_name)

src_table_dir = "%s/%s" % (GlobalConfig.SRC_TABLE_DIR, project_name)
#setup directory for the explorer tree to find
if os.path.exists(project_dir) or os.path.exists(src_table_dir):
    print "project_dir", project_dir
    #TODO:
    #name comflicts between users?
    status, message = "bad", "The project name '%s' is already taken" % project_name
    print status, message
Esempio n. 16
0
    #if len(retained_tracks) == len(json_tracks) :
        #raise Exception("No tracks were removed. Check to make sure the supplied track names are correct")
#
    #tifile = open( filename, 'w')
    #tifile.write( "%s = \n%s" % (js_var_name, json.dumps( retained_tracks, indent=4 )))
    #tifile.close()


if __name__ == '__main__' :
    root_dir = GlobalConfig.ROOT_DIR 

    sys.stderr = open("%s/delete_error.txt" % GlobalConfig.DEBUG_DIR,'w')
    sys.stdout = open("%s/delete_output.txt" % GlobalConfig.DEBUG_DIR,'w')
    
    fields = cgi.FieldStorage()
    utils.printToServer( 'Content-type: text/json\n\n' )
    print 'fields: ', fields

    if not fields :
        donor = sys.argv[1]
        query_name = sys.argv[2]
        removeQuery( donor, query_name, delete=True )
    else :
        fields = cgi.parse()
        #utils.printToServer('<html><body><textarea>')
        #try :
        (status, message) = removeQuery( fields["project"][0], fields["donor"][0], fields["query_name"][0], delete=True )
        utils.printToServer( '{"status":"%s", "message":"%s"}' % (status,message) )
        #except Exception :
            #utils.printToServer( '{"status":"error", "message":"Something went wrong, check the logs"}' )
Esempio n. 17
0
import sys
sys.path.append("../lib")
import GlobalConfig
import utils
import time
from subprocess import Popen, PIPE

err_filename = "%s/list_project_donors_error.txt" % (GlobalConfig.DEBUG_DIR)
sys.stderr = open( err_filename,'w')
out_filename = "%s/list_project_donors_output.txt" % (GlobalConfig.DEBUG_DIR)
sys.stdout = open( out_filename,'w')

fields = cgi.FieldStorage()
project_name = fields.getvalue("project_name")

utils.printToServer( 'Content-type: text/json\n\n' )

project_dir = "%s/data/tracks/%s%s/" \
        % (GlobalConfig.ROOT_DIR, GlobalConfig.PROJECT_PREFIX, project_name)

donors = []
for donor in os.listdir( project_dir ) :
    if donor.startswith( GlobalConfig.DONOR_PREFIX ) :
        if donor.find( GlobalConfig.DONOR_PREFIX ) == 0 :
            donors.append( donor[len(GlobalConfig.DONOR_PREFIX):] )

if len(donors) == 0 :
    status = "empty"
    message = "\"No genomes associated with this project\""
else :
    status = "ok"