コード例 #1
0
 def get(self, request, *args, **kwargs):
     kwargs = dict(request.GET)
     queryset = internal.GetDataset(request, kwargs)
     header = [
         'project', 'sample', 'dataset', 'method', 'category', 'entity',
         'numreads', 'profile', 'avgscore'
     ]
     array = []
     array.append(header)
     for q in queryset:
         tmp = [
             q.project.pk, q.sample, q.dataset, q.method, q.category,
             q.entity, q.numreads, q.profile, q.avgscore
         ]
         array.append(tmp)
     return Response(array)
コード例 #2
0
def main(request):
    params = {}
    params['queries'] = internal.ListQueries(
        request, {'projectID': [request.session['projectID']]})
    params['attributes'] = internal.ListAttributes(
        request, {'projectID': [request.session['projectID']]})
    queryname = request.GET.get('query') or None
    inputdataset = request.GET.get('dataset') or None
    method = request.GET.get('method') or None
    category = request.GET.get('category') or None
    attribute = request.GET.get('attribute') or None
    mydata = {}
    sa = {}
    if queryname:
        if not queryname or not inputdataset or not method or not category:
            return HttpResponse(
                "required GET parameters: query, dataset, method, category",
                content_type='text/plain')
        dataset = internal.GetDataset(request,
                                      params={
                                          'queryname': [queryname],
                                          'projectID':
                                          [request.session['projectID']],
                                          'dataset': [inputdataset],
                                          'category': [category],
                                          'method': [method]
                                      })
        attributes = internal.GetData(
            request, {
                'queryname': [queryname],
                'projectID': [request.session['projectID']],
                'attribute': [
                    attribute,
                ],
            })
        attributes.pop(0)
        for a in attributes:
            sa[a[0]] = a[1]
        samples = list(set(dataset.values_list('sample', flat=True)))
        for row in dataset:
            if row.entity in mydata:
                mydata[row.entity][row.sample] = row.profile / 100
            else:
                mydata[row.entity] = {row.sample: row.profile / 100}

        inputfile = "/tmp/%d-%d-%s.txt" % (request.user.pk, int(
            time.time()), attribute.replace(' ', '_'))
        formatfile = inputfile + ".format"
        resultfile = inputfile + ".result"
        with open(inputfile, 'w') as f:
            f.write(attribute)
            for s in samples:
                if s in sa: f.write("\t" + str(sa[s]).lower())
                else: f.write("\tNA")
            f.write("\nsubject_id")
            for s in samples:
                f.write("\t" + str(s))
            for e in sorted(mydata):
                f.write("\n" + e)
                for s in samples:
                    if s in mydata[e]: f.write("\t" + str(mydata[e][s]))
                    else: f.write("\t0")
        formatresp = myutils.runPython("lefse/format_input.py", inputfile,
                                       [formatfile, "-u2", "-o1000000"])
        lefseresp = myutils.runPython("lefse/run_lefse.py", formatfile,
                                      [resultfile]).strip()
        lefseresp = lefseresp.replace('\n', '<br />')
        lefseresp = "<strong>Query:</strong> " + queryname + "<br><strong>Attribute:</strong> " + attribute + "<hr>" + lefseresp
        #lefseresp = myutils.runCmd("python /home/orean/OREAN/scripts/misc/lefse/run_lefse.py "+ formatfile+" "+resultfile)
        rows = ""
        with open(resultfile, 'r') as f:
            rows = f.read()
        os.remove(inputfile)
        os.remove(formatfile)
        os.remove(resultfile)
        return HttpResponse(json.dumps({
            'msg': lefseresp,
            'data': rows
        }),
                            content_type='application/json')
    return render(request, "lefse.html", params)
コード例 #3
0
def main(request):
    params = {}
    params['queries'] = internal.ListQueries(request, {'projectID': [request.session['projectID']]})
    querynames = request.GET.getlist('query') or None
    inputdataset = request.GET.get('dataset') or None
    method = request.GET.get('method') or None
    category = request.GET.get('category') or None
    if querynames:
        if not querynames or not inputdataset or not method or not category: return render(request, 'alpha.html', params)
        entities = []
        samples = []
        datahash = {}
        filename = "/tmp/%s-%d.txt" %(request.user.username, int(time.time()))
        for query in querynames:
            dataset =  internal.GetDataset(request, params={'queryname': [query], 'projectID': [request.session['projectID']], 'dataset': [inputdataset], 'category': [category], 'method': [method]})
            for d in dataset:
                if d.entity not in entities:
                    entities.append(d.entity)
                if d.sample not in samples:
                    samples.append(d.sample)
                    datahash[d.sample] = {'query' : [query]}
                datahash[d.sample][d.entity] = d.profile
                if query not in datahash[d.sample]['query']: datahash[d.sample]['query'].append(query)
        with open(filename, 'w') as f:
            f.write('Rank,Taxa,')
            for sample in samples: f.write(str(sample)+',')
            f.write('\n')
            for entity in entities:
                mycontent = category+','+entity+','
                for sample in samples:
                    if entity in datahash[sample]: mycontent+=str(datahash[sample][entity])
                    else: mycontent += str('0')
		    mycontent+=','
                mycontent+='\n'
                f.write(mycontent)
        pca = myutils.runRscript('pca.R', filename)
        pca = pca.split('\n')
        pca.pop(0) 
        pca.pop() 
        finaldata = [{'name' : querynames[0], 'color': '#ff0000', 'data': []}, {'name': querynames[1], 'color': '#0000ff', 'data': []}, {'name': 'Both', 'color': '#00ff00', 'data' : []}]
        i = 0
        for i in range(len(pca)):
            row = pca[i]
            if row.startswith('-----'): break
            cols = row.split(',')
            sample = cols[0]
            if sample[0] == 'X' and sample not in datahash: sample = sample[1:]
            xy = [float(cols[1]), float(cols[2])]
            if len(datahash[sample]['query']) > 1: finaldata[2]['data'].append(xy)
            elif querynames[0] in datahash[sample]['query']: finaldata[0]['data'].append(xy)
            else: finaldata[1]['data'].append(xy)
        i+=1
        cutoff = i
        variances = []
        for i in range(cutoff, len(pca)):
            row = pca[i]
            if row.startswith('-----'): break
            cols = row.split(',')	
            variances.append(cols)
        i+=1
        cutoff = i
        finaldata.append(variances)
        keytaxa = []
        for i in range(cutoff, len(pca)):
            row = pca[i]
            cols = row.split(',')       
            keytaxa.append(cols)
        finaldata.append(keytaxa)
        #os.remove(filename)
        return HttpResponse(json.dumps(finaldata), content_type="application/json")
    return render(request, 'pca.html', params)
コード例 #4
0
ファイル: area.py プロジェクト: iojas/OREAN
def main(request):
    params = {}
    params['time'] = []
    params['queries'] = internal.ListQueries(
        request, {'projectID': [request.session['projectID']]})
    if request.method == 'POST':
        start = time.time()
        queryname = request.POST.get('query') or None
        dataset = request.POST.get('dataset') or None
        method = request.POST.get('method') or None
        category = request.POST.get('category') or None
        if not queryname or not dataset or not method or not category:
            return render(request, 'profile.html', params)
        params['feedback'] = 'Query selected: "%s"' % queryname
        query = internal.ListQueries(
            request, {
                'projectID': [request.session['projectID']],
                'full': [True],
                'queryname': [queryname]
            })
        if not query or not len(query):
            return render(request, 'profile.html', params)
        else:
            query = query[0]
        samplelist = myutils.fieldexpand(query['results'])
        mark = time.time()
        dataset = internal.GetDataset(request,
                                      params={
                                          'queryname': [queryname],
                                          'projectID': [query['project_id']],
                                          'dataset': [dataset],
                                          'category': [category],
                                          'method': [method]
                                      })
        jsondata = []
        magichash = {}
        maxhash = {}
        sorterhash = {}
        entityorder = []
        first = True
        for d in dataset:
            if first:
                params['time'].append(
                    'parsed input and fetched API data: %.02f' %
                    (time.time() - start))
                first = False
            if d.profile < 1: continue
            if d.sample not in maxhash or d.profile > maxhash[d.sample]['val']:
                maxhash[d.sample] = {'entity': d.entity, 'val': d.profile}
            if d.entity in magichash: magichash[d.entity][d.sample] = d.profile
            else: magichash[d.entity] = {d.sample: d.profile}
        params['time'].append('computed max profile values: %.02f ' %
                              (time.time() - start))
        for sample in sorted(maxhash,
                             key=lambda x: maxhash[x]['val'],
                             reverse=True):
            if maxhash[sample]['val'] < 5: continue
            if maxhash[sample]['entity'] not in sorterhash:
                sorterhash[maxhash[sample]['entity']] = [sample]
                entityorder.append(maxhash[sample]['entity'])
            else:
                sorterhash[maxhash[sample]['entity']].append(sample)
        params['time'].append('sorted taxa: %.02f' % (time.time() - start))
        samplelist = []
        for element in entityorder:
            for x in sorterhash[element]:
                samplelist.append(x)
        params['time'].append('sorted samples: %.02f' % (time.time() - start))
        for m in magichash:
            tmp = {'name': m, 'data': []}
            for i, s in enumerate(samplelist):
                if s in magichash[m]: tmp['data'].append(magichash[m][s])
                else: tmp['data'].append(0)
            jsondata.append(tmp)
        params['time'].append('formatted data for JSON conversion: %.02f' %
                              (time.time() - start))
        params['json'] = json.dumps([samplelist, jsondata, params['time']])
        return HttpResponse(params['json'], content_type="application/json")
    return render(request, 'profile.html', params)
コード例 #5
0
ファイル: 16sProfileBoxplot.py プロジェクト: iojas/OREAN
def main(request):
    CUTOFF = 20
    params = {}
    params['queries'] = internal.ListQueries(
        request, {'projectID': [request.session['projectID']]})
    querynames = request.GET.getlist('query') or None
    querynames = request.GET.getlist('query') or None
    inputdataset = request.GET.get('dataset') or None
    category = request.GET.get('category') or None
    method = request.GET.get('method') or None
    print querynames
    if querynames:
        if not querynames or not inputdataset or not method or not category:
            return render(request, 'alpha.html', params)
        #entities = []
        entities = myutils.topTaxa(request.session['projectID'], querynames,
                                   inputdataset, method, category, CUTOFF)
        elementOfGlory = {en: {q: {} for q in querynames} for en in entities}
        for query in querynames:
            filename = "/tmp/%s-%d.txt" % (request.user.pk, int(time.time()))
            datahash = {}
            for ent in entities:
                datahash[ent] = []
            longest_list = -1
            print "Query:", query
            dataset = internal.GetDataset(request,
                                          params={
                                              'queryname': [query],
                                              'projectID':
                                              [request.session['projectID']],
                                              'dataset': [inputdataset],
                                              'category': [category],
                                              'method': [method],
                                              'entity':
                                              entities
                                          })
            for d in dataset:
                taxa = d.entity
                datahash[taxa].append(d.profile)
                if len(datahash[taxa]) > longest_list:
                    longest_list = len(datahash[taxa])
            with open(filename, 'a') as f:
                for taxa in datahash:
                    f.write(str(taxa))
                    count = 0
                    for profile in datahash[taxa]:
                        count += 1
                        f.write(',' + str(profile))
                    elementOfGlory[taxa][query]['n'] = str(count)
                    while count < longest_list:
                        f.write(',' + str('0'))
                        count += 1
                    f.write('\n')
            boxplot = myutils.runRscript('16sProfilesBoxplot.R', filename)
            os.remove(filename)
            tmp = boxplot.split('\n')
            boxplot = []
            for bp in tmp:
                if bp == "": continue
                tmp2 = bp.split(',')
                tmp2 = [float(s) if isfloat(s) else s for s in tmp2]
                if tmp2[0] in elementOfGlory:
                    elementOfGlory[tmp2[0]][query]['stats'] = tmp2
                else:
                    elementOfGlory[tmp2[0]] = {query: {'stats': tmp2}}
        finaldata = [[], [], [], len(querynames)]
        i = 0
        for en in entities:
            for query in querynames:
                bp = elementOfGlory[en][query]['stats']
                bp.pop(0)
                if len(bp[5:]):
                    [
                        finaldata[2].append([i, float(x)]) for x in bp[5:]
                        if x != ''
                    ]
                finaldata[0].append(en + ' (' +
                                    elementOfGlory[en][query].get('n', '?') +
                                    ')')
                finaldata[1].append(bp[:5])
                i += 1
        return HttpResponse(json.dumps(finaldata),
                            content_type="application/json")
    return render(request, '16sProfileBoxplot.html', params)
コード例 #6
0
ファイル: stackedbars.py プロジェクト: iojas/OREAN
def main(request):
    params = {}
    params['queries'] = internal.ListQueries(
        request, {'projectID': [request.session['projectID']]})
    if request.method == 'POST':
        start = time.time()
        queryname = request.POST.get('query') or None
        datasetname = request.POST.get('dataset') or None
        method = request.POST.get('method') or None
        category = request.POST.get('category') or None
        if not queryname or not datasetname or not method or not category:
            return render(request, 'profile.html', params)
        params['feedback'] = 'Query selected: "%s"' % queryname

        query = internal.ListQueries(
            request, {
                'projectID': [request.session['projectID']],
                'full': [True],
                'queryname': [queryname]
            })
        if not query or not len(query):
            return render(request, 'profile.html', params)
        else:
            query = query[0]
        filename = 'stackedbars.%d.%d.%s.%s.%s.csv' % (
            query['id'], query['project_id'], datasetname, method, category)
        furl = murl + filename
        fpath = mroot + filename

        if not os.path.isfile(fpath):
            samplelist = myutils.fieldexpand(query['results'])
            dataset = internal.GetDataset(request,
                                          params={
                                              'queryname': [queryname],
                                              'projectID':
                                              [query['project_id']],
                                              'dataset': [datasetname],
                                              'category': [category],
                                              'method': [method]
                                          })
            with open(fpath, 'w') as f:
                f.write('sample,taxa,profile\n')
                #for d in dataset.order_by('sample'):
                # if d.profile > 0.1: f.write("%s,%s,%f\n" % ( d.sample, d.entity, d.profile ))

                taxahash = dict()
                samplemax = dict()
                datahash = dict()
                for d in dataset:
                    if d.profile < 0.1: continue
                    if d.entity not in taxahash or d.profile > taxahash[
                            d.entity]:
                        taxahash[d.entity] = d.profile
                    if d.entity not in datahash: datahash[d.entity] = dict()
                    if d.sample not in samplemax:
                        samplemax[d.sample] = {'e': d.entity, 'p': d.profile}
                        datahash[d.entity][d.sample] = {d.entity: d.profile}
                    elif d.profile > samplemax[d.sample]['p']:
                        current = samplemax[d.sample]
                        samplemax[d.sample] = {'e': d.entity, 'p': d.profile}
                        sampledata = datahash[current['e']][d.sample]
                        del datahash[current['e']][d.sample]
                        sampledata[d.entity] = d.profile
                        datahash[samplemax[d.sample]['e']][
                            d.sample] = sampledata
                    else:
                        datahash[samplemax[d.sample]['e']][d.sample][
                            d.entity] = d.profile
                for t, tmax in sorted(taxahash.items(), key=lambda x: -x[1]):
                    for sample, data in sorted(datahash[t].items(),
                                               key=lambda x: -x[1][t]):
                        for entity, profile in sorted(data.items(),
                                                      key=lambda x: x[1]):
                            f.write("%s,%s,%s\n" % (sample, entity, profile))
        return HttpResponse(json.dumps(furl), content_type="application/json")
    return render(request, 'stackedbars.html', params)
コード例 #7
0
ファイル: diversity.py プロジェクト: iojas/OREAN
def alpha(request):
    params = {}
    params['queries'] = internal.ListQueries(
        request, {'projectID': [request.session['projectID']]})
    querynames = request.GET.getlist('query') or None
    inputdataset = request.GET.get('dataset') or None
    method = request.GET.get('method') or None
    category = request.GET.get('category') or None
    if querynames:
        if not querynames or not inputdataset or not method or not category:
            return render(request, 'alpha.html', params)
        datapoints = []
        outlierpoints = []
        count = 0
        for query in querynames:
            entities = []
            datahash = {}
            dataset = internal.GetDataset(request,
                                          params={
                                              'queryname': [query],
                                              'projectID':
                                              [request.session['projectID']],
                                              'dataset': [inputdataset],
                                              'category': [category],
                                              'method': [method]
                                          })
            for d in dataset:
                if d.entity not in entities:
                    entities.append(d.entity)
                if d.sample not in datahash: datahash[d.sample] = {}
                datahash[d.sample][d.entity] = d.numreads
            filename = "/tmp/%s-%d.txt" % (request.user.username,
                                           int(time.time()))
            with open(filename, 'w') as f:
                for taxa in entities:
                    f.write(',' + str(taxa))
                f.write('\n')
                for sample in datahash:
                    mycontent = sample
                    for taxa in entities:
                        mycontent += ','
                        if taxa in datahash[sample]:
                            mycontent += str(datahash[sample][taxa])
                        else:
                            mycontent += str('0')
                    mycontent += '\n'
                    f.write(mycontent)
            boxplot = myutils.runRscript('alphaDiversity.R', filename)
            boxplot, outliers = boxplot.split('\n')[:2]
            boxplot = boxplot.split(',')
            outliers = outliers.split(',')
            if len(boxplot): boxplot = [float(x) for x in boxplot]
            if len(outliers):
                outliers = [[count, float(x)] for x in outliers if x != '']
            os.remove(filename)
            datapoints.append(boxplot)
            outlierpoints.append(outliers)
            count += 1
        finaldata = [querynames, datapoints, outlierpoints]
        return HttpResponse(json.dumps(finaldata),
                            content_type="application/json")
    return render(request, 'alpha.html', params)
コード例 #8
0
def main(request):
    params = {}
    params['queries'] = internal.ListQueries(request, {'projectID': [request.session['projectID']]})
    params['attributes'] = internal.ListAttributes(request, {'projectID': [request.session['projectID']]})
    querynamelist = request.GET.getlist('query') or None
    inputdataset = request.GET.get('dataset') or None
    method = request.GET.get('method') or None
    category = request.GET.get('category') or None
    bothcount = 0
    mydata = {}
    sa = {}
    if querynamelist:
        if not len(querynamelist) == 2 or not inputdataset or not method or not category: return HttpResponse("required GET parameters: query, dataset, method, category, query2" , content_type='text/plain')
        queryname, queryname2 = querynamelist
        query1dataset =  internal.GetDataset(request, params={'queryname': [queryname], 'projectID': [request.session['projectID']], 'dataset': [inputdataset], 'category': [category], 'method': [method]})
        query2dataset =  internal.GetDataset(request, params={'queryname': [queryname2], 'projectID': [request.session['projectID']], 'dataset': [inputdataset], 'category': [category], 'method': [method]})
        #query1dataset =  internal.GetDataset(request, params={'queryname': [queryname], 'projectID': [request.session['projectID']], 'dataset': [inputdataset], 'method': [method]})
        #query2dataset =  internal.GetDataset(request, params={'queryname': [queryname2], 'projectID': [request.session['projectID']], 'dataset': [inputdataset], 'method': [method]})
        query1samples = internal.ListQueries(request, {'projectID': [request.session['projectID']], 'full': [True], 'queryname': [queryname]})[0]['results'].split(',')
        query2samples = internal.ListQueries(request, {'projectID': [request.session['projectID']], 'full': [True], 'queryname': [queryname2]})[0]['results'].split(',')
        totalsamples = list(set(query1samples+query2samples))
        dataset = query1dataset | query2dataset
        for s in totalsamples:
            if s in query1samples and s in query2samples: 
                sa[s] = "Both"
                bothcount+=1
            elif s in query1samples and s not in query2samples:
                sa[s] = queryname
            elif s not in query1samples and s in query2samples:
	        sa[s] = queryname2
            else: 
                return HttpResponse(json.dumps({'msg': "Could not determine how to group sample '%s'. Aborting LefSE analysis." % s, 'data': False}), content_type='application/json')
        if bothcount == len(totalsamples): return HttpResponse(json.dumps({'msg': "Queries have the same sample composition. Cannot perform an analysis", 'data': False}), content_type='application/json')
        samples = list(set(dataset.values_list('sample', flat=True)))
        for row in dataset:
            #if row.entity in mydata: mydata[row.entity][row.sample] = row.profile/100
            #else: mydata[row.entity] = {row.sample: row.profile/100}
            taxaName = row.entity
            if row.taxatree is not None: taxaName = row.taxatree.full_tree
            if taxaName in mydata: mydata[taxaName][row.sample] = row.profile/100
            else: mydata[taxaName] = {row.sample: row.profile/100}
        inputfile = "/tmp/%d-%d-lefse.txt" %(request.user.pk, int(time.time()))
        formatfile = inputfile+".format"
        resultfile = inputfile+".result"
        with open(inputfile, 'w') as f:
            f.write("QueryStatus")
            for s in samples: 
                if s in sa: f.write("\t"+str(sa[s]).lower())
                else: f.write("\tNA")
            f.write("\nsubject_id")
            for s in samples: f.write("\t"+str(s))
            for e in sorted(mydata):
                f.write("\n"+e)
                for s in samples:
                    if s in mydata[e]: f.write("\t"+str(mydata[e][s]))
                    else: f.write("\t0")
        formatresp = myutils.runPython("lefse/format_input.py", inputfile, [formatfile, "-u2", "-o1000000"])
        lefseresp = myutils.runPython("lefse/run_lefse.py", formatfile, [resultfile]).strip()
        lefseresp = lefseresp.replace('\n', '<br />')
        lefseresp = "<strong>Query 1:</strong> "+queryname+"<br><strong>Query 2:</strong> "+queryname2+"<hr>"+lefseresp
        #lefseresp = myutils.runCmd("python /home/orean/OREAN/scripts/misc/lefse/run_lefse.py "+ formatfile+" "+resultfile)
        rows = ""
        with open(resultfile, 'r') as f:
            rows = f.read()
        #os.remove(inputfile)
        #os.remove(formatfile)
        #os.remove(resultfile)
        return HttpResponse(json.dumps({'msg': lefseresp, 'data': rows}), content_type='application/json')
    return render(request, "lefse2.html", params)