Exemple #1
0
    def test_dtafile_not_found_gives_empty_json_obj(self):

        ''' Tests that a DTAFile that doesn't exist (id=2)
            returns an empty JSON object as a response
        '''

        self.assertEqual(views_helpers.get_json_response('api.dtafile_quickinfo', 2), '{}')
def sqtfile_info(sqtfile_pk):

    ''' View function that displays information about a 
        SQTFile with id=sqtfile_pk

        Looks up associated files via associated tables.

        Allows editing/deleting of associated files (or entire dataset)
    '''

    current_sqtfile = models.SQTFile.query.get_or_404(sqtfile_pk)

    # this is performing a second identical DB query... not ideal:
    sqtfile_quickinfo_dict = views_helpers.get_json_response('api.sqtfile_quickinfo', sqtfile_pk)
    sqtfile_quickinfo_dict = json.loads(sqtfile_quickinfo_dict)

    parent_dbsearch = models.DBSearch.query.get_or_404(sqtfile_quickinfo_dict['parent_dbsearch'])
    dta_files = parent_dbsearch.dtafiles.all()
    parent_dataset = models.Dataset.query.get_or_404(parent_dbsearch.dataset_id)

    return render_template( 'data/sqtfile.html', 
                            current_sqtfile=current_sqtfile, 
                            parent_dbsearch=parent_dbsearch, 
                            dta_files=dta_files, 
                            parent_dataset=parent_dataset, 
                            sqtfile_quickinfo_dict=sqtfile_quickinfo_dict)
Exemple #3
0
def test_API():
    json_obj = views_helpers.get_json_response('api.json_api')
    json_obj = json.loads(json_obj)
    
    # example modifying new dict
    json_obj['new'] = 3
    
    return json.dumps(json_obj)
def delete_dataset(dataset_pk):

    ''' "Deletes" dataset (id=dataset_pk) by setting Dataset.deleted=True

        "Recovers" dataset if url is accessed with argument recover=True like this:
        /<dataset_pk>/delete?recover=True
    '''

    # new_status flag sets [model_instance].deleted to True or False
    # depending on whether it should be "deleted" or "recovered"
    new_status = not request.args.get('recover', None)

    dataset_quickinfo_dict = views_helpers.get_json_response('api.dataset_quickinfo', dataset_pk)
    dataset_quickinfo_dict = json.loads(dataset_quickinfo_dict)

    # "delete" dataset
    current_dataset = models.Dataset.query.get(dataset_pk)
    current_dataset.deleted = new_status

    # "delete" associated MS1 and MS2 files
    for ms1_file_id in dataset_quickinfo_dict['ms1_files']:
        ms1_file = models.MS1File.query.get(ms1_file_id)
        ms1_file.deleted = new_status

    for ms2_file_id in dataset_quickinfo_dict['ms2_files']:
        ms2_file = models.MS2File.query.get(ms2_file_id)
        ms2_file.deleted = new_status

    # "delete" associated dbsearches
    if dataset_quickinfo_dict['dbsearches']:
        all_sqt_files = []
        all_dta_files = []

        for dbsearch_pk in dataset_quickinfo_dict['dbsearches']:
            current_dbsearch = models.DBSearch.query.get(dbsearch_pk)
            current_dbsearch.deleted = new_status

            for sqt_file in current_dbsearch.sqtfiles.all():
                all_sqt_files.append(sqt_file)

            for dta_file in current_dbsearch.dtafiles.all():
                all_dta_files.append(dta_file)

        # "delete" associated SQT and DTA files
        for sqt_file in all_sqt_files:
            sqt_file.deleted = new_status

        for dta_file in all_dta_files:
            dta_file.deleted = new_status

    db.session.commit()

    app.logger.info('Deleted Dataset "{}" (Dataset ID {}) and associated files from database'.format(current_dataset.name, current_dataset.id))

    return redirect(url_for('data.document_index')) # pass a message here confirming delete
Exemple #5
0
    def test_dtafile_JSON_parser_returns_correct_data(self):

        ''' Tests that DTASelect file --> JSON parser returns valid data (based on mock DTAFile)
        '''

        try:
            json_resp = json.loads(views_helpers.get_json_response('api.dtafile_json', 1))
        except (ValueError, TypeError):
            self.fail('Invalid JSON')

        self.assertIn('data', json_resp) # JSON response has 'data'

        self.assertEqual(len(json_resp['data']), 9) # 9 loci in this "file"
Exemple #6
0
    def test_dtafile_found_gives_correct_json_output(self):

        ''' Tests that a DTAFile exists (id=1),
            returns a valid JSON object as a response,
            and that this object contains the 'id' key 
            and that response['id']==1
        '''

        try:
            json_resp = json.loads(views_helpers.get_json_response('api.dtafile_quickinfo', 1))
        except (ValueError, TypeError):
            self.fail('Invalid JSON')

        self.assertIn('id', json_resp)
        self.assertEqual(json_resp['id'], 1)
def dataset_info(dataset_pk):

    ''' View function that displays information about a 
        Dataset with id=dataset_pk

        Looks up associated files via associated tables.

        Allows editing/deleting of associated files (or entire dataset)
    '''

    current_dataset = models.Dataset.query.get_or_404(dataset_pk)

    # this is performing a second identical DB query... not ideal:
    dataset_quickinfo_dict = views_helpers.get_json_response('api.dataset_quickinfo', dataset_pk)
    dataset_quickinfo_dict = json.loads(dataset_quickinfo_dict)

    return render_template('data/dataset.html', dataset_id=dataset_pk, current_dataset=current_dataset, dataset_quickinfo_dict=dataset_quickinfo_dict)
def salt_step_peptide_analysis(dtafile_pk):

    ''' Determines how many filtered peptides are present 
        per chromatography step and draws a plot
    '''

    current_dtafile = models.DTAFile.query.get_or_404(dtafile_pk)

    dtafile_quickinfo_dict = views_helpers.get_json_response('api.dtafile_quickinfo', dtafile_pk)
    dtafile_quickinfo_dict = json.loads(dtafile_quickinfo_dict)

    dtafile_json = views_helpers.get_json_response('api.dtafile_json', dtafile_pk)
    dtafile_json = json.loads(dtafile_json)

    parent_dbsearch = models.DBSearch.query.get_or_404(dtafile_quickinfo_dict['parent_dbsearch'])
    sqt_files = parent_dbsearch.sqtfiles.all()
    parent_dataset = models.Dataset.query.get_or_404(parent_dbsearch.dataset_id)

    def get_distinct_psm_ids(dtaselect_parser):
        psms = set()
        for locus in dtaselect_parser:
            for peptide in locus['peptides']:
                psm_id = str(peptide['LCStep'])+'_'+str(peptide['Scan'])+'_'+str(peptide['ChargeState'])
                psms.add(psm_id)
        return psms

    def make_LCStep_histogram(psm_ids_set):
        full_lcstep_count = []
        for psm in psm_ids_set:
            full_lcstep_count.append(psm.split('_')[0])

        return Counter(full_lcstep_count)

    psm_ids = get_distinct_psm_ids(dtafile_json['data'])
    hist = make_LCStep_histogram(psm_ids)
    labels, values = zip(*sorted(hist.items(), key=lambda x: int(x[0])))
    labels = np.array(labels)
    values = np.array(values)

    fig = figure(title="Peptides per LC Step", y_range=[0, max(values)*1.25], plot_height=400, plot_width=700)
    fig.rect(x=labels, y=values/2, width=0.8, height=values)
    fig.xaxis.axis_label = 'chromatography step'
    fig.yaxis.axis_label = '# peptides identified'
    from bokeh.models import FixedTicker
    fig.xaxis[0].ticker.desired_num_ticks = len(labels)

    plot_resources = RESOURCES.render(
        js_raw=INLINE.js_raw,
        css_raw=INLINE.css_raw,
        js_files=INLINE.js_files,
        css_files=INLINE.css_files,
    )

    script, div = components(fig, INLINE)

    return render_template( 'plots/empty.html', 
                            dtafile_quickinfo_dict=dtafile_quickinfo_dict, 
                            current_dtafile=current_dtafile, 
                            parent_dbsearch=parent_dbsearch, 
                            sqt_files=sqt_files, 
                            parent_dataset=parent_dataset, 
                            plot_script=script, 
                            plot_div=div, 
                            plot_resources=plot_resources, 
                            # color=color, 
                            # _from=_from, 
                            # to=to, 
                            )