Exemplo n.º 1
0
def update_analysis(limit=None):
    s = db.session()
    q = s.query(db.Pair,
                FirstPulseFeatures).outerjoin(FirstPulseFeatures).filter(
                    FirstPulseFeatures.pair_id == None)
    if limit is not None:
        q = q.limit(limit)
    print("Updating %d pairs.." % q.count())
    records = q.all()
    for i, record in enumerate(records):
        pair = record[0]
        pulse_responses, pulse_ids, pulse_response_amps = filter_pulse_responses(
            pair)
        if len(pulse_responses) > 0:
            results = first_pulse_features(pair, pulse_responses,
                                           pulse_response_amps)
            fpf = FirstPulseFeatures(pair=pair,
                                     n_sweeps=len(pulse_ids),
                                     pulse_ids=pulse_ids,
                                     **results)
            s.add(fpf)
            if i % 10 == 0:
                s.commit()
                print("%d pairs added to the DB of %d" % (i, len(records)))
    s.commit()
    s.close()
Exemplo n.º 2
0
def build_detection_limits():
    # silence warnings about fp issues
    np.seterr(all='ignore')

    # read all pair records from DB
    classifier = strength_analysis.get_pair_classifier(seed=0,
                                                       use_vc_features=False)
    conns = strength_analysis.query_all_pairs(classifier)

    # filter
    mask = np.isfinite(conns['ic_deconv_amp_mean'])
    filtered = conns[mask]

    # remove recordings with gain errors
    mask = filtered['ic_deconv_amp_mean'] < 0.02

    # remove recordings with high crosstalk
    mask &= abs(filtered['ic_crosstalk_mean']) < 60e-6

    # remove recordings with low sample count
    mask &= filtered['ic_n_samples'] > 50

    typs = filtered['pre_cre_type']
    mask &= typs == filtered['post_cre_type']

    typ_mask = ((typs == 'sim1') | (typs == 'tlx3') | (typs == 'unknown') |
                (typs == 'rorb') | (typs == 'ntsr1'))
    mask &= typ_mask

    filtered = filtered[mask]

    c_mask = filtered['synapse'] == True
    u_mask = ~c_mask

    signal = filtered['confidence']
    background = filtered['ic_base_deconv_amp_mean']

    session = db.session()

    # do selected connections first
    count = 0
    for i, rec in enumerate(filtered):
        print("================== %d/%d ===================== " %
              (i, len(filtered)))
        pair = session.query(
            db.Pair).filter(db.Pair.id == rec['pair_id']).all()[0]
        if pair.detection_limit is not None:
            print("    skip!")
            continue
        try:
            measure_limit(pair, session, classifier)
        except Exception:
            sys.excepthook(*sys.exc_info())

        count += 1
        if count > 100:
            print("Bailing out before memory fills up.")
            sys.exit(0)
Exemplo n.º 3
0
def plot_features(organism=None,
                  conn_type=None,
                  calcium=None,
                  age=None,
                  sweep_thresh=None,
                  fit_thresh=None):
    s = db.session()

    filters = {
        'organism': organism,
        'conn_type': conn_type,
        'calcium': calcium,
        'age': age
    }

    selection = [{}]
    for key, value in filters.iteritems():
        if value is not None:
            temp_list = []
            value_list = value.split(',')
            for v in value_list:
                temp = [s1.copy() for s1 in selection]
                for t in temp:
                    t[key] = v
                temp_list = temp_list + temp
            selection = list(temp_list)

    if len(selection) > 0:

        response_grid = PlotGrid()
        response_grid.set_shape(len(selection), 1)
        response_grid.show()
        feature_grid = PlotGrid()
        feature_grid.set_shape(6, 1)
        feature_grid.show()

        for i, select in enumerate(selection):
            pre_cell = aliased(db.Cell)
            post_cell = aliased(db.Cell)
            q_filter = []
            if sweep_thresh is not None:
                q_filter.append(FirstPulseFeatures.n_sweeps >= sweep_thresh)
            species = select.get('organism')
            if species is not None:
                q_filter.append(db.Slice.species == species)
            c_type = select.get('conn_type')
            if c_type is not None:
                pre_type, post_type = c_type.split('-')
                pre_layer, pre_cre = pre_type.split(';')
                if pre_layer == 'None':
                    pre_layer = None
                post_layer, post_cre = post_type.split(';')
                if post_layer == 'None':
                    post_layer = None
                q_filter.extend([
                    pre_cell.cre_type == pre_cre,
                    pre_cell.target_layer == pre_layer,
                    post_cell.cre_type == post_cre,
                    post_cell.target_layer == post_layer
                ])
            calc_conc = select.get('calcium')
            if calc_conc is not None:
                q_filter.append(db.Experiment.acsf.like(calc_conc + '%'))
            age_range = select.get('age')
            if age_range is not None:
                age_lower, age_upper = age_range.split('-')
                q_filter.append(
                    db.Slice.age.between(int(age_lower), int(age_upper)))

            q = s.query(FirstPulseFeatures).join(db.Pair, FirstPulseFeatures.pair_id==db.Pair.id)\
                .join(pre_cell, db.Pair.pre_cell_id==pre_cell.id)\
                .join(post_cell, db.Pair.post_cell_id==post_cell.id)\
                .join(db.Experiment, db.Experiment.id==db.Pair.expt_id)\
                .join(db.Slice, db.Slice.id==db.Experiment.slice_id)

            for filter_arg in q_filter:
                q = q.filter(filter_arg)

            results = q.all()

            trace_list = []
            for pair in results:
                #TODO set t0 to latency to align to foot of PSP
                trace = TSeries(data=pair.avg_psp,
                                sample_rate=db.default_sample_rate)
                trace_list.append(trace)
                response_grid[i, 0].plot(trace.time_values, trace.data)
            if len(trace_list) > 0:
                grand_trace = TSeriesList(trace_list).mean()
                response_grid[i, 0].plot(grand_trace.time_values,
                                         grand_trace.data,
                                         pen='b')
                response_grid[i, 0].setTitle(
                    'layer %s, %s-> layer %s, %s; n_synapses = %d' %
                    (pre_layer, pre_cre, post_layer, post_cre,
                     len(trace_list)))
            else:
                print('No synapses for layer %s, %s-> layer %s, %s' %
                      (pre_layer, pre_cre, post_layer, post_cre))

    return response_grid, feature_grid
Exemplo n.º 4
0
                      brush=(120, 120, 120),
                      pen='k').setZValue(-10)
    write_csv(csv_file, x,
              "Figure 3E estimated PSP amplitude distribution bin edges (V)")
    write_csv(
        csv_file, y,
        "Figure 3E estimated PSP amplitude distribution connections per bin")

    print("Global connectivity correction factor:",
          corrected_prof.sum() / amp_hist[0].sum())

    trace_plots = []
    deconv_plots = []
    hist_plots = []

    session = db.session()

    def add_connection_plots(i, name, timestamp, pre_id, post_id):
        global session, win, filtered
        p = pg.debug.Profiler(disabled=True, delayed=False)
        trace_plot = win.addPlot(i, 1)
        trace_plots.append(trace_plot)
        trace_plot.setYRange(-1.4e-3, 2.1e-3)
        # deconv_plot = win.addPlot(i, 2)
        # deconv_plots.append(deconv_plot)
        # deconv_plot.hide()

        hist_plot = win.addPlot(i, 2)
        hist_plots.append(hist_plot)
        limit_plot = win.addPlot(i, 3)
        limit_plot.addLegend()
from __future__ import division
import time, datetime
import multipatch_analysis.database.database as db
from neuroanalysis.ui.plot_grid import PlotGrid
s = db.session()

q = """
    select 
        pcrec.baseline_rms_noise,
        substring(experiment.original_path from 36 for 1),
        recording.device_key,
        recording.start_time
    from 
        patch_clamp_recording pcrec
        join recording on pcrec.recording_id=recording.id
        join sync_rec on recording.sync_rec_id=sync_rec.id
        join experiment on sync_rec.experiment_id=experiment.id
    where
        pcrec.clamp_mode='ic'
        and pcrec.baseline_rms_noise is not null
        and recording.device_key is not null
        and experiment.original_path is not null
"""

rec = s.execute(q)
rows = rec.fetchall()

import pyqtgraph as pg
import numpy as np

rms = np.array([row[0] for row in rows])