コード例 #1
0
def update_analysis(limit=None):
    s = db.session()
    q = s.query(db.Pair,
                FirstPulseFeatures).outerjoin(FirstPulseFeatures).filter(
                    FirstPulseFeatures.pair_id == None)
    if limit is not None:
        q = q.limit(limit)
    print("Updating %d pairs.." % q.count())
    records = q.all()
    for i, record in enumerate(records):
        pair = record[0]
        pulse_responses, pulse_ids, pulse_response_amps = filter_pulse_responses(
            pair)
        if len(pulse_responses) > 0:
            results = first_pulse_features(pair, pulse_responses,
                                           pulse_response_amps)
            fpf = FirstPulseFeatures(pair=pair,
                                     n_sweeps=len(pulse_ids),
                                     pulse_ids=pulse_ids,
                                     **results)
            s.add(fpf)
            if i % 10 == 0:
                s.commit()
                print("%d pairs added to the DB of %d" % (i, len(records)))
    s.commit()
    s.close()
コード例 #2
0
def build_detection_limits():
    # silence warnings about fp issues
    np.seterr(all='ignore')

    # read all pair records from DB
    classifier = strength_analysis.get_pair_classifier(seed=0,
                                                       use_vc_features=False)
    conns = strength_analysis.query_all_pairs(classifier)

    # filter
    mask = np.isfinite(conns['ic_deconv_amp_mean'])
    filtered = conns[mask]

    # remove recordings with gain errors
    mask = filtered['ic_deconv_amp_mean'] < 0.02

    # remove recordings with high crosstalk
    mask &= abs(filtered['ic_crosstalk_mean']) < 60e-6

    # remove recordings with low sample count
    mask &= filtered['ic_n_samples'] > 50

    typs = filtered['pre_cre_type']
    mask &= typs == filtered['post_cre_type']

    typ_mask = ((typs == 'sim1') | (typs == 'tlx3') | (typs == 'unknown') |
                (typs == 'rorb') | (typs == 'ntsr1'))
    mask &= typ_mask

    filtered = filtered[mask]

    c_mask = filtered['synapse'] == True
    u_mask = ~c_mask

    signal = filtered['confidence']
    background = filtered['ic_base_deconv_amp_mean']

    session = db.session()

    # do selected connections first
    count = 0
    for i, rec in enumerate(filtered):
        print("================== %d/%d ===================== " %
              (i, len(filtered)))
        pair = session.query(
            db.Pair).filter(db.Pair.id == rec['pair_id']).all()[0]
        if pair.detection_limit is not None:
            print("    skip!")
            continue
        try:
            measure_limit(pair, session, classifier)
        except Exception:
            sys.excepthook(*sys.exc_info())

        count += 1
        if count > 100:
            print("Bailing out before memory fills up.")
            sys.exit(0)
コード例 #3
0
"""Queries and plots voltage and current clamp data fits from avg_first_pulse_fit table.
"""

import numpy as np
import matplotlib
import matplotlib.pyplot as plt
from sqlalchemy.orm import aliased
from aisynphys.database import database as db
from aisynphys import fit_average_first_pulse as fafp

#----------------------------------------------------------------
#-------------------------- queries -----------------------------
#----------------------------------------------------------------
session = db.session()

# ---- Query all---------------
'''Note that this query can take several minutes'''
#data=session.query(fafp.AvgFirstPulseFit, db.Pair).join(db.Pair).all() #this need to correspond to import

# --- Query some good PV examples--------
pv = [(1533244490.755, 6, 4), (1530559621.966, 7, 6), (1527020350.517, 6, 7),
      (1525903868.115, 6, 5), (1525903868.115, 7, 6), (1525812130.134, 7, 6),
      (1523398687.484, 2, 1), (1523398687.484, 2, 3), (1523398687.484, 3, 2),
      (1521667891.153, 3, 4), (1519425770.224, 6, 7), (1519425770.224, 7, 2),
      (1518564082.242, 4, 6), (1517356361.727, 7, 6), (1517356063.393, 3, 4),
      (1517348193.989, 1, 6), (1517348193.989, 6, 8), (1517266234.424, 5, 4),
      (1517266234.424, 6, 5), (1517269966.858, 2, 4), (1517269966.858, 2, 8),
      (1517269966.858, 7, 8), (1516820219.855, 3, 4), (1516744107.347, 3, 6),
      (1513976168.029, 2, 1), (1511913615.871, 5, 4), (1510268339.831, 4, 5),
      (1510268339.831, 4, 7), (1510268339.831, 5, 4), (1510268339.831, 7, 4),
      (1508189280.042, 4, 8), (1507235159.776, 5, 6), (1505421643.243, 6, 7),
コード例 #4
0
from __future__ import division
import time, datetime
import aisynphys.database.database as db
from neuroanalysis.ui.plot_grid import PlotGrid
s = db.session()

q = """
    select 
        pcrec.baseline_rms_noise,
        substring(experiment.original_path from 36 for 1),
        recording.device_key,
        recording.start_time
    from 
        patch_clamp_recording pcrec
        join recording on pcrec.recording_id=recording.id
        join sync_rec on recording.sync_rec_id=sync_rec.id
        join experiment on sync_rec.experiment_id=experiment.id
    where
        pcrec.clamp_mode='ic'
        and pcrec.baseline_rms_noise is not null
        and recording.device_key is not null
        and experiment.original_path is not null
"""

rec = s.execute(q)
rows = rec.fetchall()

import pyqtgraph as pg
import numpy as np

rms = np.array([row[0] for row in rows])
コード例 #5
0
def plot_features(organism=None,
                  conn_type=None,
                  calcium=None,
                  age=None,
                  sweep_thresh=None,
                  fit_thresh=None):
    s = db.session()

    filters = {
        'organism': organism,
        'conn_type': conn_type,
        'calcium': calcium,
        'age': age
    }

    selection = [{}]
    for key, value in filters.iteritems():
        if value is not None:
            temp_list = []
            value_list = value.split(',')
            for v in value_list:
                temp = [s1.copy() for s1 in selection]
                for t in temp:
                    t[key] = v
                temp_list = temp_list + temp
            selection = list(temp_list)

    if len(selection) > 0:

        response_grid = PlotGrid()
        response_grid.set_shape(len(selection), 1)
        response_grid.show()
        feature_grid = PlotGrid()
        feature_grid.set_shape(6, 1)
        feature_grid.show()

        for i, select in enumerate(selection):
            pre_cell = aliased(db.Cell)
            post_cell = aliased(db.Cell)
            q_filter = []
            if sweep_thresh is not None:
                q_filter.append(FirstPulseFeatures.n_sweeps >= sweep_thresh)
            species = select.get('organism')
            if species is not None:
                q_filter.append(db.Slice.species == species)
            c_type = select.get('conn_type')
            if c_type is not None:
                pre_type, post_type = c_type.split('-')
                pre_layer, pre_cre = pre_type.split(';')
                if pre_layer == 'None':
                    pre_layer = None
                post_layer, post_cre = post_type.split(';')
                if post_layer == 'None':
                    post_layer = None
                q_filter.extend([
                    pre_cell.cre_type == pre_cre,
                    pre_cell.target_layer == pre_layer,
                    post_cell.cre_type == post_cre,
                    post_cell.target_layer == post_layer
                ])
            calc_conc = select.get('calcium')
            if calc_conc is not None:
                q_filter.append(db.Experiment.acsf.like(calc_conc + '%'))
            age_range = select.get('age')
            if age_range is not None:
                age_lower, age_upper = age_range.split('-')
                q_filter.append(
                    db.Slice.age.between(int(age_lower), int(age_upper)))

            q = s.query(FirstPulseFeatures).join(db.Pair, FirstPulseFeatures.pair_id==db.Pair.id)\
                .join(pre_cell, db.Pair.pre_cell_id==pre_cell.id)\
                .join(post_cell, db.Pair.post_cell_id==post_cell.id)\
                .join(db.Experiment, db.Experiment.id==db.Pair.expt_id)\
                .join(db.Slice, db.Slice.id==db.Experiment.slice_id)

            for filter_arg in q_filter:
                q = q.filter(filter_arg)

            results = q.all()

            trace_list = []
            for pair in results:
                #TODO set t0 to latency to align to foot of PSP
                trace = TSeries(data=pair.avg_psp,
                                sample_rate=db.default_sample_rate)
                trace_list.append(trace)
                response_grid[i, 0].plot(trace.time_values, trace.data)
            if len(trace_list) > 0:
                grand_trace = TSeriesList(trace_list).mean()
                response_grid[i, 0].plot(grand_trace.time_values,
                                         grand_trace.data,
                                         pen='b')
                response_grid[i, 0].setTitle(
                    'layer %s, %s-> layer %s, %s; n_synapses = %d' %
                    (pre_layer, pre_cre, post_layer, post_cre,
                     len(trace_list)))
            else:
                print('No synapses for layer %s, %s-> layer %s, %s' %
                      (pre_layer, pre_cre, post_layer, post_cre))

    return response_grid, feature_grid