Exemplo n.º 1
0
 def selected_pair(self):
     with pg.BusyCursor():
         self.fit_compare.hide()
         self.meta_compare.hide()
         selected = self.experiment_browser.selectedItems()
         if len(selected) != 1:
             return
         item = selected[0]
         if hasattr(item, 'pair') is False:
             return
         pair = item.pair
         ## check to see if the pair has already been analyzed
         expt_id = pair.experiment.ext_id
         pre_cell_id = pair.pre_cell.ext_id
         post_cell_id = pair.post_cell.ext_id
         record = notes_db.get_pair_notes_record(expt_id, pre_cell_id, post_cell_id, session=self.notes_session)
         
         self.pair_param.setValue(pair)
         if record is None:
             self.pair_analyzer.load_pair(pair, self.default_session)
             self.pair_analyzer.analyze_responses()
             # self.pair_analyzer.fit_responses()
         else:
             self.pair_analyzer.load_pair(pair, self.default_session, record=record)
             self.pair_analyzer.analyze_responses()
             self.pair_analyzer.load_saved_fit(record)
    def save_to_db(self):
        fit_pass = {}
        for mode in modes:
            fit_pass[mode] = {}
            for holding in holdings:
                fit_pass[mode][str(holding)] = self.ctrl_panel.output_params[
                    'Fit parameters',
                    str(holding) + ' ' + mode.upper(), 'Fit Pass']

        expt_id = self.pair.experiment.ext_id
        pre_cell_id = self.pair.pre_cell.ext_id
        post_cell_id = self.pair.post_cell.ext_id
        meta = {
            'expt_id': expt_id,
            'pre_cell_id': pre_cell_id,
            'post_cell_id': post_cell_id,
            'synapse_type': self.ctrl_panel.user_params['Synapse call'],
            'gap_junction': self.ctrl_panel.user_params['Gap junction call'],
            'fit_parameters': self.fit_params,
            'fit_pass': fit_pass,
            'fit_warnings': self.warnings,
            'comments': self.ctrl_panel.output_params['Comments', ''],
        }

        session = notes_db.db.session(readonly=False)
        record = notes_db.get_pair_notes_record(expt_id,
                                                pre_cell_id,
                                                post_cell_id,
                                                session=session)

        if record is None:
            entry = notes_db.PairNotes(
                expt_id=expt_id,
                pre_cell_id=pre_cell_id,
                post_cell_id=post_cell_id,
                notes=meta,
                modification_time=datetime.datetime.now(),
            )
            session.add(entry)
            session.commit()
        else:
            self.print_pair_notes(meta, record)
            msg = pg.QtGui.QMessageBox.question(
                None, "Pair Analysis",
                "The record you are about to save conflicts with what is in the Pair Notes database.\nYou can see the differences highlighted in red.\nWould you like to overwrite?",
                pg.QtGui.QMessageBox.Yes | pg.QtGui.QMessageBox.No)
            if msg == pg.QtGui.QMessageBox.Yes:
                record.notes = meta
                record.modification_time = datetime.datetime.now()
                session.commit()
            else:
                raise Exception('Save Cancelled')
        session.close()
Exemplo n.º 3
0
def get_pair_avg_fits(pair, session, notes_session=None, ui=None, max_ind_freq=50):
    """Return PSP fits to averaged responses for this pair.

    Fits are performed against average PSPs in 4 different categories: 
    IC -70mV, IC -55mV, VC -70mV, and VC -55mV. All PSPs in these categories are averaged together
    regardless of their position in a pulse train, so we expect the amplitudes of these averages to
    be affected by any short-term depression/facilitation present at the synapse. As such, these fits
    are not ideal for measuring the amplitude of the synapse; however, they do provide good estimates
    of rise time and decay tau.
    
    Operations are:
    
    - Query all pulse responses for this pair, where the pulse train frequency was 
      no faster than max_ind_freq
    - Sort responses by clamp mode and holding potential, with the latter in two bins: -80 to -60 mV and -60 to -45 mV.
      Responses are further separated into qc pass/fail for each bin. QC pass criteria:
        - PR must have exactly one presynaptic spike with detectable latency
        - Either PR.ex_qc_pass or .in_qc_pass must be True, depending on clamp mode / holding
    - Generate average response for qc-passed pulses responses in each mode/holding combination
    - Fit averages to PSP curve. If the latency was manually annotated for this synapse, then the curve
      fit will have its latency constrained within ±100 μs.
    - Compare to manually verified fit parameters; if these are not a close match OR if the 
      manual fits were already failed, then *fit_qc_pass* will be False.
      
    
    Returns
    -------
    results : dict
        {(mode, holding): {
            'responses': ..,
            'average': ..,
            'initial_latency': ..,
            'fit_result': ..,
            'fit_qc_pass': ..,
            'fit_qc_pass_reasons': ..,
            'expected_fit_params': ..,
            'expected_fit_pass': ..,
            'avg_baseline_noise': ..,
        }, ...}
    
    """
    prof = pg.debug.Profiler(disabled=True, delayed=False)
    prof(str(pair))
    results = {}
    
    # query and sort pulse responses with induction frequency 50Hz or slower
    records = response_query(session=session, pair=pair, max_ind_freq=max_ind_freq).all()
    prof('query prs')
    pulse_responses = [rec[0] for rec in records]

    # sort into clamp mode / holding bins
    sorted_responses = sort_responses(pulse_responses)
    prof('sort prs')

    # load expected PSP curve fit parameters from notes DB
    notes_rec = notes_db.get_pair_notes_record(pair.experiment.ext_id, pair.pre_cell.ext_id, pair.post_cell.ext_id, session=notes_session)
    prof('get pair notes')

    if ui is not None:
        ui.show_pulse_responses(sorted_responses)
        ui.show_data_notes(notes_rec)
        prof('update ui')

    for (clamp_mode, holding), responses in sorted_responses.items():
        if len(responses['qc_pass']) == 0:
            results[clamp_mode, holding] = None
            continue
            
        if notes_rec is None:
            notes = None
            sign = 0
            init_latency = None
            latency_window = (0.5e-3, 10e-3)
        else:
            notes = notes_rec.notes
            if notes.get('fit_parameters') is None:
                init_latency = None
                latency_window = (0.5e-3, 10e-3)
            else:
                init_latency = notes['fit_parameters']['initial'][clamp_mode][str(holding)]['xoffset']
                latency_window = (init_latency - 100e-6, init_latency + 100e-6)
            
            # Expected response sign depends on synapse type, clamp mode, and holding:
            sign = 0
            if notes['synapse_type'] == 'ex':
                sign = -1 if clamp_mode == 'vc' else 1
            elif notes['synapse_type'] == 'in' and holding == -55:
                sign = 1 if clamp_mode == 'vc' else -1

        prof('prepare %s %s' % (clamp_mode, holding))
        fit_result, avg_response = fit_avg_pulse_response(responses['qc_pass'], latency_window, sign)
        prof('fit avg')

        # measure baseline noise
        avg_baseline_noise = avg_response.time_slice(avg_response.t0, avg_response.t0+7e-3).data.std()

        # compare to manually-verified results
        if notes is None:
            qc_pass = False
            reasons = ['no data notes entry']
            expected_fit_params = None
            expected_fit_pass = None
        elif notes['fit_pass'][clamp_mode][str(holding)] is not True:
            qc_pass = False
            reasons = ['data notes fit failed qc']
            expected_fit_params = None
            expected_fit_pass = False
        else:
            expected_fit_params = notes['fit_parameters']['fit'][clamp_mode][str(holding)]
            expected_fit_pass = True
            qc_pass, reasons = check_fit_qc_pass(fit_result, expected_fit_params, clamp_mode)
            if not qc_pass:
                print("%s %s %s: %s" % (str(pair), clamp_mode, holding,  '; '.join(reasons)))

        if ui is not None:
            ui.show_fit_results(clamp_mode, holding, fit_result, avg_response, qc_pass)

        results[clamp_mode, holding] = {
            'responses': responses,
            'average': avg_response,
            'initial_latency': init_latency,
            'fit_result': fit_result,
            'fit_qc_pass': qc_pass,
            'fit_qc_pass_reasons': reasons,
            'expected_fit_params': expected_fit_params,
            'expected_fit_pass': expected_fit_pass,
            'avg_baseline_noise': avg_baseline_noise,
        }

    return results
Exemplo n.º 4
0
def get_pair_avg_fits(pair, session, notes_session=None, ui=None):
    """Return PSP fits to averaged responses for this pair.
    
    Operations are:
    - query all pulse responses for this pair
    - sort responses by clamp mode and holding potential
    - generate average response for each mode/holding combination
    - fit averages to PSP curve
    
    Returns
    -------
    results : dict
        {(mode, holding): {
            'traces': , 
            'average', 
            'fit_params',
            'initial_latency',
            'fit_qc_pass',
            'expected_fit_params',
            'avg_baseline_noise',
            }, 
        }
    
    """
    prof = pg.debug.Profiler(disabled=True, delayed=False)
    prof(str(pair))
    results = {}
    
    # query and sort pulse responses
    records = response_query(session=session, pair=pair).all()
    prof('query prs')
    pulse_responses = [rec[0] for rec in records]
    sorted_responses = sort_responses(pulse_responses)
    prof('sort prs')

    notes_rec = notes_db.get_pair_notes_record(pair.experiment.ext_id, pair.pre_cell.ext_id, pair.post_cell.ext_id, session=notes_session)
    prof('get pair notes')

    if ui is not None:
        ui.show_pulse_responses(sorted_responses)
        ui.show_data_notes(notes_rec)
        prof('update ui')

    for (clamp_mode, holding), responses in sorted_responses.items():
        if len(responses['qc_pass']) == 0:
            results[clamp_mode, holding] = None
            continue
            
        if notes_rec is None:
            notes = None
            sign = 0
            init_latency = None
            latency_window = (0.5e-3, 8e-3)
        else:
            notes = notes_rec.notes
            if notes.get('fit_parameters') is None:
                init_latency = None
                latency_window = (0.5e-3, 8e-3)
            else:
                init_latency = notes['fit_parameters']['initial'][clamp_mode][str(holding)]['xoffset']
                latency_window = (init_latency - 100e-6, init_latency + 100e-6)
            
            # Expected response sign depends on synapse type, clamp mode, and holding:
            sign = 0
            if notes['synapse_type'] == 'ex':
                sign = -1 if clamp_mode == 'vc' else 1
            elif notes['synapse_type'] == 'in' and holding == -55:
                sign = 1 if clamp_mode == 'vc' else -1

        prof('prepare %s %s' % (clamp_mode, holding))
        fit_result, avg_response = fit_avg_pulse_response(responses['qc_pass'], latency_window, sign)
        prof('fit avg')

        # measure baseline noise
        avg_baseline_noise = avg_response.time_slice(avg_response.t0, avg_response.t0+7e-3).data.std()

        # load up expected fit results and compare to manually-verified
        # results
        if notes is None:
            qc_pass = False
            reasons = ['no data notes entry']
            expected_fit_params = None
            expected_fit_pass = None
        elif notes['fit_pass'][clamp_mode][str(holding)] is not True:
            qc_pass = False
            reasons = ['data notes fit failed qc']
            expected_fit_params = None
            expected_fit_pass = False
        else:
            expected_fit_params = notes['fit_parameters']['fit'][clamp_mode][str(holding)]
            expected_fit_pass = True
            qc_pass, reasons = check_fit_qc_pass(fit_result, expected_fit_params, clamp_mode)
            if not qc_pass:
                print("%s %s %s: %s" % (str(pair), clamp_mode, holding,  '; '.join(reasons)))

        if ui is not None:
            ui.show_fit_results(clamp_mode, holding, fit_result, avg_response, qc_pass)

        results[clamp_mode, holding] = {
            'responses': responses,
            'average': avg_response,
            'initial_latency': init_latency,
            'fit_result': fit_result,
            'fit_qc_pass': qc_pass,
            'fit_qc_pass_reasons': reasons,
            'expected_fit_params': expected_fit_params,
            'expected_fit_pass': expected_fit_pass,
            'avg_baseline_noise': avg_baseline_noise,
        }

    return results
Exemplo n.º 5
0
    def create_db_entries(cls, job, session):
        errors = []
        db = job['database']
        expt_id = job['job_id']

        expt = db.experiment_from_ext_id(expt_id, session=session)

        # keep track of whether cells look like they should be inhibitory or excitatory based on synaptic projections
        synaptic_cell_class = {}

        for pair in expt.pair_list:
            # look up synapse type from notes db
            notes_rec = notes_db.get_pair_notes_record(pair.experiment.ext_id,
                                                       pair.pre_cell.ext_id,
                                                       pair.post_cell.ext_id)
            if notes_rec is None:
                continue

            # update upstream pair record
            pair.has_synapse = notes_rec.notes['synapse_type'] in ('ex', 'in')
            pair.has_electrical = notes_rec.notes['gap_junction']

            # only proceed if we have a synapse here
            if not pair.has_synapse:
                continue

            # fit PSP shape against averaged PSPs/PCSs at -70 and -55 mV
            #   - selected from <= 50Hz trains
            #   - must pass ex_qc_pass or in_qc_pass
            #   - must have exactly 1 pre spike with onset time
            fits = get_pair_avg_fits(pair, session)

            # collect values with which to decide on the "correct" kinetic values to report
            latency_vals = []
            rise_vals = {'ic': [], 'vc': []}
            decay_vals = {'ic': [], 'vc': []}

            for (mode, holding), fit in fits.items():
                if fit is None:
                    continue

                if fit['fit_qc_pass']:
                    # user says this is a good fit; write down the kinetic parameters and number of responses that went into the average
                    latency_vals.append(
                        (fit['fit_result'].best_values['xoffset'],
                         len(fit['responses']['qc_pass'])))
                    rise_vals[mode].append(
                        (fit['fit_result'].best_values['rise_time'],
                         len(fit['responses']['qc_pass'])))
                    decay_vals[mode].append(
                        (fit['fit_result'].best_values['decay_tau'],
                         len(fit['responses']['qc_pass'])))

                # record this fit in the avg_response_fit table
                rec = db.AvgResponseFit(
                    pair_id=pair.id,
                    clamp_mode=mode,
                    holding=holding,
                    nrmse=fit['fit_result'].nrmse(),
                    initial_xoffset=fit['initial_latency'],
                    manual_qc_pass=fit['fit_qc_pass'],
                    avg_data=fit['average'].data,
                    avg_data_start_time=fit['average'].t0,
                    n_averaged_responses=len(fit['responses']),
                    avg_baseline_noise=fit['avg_baseline_noise'],
                    meta={
                        'expected_fit_params': fit['expected_fit_params'],
                        'expected_fit_pass': fit['expected_fit_pass']
                    },
                )
                reasons = fit['fit_qc_pass_reasons']
                if len(reasons) > 0:
                    rec.meta = {'fit_qc_pass_reasons': reasons}
                    errors.append("Fit errors for %s %s %s: %s" %
                                  (expt_id, pair.pre_cell.ext_id,
                                   pair.post_cell.ext_id, '\n'.join(reasons)))

                for k in [
                        'xoffset', 'yoffset', 'amp', 'rise_time', 'decay_tau',
                        'exp_amp', 'exp_tau'
                ]:
                    setattr(rec, 'fit_' + k, fit['fit_result'].best_values[k])

                session.add(rec)

            # create a DB record for this synapse
            syn = db.Synapse(
                pair_id=pair.id,
                synapse_type=notes_rec.notes['synapse_type'],
            )
            print("add synapse:", pair, pair.id)

            pre_cell_class = notes_rec.notes['synapse_type']
            if pre_cell_class is not None:
                synaptic_cell_class.setdefault(pair.pre_cell,
                                               []).append(pre_cell_class)

            # compute weighted average of latency values
            lvals = np.array([lv[0] for lv in latency_vals])
            nvals = np.array([lv[1] for lv in latency_vals])
            if nvals.sum() != 0:
                latency = (lvals * nvals).sum() / nvals.sum()
                dist = np.abs(lvals - latency)
                # only set latency if the averaged values agree
                if np.all(dist < 200e-6):
                    syn.latency = latency
                else:
                    errors.append(
                        "latency mismatch on %s %s %s" %
                        (expt_id, pair.pre_cell.ext_id, pair.post_cell.ext_id))
            else:
                errors.append(
                    "%s %s: No latency values available for this synapse" %
                    (pair.pre_cell.ext_id, pair.post_cell.ext_id))

            # compute weighted averages of kinetic parameters
            for mode, pfx in [('ic', 'psp_'), ('vc', 'psc_')]:
                for param, fit_vals in [('rise_time', rise_vals[mode]),
                                        ('decay_tau', decay_vals[mode])]:
                    vals = np.array([v[0] for v in fit_vals])
                    nvals = np.array([v[1] for v in fit_vals])
                    if nvals.sum() == 0:
                        errors.append(
                            "%s %s: No %s %s values available for this synapse"
                            % (pair.pre_cell.ext_id, pair.post_cell.ext_id,
                               mode, param))
                        avg = None
                    else:
                        avg = (vals * nvals).sum() / nvals.sum()
                    setattr(syn, pfx + param, avg)

            session.add(syn)

        # update cell_class:
        for cell, cell_classes in synaptic_cell_class.items():
            if len(set(cell_classes)) == 1:
                # all synaptic projections agree on sign
                syn_class = cell_classes[0]
            else:
                # mismatched synaptic sign
                syn_class = None

            # previously generated nonsynaptic cell class -- based only on transgenic markers and morphology
            cell_class_ns = cell.cell_class_nonsynaptic

            if cell_class_ns is None or syn_class == cell_class_ns:
                # if cell class was not called previously, or if the synaptic class
                # matches the previous nonsynaptic class
                cell.cell_class = syn_class
            elif syn_class is None:
                cell.cell_class = cell_class_ns
            cell_meta = cell.meta.copy()
            cell_meta['synaptic_cell_class'] = syn_class
            cell.meta = cell_meta
            cell.cell_class, cell.cell_class_nonsynaptic = cell._infer_cell_classes(
            )

        return errors
Exemplo n.º 6
0
    def create_db_entries(cls, job, session):
        db = job['database']
        expt_id = job['job_id']

        expt = db.experiment_from_ext_id(expt_id, session=session)

        for pair in expt.pair_list:

            # look up synapse type from notes db
            notes_rec = notes_db.get_pair_notes_record(pair.experiment.ext_id,
                                                       pair.pre_cell.ext_id,
                                                       pair.post_cell.ext_id)
            if notes_rec is None:
                continue

            # update upstream pair record
            pair.has_synapse = notes_rec.notes['synapse_type'] in ('ex', 'in')
            pair.has_electrical = notes_rec.notes['gap_junction']

            # only proceed if we have a synapse here
            if not pair.has_synapse:
                continue

            # fit PSP shape against averaged PSPs/PCSs at -70 and -55 mV
            fits = get_pair_avg_fits(pair, session)

            # collect values with which to decide on the "correct" kinetic values to report
            latency_vals = []
            rise_vals = {'ic': [], 'vc': []}
            decay_vals = {'ic': [], 'vc': []}

            for (mode, holding), fit in fits.items():
                if fit is None:
                    continue

                if fit['fit_qc_pass']:
                    # user says this is a good fit; write down the kinetic parameters and number of responses that went into the average
                    latency_vals.append(
                        (fit['fit_result'].best_values['xoffset'],
                         len(fit['responses']['qc_pass'])))
                    rise_vals[mode].append(
                        (fit['fit_result'].best_values['rise_time'],
                         len(fit['responses']['qc_pass'])))
                    decay_vals[mode].append(
                        (fit['fit_result'].best_values['decay_tau'],
                         len(fit['responses']['qc_pass'])))

                # record this fit in the avg_response_fit table
                rec = db.AvgResponseFit(
                    pair_id=pair.id,
                    clamp_mode=mode,
                    holding=holding,
                    nrmse=fit['fit_result'].nrmse(),
                    initial_xoffset=fit['initial_latency'],
                    manual_qc_pass=fit['fit_qc_pass'],
                    avg_data=fit['average'].data,
                    avg_data_start_time=fit['average'].t0,
                    n_averaged_responses=len(fit['responses']),
                    avg_baseline_noise=fit['avg_baseline_noise'],
                )
                reasons = fit['fit_qc_pass_reasons']
                if len(reasons) > 0:
                    rec.meta = {'fit_qc_pass_reasons': reasons}

                for k in [
                        'xoffset', 'yoffset', 'amp', 'rise_time', 'decay_tau',
                        'exp_amp', 'exp_tau'
                ]:
                    setattr(rec, 'fit_' + k, fit['fit_result'].best_values[k])

                session.add(rec)

            # create a DB record for this synapse
            syn = db.Synapse(
                pair_id=pair.id,
                synapse_type=notes_rec.notes['synapse_type'],
            )
            print("add synapse:", pair, pair.id)

            # compute weighted average of latency values
            lvals = np.array([lv[0] for lv in latency_vals])
            nvals = np.array([lv[1] for lv in latency_vals])
            if nvals.sum() != 0:
                latency = (lvals * nvals).sum() / nvals.sum()
                dist = np.abs(lvals - latency)
                # only set latency if the averaged values agree
                if np.all(dist < 150e-6):
                    syn.latency = latency

            # compute weighted averages of kinetic parameters
            for mode, pfx in [('ic', 'psp_'), ('vc', 'psc_')]:
                for param, fit_vals in [('rise_time', rise_vals[mode]),
                                        ('decay_tau', decay_vals[mode])]:
                    vals = np.array([v[0] for v in fit_vals])
                    nvals = np.array([v[1] for v in fit_vals])
                    if nvals.sum() == 0:
                        avg = None
                    else:
                        avg = (vals * nvals).sum() / nvals.sum()
                    setattr(syn, pfx + param, avg)

            session.add(syn)
Exemplo n.º 7
0
    def create_db_entries(cls, job, session):
        all_errors = []
        db = job['database']
        expt_id = job['job_id']

        expt = db.experiment_from_ext_id(expt_id, session=session)

        # keep track of whether cells look like they should be inhibitory or excitatory based on synaptic projections
        synaptic_cell_class = {}

        for pair in expt.pair_list:
            try:
                # look up synapse type from notes db
                notes_rec = notes_db.get_pair_notes_record(
                    pair.experiment.ext_id, pair.pre_cell.ext_id,
                    pair.post_cell.ext_id)
                if notes_rec is None:
                    continue

                # update upstream pair record
                mono_synapse = notes_rec.notes['synapse_type'] in ('ex', 'in')
                pair.has_synapse = mono_synapse
                poly_synapse = notes_rec.notes.get('polysynaptic_type') in (
                    'ex', 'in', 'mix')
                pair.has_polysynapse = poly_synapse
                pair.has_electrical = notes_rec.notes['gap_junction']

                if pair.has_synapse:
                    errors = generate_synapse_record(pair,
                                                     db,
                                                     session,
                                                     notes_rec,
                                                     syn='mono',
                                                     max_ind_freq=50)

                    all_errors.extend(errors)

                    pre_cell_class = notes_rec.notes['synapse_type']
                    if pre_cell_class is not None:
                        synaptic_cell_class.setdefault(
                            pair.pre_cell, []).append(pre_cell_class)

                    # update cell_class if this is a monosynaptic response:
                    # for cell, cell_classes in synaptic_cell_class.items():
                    cell = pair.pre_cell
                    cell_classes = synaptic_cell_class[cell]
                    if len(set(cell_classes)) == 1:
                        # all synaptic projections agree on sign
                        syn_class = cell_classes[0]
                    else:
                        # mismatched synaptic sign
                        syn_class = 'mixed'

                    # previously generated nonsynaptic cell class -- based only on transgenic markers and morphology
                    # cell_class_ns = cell.cell_class_nonsynaptic

                    # if cell_class_ns is None or syn_class == cell_class_ns:
                    #     # if cell class was not called previously, or if the synaptic class
                    #     # matches the previous nonsynaptic class
                    #     cell.cell_class = syn_class
                    # elif syn_class is None:
                    #     cell.cell_class = cell_class_ns
                    cell_meta = cell.meta.copy()
                    cell_meta['synaptic_cell_class'] = syn_class
                    cell.meta = cell_meta
                    cell.cell_class, _ = cell._infer_cell_classes()
                    # if pair.id==85255 and cell.id==15320:
                    #     sadf
                if pair.has_polysynapse:
                    errors = generate_synapse_record(pair,
                                                     db,
                                                     session,
                                                     notes_rec,
                                                     syn='poly',
                                                     max_ind_freq=50)
                    all_errors.extend(errors)
            except Exception:
                print(f"Error processing pair: {pair}")
                raise

        session.commit()
        return all_errors