def build_detection_limits(): # silence warnings about fp issues np.seterr(all='ignore') # read all pair records from DB classifier = strength_analysis.get_pair_classifier(seed=0, use_vc_features=False) conns = strength_analysis.query_all_pairs(classifier) # filter mask = np.isfinite(conns['ic_deconv_amp_mean']) filtered = conns[mask] # remove recordings with gain errors mask = filtered['ic_deconv_amp_mean'] < 0.02 # remove recordings with high crosstalk mask &= abs(filtered['ic_crosstalk_mean']) < 60e-6 # remove recordings with low sample count mask &= filtered['ic_n_samples'] > 50 typs = filtered['pre_cre_type'] mask &= typs == filtered['post_cre_type'] typ_mask = ((typs == 'sim1') | (typs == 'tlx3') | (typs == 'unknown') | (typs == 'rorb') | (typs == 'ntsr1')) mask &= typ_mask filtered = filtered[mask] c_mask = filtered['synapse'] == True u_mask = ~c_mask signal = filtered['confidence'] background = filtered['ic_base_deconv_amp_mean'] session = db.session() # do selected connections first count = 0 for i, rec in enumerate(filtered): print("================== %d/%d ===================== " % (i, len(filtered))) pair = session.query( db.Pair).filter(db.Pair.id == rec['pair_id']).all()[0] if pair.detection_limit is not None: print(" skip!") continue try: measure_limit(pair, session, classifier) except Exception: sys.excepthook(*sys.exc_info()) count += 1 if count > 100: print("Bailing out before memory fills up.") sys.exit(0)
def build_detection_limits(): # silence warnings about fp issues np.seterr(all='ignore') # read all pair records from DB classifier = strength_analysis.get_pair_classifier(seed=0, use_vc_features=False) conns = strength_analysis.query_all_pairs(classifier) # filter mask = np.isfinite(conns['ic_deconv_amp_mean']) filtered = conns[mask] # remove recordings with gain errors mask = filtered['ic_deconv_amp_mean'] < 0.02 # remove recordings with high crosstalk mask &= abs(filtered['ic_crosstalk_mean']) < 60e-6 # remove recordings with low sample count mask &= filtered['ic_n_samples'] > 50 typs = filtered['pre_cre_type'] mask &= typs == filtered['post_cre_type'] typ_mask = ((typs == 'sim1') | (typs == 'tlx3') | (typs == 'unknown') | (typs == 'rorb') | (typs == 'ntsr1')) mask &= typ_mask filtered = filtered[mask] c_mask = filtered['synapse'] == True u_mask = ~c_mask signal = filtered['confidence'] background = filtered['ic_base_deconv_amp_mean'] session = db.Session() # do selected connections first count = 0 for i,rec in enumerate(filtered): print("================== %d/%d ===================== " % (i, len(filtered))) pair = session.query(db.Pair).filter(db.Pair.id==rec['pair_id']).all()[0] if pair.detection_limit is not None: print(" skip!") continue try: measure_limit(pair, session, classifier) except Exception: sys.excepthook(*sys.exc_info()) count += 1 if count > 100: print("Bailing out before memory fills up.") sys.exit(0)
pg.mkQApp() pg.dbg() pg.setConfigOption('background', 'w') pg.setConfigOption('foreground', 'k') win = pg.GraphicsLayoutWidget() win.show() win.resize(1600, 600) scatter_plot = win.addPlot(0, 0, rowspan=len(show_conns)) scatter_plot.setLogMode(True, True) scatter_plot.setAspectLocked() scatter_plot.setFixedWidth(500) # read all pair records from DB conns = strength_analysis.query_all_pairs() # filter mask = np.isfinite(conns['abs_deconv_base_amp_med']) filtered = conns[mask] # remove recordings with gain errors mask = filtered['abs_deconv_base_amp_med'] < 0.02 # remove recordings likely to have high crosstalk # cutoff = strength_analysis.datetime_to_timestamp(datetime(2017,4,1)) # mask &= (filtered['electrode_distance'] > 1) | (filtered['acq_timestamp'] > cutoff) # mask &= filtered['electrode_distance'] > 1 # remove recordings with low sample count mask &= filtered['n_samples'] > 50
win = pg.GraphicsLayoutWidget() win.show() win.resize(900, 900) # set up scatter plot scatter_plot = win.addPlot(0, 0, rowspan=len(show_conns)) scatter_plot.setLogMode(True, True) scatter_plot.setAspectLocked() scatter_plot.setFixedWidth(350) scatter_plot.showGrid(True, True, alpha=0.5) # read all pair records from DB classifier = strength_analysis.get_pair_classifier(seed=0, use_vc_features=False) conns = strength_analysis.query_all_pairs(classifier) # filter mask = np.isfinite(conns['ic_deconv_amp_mean']) filtered = conns[mask] # remove recordings with gain errors mask = filtered['ic_deconv_amp_mean'] < 0.02 # remove recordings with high crosstalk mask &= abs(filtered['ic_crosstalk_mean']) < 60e-6 # remove recordings with low sample count mask &= filtered['ic_n_samples'] > 50 typs = filtered['pre_cre_type']
pg.setConfigOption('foreground', 'k') win = pg.GraphicsLayoutWidget() win.show() win.resize(900, 900) # set up scatter plot scatter_plot = win.addPlot(0, 0, rowspan=len(show_conns)) scatter_plot.setLogMode(True, True) scatter_plot.setAspectLocked() scatter_plot.setFixedWidth(350) scatter_plot.showGrid(True, True, alpha=0.5) # read all pair records from DB classifier = strength_analysis.get_pair_classifier(seed=0, use_vc_features=False) conns = strength_analysis.query_all_pairs(classifier) # filter mask = np.isfinite(conns['ic_deconv_amp_mean']) filtered = conns[mask] # remove recordings with gain errors mask = filtered['ic_deconv_amp_mean'] < 0.02 # remove recordings with high crosstalk mask &= abs(filtered['ic_crosstalk_mean']) < 60e-6 # remove recordings with low sample count mask &= filtered['ic_n_samples'] > 50 typs = filtered['pre_cre_type']