def undo_bitvolt_scaling(insertion_keys={}): """ This is a one-time operation only - Oct 2020 """ units2fix = ephys.Unit * ephys.ClusteringLabel & insertion_keys & 'quality_control = 1' # only on QC results units2fix = units2fix - (UndoBitVoltScalingAmpUnit & 'fixed=1') # exclude those that were already fixed if not units2fix: return # safety check, no jrclust results assert len(units2fix & 'clustering_method LIKE "jrclust%"') == 0 fix_hist_key = {'fix_name': pathlib.Path(__file__).name, 'fix_timestamp': datetime.now()} FixHistory.insert1(fix_hist_key) for unit in tqdm(units2fix.proj('unit_amp').fetch(as_dict=True)): probe_type = (ephys.ProbeInsertion & unit).fetch1('probe_type') bit_volts = npx_bit_volts[re.match('neuropixels (\d.0)', probe_type).group()] amp = unit.pop('unit_amp') with dj.conn().transaction: (ephys.Unit & unit)._update('unit_amp', amp * 1/bit_volts) UndoBitVoltScalingAmpUnit.insert1({**fix_hist_key, **unit, 'fixed': True, 'scale': 1/bit_volts}) # delete cluster_quality figures and remake figures with updated unit_amp with dj.config(safemode=False): (report.ProbeLevelReport & units2fix).delete()
def update_waveform(session_keys={}): """ Updating unit-waveform, where a unit's waveform is updated to be from the peak-channel and not from the 1-st channel as before Applicable to only kilosort2 clustering results and not jrclust """ sessions_2_update = experiment.Session & ( ephys.ProbeInsertion.proj() * ephys.Unit & 'clustering_method = "kilosort2"') & session_keys sessions_2_update = sessions_2_update - FixedWaveformUnit if not sessions_2_update: return fix_hist_key = { 'fix_name': pathlib.Path(__file__).name, 'fix_timestamp': datetime.now() } for key in sessions_2_update.fetch('KEY'): success = _update_one_session(key) if success: FixHistory.insert1(fix_hist_key, skip_duplicates=True) FixedWaveformUnit.insert([{ **fix_hist_key, **ukey, 'fixed': 1 } for ukey in (ephys.Unit & key).fetch('KEY')])
def update_waveforms(session_keys={}): """ For results with quality control, updating unit-waveform where a unit's waveform read from waveform.npy needs to be filtered down by i) units in metrics.csv ii) channels in "channel_map" Applicable to only kilosort2 clustering results with quality control """ sessions_2_update = (experiment.Session & (ephys.Unit * ephys.ClusteringLabel & 'quality_control = 1' & 'clustering_method = "kilosort2"') & session_keys) sessions_2_update = sessions_2_update - FixQCsessionWaveform if not sessions_2_update: return fix_hist_key = { 'fix_name': pathlib.Path(__file__).name, 'fix_timestamp': datetime.now() } for key in sessions_2_update.fetch('KEY'): success = _update_one_session(key) if success: FixHistory.insert1(fix_hist_key, skip_duplicates=True) FixQCsessionWaveform.insert([{ **fix_hist_key, **ukey, 'fixed': 1 } for ukey in (ephys.Unit & key).fetch('KEY')])
def fix_photostim_trial(session_keys={}): """ This fix applies to sessions ingested with the BehaviorIngest's make() only, as opposed to BehaviorBpodIngest (for Foraging Task) """ sessions_2_update = (experiment.Session & behavior_ingest.BehaviorIngest & experiment.PhotostimTrial & session_keys) sessions_2_update = sessions_2_update - FixPhotostimTrial if not sessions_2_update: return fix_hist_key = { 'fix_name': pathlib.Path(__file__).name, 'fix_timestamp': datetime.now() } log.info('Fixing {} session(s)'.format(len(sessions_2_update))) for key in sessions_2_update.fetch('KEY'): success, invalid_photostim_trials = _fix_one_session(key) if success: removed_photostim_trials = [ t['trial'] for t in invalid_photostim_trials ] FixHistory.insert1(fix_hist_key, skip_duplicates=True) FixPhotostimTrial.insert([{ **fix_hist_key, **tkey, 'is_removed': 1 if tkey['trial'] in removed_photostim_trials else 0 } for tkey in (experiment.SessionTrial & key).fetch('KEY')])
def fix_autowater_trial(session_keys={}): """ This fix applies to sessions ingested with the BehaviorIngest's make() only, as opposed to BehaviorBpodIngest (for Foraging Task) """ sessions_2_update = (experiment.Session & behavior_ingest.BehaviorIngest & session_keys) sessions_2_update = sessions_2_update - FixAutoWater if not sessions_2_update: return fix_hist_key = { 'fix_name': pathlib.Path(__file__).name, 'fix_timestamp': datetime.now() } log.info('--- Fixing {} session(s) ---'.format(len(sessions_2_update))) for key in sessions_2_update.fetch('KEY'): success, incorrect_autowater_trials = _fix_one_session(key) if success: needed_fix_trials = [ t['trial'] for t, _ in incorrect_autowater_trials ] FixHistory.insert1(fix_hist_key, skip_duplicates=True) FixAutoWater.insert([{ **fix_hist_key, **tkey, 'auto_water_needed_fix': 1 if tkey['trial'] in needed_fix_trials else 0 } for tkey in (experiment.BehaviorTrial & key).fetch('KEY')]) log.info('\tAuto-water fixing for session {} finished'.format(key)) else: log.info('\t!!! Fixing session {} failed! Skipping...'.format(key))
def undo_amplitude_scaling(): amp_scale = 1 / 3.01 units2fix = ephys.Unit & FixedAmpUnit # only fix those units that underwent fix_0007 units2fix = units2fix - (UndoFixedAmpUnit & 'fixed=1' ) # exclude those that were already fixed if not units2fix: return # safety check, no jrclust results and no npx 1.0 assert len(units2fix & 'clustering_method LIKE "jrclust%"') == 0 assert len(units2fix.proj() * ephys.ProbeInsertion & 'probe_type LIKE "neuropixels 1.0%"') == 0 fix_hist_key = { 'fix_name': pathlib.Path(__file__).name, 'fix_timestamp': datetime.now() } FixHistory.insert1(fix_hist_key) for unit in tqdm(units2fix.proj('unit_amp').fetch(as_dict=True)): amp = unit.pop('unit_amp') with dj.conn().transaction: (ephys.Unit & unit)._update('unit_amp', amp * amp_scale) FixedAmpUnit.insert1({ **fix_hist_key, **unit, 'fixed': True, 'scale': amp_scale }) # delete cluster_quality figures and remake figures with updated unit_amp with dj.config(safemode=False): (report.ProbeLevelReport & units2fix).delete()
def apply_amplitude_scaling(insertion_keys={}): """ This fix is identical to that of fix_0007 - apply an amplitude scaling (3.01) to npx 2.0 probe units The difference is that this fix only apply the scaling to mean waveform, and not unit_amp """ amp_scale = 3.01 npx2_inserts = ephys.ProbeInsertion & insertion_keys & 'probe_type LIKE "neuropixels 2.0%"' units2fix = ephys.Unit * ephys.ClusteringLabel & npx2_inserts.proj() & 'quality_control = 1' units2fix = units2fix - (FixedAmpWfUnit & 'fixed=1') # exclude those that were already fixed if not units2fix: return # safety check, no jrclust results assert len(units2fix & 'clustering_method LIKE "jrclust%"') == 0 fix_hist_key = {'fix_name': pathlib.Path(__file__).name, 'fix_timestamp': datetime.now()} FixHistory.insert1(fix_hist_key) for unit in tqdm(units2fix.proj('waveform').fetch(as_dict=True)): wf = unit.pop('waveform') with dj.conn().transaction: (ephys.Unit & unit)._update('waveform', wf * amp_scale) FixedAmpWfUnit.insert1({**fix_hist_key, **unit, 'fixed': True, 'scale': amp_scale})
def update_spike_sites_and_depths(session_keys={}): """ Updating unit-waveform, where a unit's waveform is updated to be from the peak-channel and not from the 1-st channel as before Applicable to only kilosort2 clustering results and not jrclust """ sessions_2_update = experiment.Session & ephys.ArchivedClustering.Unit & session_keys sessions_2_update = sessions_2_update - ArchivedSpikeSitesAndDepths if not sessions_2_update: return fix_hist_key = { 'fix_name': pathlib.Path(__file__).name, 'fix_timestamp': datetime.now() } for key in sessions_2_update.fetch('KEY'): success = _update_one_session(key) if success: FixHistory.insert1(fix_hist_key, skip_duplicates=True) ArchivedSpikeSitesAndDepths.insert([{ **fix_hist_key, **ukey, 'fixed': 1 } for ukey in (ephys.ArchivedClustering.Unit & key).fetch('KEY')])
def add_ontology_region_id_and_hexcode(): """ Update to the ccf.CCFAnnotation table, udpating values for 2 new attributes: + ontology_region_id + color_code """ fix_hist_key = {'fix_name': pathlib.Path(__file__).name, 'fix_timestamp': datetime.now()} FixHistory.insert1(fix_hist_key) stack_path = dj.config['custom']['ccf.r3_20um_path'] stack = imread(stack_path) # load reference stack log.info('.. loaded stack of shape {} from {}' .format(stack.shape, stack_path)) # iterate over ccf ontology region id/name records, regions = ccf.CCFAnnotation().get_ccf_r3_20um_ontology_regions() region, nregions = 0, len(regions) for region_id, region_name, color_hexcode in regions: region += 1 region_id = int(region_id) log.info('.. loading region {} ({}/{}) ({})' .format(region_id, region, nregions, region_name)) # extracting filled volumes from stack in scaled [[x,y,z]] shape, vol = np.array(np.where(stack == region_id)).T[:, [2, 1, 0]] * 20 if not vol.shape[0]: log.info('.. region {} volume: shape {} - skipping' .format(region_id, vol.shape)) continue log.info('.. region {} volume: shape {}'.format(region_id, vol.shape)) with dj.conn().transaction: for vox in vol: key = {'ccf_label_id': ccf.CCFLabel.CCF_R3_20UM_ID, 'ccf_x': vox[0], 'ccf_y': vox[1], 'ccf_z': vox[2], 'annotation_type': ccf.CCFLabel.CCF_R3_20UM_TYPE} assert (ccf.CCFAnnotation & key).fetch1('annotation') == region_name (ccf.CCFAnnotation & key)._update('ontology_region_id', region_id) (ccf.CCFAnnotation & key)._update('color_code', color_hexcode) log.info('.. done.')
def apply_amplitude_scaling(insertion_keys={}): """ This is a one-time operation only - April 2020 Kilosort2 results from neuropixels probe 2.0 requires an additionally scaling factor of 3.01 applied to the unit amplitude and mean waveform. Future version of quality control pipeline will apply this scaling. """ amp_scale = 3.01 npx2_inserts = ephys.ProbeInsertion & insertion_keys & 'probe_type LIKE "neuropixels 2.0%"' units2fix = ephys.Unit * ephys.ClusteringLabel & npx2_inserts.proj( ) & 'quality_control = 1' units2fix = units2fix - (FixedAmpUnit & 'fixed=1' ) # exclude those that were already fixed if not units2fix: return # safety check, no jrclust results assert len(units2fix & 'clustering_method LIKE "jrclust%"') == 0 fix_hist_key = { 'fix_name': pathlib.Path(__file__).name, 'fix_timestamp': datetime.now() } FixHistory.insert1(fix_hist_key) for unit in tqdm( units2fix.proj('unit_amp', 'waveform').fetch(as_dict=True)): amp = unit.pop('unit_amp') wf = unit.pop('waveform') with dj.conn().transaction: (ephys.Unit & unit)._update('unit_amp', amp * amp_scale) (ephys.Unit & unit)._update('waveform', wf * amp_scale) FixedAmpUnit.insert1({ **fix_hist_key, **unit, 'fixed': True, 'scale': amp_scale }) # delete cluster_quality figures and remake figures with updated unit_amp with dj.config(safemode=False): (report.ProbeLevelReport & npx2_inserts).delete()