def upload_data(self, xyz_channels, channels=True): if not self.resolved: channel_upload = True # Create new trajectory and overwrite previous one histology.register_aligned_track(self.probe_id, xyz_channels, chn_coords=self.chn_coords, one=self.one, overwrite=True, channels=channels) else: channel_upload = False return channel_upload
def upload_channels(self, alignment_key, upload_alyx, upload_flatiron): """ Upload channels to alyx and flatiron based on the alignment specified by the alignment key """ feature = np.array(self.alignments[alignment_key][0]) track = np.array(self.alignments[alignment_key][1]) ephysalign = EphysAlignment(self.xyz_picks, self.depths, track_prev=track, feature_prev=feature, brain_atlas=self.brain_atlas) # Find the channels channels_mlapdv = np.int32( ephysalign.get_channel_locations(feature, track) * 1e6) channels_brainID = ephysalign.get_brain_locations(channels_mlapdv / 1e6)['id'] # Find the clusters r = regions_from_allen_csv() clusters_mlapdv = channels_mlapdv[self.cluster_chns] clusters_brainID = channels_brainID[self.cluster_chns] clusters_brainAcro = r.get(ids=clusters_brainID).acronym # upload datasets to flatiron files_to_register = [] if upload_flatiron: ftp_patcher = FTPPatcher(one=self.one) insertion = self.one.alyx.rest('insertions', 'read', id=self.eid) alf_path = self.one.path_from_eid(insertion['session']).joinpath( 'alf', insertion['name']) alf_path.mkdir(exist_ok=True, parents=True) # Make the channels.mlapdv dataset f_name = alf_path.joinpath('channels.mlapdv.npy') np.save(f_name, channels_mlapdv) files_to_register.append(f_name) # Make the channels.brainLocationIds dataset f_name = alf_path.joinpath( 'channels.brainLocationIds_ccf_2017.npy') np.save(f_name, channels_brainID) files_to_register.append(f_name) # Make the clusters.mlapdv dataset f_name = alf_path.joinpath('clusters.mlapdv.npy') np.save(f_name, clusters_mlapdv) files_to_register.append(f_name) # Make the clusters.brainLocationIds dataset f_name = alf_path.joinpath( 'clusters.brainLocationIds_ccf_2017.npy') np.save(f_name, clusters_brainID) files_to_register.append(f_name) # Make the clusters.brainLocationAcronym dataset f_name = alf_path.joinpath( 'clusters.brainLocationAcronyms_ccf_2017.npy') np.save(f_name, clusters_brainAcro) files_to_register.append(f_name) self.log.info("Writing datasets to FlatIron") ftp_patcher.create_dataset(path=files_to_register, created_by=self.one._par.ALYX_LOGIN) # Need to change channels stored on alyx as well as the stored key is not the same as the # latest key if upload_alyx: if alignment_key != self.align_keys_sorted[0]: histology.register_aligned_track(self.eid, channels_mlapdv / 1e6, chn_coords=SITES_COORDINATES, one=self.one, overwrite=True, channels=self.channels) ephys_traj = self.one.alyx.rest( 'trajectories', 'list', probe_insertion=self.eid, provenance='Ephys aligned histology track') patch_dict = {'json': self.alignments} self.one.alyx.rest('trajectories', 'partial_update', id=ephys_traj[0]['id'], data=patch_dict) return files_to_register
def upload_channels(self, alignment_key, upload_alyx, upload_flatiron): """ Upload channels to alyx and flatiron based on the alignment specified by the alignment key """ feature = np.array(self.alignments[alignment_key][0]) track = np.array(self.alignments[alignment_key][1]) try: meta_dset = self.one.list_datasets(self.insertion['session'], '*ap.meta', collection=f'raw_ephys_data/{self.insertion["name"]}') meta_file = self.one.load_dataset(self.insertion['session'], meta_dset[0].split('/')[-1], collection=f'raw_ephys_data/{self.insertion["name"]}', download_only=True) geometry = spikeglx.read_geometry(meta_file) chns = np.c_[geometry['x'], geometry['y']] except Exception as err: self.log.warning(f"Could not compute channel locations from meta file, errored with message: {err}. " f"Will use default Neuropixel 1 channels") geometry = trace_header(version=1) chns = np.c_[geometry['x'], geometry['y']] ephysalign = EphysAlignment(self.xyz_picks, chns[:, 1], track_prev=track, feature_prev=feature, brain_atlas=self.brain_atlas) channels_mlapdv = np.int32(ephysalign.get_channel_locations(feature, track) * 1e6) channels_atlas_id = ephysalign.get_brain_locations(channels_mlapdv / 1e6)['id'] # Need to change channels stored on alyx as well as the stored key is not the same as the latest key if upload_alyx: if alignment_key != self.align_keys_sorted[0]: histology.register_aligned_track(self.eid, channels_mlapdv / 1e6, chn_coords=chns, one=self.one, overwrite=True, channels=self.channels_flag, brain_atlas=self.brain_atlas) ephys_traj = self.one.alyx.get(f'/trajectories?&probe_insertion={self.eid}' '&provenance=Ephys aligned histology track', clobber=True) patch_dict = {'json': self.alignments} self.one.alyx.rest('trajectories', 'partial_update', id=ephys_traj[0]['id'], data=patch_dict) files_to_register = [] if upload_flatiron: ftp_patcher = FTPPatcher(one=self.one) alf_path = self.one.eid2path(self.insertion['session']).joinpath('alf', self.insertion["name"]) alf_path.mkdir(exist_ok=True, parents=True) f_name = alf_path.joinpath('electrodeSites.mlapdv.npy') np.save(f_name, channels_mlapdv) files_to_register.append(f_name) f_name = alf_path.joinpath('electrodeSites.brainLocationIds_ccf_2017.npy') np.save(f_name, channels_atlas_id) files_to_register.append(f_name) f_name = alf_path.joinpath('electrodeSites.localCoordinates.npy') np.save(f_name, chns) files_to_register.append(f_name) probe_collections = self.one.list_collections(self.insertion['session'], filename='channels*', collection=f'alf/{self.insertion["name"]}*') for collection in probe_collections: chns = self.one.load_dataset(self.insertion['session'], 'channels.localCoordinates', collection=collection) ephysalign = EphysAlignment(self.xyz_picks, chns[:, 1], track_prev=track, feature_prev=feature, brain_atlas=self.brain_atlas) channels_mlapdv = np.int32(ephysalign.get_channel_locations(feature, track) * 1e6) channels_atlas_id = ephysalign.get_brain_locations(channels_mlapdv / 1e6)['id'] alf_path = self.one.eid2path(self.insertion['session']).joinpath(collection) alf_path.mkdir(exist_ok=True, parents=True) f_name = alf_path.joinpath('channels.mlapdv.npy') np.save(f_name, channels_mlapdv) files_to_register.append(f_name) f_name = alf_path.joinpath('channels.brainLocationIds_ccf_2017.npy') np.save(f_name, channels_atlas_id) files_to_register.append(f_name) self.log.info("Writing datasets to FlatIron") ftp_patcher.create_dataset(path=files_to_register, created_by=self.one.alyx.user) return files_to_register