示例#1
0
    def compute_similarity_matrix(self):
        """
        Computes the similarity matrix between each alignment stored in the ephys aligned
        trajectory. Similarity matrix based on number of clusters that share brain region and
        parent brain region
        """

        r = regions_from_allen_csv()

        clusters = dict()
        for iK, key in enumerate(self.align_keys_sorted):
            # Location of reference lines used for alignment
            feature = np.array(self.alignments[key][0])
            track = np.array(self.alignments[key][1])

            # Instantiate EphysAlignment object
            ephysalign = EphysAlignment(self.xyz_picks,
                                        self.depths,
                                        track_prev=track,
                                        feature_prev=feature,
                                        brain_atlas=self.brain_atlas)

            # Find xyz location of all channels
            xyz_channels = ephysalign.get_channel_locations(feature, track)
            brain_regions = ephysalign.get_brain_locations(xyz_channels)

            # Find the location of clusters along the alignment
            cluster_info = dict()
            cluster_info['brain_id'] = brain_regions['id'][self.cluster_chns]
            cluster_info['parent_id'] = r.get(
                ids=cluster_info['brain_id']).parent.astype(int)
            clusters.update({key: cluster_info})

        sim_matrix = np.zeros(
            (len(self.align_keys_sorted), len(self.align_keys_sorted)))

        for ik, key in enumerate(self.align_keys_sorted):
            for ikk, key2 in enumerate(self.align_keys_sorted):
                same_id = np.where(
                    clusters[key]['brain_id'] == clusters[key2]['brain_id'])[0]
                not_same_id = \
                    np.where(clusters[key]['brain_id'] != clusters[key2]['brain_id'])[0]
                same_parent = np.where(
                    clusters[key]['parent_id'][not_same_id] == clusters[key2]
                    ['parent_id'][not_same_id])[0]
                sim_matrix[ik, ikk] = len(same_id) + (len(same_parent) * 0.5)
        # Normalise
        sim_matrix_norm = sim_matrix / np.max(sim_matrix)

        return sim_matrix_norm
示例#2
0
def load_channel_locations(eid, one=None, probe=None, aligned=False):
    """
    From an eid, get brain locations from Alyx database
    analysis.
    :param eid: session eid or dictionary returned by one.alyx.rest('sessions', 'read', id=eid)
    :param dataset_types: additional spikes/clusters objects to add to the standard list
    :return: channels
    """
    if isinstance(eid, dict):
        ses = eid
        eid = ses['url'][-36:]

    one = one or ONE()

    # When a specific probe has been requested
    if isinstance(probe, str):
        insertions = one.alyx.rest('insertions',
                                   'list',
                                   session=eid,
                                   name=probe)[0]
        labels = [probe]
        if not insertions['json']:
            tracing = [False]
            resolved = [False]
            counts = [0]
        else:
            tracing = [(insertions.get('json', {
                'temp': 0
            }).get('extended_qc', {
                'temp': 0
            }).get('tracing_exists', False))]
            resolved = [(insertions.get('json', {
                'temp': 0
            }).get('extended_qc', {
                'temp': 0
            }).get('alignment_resolved', False))]
            counts = [(insertions.get('json', {
                'temp': 0
            }).get('extended_qc', {
                'temp': 0
            }).get('alignment_count', 0))]
        probe_id = [insertions['id']]
    # No specific probe specified, load any that is available
    # Need to catch for the case where we have two of the same probe insertions
    else:
        insertions = one.alyx.rest('insertions', 'list', session=eid)
        labels = [ins['name'] for ins in insertions]
        try:
            tracing = [
                ins.get('json', {
                    'temp': 0
                }).get('extended_qc', {
                    'temp': 0
                }).get('tracing_exists', False) for ins in insertions
            ]
            resolved = [
                ins.get('json', {
                    'temp': 0
                }).get('extended_qc', {
                    'temp': 0
                }).get('alignment_resolved', False) for ins in insertions
            ]
            counts = [
                ins.get('json', {
                    'temp': 0
                }).get('extended_qc', {
                    'temp': 0
                }).get('alignment_count', 0) for ins in insertions
            ]
        except Exception:
            tracing = [False for ins in insertions]
            resolved = [False for ins in insertions]
            counts = [0 for ins in insertions]

        probe_id = [ins['id'] for ins in insertions]

    channels = Bunch({})
    r = regions_from_allen_csv()
    for label, trace, resol, count, id in zip(labels, tracing, resolved,
                                              counts, probe_id):
        if trace:
            if resol:
                logger.info(
                    f'Channel locations for {label} have been resolved. '
                    f'Channel and cluster locations obtained from ephys aligned histology '
                    f'track.')
                # download the data
                chans = one.load_object(eid,
                                        'channels',
                                        collection=f'alf/{label}')
                channels[label] = Bunch({
                    'atlas_id':
                    chans['brainLocationIds_ccf_2017'],
                    'acronym':
                    r.get(chans['brainLocationIds_ccf_2017'])['acronym'],
                    'x':
                    chans['mlapdv'][:, 0] / 1e6,
                    'y':
                    chans['mlapdv'][:, 1] / 1e6,
                    'z':
                    chans['mlapdv'][:, 2] / 1e6,
                    'axial_um':
                    chans['localCoordinates'][:, 1],
                    'lateral_um':
                    chans['localCoordinates'][:, 0]
                })
            elif count > 0 and aligned:
                logger.info(
                    f'Channel locations for {label} have not been '
                    f'resolved. However, alignment flag set to True so channel and cluster'
                    f' locations will be obtained from latest available ephys aligned '
                    f'histology track.')
                # get the latest user aligned channels
                traj_id = one.alyx.rest(
                    'trajectories',
                    'list',
                    session=eid,
                    probe_name=label,
                    provenance='Ephys aligned histology track')[0]['id']
                chans = one.alyx.rest('channels',
                                      'list',
                                      trajectory_estimate=traj_id)

                channels[label] = Bunch({
                    'atlas_id':
                    np.array([ch['brain_region'] for ch in chans]),
                    'x':
                    np.array([ch['x'] for ch in chans]) / 1e6,
                    'y':
                    np.array([ch['y'] for ch in chans]) / 1e6,
                    'z':
                    np.array([ch['z'] for ch in chans]) / 1e6,
                    'axial_um':
                    np.array([ch['axial'] for ch in chans]),
                    'lateral_um':
                    np.array([ch['lateral'] for ch in chans])
                })
                channels[label]['acronym'] = r.get(
                    channels[label]['atlas_id'])['acronym']
            else:
                logger.info(
                    f'Channel locations for {label} have not been resolved. '
                    f'Channel and cluster locations obtained from histology track.'
                )
                # get the channels from histology tracing
                traj_id = one.alyx.rest('trajectories',
                                        'list',
                                        session=eid,
                                        probe_name=label,
                                        provenance='Histology track')[0]['id']
                chans = one.alyx.rest('channels',
                                      'list',
                                      trajectory_estimate=traj_id)

                channels[label] = Bunch({
                    'atlas_id':
                    np.array([ch['brain_region'] for ch in chans]),
                    'x':
                    np.array([ch['x'] for ch in chans]) / 1e6,
                    'y':
                    np.array([ch['y'] for ch in chans]) / 1e6,
                    'z':
                    np.array([ch['z'] for ch in chans]) / 1e6,
                    'axial_um':
                    np.array([ch['axial'] for ch in chans]),
                    'lateral_um':
                    np.array([ch['lateral'] for ch in chans])
                })
                channels[label]['acronym'] = r.get(
                    channels[label]['atlas_id'])['acronym']
        else:
            logger.warning(f'Histology tracing for {label} does not exist. '
                           f'No channels for {label}')

    return channels
示例#3
0
    def upload_channels(self, alignment_key, upload_alyx, upload_flatiron):
        """
        Upload channels to alyx and flatiron based on the alignment specified by the alignment key
        """

        feature = np.array(self.alignments[alignment_key][0])
        track = np.array(self.alignments[alignment_key][1])
        ephysalign = EphysAlignment(self.xyz_picks,
                                    self.depths,
                                    track_prev=track,
                                    feature_prev=feature,
                                    brain_atlas=self.brain_atlas)

        # Find the channels
        channels_mlapdv = np.int32(
            ephysalign.get_channel_locations(feature, track) * 1e6)
        channels_brainID = ephysalign.get_brain_locations(channels_mlapdv /
                                                          1e6)['id']

        # Find the clusters
        r = regions_from_allen_csv()
        clusters_mlapdv = channels_mlapdv[self.cluster_chns]
        clusters_brainID = channels_brainID[self.cluster_chns]
        clusters_brainAcro = r.get(ids=clusters_brainID).acronym

        # upload datasets to flatiron
        files_to_register = []
        if upload_flatiron:
            ftp_patcher = FTPPatcher(one=self.one)
            insertion = self.one.alyx.rest('insertions', 'read', id=self.eid)
            alf_path = self.one.path_from_eid(insertion['session']).joinpath(
                'alf', insertion['name'])
            alf_path.mkdir(exist_ok=True, parents=True)

            # Make the channels.mlapdv dataset
            f_name = alf_path.joinpath('channels.mlapdv.npy')
            np.save(f_name, channels_mlapdv)
            files_to_register.append(f_name)

            # Make the channels.brainLocationIds dataset
            f_name = alf_path.joinpath(
                'channels.brainLocationIds_ccf_2017.npy')
            np.save(f_name, channels_brainID)
            files_to_register.append(f_name)

            # Make the clusters.mlapdv dataset
            f_name = alf_path.joinpath('clusters.mlapdv.npy')
            np.save(f_name, clusters_mlapdv)
            files_to_register.append(f_name)

            # Make the clusters.brainLocationIds dataset
            f_name = alf_path.joinpath(
                'clusters.brainLocationIds_ccf_2017.npy')
            np.save(f_name, clusters_brainID)
            files_to_register.append(f_name)

            # Make the clusters.brainLocationAcronym dataset
            f_name = alf_path.joinpath(
                'clusters.brainLocationAcronyms_ccf_2017.npy')
            np.save(f_name, clusters_brainAcro)
            files_to_register.append(f_name)

            self.log.info("Writing datasets to FlatIron")
            ftp_patcher.create_dataset(path=files_to_register,
                                       created_by=self.one._par.ALYX_LOGIN)

        # Need to change channels stored on alyx as well as the stored key is not the same as the
        # latest key
        if upload_alyx:
            if alignment_key != self.align_keys_sorted[0]:
                histology.register_aligned_track(self.eid,
                                                 channels_mlapdv / 1e6,
                                                 chn_coords=SITES_COORDINATES,
                                                 one=self.one,
                                                 overwrite=True,
                                                 channels=self.channels)

                ephys_traj = self.one.alyx.rest(
                    'trajectories',
                    'list',
                    probe_insertion=self.eid,
                    provenance='Ephys aligned histology track')
                patch_dict = {'json': self.alignments}
                self.one.alyx.rest('trajectories',
                                   'partial_update',
                                   id=ephys_traj[0]['id'],
                                   data=patch_dict)

        return files_to_register
示例#4
0
 def test_ancestors_descendants(self):
     # here we use the same brain region as in the alyx test
     brs = regions_from_allen_csv()
     self.assertTrue(brs.descendants(ids=688).id.size == 567)
     self.assertTrue(brs.ancestors(ids=688).id.size == 4)
示例#5
0
 def test_get(self):
     brs = regions_from_allen_csv()
     ctx = brs.get(688)
     self.assertTrue(len(ctx.acronym) == 1 and ctx.acronym == 'CTX')
示例#6
0
            'atlas_id': np.array([ch['brain_region'] for ch in channels]),
            'x': np.array([ch['x'] for ch in channels]) / 1e6,
            'y': np.array([ch['y'] for ch in channels]) / 1e6,
            'z': np.array([ch['z'] for ch in channels]) / 1e6,
            'axial_um': np.array([ch['axial'] for ch in channels]),
            'lateral_um': np.array([ch['lateral'] for ch in channels])
        }

    else:
        print(
            f'No histology or ephys aligned trajectory for session: {eid} and '
            f'probe: {probe_label}, no channels available')
        chans = None

if chans is not None:
    r = regions_from_allen_csv()
    chans['acronym'] = r.get(ids=chans['atlas_id']).acronym
    chans['rgb'] = r.get(ids=chans['atlas_id']).rgb
    cluster_brain_region = chans['acronym'][cluster_chans]
    cluster_colour = chans['rgb'][cluster_chans]
    cluster_xyz = np.c_[chans['x'], chans['y'], chans['z']][cluster_chans]
    regions, idx, n_clust = np.unique(cluster_brain_region,
                                      return_counts=True,
                                      return_index=True)

    region_cols = cluster_colour[idx, :]
    fig, ax = plt.subplots()
    ax.bar(x=np.arange(len(regions)),
           height=n_clust,
           tick_label=regions,
           color=region_cols / 255)