Пример #1
0
    def _segment(self, dataset):

        channel = sima.misc.resolve_channels(self._params['channel'],
                                             dataset.channel_names)
        if dataset.savedir is not None:
            pca_path = os.path.join(dataset.savedir,
                                    'opca_' + str(channel) + '.npz')
        else:
            pca_path = None

        if dataset.savedir is not None:
            ica_path = os.path.join(dataset.savedir,
                                    'ica_' + str(channel) + '.npz')
        else:
            ica_path = None

        if self._params['verbose']:
            print('performing PCA...')
        components = self._params['components']
        if isinstance(components, int):
            components = list(range(components))
        _, space_pcs, time_pcs = oPCA.dataset_opca(
            dataset, channel, components[-1] + 1, path=pca_path)
        space_pcs = np.real(space_pcs)

        if self._params['verbose']:
            print('performing ICA...')
        st_components = _stica(
            space_pcs, time_pcs, mu=self._params['mu'], path=ica_path,
            n_components=space_pcs.shape[-1])

        return ROIList([ROI(st_components[..., i]) for i in
                        range(st_components.shape[-1])])
Пример #2
0
Файл: sara.py Проект: nk53/SARA
 def apply(self, rois, dataset=None):
     rois_with_ids = []
     for index, roi in enumerate(rois):
         newroi = roi.todict()
         newroi['id'] = index
         rois_with_ids.append(newroi)
     return ROIList(rois_with_ids)
Пример #3
0
    def _segment(self, dataset):
        def set_z(roi, z):
            old_mask = roi.mask
            roi.mask = [
                sparse.lil_matrix(old_mask[0].shape, dtype=old_mask[0].dtype)
                for _ in range(z)
            ] + [old_mask[0]]

        rois = ROIList([])
        if isinstance(self.strategy, list):
            if len(self.strategy) != dataset.frame_shape[0]:
                raise Exception('There is not exactly one strategy per plane.')
            iterator = list(
                zip(self.strategy, list(range(dataset.frame_shape[0]))))
        elif isinstance(self.strategy, SegmentationStrategy):
            iterator = list(
                zip(it.repeat(self.strategy),
                    list(range(dataset.frame_shape[0]))))

        for strategy, plane_idx in iterator:
            plane_rois = strategy.segment(dataset[:, :, plane_idx])
            for roi in plane_rois:
                set_z(roi, plane_idx)
            rois.extend(plane_rois)
        return rois
Пример #4
0
    def apply(self, rois, dataset=None):
        """ Remove overlapping ROIs

        Parameters
        ----------
        rois : list
            list of sima.ROI ROIs
        percent_overlap : float
            percent of the smaller ROIs total area which must be covered in
            order for the ROIs to be evaluated as overlapping

        Returns
        -------
        rois : list
            A list of sima.ROI ROI objects with the overlapping ROIs combined
        """

        for roi in rois:
            roi.mask = roi.mask

        for i in range(len(rois)):  # TODO: more efficient strategy
            for j in [j for j in range(len(rois)) if j != i]:
                if rois[i] is not None and rois[j] is not None:
                    overlap = np.logical_and(rois[i], rois[j])
                    small_area = min(np.size(rois[i]), np.size(rois[j]))

                    if len(np.where(overlap)[0]) > \
                            self.percent_overlap * small_area:
                        new_shape = np.logical_or(rois[i], rois[j])

                        rois[i] = ROI(mask=new_shape.astype('bool'))
                        rois[j] = None
        return ROIList(roi for roi in rois if roi is not None)
Пример #5
0
    def apply(self, rois, dataset=None):
        smoothed, _, _ = SparseROIsFromMasks._find_and_smooth(
            rois, self.static_threshold, self.smooth_size, self.sign_split,
            self.n_processes)

        return ROIList(
            SparseROIsFromMasks._extract_st_rois(smoothed, self.min_size))
Пример #6
0
 def ROIs(self):
     try:
         with open(join(self.savedir, 'rois.pkl'), 'rb') as f:
             return {label: ROIList(**v)
                     for label, v in pickle.load(f).items()}
     except (IOError, pickle.UnpicklingError):
         return {}
Пример #7
0
    def apply(self, rois, dataset=None):
        SmoothFunc = _SmoothBoundariesParallel(self.tolerance, self.min_verts)
        if self.n_processes > 1:
            pool = Pool(processes=self.n_processes)
            smooth_rois = pool.map(SmoothFunc, rois)
            pool.close()
        else:
            smooth_rois = map(SmoothFunc, rois)

        return ROIList(smooth_rois)
Пример #8
0
    def apply(self, rois, dataset=None):
        if not all(len(r.mask) == 1 for r in rois):
            raise ValueError('SmoothROIBoundaries applies only to 2D ROIs.')

        SmoothFunc = _SmoothBoundariesParallel(self.radius)
        if self.n_processes > 1:
            pool = Pool(processes=self.n_processes)
            smooth_rois = pool.map(SmoothFunc, rois)
            pool.close()
        else:
            smooth_rois = map(SmoothFunc, rois)

        return ROIList([roi[0] for roi in smooth_rois])
Пример #9
0
def deleteRoi():
    ds_path = request.form.get('path')
    label = request.form.get('label')
    roi_id = request.form.get('roiId')

    dataset = ImagingDataset.load(ds_path)
    try:
        rois = dataset.ROIs[label]
    except KeyError:
        return jsonify(result='failed to located ROI List')

    rois = filter(lambda r: r.id != roi_id, rois)
    dataset.add_ROIs(ROIList(rois), label=label)

    return jsonify(result='success')
Пример #10
0
    def apply(self, rois, dataset):
        channel = sima.misc.resolve_channels(self._channel,
                                             dataset.channel_names)
        processed_im = _processed_image_ca1pc(dataset, channel,
                                              self._x_diameter,
                                              self._y_diameter)[0]
        shape = processed_im.shape[:2]
        ROIs = ROIList([])
        for roi in rois:
            roi_indices = np.nonzero(roi.mask[0])
            roi_indices = np.ravel_multi_index(roi_indices, shape)

            # pixel values in the cut
            vals = processed_im.flat[roi_indices]

            # indices of those values below the otsu threshold
            # if all values are identical, continue without adding an ROI
            try:
                roi_indices = roi_indices[vals < threshold_otsu(vals)]
            except ValueError:
                continue

            # apply binary opening and closing to the surviving pixels
            # expand the shape by 1 in all directions to correct for edge
            # effects of binary opening/closing
            twoD_indices = [np.unravel_index(x, shape) for x in roi_indices]
            mask = np.zeros([x + 2 for x in shape])
            for indices in twoD_indices:
                mask[indices[0] + 1, indices[1] + 1] = 1
            mask = ndimage.binary_closing(ndimage.binary_opening(mask))
            mask = mask[1:-1, 1:-1]
            roi_indices = np.where(mask.flat)[0]

            # label blobs in each cut
            labeled_array, num_features = measurements.label(mask)
            for feat in range(num_features):
                blob_inds = np.where(labeled_array.flat == feat + 1)[0]

                twoD_indices = [np.unravel_index(x, shape) for x in blob_inds]
                mask = np.zeros(shape)
                for x in twoD_indices:
                    mask[x] = 1

                ROIs.append(ROI(mask=mask))

        return ROIs
Пример #11
0
    def _rois_from_cuts(cls, cuts):
        """Return ROI structures each containing the full extent of a cut.

        Parameters
        ----------
        cuts : list of sima.normcut.CutRegion
            The segmented regions identified by normalized cuts.

        Returns
        -------
        sima.ROI.ROIList
            ROI structures corresponding to each cut.
        """
        ROIs = ROIList([])
        for cut in cuts:
            if len(cut.indices):
                mask = np.zeros(cut.shape)
                for x in cut.indices:
                    mask[np.unravel_index(x, cut.shape)] = 1
                ROIs.append(ROI(mask=mask))
        return ROIs
Пример #12
0
    def _segment(self, dataset):

        channel = sima.misc.resolve_channels(self._params['channel'],
                                             dataset.channel_names)
        if dataset.savedir is not None:
            pca_path = os.path.join(dataset.savedir,
                                    'opca_' + str(channel) + '.npz')
        else:
            pca_path = None

        if dataset.savedir is not None:
            ica_path = os.path.join(dataset.savedir,
                                    'ica_' + str(channel) + '.npz')
        else:
            ica_path = None

        if self._params['verbose']:
            print('performing PCA...')
        components = self._params['components']
        if isinstance(components, int):
            components = list(range(components))
        _, space_pcs, time_pcs = oPCA.dataset_opca(
            dataset, channel, components[-1] + 1, path=pca_path)
        space_pcs = np.real(space_pcs)

        # Remove components greater than the number of PCs returned
        # in case more components were asked for than the number of
        # independent dimensions in the dataset.
        components = [c for c in components if c < time_pcs.shape[1]]

        if self._params['verbose']:
            print('performing ICA...')
        st_components = _stica(
            space_pcs, time_pcs, mu=self._params['mu'], path=ica_path,
            n_components=space_pcs.shape[-1])

        return ROIList([ROI(st_components[..., i]) for i in
                        range(st_components.shape[-1])])
Пример #13
0
def _remove_overlapping(rois, percent_overlap=0.9):
    """ Remove overlapping ROIs

    Parameters
    ----------
    rois : list
        list of sima.ROI ROIs
    percent_overlap : float
        percent of the smaller ROIs total area which must be covered in order
        for the ROIs to be evaluated as overlapping

    Returns
    -------
    rois : list
        A list of sima.ROI ROI objects with the overlapping ROIs combined
    """

    if percent_overlap > 0 and percent_overlap <= 1:
        for roi in rois:
            roi.mask = roi.mask

        for i in xrange(len(rois)):
            for j in [j for j in xrange(len(rois)) if j != i]:
                if rois[i] is not None and rois[j] is not None:
                    overlap = np.logical_and(rois[i].mask.toarray(),
                                             rois[j].mask.toarray())
                    small_area = np.min((rois[i].mask.size, rois[j].mask.size))

                    if len(np.where(overlap)[0]) > \
                            percent_overlap * small_area:
                        new_shape = np.logical_or(rois[i].mask.toarray(),
                                                  rois[j].mask.toarray())

                        rois[i] = ROI(mask=new_shape.astype('bool'),
                                      im_shape=rois[i].mask.shape)
                        rois[j] = None
    return ROIList(roi for roi in rois if roi is not None)
Пример #14
0
def updateRoi():
    ds_path = request.form.get('path')
    label = request.form.get('label')
    points = json.loads(request.form.get('points'))
    roi_label = request.form.get('roiLabel')
    roi_id = request.form.get('roiId')

    dataset = ImagingDataset.load(ds_path)
    roi_data = []
    for i, plane in enumerate(points):
        if plane is None or not len(plane):
            continue
        array_dat = np.array(plane)
        z_dims = i * np.ones((array_dat.shape[:2] + (1, )))
        plane_data = np.concatenate((array_dat, z_dims), axis=2)
        roi_data.extend(list(plane_data))

    if len(roi_data) == 0:
        return jsonify(result="no polygons to save")

    for poly in roi_data:
        if poly.shape[0] < 3:
            raise Exception("unable to store polygon with less then 3 points")
    roi = ROI(polygons=roi_data, im_shape=dataset.frame_shape[:3])

    roi.label = roi_label
    roi.id = roi_id
    try:
        rois = dataset.ROIs[label]
    except KeyError:
        rois = []

    rois = filter(lambda r: r.id != roi_id, rois)
    rois.append(roi)
    dataset.add_ROIs(ROIList(rois), label=label)

    return jsonify(result='success')
Пример #15
0
def setRoiLabel():
    ds_path = request.form.get('path')
    #old_label = request.form.get('oldLabel')
    old_label = ''
    new_label = request.form.get('newLabel')

    if new_label == '':
        new_label = 'rois'

    dataset = ImagingDataset.load(ds_path)
    if (old_label != ''):
        rois = dataset.ROIs[old_label]
    else:
        rois = ROIList([])
    dataset.add_ROIs(rois, label=new_label)

    labels = dataset.ROIs.keys()

    labels.extend(
        map(os.path.basename, glob.glob(os.path.join(ds_path, 'ica*.npz'))))
    labels.extend(
        map(os.path.basename, glob.glob(os.path.join(ds_path, 'opca*.npz'))))

    return jsonify({'labels': labels})
Пример #16
0
def process_data(haussio_data,
                 mask=None,
                 p=2,
                 nrois_init=400,
                 roi_iceberg=0.9,
                 merge_unconnected=None):
    if mask is not None:
        raise RuntimeError("mask not supported in cnmf.process_data")

    fn_cnmf = haussio_data.dirname_comp + '_cnmf.mat'
    shapefn = os.path.join(haussio_data.dirname_comp,
                           haussio.THOR_RAW_FN[:-3] + "shape.npy")
    shape = np.load(shapefn)
    if len(shape) == 5:
        d1, d2 = shape[2], shape[3]
    else:
        d1, d2 = shape[1], shape[2]
    fn_mmap = get_mmap_name(haussio_data.dirname_comp + os.path.sep + 'Yr', d1,
                            d2, shape[0])

    tiffs_to_cnmf(haussio_data)
    if os.path.exists(fn_cnmf):
        resdict = loadmat(fn_cnmf)
        if "dFoF" in resdict.keys():
            A2 = resdict["A"]
            C2 = resdict["C"]
            YrA = resdict["YrA"]
            S2 = resdict["S"]
            dFoF = resdict["dFoF"]
            bl2 = resdict["bl"]
            f = resdict["f"]
            images = haussio_data.read_raw().squeeze()
        else:
            dFoF = None
    if not os.path.exists(fn_cnmf) or dFoF is None:
        c, dview, n_processes = cm.cluster.setup_cluster(
            backend='multiprocessing', n_processes=NCPUS, single_thread=False)

        Yr, dims, T = cm.load_memmap(fn_mmap, 'r+')
        d1, d2 = dims
        images = np.reshape(Yr.T, [T] + list(dims), order='F')

        fr = 1.0 / haussio_data.dt  # imaging rate in frames per second\n",
        decay_time = 0.4  # length of a typical transient in seconds\n",

        # parameters for source extraction and deconvolution\n",
        bord_px_els = 32  # maximum shift to be used for trimming against NaNs
        p = 1  # order of the autoregressive system\n",
        gnb = 2  # number of global background components\n",
        merge_thresh = 0.8  # merging threshold, max correlation allowed\n",
        rf = int(
            np.round(np.sqrt(d1 * d2) / nrois_init)
        )  # half-size of the patches in pixels. e.g., if rf=25, patches are 50x50\n",
        if rf < 16:
            rf = 16
        stride_cnmf = 6  # amount of overlap between the patches in pixels\n",
        npatches = np.round(d1 / (rf * 2) * d2 / (rf * 2))
        K = nrois_init / npatches  # number of components per patch\n",
        if K < 2:
            K = 2
        print(rf, npatches, K)
        gSig = [8, 8]  # expected half size of neurons\n",
        init_method = 'greedy_roi'  # initialization method (if analyzing dendritic data using 'sparse_nmf')\n",
        is_dendrites = False  # flag for analyzing dendritic data\n",
        alpha_snmf = None  # sparsity penalty for dendritic data analysis through sparse NMF\n",

        # parameters for component evaluation\n",
        min_SNR = 2.5  # signal to noise ratio for accepting a component\n",
        rval_thr = 0.8  # space correlation threshold for accepting a component\n",
        cnn_thr = 0.8  # threshold for CNN based classifier"

        cnm = caiman_cnmf.CNMF(n_processes=1,
                               k=K,
                               gSig=gSig,
                               merge_thresh=merge_thresh,
                               p=0,
                               dview=dview,
                               rf=rf,
                               stride=stride_cnmf,
                               memory_fact=1,
                               method_init=init_method,
                               alpha_snmf=alpha_snmf,
                               only_init_patch=False,
                               gnb=gnb,
                               border_pix=bord_px_els)
        cnm = cnm.fit(images)

        idx_components, idx_components_bad, SNR_comp, r_values, cnn_preds = \
            estimate_components_quality_auto(images, cnm.A, cnm.C, cnm.b, cnm.f,
                                             cnm.YrA, fr, decay_time, gSig, dims,
                                             dview = dview, min_SNR=min_SNR,
                                             r_values_min = rval_thr, use_cnn = False,
                                             thresh_cnn_lowest = cnn_thr)
        A_in, C_in, b_in, f_in = cnm.A[:, idx_components], cnm.C[
            idx_components], cnm.b, cnm.f
        cnm2 = caiman_cnmf.CNMF(n_processes=1,
                                k=A_in.shape[-1],
                                gSig=gSig,
                                p=p,
                                dview=dview,
                                merge_thresh=merge_thresh,
                                Ain=A_in,
                                Cin=C_in,
                                b_in=b_in,
                                f_in=f_in,
                                rf=None,
                                stride=None,
                                gnb=gnb,
                                method_deconvolution='oasis',
                                check_nan=True)
        cnm2 = cnm2.fit(images)

        if merge_unconnected is not None:
            idx_merge = []
            for nroi, ca_roi in enumerate(cnm2.C):
                for nroi_compare_counter, ca_roi_compare in enumerate(
                        cnm2.C[nroi + 1:]):
                    nroi_compare = nroi_compare_counter + nroi + 1
                    if nroi_compare not in idx_merge:
                        correls = np.correlate(ca_roi,
                                               ca_roi_compare,
                                               mode='same')
                        correls /= np.sqrt(
                            np.dot(ca_roi, ca_roi) *
                            np.dot(ca_roi_compare, ca_roi_compare))
                        if correls.max() > merge_unconnected:
                            print("Merging ", nroi_compare)
                            idx_merge.append(nroi_compare)
            idx_no_merge = [
                idx for idx in range(cnm2.C.shape[0]) if idx not in idx_merge
            ]
        else:
            idx_no_merge = range(cnm2.C.shape[0])
        A2 = cnm2.A[:, idx_no_merge].tocsc()
        C2 = cnm2.C[idx_no_merge]
        YrA = cnm2.YrA[idx_no_merge]
        S2 = cnm2.S[idx_no_merge]
        dFoF = cnm2.detrend_df_f(frames_window=300)[idx_no_merge]
        # A: spatial components (ROIs)
        # C: denoised [Ca2+]
        # YrA: residuals ("noise", i.e. traces = C+YrA)
        # S: Spikes
        # f: temporal background
        savemat(
            fn_cnmf, {
                "A": A2,
                "C": C2,
                "YrA": YrA,
                "S": S2,
                "dFoF": dFoF,
                "bl": cnm2.b,
                "f": cnm2.f
            })
        dview.terminate()
        cm.stop_server()

    proj_fn = haussio_data.dirname_comp + "_proj.npy"
    if not os.path.exists(proj_fn):
        zproj = utils.zproject(images)
        np.save(proj_fn, zproj)
    else:
        zproj = np.load(proj_fn)

    logfiles = glob.glob("*LOG*")
    for logfile in logfiles:
        try:
            os.unlink(logfile)
        except OSError:
            pass

    polygons = contour(A2, images.shape[1], images.shape[2], thr=roi_iceberg)
    rois = ROIList([sima.ROI.ROI(polygons=poly) for poly in polygons])

    return rois, C2, zproj, S2, images, YrA
Пример #17
0
    def _segment(self, dataset):

        channel = sima.misc.resolve_channels(self._params.channel,
                                             dataset.channel_names)
        if dataset.savedir is not None:
            pca_path = os.path.join(dataset.savedir,
                                    'opca_' + str(channel) + '.npz')
        else:
            pca_path = None

        if dataset.savedir is not None:
            ica_path = os.path.join(dataset.savedir,
                                    'ica_' + str(channel) + '.npz')
        else:
            ica_path = None

        if self._params.verbose:
            print 'performing PCA...'
        if isinstance(self._params.components, int):
            self._params.components = range(self._params.components)
        _, space_pcs, time_pcs = _OPCA(dataset,
                                       channel,
                                       self._params.components[-1] + 1,
                                       path=pca_path)
        space_pcs = np.real(
            space_pcs.reshape(dataset.frame_shape[1:3] +
                              (space_pcs.shape[2], )))
        space_pcs = np.array(
            [space_pcs[:, :, i] for i in self._params.components]).transpose(
                (1, 2, 0))
        time_pcs = np.array([time_pcs[:, i]
                             for i in self._params.components]).transpose(
                                 (1, 0))

        if self._params.verbose:
            print 'performing ICA...'
        st_components = _stica(space_pcs,
                               time_pcs,
                               mu=self._params.mu,
                               path=ica_path,
                               n_components=space_pcs.shape[2])

        if self._params.x_smoothing > 0 or self._params.static_threshold > 0:
            accepted, _, _ = _find_useful_components(
                st_components,
                self._params.static_threshold,
                x_smoothing=self._params.x_smoothing)

            if self._params.min_area > 0 or self._params.spatial_sep:
                rois = _extract_st_rois(accepted,
                                        min_area=self._params.min_area,
                                        spatial_sep=self._params.spatial_sep)

            if self._params.smooth_rois:
                if self._params.verbose:
                    print 'smoothing ROIs...'
                rois = [_smooth_roi(roi)[0] for roi in rois]

            if self._params.verbose:
                print 'removing overlapping ROIs...'
            rois = _remove_overlapping(
                rois, percent_overlap=self._params.overlap_per)
        else:
            rois = [
                ROI(st_components[:, :, i])
                for i in xrange(st_components.shape[2])
            ]

        return ROIList(rois)
Пример #18
0
def process_data(haussio_data,
                 mask=None,
                 p=2,
                 nrois_init=400,
                 roi_iceberg=0.9):
    if mask is not None:
        raise RuntimeError("mask not supported in cnmf.process_data")

    fn_cnmf = haussio_data.dirname_comp + '_cnmf.mat'
    shapefn = os.path.join(haussio_data.dirname_comp,
                           haussio.THOR_RAW_FN[:-3] + "shape.npy")
    shape = np.load(shapefn)
    if len(shape) == 5:
        d1, d2 = shape[2], shape[3]
        fn_mmap = get_mmap_name('Yr', shape[2], shape[3], shape[0])
    else:
        d1, d2 = shape[1], shape[2]
        fn_mmap = get_mmap_name('Yr', shape[1], shape[2], shape[0])
    fn_mmap = os.path.join(haussio_data.dirname_comp, fn_mmap)
    print(fn_mmap, os.path.exists(fn_mmap), d1, d2)

    if not os.path.exists(fn_cnmf):
        # fn_raw = os.path.join(haussio_data.dirname_comp, haussio.THOR_RAW_FN)
        fn_sima = haussio_data.dirname_comp + '.sima'
        fnames = [
            fn_sima,
        ]
        fnames.sort()
        print(fnames)
        fnames = fnames

        final_frate = 1.0 / haussio_data.dt
        downsample_factor = 1  # use .2 or .1 if file is large and you want a quick answer
        final_frate *= downsample_factor

        c, dview, n_processes = cm.cluster.setup_cluster(backend='local',
                                                         n_processes=None,
                                                         single_thread=False)

        idx_xy = None
        base_name = 'Yr'
        name_new = cm.save_memmap_each(fnames,
                                       dview=dview,
                                       base_name=base_name,
                                       resize_fact=(1, 1, downsample_factor),
                                       remove_init=0,
                                       idx_xy=idx_xy)
        name_new.sort()
        print(name_new)

        if len(name_new) > 1:
            fname_new = cm.save_memmap_join(name_new,
                                            base_name='Yr',
                                            n_chunks=12,
                                            dview=dview)
        else:
            sys.stdout.write('One file only, not saving\n')
            fname_new = name_new[0]

        print("fname_new: " + fname_new)

        Yr, dims, T = cm.load_memmap(fname_new)
        Y = np.reshape(Yr, dims + (T, ), order='F')
        Cn = cm.local_correlations(Y)

        K = nrois_init  # number of neurons expected per patch
        gSig = [15, 15]  # expected half size of neurons
        merge_thresh = 0.8  # merging threshold, max correlation allowed
        p = 2  #order of the autoregressive system
        options = caiman_cnmf.utilities.CNMFSetParms(Y,
                                                     NCPUS,
                                                     p=p,
                                                     gSig=gSig,
                                                     K=K,
                                                     ssub=2,
                                                     tsub=2)

        Yr, sn, g, psx = caiman_cnmf.pre_processing.preprocess_data(
            Yr, dview=dview, **options['preprocess_params'])
        Atmp, Ctmp, b_in, f_in, center = caiman_cnmf.initialization.initialize_components(
            Y, **options['init_params'])

        Ain, Cin = Atmp, Ctmp
        A, b, Cin, f_in = caiman_cnmf.spatial.update_spatial_components(
            Yr,
            Cin,
            f_in,
            Ain,
            sn=sn,
            dview=dview,
            **options['spatial_params'])

        options['temporal_params'][
            'p'] = 0  # set this to zero for fast updating without deconvolution
        C, A, b, f, S, bl, c1, neurons_sn, g, YrA = caiman_cnmf.temporal.update_temporal_components(
            Yr,
            A,
            b,
            Cin,
            f_in,
            bl=None,
            c1=None,
            sn=None,
            g=None,
            **options['temporal_params'])

        A_m, C_m, nr_m, merged_ROIs, S_m, bl_m, c1_m, sn_m, g_m = caiman_cnmf.merging.merge_components(
            Yr,
            A,
            b,
            C,
            f,
            S,
            sn,
            options['temporal_params'],
            options['spatial_params'],
            dview=dview,
            bl=bl,
            c1=c1,
            sn=neurons_sn,
            g=g,
            thr=merge_thresh,
            mx=50,
            fast_merge=True)

        A2, b2, C2, f = caiman_cnmf.spatial.update_spatial_components(
            Yr, C_m, f, A_m, sn=sn, dview=dview, **options['spatial_params'])
        options['temporal_params'][
            'p'] = p  # set it back to original value to perform full deconvolution
        C2, A2, b2, f2, S2, bl2, c12, neurons_sn2, g21, YrA = caiman_cnmf.temporal.update_temporal_components(
            Yr,
            A2,
            b2,
            C2,
            f,
            dview=dview,
            bl=None,
            c1=None,
            sn=None,
            g=None,
            **options['temporal_params'])

        tB = np.minimum(-2, np.floor(-5. / 30 * final_frate))
        tA = np.maximum(5, np.ceil(25. / 30 * final_frate))
        Npeaks = 10
        traces = C2 + YrA
        fitness_raw, fitness_delta, erfc_raw, erfc_delta, r_values, significant_samples = \
            evaluate_components(
                Y, traces, A2, C2, b2, f2, final_frate, remove_baseline=True, N=5,
                robust_std=False, Athresh=0.1, Npeaks=Npeaks, thresh_C=0.3)

        idx_components_r = np.where(r_values >= .6)[0]
        idx_components_raw = np.where(fitness_raw < -60)[0]
        idx_components_delta = np.where(fitness_delta < -20)[0]

        min_radius = gSig[0] - 2
        masks_ws, idx_blobs, idx_non_blobs = extract_binary_masks_blob(
            A2.tocsc(),
            min_radius,
            dims,
            num_std_threshold=1,
            minCircularity=0.6,
            minInertiaRatio=0.2,
            minConvexity=.8)

        idx_components = np.union1d(idx_components_r, idx_components_raw)
        idx_components = np.union1d(idx_components, idx_components_delta)
        idx_blobs = np.intersect1d(idx_components, idx_blobs)
        idx_components_bad = np.setdiff1d(range(len(traces)), idx_components)

        A2 = A2.tocsc()[:, idx_components]
        C2 = C2[idx_components, :]
        YrA = YrA[idx_components, :]
        S2 = S2[idx_components, :]

        # A: spatial components (ROIs)
        # C: denoised [Ca2+]
        # YrA: residuals ("noise", i.e. traces = C+YrA)
        # S: Spikes
        savemat(fn_cnmf, {"A": A2, "C": C2, "YrA": YrA, "S": S2, "bl": bl2})

    else:
        resdict = loadmat(fn_cnmf)
        A2 = resdict["A"]
        C2 = resdict["C"]
        YrA = resdict["YrA"]
        S2 = resdict["S"]
        bl2 = resdict["bl"]
        Yr, dims, T = cm.load_memmap(fn_mmap)
        dims = dims[1:]
        Y = np.reshape(Yr, dims + (T, ), order='F')

    proj_fn = haussio_data.dirname_comp + "_proj.npy"
    if not os.path.exists(proj_fn):
        zproj = utils.zproject(np.transpose(Y, (2, 0, 1)))
        np.save(proj_fn, zproj)
    else:
        zproj = np.load(proj_fn)

    # DF_F, DF = cse.extract_DF_F(Y.reshape(d1*d2, T), A2, C2)

    # t0 = time.time()
    # sys.stdout.write("Ordering components... ")
    # sys.stdout.flush()
    # A_or, C_or, srt = cse.order_components(A2, C2)
    # sys.stdout.write(' took {0:.2f} s\n'.format(time.time()-t0))

    cm.stop_server()

    polygons = contour(A2, Y.shape[0], Y.shape[1], thr=roi_iceberg)
    rois = ROIList([sima.ROI.ROI(polygons=poly) for poly in polygons])

    return rois, C2, zproj, S2, Y, YrA
Пример #19
0
def process_data_patches(haussio_data,
                         mask=None,
                         p=2,
                         nrois_init=400,
                         roi_iceberg=0.9):
    fn_cnmf = haussio_data.dirname_comp + '_cnmf.mat'

    tiffs_to_cnmf(haussio_data, mask)
    tmpdirname_comp = os.path.join(tempfile.gettempdir(),
                                   haussio_data.dirname_comp)
    try:
        os.makedirs(os.path.dirname(tmpdirname_comp))
    except OSError:
        pass
    sys.stdout.write('Loading from {0}... '.format(tmpdirname_comp +
                                                   '_Y*.npy'))
    Y = np.load(tmpdirname_comp + '_Y.npy', mmap_mode='r')
    d1, d2, T = Y.shape

    if not os.path.exists(fn_cnmf):

        cse.utilities.stop_server()

        sys.stdout.flush()
        t0 = time.time()
        fname_new = get_mmap_name(tmpdirname_comp, d1, d2, T)
        Yr, _, _ = cse.utilities.load_memmap(fname_new)
        sys.stdout.write('took {0:.2f} s\n'.format(time.time() - t0))

        # how to subdivide the work among processes
        n_pixels_per_process = d1 * d2 / NCPUS_PATCHES

        sys.stdout.flush()
        cse.utilities.stop_server()
        cse.utilities.start_server()
        cl = Client()
        dview = cl[:NCPUS_PATCHES]

        rf = int(
            np.ceil(np.sqrt(d1 * d2 / 4 / NCPUS_PATCHES))
        )  # half-size of the patches in pixels. rf=25, patches are 50x50
        sys.stdout.write("Patch size: {0} * {0} = {1}\n".format(
            rf * 2, rf * rf * 4))
        stride = int(rf /
                     5)  # amounpl of overlap between the patches in pixels

        t0 = time.time()
        sys.stdout.write("CNMF patches... ")
        sys.stdout.flush()
        options_patch = cse.utilities.CNMFSetParms(Y,
                                                   NCPUS_PATCHES,
                                                   p=0,
                                                   gSig=[16, 16],
                                                   K=nrois_init /
                                                   NCPUS_PATCHES,
                                                   ssub=1,
                                                   tsub=8,
                                                   thr=0.8)
        A_tot, C_tot, YrA_tot, b, f, sn_tot, opt_out = cse.map_reduce.run_CNMF_patches(
            fname_new, (d1, d2, T),
            options_patch,
            rf=rf,
            stride=stride,
            dview=dview,
            memory_fact=4.0)
        sys.stdout.write(' took {0:.2f} s\n'.format(time.time() - t0))

        options = cse.utilities.CNMFSetParms(Y,
                                             NCPUS_PATCHES,
                                             K=A_tot.shape[-1],
                                             p=p,
                                             gSig=[16, 16],
                                             ssub=1,
                                             tsub=1)
        pix_proc = np.minimum(
            np.int((d1 * d2) / NCPUS_PATCHES / (T / 2000.)),
            np.int((d1 * d2) /
                   NCPUS_PATCHES))  # regulates the amount of memory used
        options['spatial_params']['n_pixels_per_process'] = pix_proc
        options['temporal_params']['n_pixels_per_process'] = pix_proc

        t0 = time.time()
        sys.stdout.write("Merging ROIs... ")
        sys.stdout.flush()
        A_m, C_m, nr_m, merged_ROIs, S_m, bl_m, c1_m, sn_m, g_m = \
            cse.merge_components(
                Yr, A_tot, [], np.array(C_tot), [], np.array(C_tot), [],
                options['temporal_params'],
                options['spatial_params'], dview=dview,
                thr=options['merging']['thr'], mx=np.Inf)
        sys.stdout.write(' took {0:.2f} s\n'.format(time.time() - t0))

        options['temporal_params']['p'] = 0
        options['temporal_params'][
            'fudge_factor'] = 0.96  #change ifdenoised traces time constant is wrong
        options['temporal_params']['backend'] = 'ipyparallel'

        t0 = time.time()
        sys.stdout.write("Updating temporal components... ")
        sys.stdout.flush()
        C_m, f_m, S_m, bl_m, c1_m, neurons_sn_m, g2_m, YrA_m = \
            cse.temporal.update_temporal_components(
                Yr, A_m, np.atleast_2d(b).T, C_m, f, dview=dview,
                bl=None, c1=None, sn=None, g=None, **options['temporal_params'])
        sys.stdout.write(' took {0:.2f} s\n'.format(time.time() - t0))
        # 483.41s
        # 2016-05-24: 74.81s

        t0 = time.time()
        sys.stdout.write("Evaluating components... ")
        sys.stdout.flush()
        traces = C_m + YrA_m

        Npeaks = 10
        final_frate = 1.0 / haussio_data.dt
        tB = np.minimum(-2, np.floor(-5. / 30 * final_frate))
        tA = np.maximum(5, np.ceil(25. / 30 * final_frate))
        fitness_raw, fitness_delta, erfc_raw, erfc_delta, r_values, sign_sam =\
            cse.utilities.evaluate_components(
                Y, traces, A_m, C_m, b, f_m,
                remove_baseline=True, N=5, robust_std=False,
                Athresh=0.1, Npeaks=Npeaks, tB=tB, tA=tA, thresh_C=0.3)

        idx_components_r = np.where(r_values >= .5)[0]
        idx_components_raw = np.where(fitness_raw < -20)[0]
        idx_components_delta = np.where(fitness_delta < -10)[0]

        idx_components = np.union1d(idx_components_r, idx_components_raw)
        idx_components = np.union1d(idx_components, idx_components_delta)

        A_m = A_m[:, idx_components]
        C_m = C_m[idx_components, :]
        sys.stdout.write(' took {0:.2f} s\n'.format(time.time() - t0))

        t0 = time.time()
        sys.stdout.write("Updating spatial components... ")
        sys.stdout.flush()
        A2, b2, C2 = cse.spatial.update_spatial_components(
            Yr,
            C_m,
            f,
            A_m,
            sn=sn_tot,
            dview=dview,
            **options['spatial_params'])
        sys.stdout.write(' took {0:.2f} s\n'.format(time.time() - t0))
        # 77.16s
        # 2016-05-24: 99.22s

        options['temporal_params']['p'] = p
        options['temporal_params'][
            'fudge_factor'] = 0.96  #change ifdenoised traces time constant is wrong
        C2, f2, S2, bl2, c12, neurons_sn2, g21, YrA = \
            cse.temporal.update_temporal_components(
                Yr, A2, b2, C2, f, dview=dview, bl=None, c1=None, sn=None,
                g=None, **options['temporal_params'])

        traces = C2 + YrA
        fitness_raw, fitness_delta, erfc_raw, erfc_delta, r_values, sign_sam =\
            cse.utilities.evaluate_components(
                Y, traces, A2, C2, b2, f2, remove_baseline=True, N=5,
                robust_std=False, Athresh=0.1, Npeaks=Npeaks, tB=tB,
                tA=tA, thresh_C=0.3)
        idx_components_r = np.where(r_values >= .6)[0]
        idx_components_raw = np.where(fitness_raw < -60)[0]
        idx_components_delta = np.where(fitness_delta < -20)[0]
        idx_components = np.union1d(idx_components_r, idx_components_raw)
        idx_components = np.union1d(idx_components, idx_components_delta)

        A2 = A2.tocsc()[:, idx_components]
        C2 = C2[idx_components, :]
        YrA = YrA[idx_components, :]
        S2 = S2[idx_components, :]

        # A: spatial components (ROIs)
        # C: denoised [Ca2+]
        # YrA: residuals ("noise")
        # S: Spikes
        savemat(fn_cnmf, {"A": A2, "C": C2, "YrA": YrA, "S": S2, "bl": bl2})
    else:
        resdict = loadmat(fn_cnmf)
        A2 = resdict["A"]
        C2 = resdict["C"]
        YrA = resdict["YrA"]
        S2 = resdict["S"]
        bl2 = resdict["bl"]

    proj_fn = haussio_data.dirname_comp + "_proj.npy"
    if not os.path.exists(proj_fn):
        zproj = utils.zproject(np.transpose(Y, (2, 0, 1)))
        np.save(proj_fn, zproj)
    else:
        zproj = np.load(proj_fn)

    # DF_F, DF = cse.extract_DF_F(Y.reshape(d1*d2, T), A2, C2)

    # t0 = time.time()
    # sys.stdout.write("Ordering components... ")
    # sys.stdout.flush()
    # A_or, C_or, srt = cse.order_components(A2, C2)
    # sys.stdout.write(' took {0:.2f} s\n'.format(time.time()-t0))

    cse.utilities.stop_server()

    polygons = contour(A2, d1, d2, thr=roi_iceberg)
    rois = ROIList([sima.ROI.ROI(polygons=poly) for poly in polygons])

    return rois, C2, zproj, S2, Y, YrA
Пример #20
0
 def apply(self, rois, dataset=None):
     return ROIList([r for r in rois if self._valid(r)])
Пример #21
0
def process_data(haussio_data, mask=None, p=2, nrois_init=200):
    fn_cnmf = haussio_data.dirname_comp + '_cnmf.mat'

    tiffs_to_cnmf(haussio_data, mask)
    sys.stdout.write('Loading from {0}... '.format(
        haussio_data.dirname_comp + '_Y*.npy'))
    Y = np.load(haussio_data.dirname_comp + '_Y.npy', mmap_mode='r')
    d1, d2, T = Y.shape

    if not os.path.exists(fn_cnmf):

        cse.utilities.stop_server()

        sys.stdout.flush()
        t0 = time.time()
        Yr = np.load(haussio_data.dirname_comp + '_Yr.npy', mmap_mode='r')
        sys.stdout.write('took {0:.2f} s\n'.format(time.time()-t0))

        # how to subdivide the work among processes
        n_pixels_per_process = d1*d2/NCPUS

        options = cse.utilities.CNMFSetParms(Y, K=nrois_init, p=p, gSig=[9, 9])
        options['preprocess_params']['n_processes'] = NCPUS
        options['preprocess_params'][
            'n_pixels_per_process'] =  n_pixels_per_process
        options['init_params']['nIter'] = 10
        options['init_params']['maxIter'] = 10
        options['init_params']['use_hals'] = False
        options['spatial_params']['n_processes'] = NCPUS
        options['spatial_params'][
            'n_pixels_per_process'] = n_pixels_per_process
        options['temporal_params']['n_processes'] = NCPUS
        options['temporal_params'][
            'n_pixels_per_process'] = n_pixels_per_process

        cse.utilities.start_server(NCPUS)

        t0 = time.time()
        sys.stdout.write("Preprocessing... ")
        sys.stdout.flush()
        Yr, sn, g = cse.preprocess_data(Yr, **options['preprocess_params'])
        sys.stdout.write(' took {0:.2f} s\n'.format(time.time()-t0))
        # 224.94s

        t0 = time.time()
        sys.stdout.write("Initializing components... ")
        sys.stdout.flush()
        Ain, Cin, b_in, f_in, center = cse.initialize_components(
            Y, **options['init_params'])
        sys.stdout.write(' took {0:.2f} s\n'.format(time.time()-t0))
        # 2281.37s

        t0 = time.time()
        sys.stdout.write("Updating spatial components... ")
        sys.stdout.flush()
        A, b, Cin = cse.update_spatial_components(
            Yr, Cin, f_in, Ain, sn=sn, **options['spatial_params'])
        sys.stdout.write(' took {0:.2f} s\n'.format(time.time()-t0))
        # 252.57s

        t0 = time.time()
        sys.stdout.write("Updating temporal components... ")
        sys.stdout.flush()
        C, f, S, bl, c1, neurons_sn, g, YrA = \
            cse.update_temporal_components(
                Yr, A, b, Cin, f_in, bl=None, c1=None, sn=None, g=None,
                **options['temporal_params'])
        sys.stdout.write(' took {0:.2f} s\n'.format(time.time()-t0))
        # 455.14s

        t0 = time.time()
        sys.stdout.write("Merging ROIs... ")
        sys.stdout.flush()
        A_m, C_m, nr_m, merged_ROIs, S_m, bl_m, c1_m, sn_m, g_m = \
            cse.merge_components(
                Yr, A, b, C, f, S, sn, options['temporal_params'],
                options['spatial_params'], bl=bl, c1=c1, sn=neurons_sn, g=g,
                thr=0.7, mx=100, fast_merge=True)
        sys.stdout.write(' took {0:.2f} s\n'.format(time.time()-t0))
        # 702.55s

        t0 = time.time()
        sys.stdout.write("Updating spatial components... ")
        sys.stdout.flush()
        A2, b2, C2 = cse.update_spatial_components(
            Yr, C_m, f, A_m, sn=sn, **options['spatial_params'])
        sys.stdout.write(' took {0:.2f} s\n'.format(time.time()-t0))
        # 77.16s

        t0 = time.time()
        sys.stdout.write("Updating temporal components... ")
        sys.stdout.flush()
        C2, f2, S2, bl2, c12, neurons_sn2, g21, YrA = \
            cse.update_temporal_components(
                Yr, A2, b2, C2, f, bl=None, c1=None, sn=None, g=None,
                **options['temporal_params'])
        sys.stdout.write(' took {0:.2f} s\n'.format(time.time()-t0))
        # 483.41s

        # A: spatial components (ROIs)
        # C: denoised [Ca2+]
        # YrA: residuals ("noise")
        # S: Spikes
        savemat(fn_cnmf, {"A": A2, "C": C2, "YrA": YrA, "S": S2, "bl": bl2})
    else:
        resdict = loadmat(fn_cnmf)
        A2 = resdict["A"]
        C2 = resdict["C"]
        YrA = resdict["YrA"]
        S2 = resdict["S"]
        bl2 = resdict["bl"]

    proj_fn = haussio_data.dirname_comp + "_proj.npy"
    if not os.path.exists(proj_fn):
        zproj = utils.zproject(np.transpose(Y, (2, 0, 1)))
        np.save(proj_fn, zproj)
    else:
        zproj = np.load(proj_fn)

    # DF_F, DF = cse.extract_DF_F(Y.reshape(d1*d2, T), A2, C2)

    t0 = time.time()
    sys.stdout.write("Ordering components... ")
    sys.stdout.flush()
    A_or, C_or, srt = cse.order_components(A2, C2)
    sys.stdout.write(' took {0:.2f} s\n'.format(time.time()-t0))

    cse.utilities.stop_server()

    polygons = contour(A2, d1, d2, thr=0.9)
    rois = ROIList([sima.ROI.ROI(polygons=poly) for poly in polygons])

    return rois, C2, haussio_data, zproj, S2, Y, YrA