Exemplo n.º 1
0
    def _load_data(self):
        self.current_sample_id = None
        self.ix = 0
        dfof_curves = None

        proj_path = self.t.get_proj_path()
        self.t.df['_DETREND_DF_O_F'] = [np.array([], dtype=np.float64)] * self.t.df.index.size
        for index, row in self.t.df.iterrows():
            self.ix += 1

            if row['SampleID'] != self.current_sample_id:
                self.idx_components = None
                self.ix = 0
                dfof_curves = None

                self.current_sample_id = row['SampleID']

                pikPath = os.path.join(proj_path, row['ImgInfoPath'])
                pik = pickle.load(open(pikPath, 'rb'))

                cnmA = pik['roi_states']['cnmf_output']['cnmA']
                cnmb = pik['roi_states']['cnmf_output']['cnmb']
                cnmC = pik['roi_states']['cnmf_output']['cnmC']
                cnm_f = pik['roi_states']['cnmf_output']['cnm_f']
                cnmYrA = pik['roi_states']['cnmf_output']['cnmYrA']

                self.idx_components = pik['roi_states']['cnmf_output']['idx_components']
                try:
                    dfof_curves = detrend_df_f(cnmA, cnmb, cnmC, cnm_f, YrA=cnmYrA,
                                               quantileMin=self.params['quantileMin'],
                                               frames_window=self.params['frames_window'])  # ,
                    # self.dfof_curves = dfof_curves
                except:
                    tb = traceback.format_exc()
                    raise Exception('The following exception was raised in caiman detrend_df_f method for SampleID: '
                                    + self.current_sample_id + '\n\n' + tb)
                # flag_auto=self.ctrls['auto_quantile'].isChecked(),
                # use_fast=self.ctrls['fast_filter'].isChecked())
                if self.idx_components[self.ix] != row['ROI_State']['cnmf_idx']:
                    raise Exception(
                        'CNMF component index Mismatch Error! Something went very wrong. Check the indices of your '
                        'CNMF components from SampleID: ' + self.current_sample_id +
                        '\nIndex in pickle is: ' + str(self.idx_components[self.ix]) +
                        '\nIndex in dataframe is: ' + str(row['ROI_State']['cnmf_idx']) + '\n at iloc: ' + str(index))

            self.t.df.at[index, '_DETREND_DF_O_F'] = dfof_curves[self.idx_components[self.ix]]
Exemplo n.º 2
0
def main():
    pass  # For compatibility between running under Spyder and the CLI

    #%% First setup some parameters

    # dataset dependent parameters
    display_images = False  # Set this to true to show movies and plots
    fname = ['Sue_2x_3000_40_-46.tif']  # filename to be processed
    fr = 30  # imaging rate in frames per second
    decay_time = 0.4  # length of a typical transient in seconds

    # motion correction parameters
    niter_rig = 1  # number of iterations for rigid motion correction
    max_shifts = (6, 6)  # maximum allow rigid shift
    # for parallelization split the movies in  num_splits chuncks across time
    splits_rig = 56
    # start a new patch for pw-rigid motion correction every x pixels
    strides = (48, 48)
    # overlap between pathes (size of patch strides+overlaps)
    overlaps = (24, 24)
    # for parallelization split the movies in  num_splits chuncks across time
    splits_els = 56
    upsample_factor_grid = 4  # upsample factor to avoid smearing when merging patches
    # maximum deviation allowed for patch with respect to rigid shifts
    max_deviation_rigid = 3

    # parameters for source extraction and deconvolution
    p = 1  # order of the autoregressive system
    gnb = 2  # number of global background components
    merge_thresh = 0.8  # merging threshold, max correlation allowed
    # half-size of the patches in pixels. e.g., if rf=25, patches are 50x50
    rf = 15
    stride_cnmf = 6  # amount of overlap between the patches in pixels
    K = 4  # number of components per patch
    gSig = [4, 4]  # expected half size of neurons
    # initialization method (if analyzing dendritic data using 'sparse_nmf')
    init_method = 'greedy_roi'
    is_dendrites = False  # flag for analyzing dendritic data
    # sparsity penalty for dendritic data analysis through sparse NMF
    alpha_snmf = None

    # parameters for component evaluation
    min_SNR = 2.5  # signal to noise ratio for accepting a component
    rval_thr = 0.8  # space correlation threshold for accepting a component
    cnn_thr = 0.8  # threshold for CNN based classifier

    #%% download the dataset if it's not present in your folder
    if fname[0] in ['Sue_2x_3000_40_-46.tif', 'demoMovie.tif']:
        fname = [download_demo(fname[0])]

#%% play the movie
# playing the movie using opencv. It requires loading the movie in memory. To
# close the video press q

    m_orig = cm.load_movie_chain(fname[:1])
    downsample_ratio = 0.2
    offset_mov = -np.min(m_orig[:100])
    moviehandle = m_orig.resize(1, 1, downsample_ratio)
    if display_images:
        moviehandle.play(gain=10, offset=offset_mov, fr=30, magnification=2)

#%% start a cluster for parallel processing
    c, dview, n_processes = cm.cluster.setup_cluster(backend='local',
                                                     n_processes=None,
                                                     single_thread=False)

    #%%% MOTION CORRECTION
    # first we create a motion correction object with the parameters specified
    min_mov = cm.load(fname[0], subindices=range(200)).min()
    # this will be subtracted from the movie to make it non-negative

    mc = MotionCorrect(fname[0],
                       min_mov,
                       dview=dview,
                       max_shifts=max_shifts,
                       niter_rig=niter_rig,
                       splits_rig=splits_rig,
                       strides=strides,
                       overlaps=overlaps,
                       splits_els=splits_els,
                       upsample_factor_grid=upsample_factor_grid,
                       max_deviation_rigid=max_deviation_rigid,
                       shifts_opencv=True,
                       nonneg_movie=True)
    # note that the file is not loaded in memory

    #%% Run piecewise-rigid motion correction using NoRMCorre
    mc.motion_correct_pwrigid(save_movie=True)
    m_els = cm.load(mc.fname_tot_els)
    bord_px_els = np.ceil(
        np.maximum(np.max(np.abs(mc.x_shifts_els)),
                   np.max(np.abs(mc.y_shifts_els)))).astype(np.int)
    # maximum shift to be used for trimming against NaNs
    #%% compare with original movie
    moviehandle = cm.concatenate([
        m_orig.resize(1, 1, downsample_ratio) + offset_mov,
        m_els.resize(1, 1, downsample_ratio)
    ],
                                 axis=2)
    display_images = False
    if display_images:
        moviehandle.play(fr=60, q_max=99.5, magnification=2,
                         offset=0)  # press q to exit

#%% MEMORY MAPPING
# memory map the file in order 'C'
    fnames = mc.fname_tot_els  # name of the pw-rigidly corrected file.
    border_to_0 = bord_px_els  # number of pixels to exclude
    fname_new = cm.save_memmap(fnames,
                               base_name='memmap_',
                               order='C',
                               border_to_0=bord_px_els)  # exclude borders

    # now load the file
    Yr, dims, T = cm.load_memmap(fname_new)
    d1, d2 = dims
    images = np.reshape(Yr.T, [T] + list(dims), order='F')
    # load frames in python format (T x X x Y)

    #%% restart cluster to clean up memory
    cm.stop_server(dview=dview)
    c, dview, n_processes = cm.cluster.setup_cluster(backend='local',
                                                     n_processes=None,
                                                     single_thread=False)

    #%% RUN CNMF ON PATCHES

    # First extract spatial and temporal components on patches and combine them
    # for this step deconvolution is turned off (p=0)
    t1 = time.time()

    cnm = cnmf.CNMF(n_processes=1,
                    k=K,
                    gSig=gSig,
                    merge_thresh=merge_thresh,
                    p=0,
                    dview=dview,
                    rf=rf,
                    stride=stride_cnmf,
                    memory_fact=1,
                    method_init=init_method,
                    alpha_snmf=alpha_snmf,
                    only_init_patch=False,
                    gnb=gnb,
                    border_pix=bord_px_els)
    cnm = cnm.fit(images)

    #%% plot contours of found components
    Cn = cm.local_correlations(images.transpose(1, 2, 0))
    Cn[np.isnan(Cn)] = 0
    plt.figure()
    crd = plot_contours(cnm.A, Cn, thr=0.9)
    plt.title('Contour plots of found components')

    #%% COMPONENT EVALUATION
    # the components are evaluated in three ways:
    #   a) the shape of each component must be correlated with the data
    #   b) a minimum peak SNR is required over the length of a transient
    #   c) each shape passes a CNN based classifier

    idx_components, idx_components_bad, SNR_comp, r_values, cnn_preds = \
        estimate_components_quality_auto(images, cnm.A, cnm.C, cnm.b, cnm.f,
                                         cnm.YrA, fr, decay_time, gSig, dims,
                                         dview=dview, min_SNR=min_SNR,
                                         r_values_min=rval_thr, use_cnn=False,
                                         thresh_cnn_min=cnn_thr)

    #%% PLOT COMPONENTS

    if display_images:
        plt.figure()
        plt.subplot(121)
        crd_good = cm.utils.visualization.plot_contours(cnm.A[:,
                                                              idx_components],
                                                        Cn,
                                                        thr=.8,
                                                        vmax=0.75)
        plt.title('Contour plots of accepted components')
        plt.subplot(122)
        crd_bad = cm.utils.visualization.plot_contours(
            cnm.A[:, idx_components_bad], Cn, thr=.8, vmax=0.75)
        plt.title('Contour plots of rejected components')

#%% VIEW TRACES (accepted and rejected)

    if display_images:
        view_patches_bar(Yr,
                         cnm.A.tocsc()[:, idx_components],
                         cnm.C[idx_components],
                         cnm.b,
                         cnm.f,
                         dims[0],
                         dims[1],
                         YrA=cnm.YrA[idx_components],
                         img=Cn)

        view_patches_bar(Yr,
                         cnm.A.tocsc()[:, idx_components_bad],
                         cnm.C[idx_components_bad],
                         cnm.b,
                         cnm.f,
                         dims[0],
                         dims[1],
                         YrA=cnm.YrA[idx_components_bad],
                         img=Cn)

#%% RE-RUN seeded CNMF on accepted patches to refine and perform deconvolution
    A_in, C_in, b_in, f_in = cnm.A[:, idx_components], cnm.C[
        idx_components], cnm.b, cnm.f
    cnm2 = cnmf.CNMF(n_processes=1,
                     k=A_in.shape[-1],
                     gSig=gSig,
                     p=p,
                     dview=dview,
                     merge_thresh=merge_thresh,
                     Ain=A_in,
                     Cin=C_in,
                     b_in=b_in,
                     f_in=f_in,
                     rf=None,
                     stride=None,
                     gnb=gnb,
                     method_deconvolution='oasis',
                     check_nan=True)

    cnm2 = cnm2.fit(images)

    #%% Extract DF/F values

    F_dff = detrend_df_f(cnm2.A,
                         cnm2.b,
                         cnm2.C,
                         cnm2.f,
                         YrA=cnm2.YrA,
                         quantileMin=8,
                         frames_window=250)

    #%% Show final traces
    cnm2.view_patches(Yr, dims=dims, img=Cn)

    #%% STOP CLUSTER and clean up log files
    cm.stop_server(dview=dview)
    log_files = glob.glob('*_LOG_*')
    for log_file in log_files:
        os.remove(log_file)

#%% reconstruct denoised movie
    denoised = cm.movie(cnm2.A.dot(cnm2.C) + cnm2.b.dot(cnm2.f)).reshape(
        dims + (-1, ), order='F').transpose([2, 0, 1])

    #%% play along side original data
    moviehandle = cm.concatenate([
        m_els.resize(1, 1, downsample_ratio),
        denoised.resize(1, 1, downsample_ratio)
    ],
                                 axis=2)
    if display_images:
        moviehandle.play(fr=60, gain=15, magnification=2,
                         offset=0)  # press q to exit
         idx_components_bad=idx_components_bad,
         SNR_comp=SNR_comp,
         cnn_preds=cnn_preds,
         r_values=r_values)

#%%
plt.figure()
plt.subplot(121)
crd_good = cm.utils.visualization.plot_contours(cnm2.A[:, idx_components],
                                                Cn,
                                                thr=.96,
                                                vmax=0.5)
plt.title('Contour plots of accepted components')
plt.subplot(122)
crd_bad = cm.utils.visualization.plot_contours(cnm2.A[:, idx_components_bad],
                                               Cn,
                                               thr=.96,
                                               vmax=0.5)
plt.title('Contour plots of rejected components')
#%%

#%% Extract DF/F values

F_dff = detrend_df_f(cnm2.A[:, idx_components],
                     cnm2.b,
                     cnm2.C[idx_components],
                     cnm2.f,
                     YrA=cnm2.YrA[idx_components],
                     quantileMin=8,
                     frames_window=250)
Exemplo n.º 4
0
                 b_in=b_in,
                 f_in=f_in,
                 rf=None,
                 stride=None,
                 gnb=gnb,
                 method_deconvolution='oasis',
                 check_nan=True)

cnm2 = cnm2.fit(images)

#%% Extract DF/F values

F_dff = detrend_df_f(cnm2.A,
                     cnm2.b,
                     cnm2.C,
                     cnm2.f,
                     YrA=cnm2.YrA,
                     quantileMin=8,
                     frames_window=250)

#%% Show final traces
cnm2.view_patches(Yr, dims=dims, img=Cn)

#%% STOP CLUSTER and clean up log files
cm.stop_server(dview=dview)
log_files = glob.glob('*_LOG_*')
for log_file in log_files:
    os.remove(log_file)

#%% reconstruct denoised movie
denoised = cm.movie(cnm2.A.dot(cnm2.C) + \
Exemplo n.º 5
0
plt.subplot(122)
crd_bad = cm.utils.visualization.plot_contours(
    cnm2.A[:, idx_components_bad], Cn, thr=.95, vmax=0.25)
plt.title('Contour plots of rejected components')

#%% VIEW TRACES (accepted and rejected)
view_patches_bar(Yr, cnm2.A.tocsc()[:, idx_components], cnm2.C[idx_components],
                 cnm2.b, cnm2.f, dims[0], dims[1], YrA=cnm2.YrA[idx_components],
                 img=Cn)

view_patches_bar(Yr, cnm2.A.tocsc()[:, idx_components_bad], cnm2.C[idx_components_bad],
                 cnm2.b, cnm2.f, dims[0], dims[1], YrA=cnm2.YrA[idx_components_bad],
                 img=Cn)
#%% Extract DF/F values

F_dff = detrend_df_f(cnm2.A, cnm2.b, cnm2.C, cnm2.f, YrA=cnm2.YrA,
                     quantileMin=8, frames_window=250)

#%% Show final traces
cnm2.view_patches(Yr, dims=dims, img=Cn)

#%% STOP CLUSTER and clean up log files
cm.stop_server(dview=dview)
log_files = glob.glob('*_LOG_*')
for log_file in log_files:
    os.remove(log_file)

#%% reconstruct denoised movie
denoised = cm.movie(cnm2.A.dot(cnm2.C) +
                    cnm2.b.dot(cnm2.f)).reshape(dims + (-1,), order='F').transpose([2, 0, 1])

#%% play along side original data
Exemplo n.º 6
0

big_string = ''
for substring in my_list:
    big_string = big_string + ' ' + substring

A = pcf.cnmf.estimates.A
F =  pcf.cnmf.estimates.C +  pcf.cnmf.estimates.YrA
b = pcf.cnmf.estimates.b
f= pcf.cnmf.estimates.f
B = A.T.dot(b).dot(f)
import scipy.ndimage as nd
Df = nd.percentile_filter(B, 10, (1000,1))
plt.figure(); plt.plot(B[49]+pcf.cnmf.estimates.C[49]+pcf.cnmf.estimates.R[49])

#%% Flag auto as False, how does dFF look
import caiman.source_extraction.cnmf.utilities as ut
flag_dff = ut.detrend_df_f(A, b, pcf.cnmf.estimates.C, f, YrA=pcf.cnmf.estimates.YrA, quantileMin=8,
                           frames_window=500, flag_auto=False, use_fast=False, detrend_only=False)
slow_dff = ut.extract_DF_F(Yr, A, C, bl, quantileMin=8, frames_window=200, block_size=400, dview=None)

fig, ax = plt.subplots(2)
ax[0].plot(pcf.cnmf.estimates.F_dff[49])
ax[1].plot(flag_dff[49])

plt.figure()
plt.plot(flag_dff[49], label='auto=False')
plt.plot(pcf.cnmf.estimates.F_dff[49], label='auto=True')
plt.legend()

Exemplo n.º 7
0
def caiman_main_light_weight(fr, fnames, z=0, dend=False):
    """
    Main function to compute the caiman algorithm. For more details see github and papers
    fpath(str): Folder where to store the plots
    fr(int): framerate
    fnames(list-str): list with the names of the files to be computed together
    z(array): vector with the values of z relative to y
    dend(bool): Boleean to change parameters to look for neurons or dendrites
    display_images(bool): to display and save different plots
    returns
    F_dff(array): array with the dff of the components
    com(array): matrix with the position values of the components as given by caiman
    cnm(struct): struct with different stimates and returns from caiman"""

    # parameters
    decay_time = 0.4  # length of a typical transient in seconds

    # Look for the best parameters for this 2p system and never change them again :)
    # motion correction parameters
    niter_rig = 1  # number of iterations for rigid motion correction
    max_shifts = (3, 3)  # maximum allow rigid shift
    splits_rig = 10  # for parallelization split the movies in  num_splits chuncks across time
    strides = (
        96, 96
    )  # start a new patch for pw-rigid motion correction every x pixels
    overlaps = (48, 48
                )  # overlap between pathes (size of patch strides+overlaps)
    splits_els = 10  # for parallelization split the movies in  num_splits chuncks across time
    upsample_factor_grid = 4  # upsample factor to avoid smearing when merging patches
    max_deviation_rigid = 3  # maximum deviation allowed for patch with respect to rigid shifts

    # parameters for source extraction and deconvolution
    p = 1  # order of the autoregressive system
    gnb = 2  # number of global background components
    merge_thresh = 0.8  # merging threshold, max correlation allowed
    rf = 25  # half-size of the patches in pixels. e.g., if rf=25, patches are 50x50
    stride_cnmf = 10  # amount of overlap between the patches in pixels
    K = 25  # number of components per patch

    if dend:
        gSig = [1, 1]  # expected half size of neurons
        init_method = 'sparse_nmf'  # initialization method (if analyzing dendritic data using 'sparse_nmf')
        alpha_snmf = 1e-6  # sparsity penalty for dendritic data analysis through sparse NMF
    else:
        gSig = [3, 3]  # expected half size of neurons
        init_method = 'greedy_roi'  # initialization method (if analyzing dendritic data using 'sparse_nmf')
        alpha_snmf = None  # sparsity penalty for dendritic data analysis through sparse NMF

    # parameters for component evaluation
    min_SNR = 2.5  # signal to noise ratio for accepting a component
    rval_thr = 0.8  # space correlation threshold for accepting a component
    cnn_thr = 0.8  # threshold for CNN based classifier

    dview = None  # parallel processing keeps crashing.

    print('***************Starting motion correction*************')
    print('files:')
    print(fnames)

    # %% start a cluster for parallel processing
    # c, dview, n_processes = cm.cluster.setup_cluster(backend='local', n_processes=None, single_thread=False)

    # %%% MOTION CORRECTION
    # first we create a motion correction object with the parameters specified
    min_mov = cm.load(fnames[0]).min()
    # this will be subtracted from the movie to make it non-negative

    mc = MotionCorrect(fnames,
                       min_mov,
                       dview=dview,
                       max_shifts=max_shifts,
                       niter_rig=niter_rig,
                       splits_rig=splits_rig,
                       strides=strides,
                       overlaps=overlaps,
                       splits_els=splits_els,
                       upsample_factor_grid=upsample_factor_grid,
                       max_deviation_rigid=max_deviation_rigid,
                       shifts_opencv=True,
                       nonneg_movie=True)
    # note that the file is not loaded in memory

    # %% Run piecewise-rigid motion correction using NoRMCorre
    mc.motion_correct_pwrigid(save_movie=True)
    bord_px_els = np.ceil(
        np.maximum(np.max(np.abs(mc.x_shifts_els)),
                   np.max(np.abs(mc.y_shifts_els)))).astype(np.int)

    totdes = [np.nansum(mc.x_shifts_els), np.nansum(mc.y_shifts_els)]
    print('***************Motion correction has ended*************')
    # maximum shift to be used for trimming against NaNs

    # %% MEMORY MAPPING
    # memory map the file in order 'C'
    fnames = mc.fname_tot_els  # name of the pw-rigidly corrected file.
    fname_new = cm.save_memmap(fnames,
                               base_name='memmap_',
                               order='C',
                               border_to_0=bord_px_els)  # exclude borders

    # now load the file
    Yr, dims, T = cm.load_memmap(fname_new)
    d1, d2 = dims
    images = np.reshape(Yr.T, [T] + list(dims), order='F')
    # load frames in python format (T x X x Y)

    # %% restart cluster to clean up memory
    # cm.stop_server(dview=dview)
    # c, dview, n_processes = cm.cluster.setup_cluster(backend='local', n_processes=None, single_thread=False)

    # %% RUN CNMF ON PATCHES
    print('***************Running CNMF...*************')

    # First extract spatial and temporal components on patches and combine them
    # for this step deconvolution is turned off (p=0)

    cnm = cnmf.CNMF(n_processes=1,
                    k=K,
                    gSig=gSig,
                    merge_thresh=merge_thresh,
                    p=0,
                    dview=dview,
                    rf=rf,
                    stride=stride_cnmf,
                    memory_fact=1,
                    method_init=init_method,
                    alpha_snmf=alpha_snmf,
                    only_init_patch=False,
                    gnb=gnb,
                    border_pix=bord_px_els)
    cnm = cnm.fit(images)

    # %% COMPONENT EVALUATION
    # the components are evaluated in three ways:
    #   a) the shape of each component must be correlated with the data
    #   b) a minimum peak SNR is required over the length of a transient
    #   c) each shape passes a CNN based classifier

    idx_components, idx_components_bad, SNR_comp, r_values, cnn_preds = \
        estimate_components_quality_auto(images, cnm.estimates.A, cnm.estimates.C, cnm.estimates.b,
                                         cnm.estimates.f,
                                         cnm.estimates.YrA, fr, decay_time, gSig, dims,
                                         dview=dview, min_SNR=min_SNR,
                                         r_values_min=rval_thr, use_cnn=False,
                                         thresh_cnn_min=cnn_thr)

    # %% RE-RUN seeded CNMF on accepted patches to refine and perform deconvolution
    A_in, C_in, b_in, f_in = cnm.estimates.A[:, idx_components], cnm.estimates.C[
        idx_components], cnm.estimates.b, cnm.estimates.f
    cnm2 = cnmf.CNMF(n_processes=1,
                     k=A_in.shape[-1],
                     gSig=gSig,
                     p=p,
                     dview=dview,
                     merge_thresh=merge_thresh,
                     Ain=A_in,
                     Cin=C_in,
                     b_in=b_in,
                     f_in=f_in,
                     rf=None,
                     stride=None,
                     gnb=gnb,
                     method_deconvolution='oasis',
                     check_nan=True)

    print('***************Fit*************')
    cnm2 = cnm2.fit(images)

    print('***************Extractind DFFs*************')
    # %% Extract DF/F values

    # cm.stop_server(dview=dview)
    try:
        F_dff = detrend_df_f(cnm2.estimates.A,
                             cnm2.estimates.b,
                             cnm2.estimates.C,
                             cnm2.estimates.f,
                             YrA=cnm2.estimates.YrA,
                             quantileMin=8,
                             frames_window=250)
        # F_dff = detrend_df_f(cnm.A, cnm.b, cnm.C, cnm.f, YrA=cnm.YrA, quantileMin=8, frames_window=250)
    except:
        F_dff = cnm2.estimates.C * np.nan
        print('WHAAT went wrong again?')

    print('***************stopping cluster*************')
    # %% STOP CLUSTER and clean up log files
    # cm.stop_server(dview=dview)

    # ***************************************************************************************
    # Preparing output data
    # F_dff  -> DFF values,  is a matrix [number of neurons, length recording]

    # com  --> center of mass,  is a matrix [number of neurons, 2]
    print('***************preparing output data*************')

    if len(dims) <= 2:
        if len(z) == 1:
            com = np.concatenate(
                (cm.base.rois.com(cnm2.estimates.A, dims[0], dims[1]),
                 np.zeros((cnm2.estimates.A.shape[1], 1)) + z), 1)
        elif len(z) == dims[0]:
            auxcom = cm.base.rois.com(cnm2.estimates.A, dims[0], dims[1])
            zy = np.zeros((auxcom.shape[0], 1))
            for y in np.arange(auxcom.shape[0]):
                zy[y, 0] = z[int(auxcom[y, 0])]
            com = np.concatenate((auxcom, zy), 1)
        else:
            print(
                'WARNING: Z value was not correctly defined, only X and Y values on file, z==zeros'
            )
            print(['length of z was: ' + str(len(z))])
            com = np.concatenate(
                (cm.base.rois.com(cnm2.estimates.A, dims[0], dims[1]),
                 np.zeros((cnm2.estimates.A.shape[1], 1))), 1)
    else:
        com = cm.base.rois.com(cnm2.estimates.A, dims[0], dims[1], dims[2])

    return F_dff, com, cnm2, totdes, SNR_comp[idx_components]