def estimate_noise_mode(traces, robust_std=False, use_mode_fast=False): """ estimate the noise in the traces under assumption that signals are sparse and only positive. The last dimension should be time. """ if use_mode_fast: md = mode_robust_fast(traces, axis=1) else: md = mode_robust(traces, axis=1) ff1 = traces - md[:, None] # only consider values under the mode to determine the noise standard deviation ff1 = -ff1 * (ff1 < 0) if robust_std: # compute 25 percentile ff1 = np.sort(ff1, axis=1) ff1[ff1 == 0] = np.nan Ns = np.round(np.sum(ff1 > 0, 1) * .5) iqr_h = np.zeros(traces.shape[0]) for idx, el in enumerate(ff1): iqr_h[idx] = ff1[idx, -Ns[idx]] # approximate standard deviation as iqr/1.349 sd_r = 2 * iqr_h / 1.349 else: Ns = np.sum(ff1 > 0, 1) sd_r = np.sqrt(old_div(np.sum(ff1**2, 1), Ns)) return sd_r
def estimate_noise_mode(traces, robust_std=False, use_mode_fast=False): """ estimate the noise in the traces under assumption that signals are sparse and only positive. The last dimension should be time. """ if use_mode_fast: md = mode_robust_fast(traces, axis=1) else: md = mode_robust(traces, axis=1) ff1 = traces - md[:, None] # only consider values under the mode to determine the noise standard deviation ff1 = -ff1 * (ff1 < 0) if robust_std: # compute 25 percentile ff1 = np.sort(ff1, axis=1) ff1[ff1 == 0] = np.nan Ns = np.round(np.sum(ff1 > 0, 1) * 0.5) iqr_h = np.zeros(traces.shape[0]) for idx, el in enumerate(ff1): iqr_h[idx] = ff1[idx, -Ns[idx]] # approximate standard deviation as iqr/1.349 sd_r = 2 * iqr_h / 1.349 else: Ns = np.sum(ff1 > 0, 1) sd_r = np.sqrt(old_div(np.sum(ff1 ** 2, 1), Ns)) return sd_r
def compute_event_exceptionality(traces, robust_std=False, N=5, use_mode_fast=False): """ Define a metric and order components according to the probabilty if some "exceptional events" (like a spike). Suvh probability is defined as the likeihood of observing the actual trace value over N samples given an estimated noise distribution. The function first estimates the noise distribution by considering the dispersion around the mode. This is done only using values lower than the mode. The estimation of the noise std is made robust by using the approximation std=iqr/1.349. Then, the probavility of having N consecutive eventsis estimated. This probability is used to order the components. Parameters: ----------- Y: ndarray movie x,y,t A: scipy sparse array spatial components traces: ndarray Fluorescence traces N: int N number of consecutive events Returns: -------- fitness: ndarray value estimate of the quality of components (the lesser the better) erfc: ndarray probability at each time step of observing the N consequtive actual trace values given the distribution of noise noise_est: ndarray the components ordered according to the fitness """ T = np.shape(traces)[-1] if use_mode_fast: md = mode_robust_fast(traces, axis=1) else: md = mode_robust(traces, axis=1) ff1 = traces - md[:, None] # only consider values under the mode to determine the noise standard deviation ff1 = -ff1 * (ff1 < 0) if robust_std: # compute 25 percentile ff1 = np.sort(ff1, axis=1) ff1[ff1 == 0] = np.nan Ns = np.round(np.sum(ff1 > 0, 1) * .5) iqr_h = np.zeros(traces.shape[0]) for idx, el in enumerate(ff1): iqr_h[idx] = ff1[idx, -Ns[idx]] # approximate standard deviation as iqr/1.349 sd_r = 2 * iqr_h / 1.349 else: Ns = np.sum(ff1 > 0, 1) sd_r = np.sqrt(old_div(np.sum(ff1**2, 1), Ns)) # # compute z value z = old_div((traces - md[:, None]), (3 * sd_r[:, None])) # probability of observing values larger or equal to z given notmal # distribution with mean md and std sd_r erf = 1 - norm.cdf(z) # use logarithm so that multiplication becomes sum erf = np.log(erf) filt = np.ones(N) # moving sum erfc = np.apply_along_axis(lambda m: np.convolve(m, filt, mode='full'), axis=1, arr=erf) erfc = erfc[:, :T] # select the maximum value of such probability for each trace fitness = np.min(erfc, 1) #ordered = np.argsort(fitness) #idx_components = ordered # [::-1]# selec only portion of components #fitness = fitness[idx_components] #erfc = erfc[idx_components] return fitness, erfc, sd_r
def compute_event_exceptionality(traces, robust_std=False, N=5, use_mode_fast=False): """ Define a metric and order components according to the probabilty if some "exceptional events" (like a spike). Suvh probability is defined as the likeihood of observing the actual trace value over N samples given an estimated noise distribution. The function first estimates the noise distribution by considering the dispersion around the mode. This is done only using values lower than the mode. The estimation of the noise std is made robust by using the approximation std=iqr/1.349. Then, the probavility of having N consecutive eventsis estimated. This probability is used to order the components. Parameters: ----------- Y: ndarray movie x,y,t A: scipy sparse array spatial components traces: ndarray Fluorescence traces N: int N number of consecutive events Returns: -------- fitness: ndarray value estimate of the quality of components (the lesser the better) erfc: ndarray probability at each time step of observing the N consequtive actual trace values given the distribution of noise noise_est: ndarray the components ordered according to the fitness """ T = np.shape(traces)[-1] if use_mode_fast: md = mode_robust_fast(traces, axis=1) else: md = mode_robust(traces, axis=1) ff1 = traces - md[:, None] # only consider values under the mode to determine the noise standard deviation ff1 = -ff1 * (ff1 < 0) if robust_std: # compute 25 percentile ff1 = np.sort(ff1, axis=1) ff1[ff1 == 0] = np.nan Ns = np.round(np.sum(ff1 > 0, 1) * 0.5) iqr_h = np.zeros(traces.shape[0]) for idx, el in enumerate(ff1): iqr_h[idx] = ff1[idx, -Ns[idx]] # approximate standard deviation as iqr/1.349 sd_r = 2 * iqr_h / 1.349 else: Ns = np.sum(ff1 > 0, 1) sd_r = np.sqrt(old_div(np.sum(ff1 ** 2, 1), Ns)) # # compute z value z = old_div((traces - md[:, None]), (3 * sd_r[:, None])) # probability of observing values larger or equal to z given notmal # distribution with mean md and std sd_r erf = 1 - norm.cdf(z) # use logarithm so that multiplication becomes sum erf = np.log(erf) filt = np.ones(N) # moving sum erfc = np.apply_along_axis(lambda m: np.convolve(m, filt, mode="full"), axis=1, arr=erf) erfc = erfc[:, :T] # select the maximum value of such probability for each trace fitness = np.min(erfc, 1) # ordered = np.argsort(fitness) # idx_components = ordered # [::-1]# selec only portion of components # fitness = fitness[idx_components] # erfc = erfc[idx_components] return fitness, erfc, sd_r