def _auto_compute(func, data, t, axis = 0, out = None, n = None): print("Computing...") #f, ndim, axes = _set_data(data,axis) f, ndim = _set_data(data,axis) out, count = _set_out_count(f.shape, out, n) if t is None: t = np.arange(f.shape[-2], dtype = I64DTYPE) out = func(f,t,out,out) if ndim == 1: out = out[:,0] _add_count_auto(t,count) out = _transpose_data(out) return out, count
def _cross_compute(func,f1,f2,t1,t2,axis = 0,out = None, n = None): print("Computing...") #f1,ndim, axes = _set_data(f1,axis) #f2, ndim, axes = _set_data(f2,axis) f1,ndim = _set_data(f1,axis) f2, ndim = _set_data(f2,axis) out, count = _set_out_count(f1.shape, out, n) if t1 is None: t1 = np.arange(f1.shape[-2], dtype = I64DTYPE) if t2 is None: t2 = t1 out = func(f1,f2,t1,t2,out,out) if ndim == 1: out = out[:,0] _add_count_cross(t1,t2,count) out = _transpose_data(out) return out, count
def asmemmaps(basename, video, count=None): """Loads multi-frame video into numpy memmaps. Actual data is written to numpy files with the provide basename and subscripted by source identifier (index), e.g. "basename_0.npy" and "basename_1.npy" in case of dual-frame video source. Parameters ---------- basename: str Base name for the filenames of the videos. video : iterable A multi-frame iterator object. count : int, optional Defines how many multi-frames are in the video. If not provided it is determined by len(). """ if count is None: count = len(count) def _load(array, frame): array[...] = frame def _empty_arrays(frames): out = tuple((np.lib.format.open_memmap(basename + "_{}.npy".format(i), "w+", shape=(count, ) + frame.shape, dtype=frame.dtype) for i, frame in enumerate(frames))) return out print("Writing to memmap...") print_progress(0, count) frames = next(video) out = _empty_arrays(frames) [_load(out[i][0], frame) for i, frame in enumerate(frames)] for j, frames in enumerate(video): print_progress(j + 1, count) [_load(out[i][j + 1], frame) for i, frame in enumerate(frames)] print_progress(count, count) return out
def _auto_compute_multi(f, t, axis = 0, period = 1, n = 2**5, binning = True, nlog = None, correlate = True): if correlate == True: func = acorr if binning == True: _bin = bin_data else: _bin = slice_data else: func = adiff if binning == True: _bin = random_select_data else: _bin = slice_data assert n > 4 n_fast = period * n n_slow = n if nlog is None: n_decades = 0 while len(t)// (2**(n_decades)) >= n: n_decades += 1 nlog = n_decades -1 print(n_decades) nlog = int(nlog) assert nlog >= 0 out_fast, count_fast = func(f,t, axis = axis, n = n_fast) shape = out_fast.shape[0:-1] out_slow = np.zeros((nlog,) + shape[0:-1] + (n_slow,) + shape[-1:], FDTYPE) out_slow = _transpose_data(out_slow) count_slow = np.zeros((nlog, n_slow,),IDTYPE) for i in range(nlog): f = _bin(f,axis) t_slow = np.arange(len(t)//(2**(i+1))) func(f,t_slow, axis = axis, out = (out_slow[i], count_slow[i])) return (out_fast, count_fast), (out_slow, count_slow)
def asarrays(video, count=None): """Loads multi-frame video into numpy arrays. Parameters ---------- video : iterable A multi-frame iterator object. count : int, optional Defines how many frames are in the video. If not provided and video has an undefined length, it will try to load the video using np.asarray. This means that data copying """ def _load(array, frame): array[...] = frame print("Writing to array...") if count is None: try: count = len(video) except TypeError: out = np.asarray(video) out = tuple((out[:, i] for i in range(out.shape[1]))) return out print_progress(0, count) frames = next(video) out = tuple((np.empty(shape=(count, ) + frame.shape, dtype=frame.dtype) for frame in frames)) [_load(out[i][0], frame) for i, frame in enumerate(frames)] for j, frames in enumerate(video): print_progress(j + 1, count) [_load(out[i][j + 1], frame) for i, frame in enumerate(frames)] print_progress(count, count) return out
def cross_analyze_iter(data, t1, t2, period = 1, level = 4, chunk_size = 256, binning = True, method = "corr", auto_background = False, nlog = None, return_background = False): if method == "corr": f = ccorr elif method == "diff": f = cdiff #binning = False else: raise ValueError("Unknown method '{}'".format(method)) half_chunk_size = chunk_size // 2 assert chunk_size % 2 == 0 assert level > 2 n = 2 ** level assert n <= half_chunk_size n_fast = period * n n_slow = n if nlog is None: n_decades = 0 while len(t1)//(chunk_size * 2**(n_decades)) > 0: n_decades += 1 else: n_decades = nlog + 1 assert n_decades >= 1 t_slow = np.arange(len(t1)) print("Computing...") print_progress(0, len(t1)) for i,d in enumerate(data): x1,x2 = d if i == 0: shape = x1.shape out_fast = np.zeros(shape[0:-1] + (n_fast,) + shape[-1:], FDTYPE) out_fast = _transpose_data(out_fast) count_fast = np.zeros((n_fast,),IDTYPE) out_slow = np.zeros((n_decades-1,) + shape[0:-1] + (n_slow,) + shape[-1:], FDTYPE) out_slow = _transpose_data(out_slow) count_slow = np.zeros((n_decades-1, n_slow,),IDTYPE) fdata1 = np.empty((n_decades,) + shape[0:-1] + (chunk_size,) + shape[-1:], CDTYPE) fdata2 = np.empty((n_decades,) + shape[0:-1] + (chunk_size,) + shape[-1:], CDTYPE) _add_data2(i,x1, x2, fdata1,fdata2, binning) if i % (half_chunk_size) == half_chunk_size -1: ichunk = i//half_chunk_size fstart1 = half_chunk_size * (ichunk%2) fstop1 = fstart1 + half_chunk_size fstart2 = half_chunk_size * ((ichunk-1)%2) fstop2 = fstart2 + half_chunk_size istart1 = ichunk * half_chunk_size istop1 = istart1 + half_chunk_size istart2 = istart1 - half_chunk_size istop2 = istop1 - half_chunk_size if auto_background == True: if ichunk == 0: bg1 = np.mean(fdata1[0,...,fstart1:fstop1,:], axis = -2) bg2 = np.mean(fdata2[0,...,fstart1:fstop1,:], axis = -2) np.subtract(fdata1[0,...,fstart1:fstop1,:], bg1[...,None,:], fdata1[0,...,fstart1:fstop1,:]) np.subtract(fdata2[0,...,fstart1:fstop1,:], bg2[...,None,:], fdata2[0,...,fstart1:fstop1,:]) if return_background == True: out_bg1 = np.mean(fdata1[0,...,fstart1:fstop1,:], axis = -2) out_bg2 = np.mean(fdata2[0,...,fstart1:fstop1,:], axis = -2) f(fdata1[0,...,fstart1:fstop1,:],fdata2[0,...,fstart1:fstop1,:],t1[istart1:istop1],t2[istart1:istop1],-2,out = (out_fast, count_fast)) if istart2 >= 0 : f(fdata1[0][...,fstart1:fstop1,:],fdata2[0][...,fstart2:fstop2,:],t1[istart1:istop1],t2[istart2:istop2],-2,out = (out_fast, count_fast)) f(fdata1[0][...,fstart2:fstop2,:],fdata2[0][...,fstart1:fstop1,:],t1[istart2:istop2],t2[istart1:istop1],-2,out = (out_fast, count_fast)) for j in range(1, n_decades): if i % (half_chunk_size * 2**j) == half_chunk_size * 2**j -1: ichunk = i//(half_chunk_size * 2**j) fstart1 = half_chunk_size * (ichunk%2) fstop1 = fstart1 + half_chunk_size fstart2 = half_chunk_size * ((ichunk-1)%2) fstop2 = fstart2 + half_chunk_size istart1 = ichunk * half_chunk_size istop1 = istart1 + half_chunk_size istart2 = istart1 - half_chunk_size istop2 = istop1 - half_chunk_size if auto_background == True: # bg1 = np.mean(fdata1[j,...,fstart1:fstop1,:], axis = -2) # bg2 = np.mean(fdata2[j,...,fstart1:fstop1,:], axis = -2) np.subtract(fdata1[j,...,fstart1:fstop1,:], bg1[...,None,:], fdata1[j,...,fstart1:fstop1,:]) np.subtract(fdata2[j,...,fstart1:fstop1,:], bg2[...,None,:], fdata2[j,...,fstart1:fstop1,:]) f(fdata1[j,...,fstart1:fstop1,:],fdata2[j,...,fstart1:fstop1,:], t_slow[istart1:istop1], t_slow[istart1:istop1],-2, out = (out_slow[j-1],count_slow[j-1])) if istart2 >= 0 : f(fdata1[j,...,fstart1:fstop1,:],fdata2[j,...,fstart2:fstop2,:], t_slow[istart1:istop1], t_slow[istart2:istop2], -2, out = (out_slow[j-1],count_slow[j-1])) f(fdata1[j,...,fstart2:fstop2,:],fdata2[j,...,fstart1:fstop1,:], t_slow[istart2:istop2], t_slow[istart1:istop1], -2, out = (out_slow[j-1],count_slow[j-1])) else: break print_progress(i+1, len(t1)) if return_background == True: yield ((out_fast,count_fast), (out_slow,count_slow)), (out_bg1, out_bg2) else: yield ((out_fast,count_fast), (out_slow,count_slow))