Example #1
0
 def init(self):
     self.windows = self.param['window']
     self.cols = self.param['col']
     self.types = self.param['type']
     self.translation_cols = self.param.get('translation')
     self.scale_cols = self.param.get('scale')
     self.move_window_mapping = {
         "mean":
         lambda c, s, t, w: bn.move_mean(c, w) * s + t,
         "std":
         lambda c, s, t, w: bn.move_std(c, w) * s,
         "var":
         lambda c, s, t, w: bn.move_var(c, w) * s * s,
         "min":
         lambda c, s, t, w: bn.move_min(c, w) * s + t,
         "max":
         lambda c, s, t, w: bn.move_max(c, w) * s + t,
         "rank":
         lambda c, s, t, w: bn.move_rank(c, w),
         "sum":
         lambda c, s, t, w: bn.move_sum(c, w) * s + t * w,
         "ema":
         lambda c, s, t, w: F.
         ema(c, 2.0 /
             (w + 1), start_indices=self.base.start_indices) * s + t,
         "rsi":
         lambda c, s, t, w: F.rsi(
             c, w, start_indices=self.base.start_indices),
         "psy":
         lambda c, s, t, w: F.psy(
             c, w, start_indices=self.base.start_indices),
         "bias":
         lambda c, s, t, w: F.bias(
             c, w, start_indices=self.base.start_indices)
     }
def Ts_max(A, n):
    '''
    计算n天(包括当天)的最大值
    n >= 1
    '''
    if n < 1:
        #print ("计算n天的最大值,n不得小于1,返回输入")
        return A
    result = bk.move_max(A, n, min_count=1, axis=0)
    result[np.isnan(A)] = np.nan
    return result
Example #3
0
def _wr(arr_h, arr_l, arr_c,  arr_o, window, start_indices):
    high_n = bn.move_max(arr_h,window)
    low_n  = bn.move_min(arr_l,window)
    res = np.where(high_n - low_n!=0, 100*(high_n - arr_c) / (high_n - low_n), 50)

    pre_cnt = 0.0
    N = arr_c.shape[0]
    N_GROUP = start_indices.shape[0]
    j = 0
    for i in range(N):
        if j < N_GROUP and start_indices[j] == i:
            pre_cnt = 0
            j += 1
        if pre_cnt < window:
            res[i] = np.nan
        pre_cnt += 1
    return res
Example #4
0
    def plot(self,
             window=100,
             alpha=0.2,
             save=False,
             close_plots=False,
             pre_fix=""):
        plt.figure()
        plt.title(pre_fix + "Mean")
        p = plt.plot(bn.move_mean(self.reward_hist, window=window))[0]
        if alpha > 0:
            plt.plot(self.reward_hist, color=p.get_color(), alpha=alpha)
        plt.xlim(xmin=0)
        plt.grid(True)
        if not save is False:
            plt.savefig(os.path.join(save, "move_mean.svg"))
        if not close_plots:
            plt.show()
        else:
            plt.close()

        plt.figure()
        plt.title(pre_fix + "Min")
        plt.plot(bn.move_min(self.reward_hist, window=window))
        plt.xlim(xmin=0)
        plt.grid(True)
        if not save is False:
            plt.savefig(os.path.join(save, "move_min.svg"))
        if not close_plots:
            plt.show()
        else:
            plt.close()

        plt.figure()
        plt.title(pre_fix + "Max")
        plt.plot(bn.move_max(self.reward_hist, window=window))
        plt.xlim(xmin=0)
        plt.grid(True)
        if not save is False:
            plt.savefig(os.path.join(save, "move_max.svg"))
        if not close_plots:
            plt.show()
        else:
            plt.close()
Example #5
0
 def time_move_max(self, dtype, shape, order, axis, window):
     bn.move_max(self.arr, window, axis=axis)
Example #6
0
 def time_move_max(self, dtype, shape, window):
     bn.move_max(self.arr, window)
Example #7
0
def cosine_similarity_continuous(run,
                                 group,
                                 tracetype='deconvolved',
                                 trange=(0, 1),
                                 rectify=False,
                                 exclude_outliers=False,
                                 remove_group=None,
                                 drop_glm_zeros=False,
                                 weight_by_protovector=False,
                                 smooth_method=None,
                                 smooth_window=None,
                                 glm_type='simpglm'):
    """
    Calculate the cosine distance between a GLM protovector and the population.

    Parameters
    ----------
    run : Run
    group : str
        Group in GLM.groups.
    tracetype : str
        Type of trace to compare to the GLM protovector.
    trange : 2-element tuple of float
        Time range to look at for the protovector.
    rectify : bool
        If True, set negative values of the reconstructed vector (before
        averaging acoss `trange`) to 0.
    exclude_outliers : bool
        Not currently supported, eventually exclude some cells from the
        calculation.
    remove_group : str, optional
        If not None, remove this group protovector from the result.
    drop_glm_zeros : bool
        If True, remove all cells in which the GLM protovector is <= 0.
    weight_by_protovector : bool
        If True, weight each cell's contribution to the cosine similarity by
        it's protovector weight.
    smooth_method : {'max'}, optional
        If not None, method uses to smooth trace before calculating similarity.
        Has no effect on the protovector itself.
    smooth_window : int
        Size of smoothing window.
    glm_type : str
        Type of GLM to use.

    Returns
    -------
    np.ndarray
        Array of length number of time points corresponding to the cosine
        similarity between the specified protovector and the population
        response. 1==most similar, 0==orthogonal, -1==anti-correlated

    """

    glm = run.parent.glm(glm_type=glm_type)
    unit = glm.protovector(group,
                           trange=trange,
                           rectify=rectify,
                           err=-1,
                           remove_group=remove_group)
    trace = run.trace2p().trace(tracetype)

    # Drop cells with any non-finite values
    keep = np.isfinite(unit)
    keep = keep & np.all(np.isfinite(trace), axis=1)
    # Optionally drop outliers
    if exclude_outliers:
        keep = keep & cell_activity.keep(run.parent, run_type=run.run_type)
    # Optionally drop non-positive GLM coefficients
    if drop_glm_zeros:
        keep = keep & (unit > 0)
    # Do the dropping
    unit = unit[keep]
    trace = trace[keep, :]
    if smooth_method == 'max':
        trace = move_max(trace, window=smooth_window, axis=1)

    n_processes = cpu_count() - 2
    pool = Pool(processes=n_processes)
    n_frames = trace.shape[1]
    chunksize = min(200, n_frames // n_processes)
    result = np.empty(n_frames, dtype=float)
    if not weight_by_protovector:
        for idx, res in enumerate(
                pool.imap(_unpack_cosine,
                          izip(trace.T, repeat(unit)),
                          chunksize=chunksize)):
            result[idx] = res
    else:
        weights = np.clip(unit, 0., 1.)
        for idx, res in enumerate(
                pool.imap(_unpack_cosine,
                          izip(trace.T, repeat(unit), repeat(weights)),
                          chunksize=chunksize)):
            result[idx] = res
    pool.close()

    # scipy.spatial.distance.cdist should be able to do this, but as of 190207
    # it keeps silently crashing the kernel (at least in Jupyter notebooks)
    # result = [cosine(trace_t, unit) for trace_t in trace.T]

    return 1.0 - np.array(result)