def make_simple_trajectories(tasks, npt=201, speed_mid=0.5, speed_width=0.2): ''' Make linear trajectories from start to finish with a gaussian speed profile. Parameters ---------- tasks : ndarray shape (n, 6) n tasks with start position :3 and target position 3: ''' ndim = tasks.shape[1] / 2 time = np.linspace(0, 1, npt) spd = curves.gauss1d(time, 1., speed_width, speed_mid) spd /= np.sum(spd) start, stop = tasks[:,:ndim], tasks[:,ndim:] drn = stop - start vel = spd[None,:,None] * drn[:,None,:] # shape (task, time, space) disp = np.cumsum(vel/2. * time[None,:,None], axis=1) # kinematic eqn pos = disp + start[:,None] return time, pos, spd
def test_gam_predict_cv(): dsname = 'frank-osmd' align = 'hold' lag = 0.1 # s b0 = 20 # Hz noise_sd = 1. # Hz b_scale = 10. nbin = 10 ds = datasets[dsname] dc = DataCollection(ds.get_files()[:5]) unit = ds.get_units()[0] dc.add_unit(unit, lag) bnd = dc.make_binned(nbin=nbin, align=align, do_count=True) ntask, nrep, nedge, ndim = bnd.pos.shape pos = bnd.pos.reshape(ntask * nrep, nedge, -1) drn = kin.get_dir(pos) # simplest model # y = b0 + Bd.D B = np.array([.2, .6, .4]) * b_scale y = b0 + np.dot(drn, B) noise = np.random.normal(0, noise_sd, size=y.shape) y += noise bnd.set_count_from_flat(y[:,None]) out = gam_predict_cv(bnd, ['kd'], [dsname, unit, lag, align], \ family='gaussian') have = np.mean(out.coef[0], axis=0)[:4] want = np.array([b0, B[0], B[1], B[2]]) np.testing.assert_array_almost_equal(have, want, decimal=1) # changing Bdt model, gaussian noise model # y = b0 + Bdt.D gc_type = [('A', float), ('fwhm', float), ('c', float)] gcoeff = np.array([[(1., 1.5, 3.), (.75, 2.5, 6.)], [(.4, 4., 2.), (.1, 2., 5.)], [(.8, 3., 5.), (.6, 4.5, 6.5)]], dtype=gc_type) gcoeff = gcoeff.view(np.recarray) x = np.linspace(0, 10, nbin) Bt = np.zeros((3, nbin)) for Bkt, gc in zip(Bt, gcoeff): g0 = gauss1d(x, gc.A[0], gc.fwhm[0], gc.c[0]) g1 = gauss1d(x, gc.A[1], gc.fwhm[1], gc.c[1]) Bkt[:] = g0 + g1 Bt *= b_scale y = b0 + np.sum(Bt.T[None] * drn, axis=-1) y = np.random.normal(y, noise_sd) bnd.set_count_from_flat(y[:,None]) out = gam_predict_cv(bnd, ['kdX'], [dsname, unit, lag, align], \ family='gaussian') # check that actual and predicted are correlated x = out.actual.flatten() y = out.pred[8].flatten() mc, rp = pearsonr(x,y) np.testing.assert_approx_equal(mc, 1, significant=1)