def __call__(self, shape, name=None): flat_shape = (shape[0], np.prod(shape[1:])) a = np_rng.normal(0.0, 1.0, flat_shape) u, _, v = np.linalg.svd(a, full_matrices=False) q = u if u.shape == flat_shape else v # pick the one with the correct shape q = q.reshape(shape) return sharedX(self.scale * q[:shape[0], :shape[1]], name=name)
def __call__(self, shape, name=None): flat_shape = (shape[0], np.prod(shape[1:])) a = np_rng.normal(0.0, 1.0, flat_shape) u, _, v = np.linalg.svd(a, full_matrices=False) q = u if u.shape == flat_shape else v # pick the one with the correct shape q = q.reshape(shape) return sharedX(self.scale * q[: shape[0], : shape[1]], name=name)
def __call__(self, shape): if len(shape) == 2: scale = np.sqrt(2.0 / shape[0]) elif len(shape) == 4: scale = np.sqrt(2.0 / np.prod(shape[1:])) else: raise NotImplementedError return sharedX(np_rng.normal(size=shape, scale=scale))
def __call__(self, shape, name=None): if len(shape) == 2: scale = np.sqrt(2./shape[0]) elif len(shape) == 4: scale = np.sqrt(2./np.prod(shape[1:])) else: raise NotImplementedError return sharedX(np_rng.normal(size=shape, scale=scale), name=name)
def init_model(): G = nx.grid_2d_graph(args.grid, args.grid) A = np.asarray(nx.adjacency_matrix(G).todense()) #A = A * np_rng.uniform(-0.1, 0.1, size=A.shape) noise = np.tril(np_rng.uniform(-0.1, 0.1, size=A.shape)) noise = noise + noise.T A = A * noise np.fill_diagonal(A, np.sum(np.abs(A), axis=1) + .1) if not np.all(np.linalg.eigvals(A) > 0) or not np.all( np.linalg.eigvals(np.linalg.inv(A)) > 0): raise NotImplementedError b = np_rng.normal(0, 1, size=(args.grid**2, 1)) mu0 = sharedX(np.dot(np.linalg.inv(A), b).flatten()) A0 = sharedX(A) model_params = {'mu': mu0, 'A': A0} ## score function score_q = score_gaussian ## ground truth samples gt = np.random.multivariate_normal(mean=mu0.get_value().flatten(), cov=np.linalg.inv(A), size=(10000, ), check_valid='raise') ## adjacency matrix W = np.zeros(A.shape).astype(int) W[A != 0] = 1 assert np.all(np.sum(W, axis=1) > 0), 'illegal inputs' assert np.sum((W - W.T)**2) < 1e-8, 'illegal inputs' return model_params, score_q, gt, W
def random_features_kernel(x, n_features, score_q, fixed_weights=True, cos_feat_dim_scale=True, **model_params): dim = x.get_value().shape[1] H = sqr_dist(x, x) h = median_distance(H) #h = select_h_by_ksdv(x, score_q, **model_params) gamma = 1./h if fixed_weights: random_offset = sharedX(np_rng.uniform(0, 2*pi, (n_features,))) weights = sharedX(np_rng.normal(0, 1, (dim, n_features))) random_weights = T.sqrt(2*gamma) * weights else: #random_weights = T.sqrt(2 * gamma) * t_rng.normal( # (x.shape[1], n_features)) #random_offset = t_rng.uniform((n_features,), 0, 2 * pi) raise NotImplementedError if cos_feat_dim_scale: alpha = T.sqrt(2.) / T.sqrt(n_features).astype(theano.config.floatX) else: alpha = T.sqrt(2.) coff = T.dot(x, random_weights) + random_offset projection = alpha * T.cos(coff) kxy = T.dot(projection, projection.T) sinf = -alpha*T.sin(coff) wd = random_weights.T inner = T.sum(sinf, axis=0).dimshuffle(0,'x') * wd dxkxy = T.dot(projection, inner) return kxy, dxkxy
def __call__(self, shape, name=None): r = np_rng.normal(loc=0, scale=0.01, size=shape) r = r / np.sqrt(np.sum(r ** 2)) * np.sqrt(shape[1]) return sharedX(r, name=name)
def __call__(self, shape, name=None): return sharedX(np_rng.normal(loc=self.loc, scale=self.scale, size=shape), name=name)
def __call__(self, shape, name=None): r = np_rng.normal(loc=0, scale=0.01, size=shape) r = r/np.sqrt(np.sum(r**2))*np.sqrt(shape[1]) return sharedX(r, name=name)