Exemplo n.º 1
0
def gaussian_sampling_valid_demo(u=None,
                                 cov=None,
                                 num_sample=1000,
                                 num_class=2,
                                 batch=2):
    if u is None:
        u = torch.tensor([[0, 0], [4, 4]])
    if cov is None:
        cov = torch.tensor([[[1, 0], [0, 1]], [[1, 0.5], [0.5, 1]]])

    sample = torch.randn(num_sample, num_class)
    sample = sample.unsqueeze(0).expand(batch, -1, -1)
    l = torch.linalg.cholesky(cov).unsqueeze(1).expand(-1, num_sample, -1, -1)
    sample = torch.matmul(l, sample.unsqueeze(-1)).squeeze(-1) + u.unsqueeze(1)
    sample = sample.reshape(-1, num_class)
    x = sample[:num_sample, :]
    y = sample[num_sample:, :]
    print(torch.mean(x, dim=0), torch.cov(x.T))
    print(torch.mean(y, dim=0), torch.cov(y.T))
Exemplo n.º 2
0
def compute_covariance_matrix(walks):
    diffs = []
    for walk in walks:
        x_pre = walk[:-1, :]
        x_post = walk[1:, :]
        diff = x_post - x_pre
        diffs.append(diff)
    diffs_cat = torch.cat(diffs, axis=0)
    cov = torch.cov(diffs_cat.T)
    return cov
Exemplo n.º 3
0
def cov(*args, **kwargs):
    return torch.cov(*args, **kwargs)
Exemplo n.º 4
0
# Author: Jintao Huang
# Email: [email protected]
# Date:

import torch
from dev.torch import cov

# In[0]:
x = torch.randn(10, 20)
print(torch.allclose(torch.cov(x), cov(x)))
print(torch.allclose(torch.cov(x, correction=0), cov(x, correction=0)))
print()
"""Out[0]
True
True
"""
Exemplo n.º 5
0
def calculate_ditribution(input):
    mu = torch.mean(input)
    sigma = torch.cov(input)
    return mu, sigma