Beispiel #1
0
    def __init__(self,
                 subject,
                 train_trials,
                 test_trials,
                 dim_z,
                 batch_size,
                 base_results_dir=None,
                 prefix='mocap_jmvae_'):
        self.subject = subject
        self.train_trials = train_trials
        self.test_trials = test_trials
        self.dim_z = dim_z
        self.batch_size = batch_size
        self.base_results_dir = base_results_dir
        self.prefix = prefix

        self.epoch_counter = itertools.count()
        self.epoch = None
        self.elbo_per_iter = []
        self.test_loglik_per_iter = []

        self.load_data()

        self.vae = JMVAEPlus(inference_nets=[
            self.make_inference_net(sum(self.joint_dims[j] for j in g))
            for g in groups
        ],
                             generative_nets=[
                                 self.make_generative_net(
                                     sum(self.joint_dims[j] for j in g))
                                 for g in groups
                             ],
                             mixture_weight_net=UniformMixtureWeightNet(),
                             prior_z=Normal(Variable(torch.zeros(1, dim_z)),
                                            Variable(torch.ones(1, dim_z))))

        self.optimizer = torch.optim.Adam(set(self.vae.parameters()), lr=1e-3)

        if self.base_results_dir is not None:
            # https://stackoverflow.com/questions/2257441/random-string-generation-with-upper-case-letters-and-digits-in-python?utm_medium=organic&utm_source=google_rich_qa&utm_campaign=google_rich_qa
            self.results_folder_name = self.prefix + ''.join(
                random.choice(string.ascii_uppercase + string.digits)
                for _ in range(16))
            self.results_dir = self.base_results_dir / self.results_folder_name
            self._init_results_dir()
Beispiel #2
0
def make_generative_net():
    return BernoulliNet(rate_net=torch.nn.Sequential(
        torch.nn.Linear(dim_z, 512), torch.nn.ReLU(),
        torch.nn.Linear(512, dim_x), Lambda(torch.sigmoid)))


def make_inference_net():
    shared = torch.nn.Sequential(torch.nn.Linear(dim_x, 512), torch.nn.ReLU())
    return NormalNet(mu_net=torch.nn.Sequential(shared,
                                                torch.nn.Linear(512, dim_z)),
                     sigma_net=torch.nn.Sequential(
                         shared, torch.nn.Linear(512, dim_z),
                         Lambda(lambda x: torch.exp(0.5 * x))))


prior_z = Normal(Variable(torch.zeros(1, dim_z)),
                 Variable(torch.ones(1, dim_z)))

inference_net = make_inference_net()
generative_net = make_generative_net()

optimizer = torch.optim.Adam([{
    'params': set(inference_net.parameters()),
    'lr': 1e-3
}, {
    'params': set(generative_net.parameters()),
    'lr': 1e-3
}])

vae = VAE(inference_model=inference_net,
          generative_model=generative_net,
          prior_z=prior_z,
    def __init__(self,
                 img_size,
                 num_samples,
                 batch_size,
                 dim_z,
                 lam,
                 sparsity_matrix_lr,
                 initial_baseline_precision,
                 inference_net_output_dim,
                 generative_net_input_dim,
                 noise_stddev,
                 group_available_prob,
                 initial_sigma_adjustment,
                 prior_theta_sigma,
                 base_results_dir=None,
                 prefix='quad_bars_'):
        self.img_size = img_size
        self.num_samples = num_samples
        self.batch_size = batch_size
        self.dim_z = dim_z
        self.lam = lam
        self.sparsity_matrix_lr = sparsity_matrix_lr
        self.initial_baseline_precision = initial_baseline_precision
        self.inference_net_output_dim = inference_net_output_dim
        self.generative_net_input_dim = generative_net_input_dim
        self.noise_stddev = noise_stddev
        self.group_available_prob = group_available_prob
        self.initial_sigma_adjustment = initial_sigma_adjustment
        self.prior_theta_sigma = prior_theta_sigma
        self.base_results_dir = base_results_dir
        self.prefix = prefix

        # Sample the training data and set up a DataLoader
        self.train_data = self.sample_data(self.num_samples)
        self.test_data = self.sample_data(1000)
        self.train_loader = torch.utils.data.DataLoader(
            torch.utils.data.TensorDataset(self.train_data,
                                           torch.zeros(self.num_samples)),
            batch_size=batch_size,
            shuffle=True)

        self.generative_sigma_adjustment = Variable(
            self.initial_sigma_adjustment * torch.ones(1), requires_grad=True)

        self.epoch_counter = itertools.count()
        self.epoch = None
        self.elbo_per_iter = []
        self.test_loglik_per_iter = []

        self.prior_z = Normal(Variable(torch.zeros(1, dim_z)),
                              Variable(torch.ones(1, dim_z)))

        half_size = self.img_size // 2
        dim_xs = [half_size * half_size] * 4
        self.inference_net = FacebackInferenceNet(
            almost_inference_nets=[
                self.make_almost_inference_net(dim_x) for dim_x in dim_xs
            ],
            net_output_dim=self.inference_net_output_dim,
            prior_z=self.prior_z,
            initial_baseline_precision=self.initial_baseline_precision)
        self.generative_net = FacebackGenerativeNet(
            almost_generative_nets=[
                self.make_almost_generative_net(dim_x) for dim_x in dim_xs
            ],
            net_input_dim=self.generative_net_input_dim,
            dim_z=self.dim_z)
        self.vae = FacebackVAE(
            # self.vae = FacebackDecoderSparseOnly(
            inference_net=self.inference_net,
            generative_net=self.generative_net,
            prior_z=self.prior_z,
            prior_theta=NormalPriorTheta(sigma=self.prior_theta_sigma),
            lam=self.lam)

        self.optimizer = MetaOptimizer([
            # Inference parameters
            torch.optim.Adam(set(
                p for net in self.inference_net.almost_inference_nets
                for p in net.parameters()),
                             lr=1e-3),
            torch.optim.Adam([self.inference_net.mu_layers], lr=1e-3),
            torch.optim.SGD([self.inference_net.precision_layers],
                            lr=self.sparsity_matrix_lr),
            torch.optim.Adam([self.inference_net.baseline_precision], lr=1e-3),

            # Generative parameters
            torch.optim.Adam(set(
                p for net in self.generative_net.almost_generative_nets
                for p in net.parameters()),
                             lr=1e-3),
            torch.optim.SGD([self.generative_net.connectivity_matrices],
                            lr=self.sparsity_matrix_lr),
            torch.optim.Adam([self.generative_sigma_adjustment], lr=1e-3)
        ])

        if self.base_results_dir is not None:
            # https://stackoverflow.com/questions/2257441/random-string-generation-with-upper-case-letters-and-digits-in-python?utm_medium=organic&utm_source=google_rich_qa&utm_campaign=google_rich_qa
            self.results_folder_name = self.prefix + ''.join(
                random.choice(string.ascii_uppercase + string.digits)
                for _ in range(16))
            self.results_dir = self.base_results_dir / self.results_folder_name
            self._init_results_dir()
Beispiel #4
0
  def __init__(
      self,
      subject,
      train_trials,
      test_trials,
      dim_z,
      batch_size,
      lam,
      sparsity_matrix_lr,
      inference_net_output_dim,
      generative_net_input_dim,
      initial_baseline_precision,
      prior_theta_sigma,
      group_available_prob,
      base_results_dir=None
  ):
    self.subject = subject
    self.train_trials = train_trials
    self.test_trials = test_trials
    self.dim_z = dim_z
    self.batch_size = batch_size
    self.lam = lam
    self.sparsity_matrix_lr = sparsity_matrix_lr
    self.inference_net_output_dim = inference_net_output_dim
    self.generative_net_input_dim = generative_net_input_dim
    self.initial_baseline_precision = initial_baseline_precision
    self.prior_theta_sigma = prior_theta_sigma
    self.group_available_prob = group_available_prob
    self.base_results_dir = base_results_dir

    self.epoch_counter = itertools.count()
    self.epoch = None
    self.elbo_per_iter = []
    self.test_loglik_per_iter = []

    self.load_data()

    self.prior_z = Normal(
      Variable(torch.zeros(1, dim_z)),
      Variable(torch.ones(1, dim_z))
    )

    self.inference_net = FacebackInferenceNet(
      almost_inference_nets=[self.make_almost_inference_net(self.joint_dims[j]) for j in joint_order],
      net_output_dim=self.inference_net_output_dim,
      prior_z=self.prior_z,
      initial_baseline_precision=self.initial_baseline_precision
    )
    self.generative_net = FacebackGenerativeNet(
      almost_generative_nets=[self.make_almost_generative_net(self.joint_dims[j]) for j in joint_order],
      net_input_dim=self.generative_net_input_dim,
      dim_z=self.dim_z
    )
    self.vae = FacebackVAE(
      inference_net=self.inference_net,
      generative_net=self.generative_net,
      prior_z=self.prior_z,
      prior_theta=NormalPriorTheta(sigma=self.prior_theta_sigma),
      lam=self.lam
    )

    self.optimizer = MetaOptimizer([
      # Inference parameters
      torch.optim.Adam(
        set(p for net in self.inference_net.almost_inference_nets for p in net.parameters()),
        lr=1e-3
      ),
      torch.optim.Adam([self.inference_net.mu_layers], lr=1e-3),
      torch.optim.SGD([self.inference_net.precision_layers], lr=self.sparsity_matrix_lr),
      torch.optim.Adam([self.inference_net.baseline_precision], lr=1e-3),

      # Generative parameters
      torch.optim.Adam(
        set(p for net in self.generative_net.almost_generative_nets for p in net.parameters()),
        lr=1e-3
      ),
      torch.optim.SGD([self.generative_net.connectivity_matrices], lr=self.sparsity_matrix_lr)
    ])

    if self.base_results_dir is not None:
      # https://stackoverflow.com/questions/2257441/random-string-generation-with-upper-case-letters-and-digits-in-python?utm_medium=organic&utm_source=google_rich_qa&utm_campaign=google_rich_qa
      self.results_folder_name = 'mocap_subject55_' + ''.join(random.choice(string.ascii_uppercase + string.digits) for _ in range(16))
      self.results_dir = self.base_results_dir / self.results_folder_name
      self._init_results_dir()
Beispiel #5
0
  def __init__(
      self,
      # img_size,
      dim_z,
      batch_size,
      lam,
      sparsity_matrix_lr,
      inference_net_output_dim,
      generative_net_input_dim,
      initial_baseline_precision,
      prior_theta_sigma,
      group_available_prob,
      inference_net_num_filters,
      generative_net_num_filters,
      use_gpu,
      base_results_dir=None,
      prefix='faces_'
  ):
    # self.img_size = img_size
    self.dim_z = dim_z
    self.batch_size = batch_size
    self.lam = lam
    self.sparsity_matrix_lr = sparsity_matrix_lr
    self.inference_net_output_dim = inference_net_output_dim
    self.generative_net_input_dim = generative_net_input_dim
    self.initial_baseline_precision = initial_baseline_precision
    self.prior_theta_sigma = prior_theta_sigma
    self.group_available_prob = group_available_prob
    self.inference_net_num_filters = inference_net_num_filters
    self.generative_net_num_filters = generative_net_num_filters
    self.use_gpu = use_gpu
    self.base_results_dir = base_results_dir
    self.prefix = prefix

    self.epoch_counter = itertools.count()
    self.epoch = None
    self.elbo_per_iter = []

    self.load_data()

    if self.use_gpu:
      self.prior_z = Normal(
        Variable(torch.zeros(1, dim_z).cuda()),
        Variable(torch.ones(1, dim_z).cuda())
      )
    else:
      self.prior_z = Normal(
        Variable(torch.zeros(1, dim_z)),
        Variable(torch.ones(1, dim_z))
      )

    self.inference_net = FacebackInferenceNet(
      almost_inference_nets=[
        self.make_almost_inference_net(64 * 64)
        for _ in range(num_groups)
      ],
      net_output_dim=self.inference_net_output_dim,
      prior_z=self.prior_z,
      initial_baseline_precision=self.initial_baseline_precision,
      use_gpu=self.use_gpu
    )
    self.generative_net = FacebackGenerativeNet(
      almost_generative_nets=[
        self.make_almost_generative_net(64 * 64)
        for _ in range(num_groups)
      ],
      net_input_dim=self.generative_net_input_dim,
      dim_z=self.dim_z,
      use_gpu=self.use_gpu
    )
    self.vae = FacebackVAE(
      inference_net=self.inference_net,
      generative_net=self.generative_net,
      prior_z=self.prior_z,
      prior_theta=NormalPriorTheta(sigma=self.prior_theta_sigma),
      lam=self.lam
    )

    self.optimizer = MetaOptimizer([
      # Inference parameters
      torch.optim.Adam(
        set(p for net in self.inference_net.almost_inference_nets for p in net.parameters()),
        lr=1e-3
      ),
      torch.optim.Adam([self.inference_net.mu_layers], lr=1e-3),
      torch.optim.SGD([self.inference_net.precision_layers], lr=self.sparsity_matrix_lr),
      torch.optim.Adam([self.inference_net.baseline_precision], lr=1e-3),

      # Generative parameters
      torch.optim.Adam(
        set(p for net in self.generative_net.almost_generative_nets for p in net.parameters()),
        lr=1e-3
      ),
      torch.optim.Adam(
        [net.sigma_net.extra_args[0] for net in self.generative_net.almost_generative_nets],
        lr=1e-3
      ),
      torch.optim.SGD([self.generative_net.connectivity_matrices], lr=self.sparsity_matrix_lr)
    ])

    if self.base_results_dir is not None:
      # https://stackoverflow.com/questions/2257441/random-string-generation-with-upper-case-letters-and-digits-in-python?utm_medium=organic&utm_source=google_rich_qa&utm_campaign=google_rich_qa
      self.results_folder_name = self.prefix + ''.join(random.choice(string.ascii_uppercase + string.digits) for _ in range(16))
      self.results_dir = self.base_results_dir / self.results_folder_name
      self._init_results_dir()