Exemplo n.º 1
0
    elif opt == '-c':
        epoch_resume = int(arg)
        if epoch_resume < 0:
            error_and_exit()
    elif opt == '-b':
        batch_size = int(arg)
    else:
        error_and_exit()

# compulsory params
if loss is None or noise is None:
    error_and_exit()

print("Params: loss=%s, noise=%s" % (loss, noise))

model = densenet.get_densenet(2)

train_loader, val_loader = utils.get_covid_loaders(float(noise),
                                                   batch_size=batch_size)

model_folder = './models/forwardt/'
model_path = fpath('./models', loss, noise)
if not os.path.exists(model_folder):
    os.mkdir(model_folder)

# Check this.
filter_outlier = False

kerasModel = ChexpertModel(model,
                           train_loader,
                           val_loader,
Exemplo n.º 2
0
 def __init__(self,
              encoder_name,
              num_input_channels=1,
              drop_rate=0,
              img_size=(160, 160),
              data_parallel=True,
              continuous_latent_dim=100,
              disc_latent_dim=10,
              sample_temperature=0.67,
              small_input=False):
     super(VariationalAutoEncoder, self).__init__()
     if "densenet" in encoder_name:
         self.feature_extractor = get_densenet(
             encoder_name,
             drop_rate,
             input_channels=num_input_channels,
             small_input=small_input,
             data_parallel=data_parallel)
     elif "wideresnet" in encoder_name:
         self.feature_extractor = get_wide_resnet(
             encoder_name,
             drop_rate,
             input_channels=num_input_channels,
             small_input=small_input,
             data_parallel=data_parallel)
     elif "preactresnet" in encoder_name:
         self.feature_extractor = get_preact_resnet(
             encoder_name,
             drop_rate,
             input_channels=num_input_channels,
             small_input=small_input,
             data_parallel=data_parallel)
     else:
         raise NotImplementedError(
             "{} not implemented".format(encoder_name))
     global_avg = nn.AdaptiveAvgPool2d(output_size=(1, 1))
     if data_parallel:
         global_avg = nn.DataParallel(global_avg)
     self.global_avg = global_avg
     self.continuous_inference = nn.Sequential()
     self.disc_latent_inference = nn.Sequential()
     conti_mean_inf_module = _Inference(
         num_input_channels=self.feature_extractor.num_feature_channel,
         latent_dim=continuous_latent_dim,
         disc_variable=False)
     conti_logsigma_inf_module = _Inference(
         num_input_channels=self.feature_extractor.num_feature_channel,
         latent_dim=continuous_latent_dim,
         disc_variable=False)
     if data_parallel:
         conti_mean_inf_module = nn.DataParallel(conti_mean_inf_module)
         conti_logsigma_inf_module = nn.DataParallel(
             conti_logsigma_inf_module)
     self.continuous_inference.add_module("mean", conti_mean_inf_module)
     self.continuous_inference.add_module("log_sigma",
                                          conti_logsigma_inf_module)
     self._disc_latent_dim = disc_latent_dim
     dic_inf = _Inference(
         num_input_channels=self.feature_extractor.num_feature_channel,
         latent_dim=disc_latent_dim,
         disc_variable=True)
     if data_parallel:
         dic_inf = nn.DataParallel(dic_inf)
     self.disc_latent_inference = dic_inf
     sample = Sample(temperature=sample_temperature)
     if data_parallel:
         sample = nn.DataParallel(sample)
     self.sample = sample
     decoder_kernel_size = tuple([int(s / 32) for s in img_size])
     self.feature_reconstructor = Decoder(
         num_channel=num_input_channels,
         latent_dim=int(continuous_latent_dim + np.sum(disc_latent_dim)),
         data_parallel=data_parallel,
         kernel_size=decoder_kernel_size)