Exemple #1
0
    def worker(mix_k):
        local_hparams = JsonConfig(hparams_dir)
        local_hparams.Mixture.num_component = mix_k
        model = "GenMM-K{}" if local_hparams.Mixture.naive else "LatMM-K{}"
        local_hparams.Dir.log_root = os.path.join(local_hparams.Dir.log_root,
                                                  model.format(mix_k))
        this_device = next(device_iter)
        local_hparams.Device.glow[0] = this_device
        local_hparams.Device.data = this_device
        print("Dir: {} and device: {}".format(local_hparams.Dir.log_root,
                                              this_device))
        peeked = False
        if not peeked:
            tmp_dataloader = torch.utils.data.DataLoader(dataset_ins,
                                                         batch_size=64,
                                                         shuffle=True,
                                                         num_workers=int(2))
            img = next(iter(tmp_dataloader))[0]

            if not os.path.exists(local_hparams.Dir.log_root):
                os.makedirs(local_hparams.Dir.log_root)
            # peek the training data set
            vutils.save_image(
                img.add(0.5),
                os.path.join(local_hparams.Dir.log_root,
                             "img_under_evaluation.png"))
            peeked = True

        built = build(local_hparams, True)
        trainer = Trainer(**built, dataset=dataset_ins, hparams=local_hparams)
        trainer.train()
Exemple #2
0
    def worker(label):
        # load the subset data of the label
        local_hparams = JsonConfig(hparams_dir)

        local_hparams.Dir.log_root = os.path.join(local_hparams.Dir.log_root,
                                                  "classfier{}".format(label))
        dataset = load_obj(
            os.path.join(dataset_root,
                         "classSets/" + "subset{}".format(label)))
        if True:
            tmp_dataloader = torch.utils.data.DataLoader(dataset,
                                                         batch_size=64,
                                                         shuffle=True,
                                                         num_workers=int(2))
            img = next(iter(tmp_dataloader))

            if not os.path.exists(local_hparams.Dir.log_root):
                os.makedirs(local_hparams.Dir.log_root)

            vutils.save_image(
                img.data.add(0.5),
                os.path.join(local_hparams.Dir.log_root,
                             "img_under_evaluation.png"))

        # dump the json file for performance evaluation
        if not os.path.exists(
                os.path.join(local_hparams.Dir.log_root,
                             local_hparams.Data.dataset + ".json")):
            get_hparams = JsonConfig(hparams_dir)
            data_dir = get_hparams.Data.dataset_root
            get_hparams.Data.dataset_root = data_dir.replace("separate", "all")
            get_hparams.dump(dir_path=get_hparams.Dir.log_root,
                             json_name=get_hparams.Data.dataset + ".json")

        ### build model and train
        built = build(local_hparams, True)

        print(hparams.Dir.log_root)
        trainer = Trainer(**built, dataset=dataset, hparams=local_hparams)
        trainer.train()
    if not os.path.exists(log_dir):
        os.makedirs(log_dir)

    print("log_dir:" + str(log_dir))

    is_training = hparams.Infer.pre_trained == ""

    data = dataset(hparams, is_training)
    x_channels, cond_channels = data.n_channels()

    # build graph
    built = build(x_channels, cond_channels, hparams, is_training)

    if is_training:
        # build trainer
        trainer = Trainer(**built, data=data, log_dir=log_dir, hparams=hparams)

        # train model
        trainer.train()
    else:
        # Synthesize a lot of data.
        generator = Generator(data, built['data_device'], log_dir, hparams)
        if "temperature" in hparams.Infer:
            temp = hparams.Infer.temperature
        else:
            temp = 1

        # We generate x times to get some different variations for each input
        for i in range(5):
            generator.generate_sample(built['graph'], eps_std=temp, counter=i)