コード例 #1
0
    def __init__(self,
                 gpu=-1,
                 data_root='../Data/FMNIST',
                 save_dir='./classifiers'):
        if torch.cuda.is_available():
            os.environ['CUDA_VISIBLE_DEVICES'] = str(gpu)
            self.device = torch.device('cuda')
        else:
            self.device = torch.device('cpu')

        print('Device detected: {}'.format(self.device))
        self.model = Classifier(num_of_class=10).to(self.device)
        self.data_root = data_root

        self.transform = transforms.Compose([transforms.ToTensor()])

        self.save_dir = lib.create_dir(save_dir)
        self.ckpt_file = os.path.join(self.save_dir, 'fmnist_clf.pth')
コード例 #2
0
ファイル: bigan_model.py プロジェクト: sailfish009/imp_bigan
    def plot_generation(self, base_dir, scaler, n_col=16, n_row=8, n_bs=3):
        save_dir = create_dir(os.path.join(base_dir, 'gener'))

        x_gener_norm, z = self.sample(512)
        x_gener = data_fn.denorm(x_gener_norm, scaler)

        z_recons, x_recons_norm = self.recons(x_gener_norm)
        x_recons = data_fn.denorm(x_recons_norm, scaler)

        h, w = x_gener.shape[1:3]
        n_imgs = n_col * n_row
        for i in range(n_bs):
            x = x_gener[i * n_imgs:(i + 1) * n_imgs]
            f, _ = pvi.plot_image(pvi.merge_images(x, n_row, n_col, direction=0, dtype=int), title='')

            pva.save_fig(f, os.path.join(save_dir, 'x_gener_{}'.format(i)))

            ze_mean = np.mean(z_recons)
            ze_var = np.var(z_recons)
コード例 #3
0
ファイル: bigan_model.py プロジェクト: sailfish009/imp_bigan
    def plot_loglikelihod(self, base_dir, scaler, x_norm, T=40):
        """

        :param base_dir: i.e. experiments/fm/6/trbiganp_256z_bces_fm_60r_0n_08p16d/test/
        :param scaler:
        :param x_norm:
        :return:
        """
        save_dir = create_dir(os.path.join(base_dir, 'loglikelihood', str(T), 'images'))
        n_imgs, h, w, c = x_norm.shape
        try:
            event_acc = utb.load_event_accumulator(os.path.join(base_dir, 'loglikelihood', str(T)))
            ll_list, sigma_list = utb.load_loglikelihood(event_acc)
        except:
            print('No loglikelihood data at: {} '.format(os.path.join(base_dir, 'loglikelihood', str(T))))
            return

        x_norm = x_norm[:len(ll_list)]
        z_list, x_recons_norm = self.recons(x_norm)
        x_recons = data_fn.denorm(x_recons_norm, scaler)
        x = data_fn.denorm(x_norm, scaler)

        psnr = loss.PSNR(x, x_recons, axis=(1, 2, 3))

        pvg.hist_plot(save_dir, ll_list, density=False, name='ll', alpha=0.8, xlabel=r'$\log_{10} p(G(E(x)))$',
                      fontsize=32, ylabel=r'$\#$ of images', close='all')

        pvg.scater_plot_with_images(save_dir, x_data=psnr, y_data=ll_list, images=x, name='psnr_ll', alpha=0.8,
                                    xlabel=r'PSNR', ylabel=r'$\log_{10} p(G(E(x)))$')

        model = LinearRegression()

        psnr = np.array(psnr).reshape((-1, 1))
        ll_list = np.array(ll_list)

        model.fit(psnr, ll_list)

        r_sq = model.score(psnr, ll_list)
        print('coefficient of determination: {} %'.format(r_sq * 100))
        print('intercept:', model.intercept_)
        print('slope:', model.coef_)
コード例 #4
0
ファイル: bigan_model.py プロジェクト: sailfish009/imp_bigan
    def plot_reconstruction_fig(self, base_dir, scaler, x_norm, im_idx=[1, 2, 3, 4, 5]):
        n_imgs, c, h, w = x_norm.shape
        z_list, x_recons_norm = self.recons(x_norm)
        x_recons = data_fn.denorm(x_recons_norm, scaler)
        x = data_fn.denorm(x_norm, scaler)

        ze_mean = np.mean(z_list, axis=0)
        ze_var = np.var(z_list, axis=0)

        z_norm = np.linalg.norm(z_list, axis=-1)
        z_norm_prior = np.linalg.norm(np.random.normal(0, 1, [n_imgs, self.z_dim]), axis=1)

        n_bs = n_imgs // 64

        psnr = loss.PSNR(x, x_recons, axis=(1, 2, 3))
        idx_sorted = np.argsort(psnr)  # From smaller to larger
        # psnr_sorted = psnr[idx_sorted]

        x_sorted = x[idx_sorted]

        n_bs = n_imgs // 64

        # Save images
        save_dir = create_dir(os.path.join(base_dir, 'recons', 'images'))

        pvg.mean_var_plot(save_dir, z_list, name='z', xlabel=r'Dimension', ylabel='')
        pvg.var_plot(save_dir, z_list, name='z', xlabel=r'Dimension')

        binwidth = (np.max(z_norm_prior) - np.min(z_norm_prior)) / 100
        x_min = min(np.min(z_norm_prior), np.min(z_norm)) - 1

        x_max = max(np.percentile(z_norm, q=90), np.max(z_norm_prior)) + 1
        pvg.multi_hist_plot(save_dir, [z_norm_prior, z_norm], [r'$||z||$', r'$||z_e||$'], name='ze', xlabel=r'Norm',
                            density=False, alpha=0.7, fontsize=38, binwidth=binwidth, x_lim=[x_min, x_max])
        mean = np.mean(z_list, axis=0)
        var = np.var(z_list, axis=0)

        pvg.multi_hist_plot(save_dir, [mean, var], [r'$\mu$', r'$\sigma^2$'], name='z_mean_var', xlabel=r'value',
                            ylabel=r'$\#$ of dimensions',
                            density=False, alpha=0.7, fontsize=32, color_list=['black', 'green'])

        save_dir = create_dir(os.path.join(base_dir, 'recons', 'images', 'batch'))

        n_imgs = 64
        for i in range(5):
            x_i = x_recons[i * n_imgs:(i + 1) * n_imgs]
            f, _ = pvi.plot_image(pvi.merge_images(x_i, 8, 8, direction=0, dtype=int), title='')

            pva.save_fig(f, os.path.join(save_dir, 'x_{}_recons'.format(i)))

            x_i = x[i * n_imgs:(i + 1) * n_imgs]
            f, _ = pvi.plot_image(pvi.merge_images(x_i, 8, 8, direction=0, dtype=int), title='')

            pva.save_fig(f, os.path.join(save_dir, 'x_{}_real'.format(i)))

        x_i = np.zeros((2 * len(im_idx), h, w, c))
        half = len(im_idx)
        x_i[:half] = x[im_idx]
        x_i[half:] = x_recons[im_idx]

        f, _ = pvi.plot_image(pvi.merge_images(x_i, 2, half, direction=1, dtype=int), title='')

        pva.save_fig(f, os.path.join(save_dir, 'x_rec_real'))
コード例 #5
0
ファイル: bigan_model.py プロジェクト: sailfish009/imp_bigan
    def plot_reconstruction(self, base_dir, scaler, x_norm):
        save_dir = create_dir(os.path.join(base_dir, 'recons', 'tb'))
        shutil.rmtree(save_dir)
        writer = SummaryWriter(log_dir=save_dir)

        n_imgs, h, w, c = x_norm.shape
        z_list, x_recons_norm = self.recons(x_norm)
        x_recons = data_fn.denorm(x_recons_norm, scaler)
        x = data_fn.denorm(x_norm, scaler)

        writer.add_embedding(mat=z_list, label_img=torch.from_numpy(x.transpose((0, 3, 1, 2)) / 255.),
                             tag='latent_space')
        ze_mean = np.mean(z_list, axis=0)
        ze_var = np.var(z_list, axis=0)

        z_norm = np.linalg.norm(z_list, axis=-1)
        z_norm_prior = np.linalg.norm(np.random.normal(0, 1, [n_imgs, self.z_dim]), axis=1)

        writer.add_histogram(tag='latent_space/norm', values=z_norm, global_step=1)
        writer.add_histogram(tag='latent_space/norm', values=z_norm_prior, global_step=2)
        for i in range(self.z_dim):
            writer.add_scalar(tag='latent_space/mean', scalar_value=ze_mean[i], global_step=i)
            writer.add_scalar(tag='latent_space/var', scalar_value=ze_var[i], global_step=i)

        n_bs = n_imgs // 64
        for i in range(n_bs):
            # x = np.concatenate([x_data[i * n_imgs:(i + 1) * n_imgs], x_recons[i * n_imgs:(i + 1) * n_imgs]], axis=0)
            x_i = tbu.images_to_tensor_grid(x_recons[i * 64: (i + 1) * 64])

            writer.add_image('x/reconstruction', x_i, i, dataformats='CHW')

            x_i = tbu.images_to_tensor_grid(x[i * 64: (i + 1) * 64])

            writer.add_image('x/original', x_i, i, dataformats='CHW')

        psnr = loss.PSNR(x, x_recons, axis=(1, 2, 3))
        writer.add_histogram(tag='psnr/histogram', values=psnr)
        idx_sorted = np.argsort(psnr)  # From smaller to larger
        # psnr_sorted = psnr[idx_sorted]

        for i in range(len(psnr)):
            writer.add_scalar(tag='psnr/original', scalar_value=psnr[i], global_step=i)
            writer.add_scalar(tag='psnr/idx', scalar_value=idx_sorted[i], global_step=i)
            writer.add_scalar(tag='psnr/sorted', scalar_value=psnr[idx_sorted[i]], global_step=i)

        x_sorted = x[idx_sorted]

        n_bs = n_imgs // 64
        for i in range(n_bs):
            # x = np.concatenate([x_data[i * n_imgs:(i + 1) * n_imgs], x_recons[i * n_imgs:(i + 1) * n_imgs]], axis=0)

            x_i = tbu.images_to_tensor_grid(x_sorted[i * 64: (i + 1) * 64])

            writer.add_image('x/psnr_sorted', x_i, i, dataformats='CHW')

        # Save images

        save_dir = create_dir(os.path.join(base_dir, 'recons', 'images'))

        pvg.mean_var_plot(save_dir, z_list, name='z', xlabel=r'Dimension', ylabel='')
        pvg.multi_hist_plot(save_dir, [z_norm_prior, z_norm], [r'$||z||$', r'$||z_e||$'], name='ze', xlabel=r'Norm',
                            density=True, alpha=0.7, fontsize=32)
コード例 #6
0
ファイル: trainer.py プロジェクト: sailfish009/imp_bigan
 def init_folders(self):
     create_dir(self.save_dir)
     create_dir(self.samples_dir)