import lpips # import models # from PerceptualSimilarity folder # ImDist = models.PerceptualLoss(model='net-lin', net='squeeze', use_gpu=1, gpu_ids=[0]) ImDist = lpips.LPIPS(net='squeeze', ) # model_vgg = models.PerceptualLoss(model='net-lin', net='vgg', use_gpu=1, gpu_ids=[0]) ImDist.cuda() for param in ImDist.parameters(): param.requires_grad_(False) #%% # BGAN = BigGAN.from_pretrained("biggan-deep-256") # for param in BGAN.parameters(): # param.requires_grad_(False) # embed_mat = BGAN.embeddings.parameters().__next__().data # BGAN.cuda() from GAN_utils import BigGAN_wrapper, loadBigGAN BGAN = loadBigGAN() G = BigGAN_wrapper(BGAN) #%% def LExpMap(refvect, tangvect, ticks=11, lims=(-1, 1)): refvect, tangvect = refvect.reshape(1, -1), tangvect.reshape(1, -1) steps = np.linspace(lims[0], lims[1], ticks)[:, np.newaxis] interp_vects = steps @ tangvect + refvect return interp_vects def SExpMap(refvect, tangvect, ticks=11, lims=(-1, 1)): refvect, tangvect = refvect.reshape(1, -1), tangvect.reshape(1, -1) steps = np.linspace(lims[0], lims[1], ticks)[:, np.newaxis] * np.pi / 2 interp_vects = steps @ tangvect + refvect
#%% ImDist = LPIPS(net="squeeze") datadir = r"E:\OneDrive - Washington University in St. Louis\HessNetArchit\BigGAN" def Hess_hook(module, fea_in, fea_out): print("hooker on %s" % module.__class__) ref_feat = fea_out.detach().clone() ref_feat.requires_grad_(False) L2dist = torch.pow(fea_out - ref_feat, 2).sum() L2dist_col.append(L2dist) return None #%% BGAN = loadBigGAN() SD = BGAN.state_dict() #%% shuffled_SD = {} for name, Weight in SD.items(): idx = torch.randperm(Weight.numel()) W_shuf = Weight.view(-1)[idx].view(Weight.shape) shuffled_SD[name] = W_shuf #%% torch.save(shuffled_SD, join(datadir, "BigGAN_shuffle.pt")) # print(name, Weight.shape, Weight.mean().item(), Weight.std().item()) #%% BGAN_sf = loadBigGAN() BGAN_sf.load_state_dict(torch.load(join(datadir, "BigGAN_shuffle.pt"))) G_sf = BigGAN_wrapper(BGAN_sf) #%%
rown=5, transpose=False) #%% vis_eigen_action(eigvect_avg[:, -5], None, figdir=figdir, page_B=50, maxdist=20, rown=5, transpose=False) #%% BigGAN on ImageNet Class Specific from GAN_utils import BigGAN_wrapper, loadBigGAN from pytorch_pretrained_biggan import BigGAN from torchvision.transforms import ToPILImage BGAN = loadBigGAN("biggan-deep-256").cuda() BG = BigGAN_wrapper(BGAN) EmbedMat = BG.BigGAN.embeddings.weight.cpu().numpy() #%% figdir = r"E:\OneDrive - Washington University in St. Louis\Hessian_summary\BigGAN" Hessdir = r"E:\OneDrive - Washington University in St. Louis\Hessian_summary\BigGAN" data = np.load(join(Hessdir, "H_avg_1000cls.npz")) eva_BG = data['eigvals_avg'] evc_BG = data['eigvects_avg'] evc_nois = data['eigvects_nois_avg'] evc_clas = data['eigvects_clas_avg'] #%% imgs = BG.render(np.random.randn(1, 256) * 0.06) #%% eigi = 5 refvecs = np.vstack((EmbedMat[:, np.random.randint(0, 1000, 10)],