def from_pretrained(cls, name="vgg_lpips"): if name is not "vgg_lpips": raise NotImplementedError model = cls() ckpt = get_ckpt_path(name) model.load_state_dict(torch.load(ckpt, map_location=torch.device("cpu")), strict=False) return model
def load_variable_latsize_generator(size, z_dim, n_class = 1000, pretrained=True, use_actnorm=False): generators = {128: VariableDimGenerator128} G = generators[size](z_dim, use_actnorm=use_actnorm, n_class=n_class) if pretrained: assert n_class==1000 ckpt = get_ckpt_path("biggan_{}".format(size)) sd = torch.load(ckpt) G.load_state_dict(sd) split_sizes = {128: 5*20, 256: 6*20} G = update_G_linear(G, z_dim - split_sizes[size]) # add new trainable layer to adopt for variable z_dim size return G
def from_pretrained(cls, name): if name is not "dequant_biggan": raise NotImplementedError config_dir = {"dequant_biggan": {"Model": {"in_channels": 128, "n_down": 2, "mid_channels": 4096, "z_dim": 128 } } } ckpt_dict = {"dequant_biggan": "dequant_vae"} model = cls(config_dir[name]) ckpt = get_ckpt_path(ckpt_dict[name]) model.load_state_dict(torch.load(ckpt, map_location=torch.device("cpu"))) model.eval() return model
def from_pretrained(cls, name): config_dict = { "animals": { "Model": { "deterministic": False, "in_size": 128, "norm": "an", "pretrained": False, "type": "resnet101", "use_actnorm_in_dec": True, "z_dim": 128, } }, "animalfaces": { "Model": { "deterministic": False, "in_size": 128, "norm": "bn", "pretrained": False, "type": "resnet101", "use_actnorm_in_dec": False, "z_dim": 128, } }, } ckpt_dict = { "animals": "bigae_animals", "animalfaces": "bigae_animalfaces", } if not name in config_dict: raise NotImplementedError(name) model = cls(config_dict[name]) ckpt = get_ckpt_path(ckpt_dict[name]) model.load_state_dict( torch.load(ckpt, map_location=torch.device("cpu"))) model.eval() return model
def from_pretrained(cls): G = cls() ckpt = get_ckpt_path("biggan_256") G.load_state_dict(torch.load(ckpt)) G.eval() return G
def load_from_pretrained(self, name="vgg_lpips"): ckpt = get_ckpt_path(name) self.load_state_dict(torch.load(ckpt, map_location=torch.device("cpu")), strict=False) print("loaded pretrained LPIPS loss from {}".format(ckpt))