def main(): args = parse_args() gen = net.Generator1() dis = net.Discriminator1() clip_rect = None if args.clip_rect: clip_rect = map(int, args.clip_rect.split(',')) clip_rect = tuple([ clip_rect[0], clip_rect[1], clip_rect[0] + clip_rect[2], clip_rect[1] + clip_rect[3] ]) gpu_device = None if args.gpu >= 0: device_id = args.gpu cuda.get_device(device_id).use() gen.to_gpu(device_id) dis.to_gpu(device_id) optimizer_gen = optimizers.Adam(alpha=0.001) optimizer_gen.setup(gen) optimizer_dis = optimizers.Adam(alpha=0.001) optimizer_dis.setup(dis) if args.input != None: serializers.load_npz(args.input + '.gen.model', gen) serializers.load_npz(args.input + '.gen.state', optimizer_gen) serializers.load_npz(args.input + '.dis.model', dis) serializers.load_npz(args.input + '.dis.state', optimizer_dis) if args.out_image_dir != None: if not os.path.exists(args.out_image_dir): try: os.mkdir(args.out_image_dir) except: print 'cannot make directory {}'.format(args.out_image_dir) exit() elif not os.path.isdir(args.out_image_dir): print 'file path {} exists but is not directory'.format( args.out_image_dir) exit() with open(args.dataset, 'rb') as f: images = pickle.load(f) train(gen, dis, optimizer_gen, optimizer_dis, images, args.epoch, batch_size=args.batch_size, margin=args.margin, save_epoch=args.save_epoch, lr_decay=args.lr_decay, output_path=args.output, out_image_dir=args.out_image_dir, clip_rect=clip_rect)
def main(): args = parse_args() gen1 = net.Generator1() gen2 = net.Generator2() chainer.serializers.load_npz(args.model_path1, gen1) chainer.serializers.load_npz(args.model_path2, gen2) device_id = None if args.gpu >= 0: device_id = args.gpu cuda.get_device(device_id).use() gen1.to_gpu(device_id) gen2.to_gpu(device_id) out_vector_path = None if args.vector_file1 and args.vector_index1 >= 0 and args.vector_file2 and args.vector_index2 >= 0: with open(args.vector_file1, 'rb') as f: z = np.load(f) z1 = z[args.vector_index1] with open(args.vector_file2, 'rb') as f: z = np.load(f) z2 = z[args.vector_index2] w = np.arange(10).astype(np.float32).reshape((-1, 1)) / 9 z = (1 - w) * z1 + w * z2 z = z / (np.linalg.norm(z, axis=1, keepdims=True) + 1e-12) else: z = np.random.normal(0, 1, (100, latent_size)).astype(np.float32) z = z / (np.linalg.norm(z, axis=1, keepdims=True) + 1e-12) out_vector_path = '{}.npy'.format(args.output) with chainer.no_backprop_mode(): if device_id is None: x1 = gen1(z, train=False) else: x1 = gen1(cuda.to_gpu(z, device_id), train=False) x2 = gen2(x1, train=False) x1 = cuda.to_cpu(x1.data) x2 = cuda.to_cpu(x2.data) batch, ch, h, w = x1.shape x1 = x1.reshape((-1, 10, ch, h, w)).transpose((0, 3, 1, 4, 2)).reshape( (-1, 10 * w, ch)) x1 = ((x1 + 1) * 127.5).clip(0, 255).astype(np.uint8) Image.fromarray(x1).save('{}_stack1.jpg'.format(args.output)) batch, ch, h, w = x2.shape x2 = x2.reshape((-1, 10, ch, h, w)).transpose((0, 3, 1, 4, 2)).reshape( (-1, 10 * w, ch)) x2 = ((x2 + 1) * 127.5).clip(0, 255).astype(np.uint8) Image.fromarray(x2).save('{}_stack2.jpg'.format(args.output)) if out_vector_path: with open(out_vector_path, 'wb') as f: np.save(f, z)
import logging import torchvision.utils as tov from torch import Tensor as tensor #pdb.set_trace() gpuid = 0 lr_rate = 0.0002 alpha = 0.0005 beta = 0.01 * alpha num_iter = 500000 optim_betas = (0.9, 0.999) bs = 64 labelfake = Vb(torch.from_numpy(np.full((bs), 2, dtype=int))).cuda(gpuid) labelpo = Vb(torch.from_numpy(np.full((bs), 1, dtype=int))).cuda(gpuid) labelne = Vb(torch.from_numpy(np.full((bs), 0, dtype=int))).cuda(gpuid) logging.basicConfig(filename='log/residualgan_v10.log', level=logging.INFO) G1 = net.Generator1().cuda(gpuid) G2 = net.Generator2().cuda(gpuid) D = net.Discriminator().cuda(gpuid) d_optimizer = optim.Adam(D.parameters(), lr=lr_rate, betas=optim_betas) g1_optimizer = optim.Adam(G1.parameters(), lr=lr_rate, betas=optim_betas) g2_optimizer = optim.Adam(G2.parameters(), lr=lr_rate, betas=optim_betas) l1_crit = nn.L1Loss(size_average=False) datalistpo = ld.getlist('../Eyeglasses_Positive.txt') datalistne = ld.getlist('../Eyeglasses_Negative.txt') iternow1 = 0 iternow2 = 0 for iter1 in xrange(num_iter): D.zero_grad() datapo, iternow1 = ld.load_data('../img_align_celeba_crop/', '../Eyeglasses_Positive.txt', datalistpo,