Exemplo n.º 1
0
                ans.append(path)
    f_w = open(target_path, 'w')
    f_w.writelines(ans)


opt = TrainOptions().parse()
opt.phase = 'val'
write_temp(opt, "temp")
opt.phase = "temp"
opt.serial_batches = True

data_loader = CreatePoseConDataLoader(opt)
dataset = data_loader.load_data()
dataset_size = len(data_loader)

visualizer = Visualizer(opt)

total_steps = 0  # (start_epoch-1) * dataset_size + epoch_iter

display_delta = total_steps % opt.display_freq
print_delta = total_steps % opt.print_freq
save_delta = total_steps % opt.save_latest_freq

for i, data in enumerate(dataset):
    if (i % 100 == 0):
        print((i, dataset_size))
    visuals = OrderedDict([('input_label',
                            util.tensor2label(data['A'][0][3:6, :, :], 0)),
                           ('real_image', util.tensor2im(data['A2'][0]))])
    visualizer.display_current_results2(visuals, 0, i)
Exemplo n.º 2
0
            train_writer.add_image('val/real_image', util.tensor2im2(data_test['A2'][0]), total_steps)
            train_writer.add_image('val/synthesized_image', util.tensor2im2(generated_test.data[0]), total_steps)
            train_writer.add_image('val/B', util.tensor2im2(data_test['B'][0], normalize=False), total_steps)
            train_writer.add_image('val/B2', util.tensor2im2(data_test['B2'][0]), total_steps)
            '''

        ### display output images
        if save_fake:
            visuals = OrderedDict([
                ('input_label', util.tensor2im(data['A'][0])),
                ('real_image', util.tensor2im(data['A2'][0])),
                ('synthesized_image', util.tensor2im(generated.data[0])),
                ('B', util.tensor2im(data['B'][0])),
                ('B2', util.tensor2im(data['B2'][0]))
            ])
            visualizer.display_current_results2(visuals, epoch, total_steps)

            train_writer.add_image(
                'train/input_label',
                util.tensor2im2(data['A'][0], normalize=False), total_steps)
            train_writer.add_image('train/real_image',
                                   util.tensor2im2(data['A2'][0]), total_steps)
            train_writer.add_image('train/synthesized_image',
                                   util.tensor2im2(generated.data[0]),
                                   total_steps)
            train_writer.add_image(
                'train/B', util.tensor2im2(data['B'][0], normalize=False),
                total_steps)
            train_writer.add_image('train/B2', util.tensor2im2(data['B2'][0]),
                                   total_steps)
Exemplo n.º 3
0
                t_data = iter_start_time - iter_data_time
            visualizer.reset()
            total_iters += opt.batch_size
            epoch_iter += opt.batch_size
            #model.initialize(opt)         # initialize model
            model.set_input(
                data)  # unpack data from dataset and apply preprocessing
            model.optimize_parameters(
            )  # calculate loss functions, get gradients, update network weights

            if total_iters % opt.display_freq == 0:  # display images on visdom and save images to a HTML file
                save_result = total_iters % opt.update_html_freq == 0
                model.compute_visuals()
                visualizer.display_current_results(model.get_current_visuals(),
                                                   epoch, save_result)
                visualizer.display_current_results2(
                    model.get_current_visuals2(), epoch, save_result)

            if total_iters % opt.print_freq == 0:  # print training losses and save logging information to the disk
                losses = model.get_current_losses()
                t_comp = (time.time() - iter_start_time) / opt.batch_size
                visualizer.print_current_losses(epoch, epoch_iter, losses,
                                                t_comp, t_data)
                if opt.display_id > 0:
                    visualizer.plot_current_losses(
                        epoch,
                        float(epoch_iter) / dataset_size, losses)

            if total_iters % opt.save_latest_freq == 0:  # cache our latest model every <save_latest_freq> iterations
                print('saving the latest model (epoch %d, total_iters %d)' %
                      (epoch, total_iters))
                save_suffix = 'iter_%d' % total_iters if opt.save_by_iter else 'latest'