def run(config):
  # Get loader
  config['drop_last'] = False
  loaders = utils.get_data_loaders(**config)

  # Load inception net
  net = inception_utils.load_inception_net(parallel=config['parallel'])
  pool, logits, labels = [], [], []
  device = 'cuda'
  for i, (x, y) in enumerate(tqdm(loaders[0])):
    x = x.to(device)
    with torch.no_grad():
      pool_val, logits_val = net(x)
      pool += [np.asarray(pool_val.cpu())]
      logits += [np.asarray(F.softmax(logits_val, 1).cpu())]
      labels += [np.asarray(y.cpu())]

  pool, logits, labels = [np.concatenate(item, 0) for item in [pool, logits, labels]]
  # uncomment to save pool, logits, and labels to disk
  # print('Saving pool, logits, and labels to disk...')
  # np.savez(config['dataset']+'_inception_activations.npz',
  #           {'pool': pool, 'logits': logits, 'labels': labels})
  # Calculate inception metrics and report them
  print('Calculating inception metrics...')
  IS_mean, IS_std = inception_utils.calculate_inception_score(logits)
  print('Training data from dataset %s has IS of %5.5f +/- %5.5f' % (config['dataset'], IS_mean, IS_std))
  # Prepare mu and sigma, save to disk. Remove "hdf5" by default 
  # (the FID code also knows to strip "hdf5")
  print('Calculating means and covariances...')
  mu, sigma = np.mean(pool, axis=0), np.cov(pool, rowvar=False)
  print('Saving calculated means and covariances to disk...')
  np.savez(config['dataset'].strip('_hdf5')+'_inception_moments.npz', **{'mu' : mu, 'sigma' : sigma})
def run(config):
    logger = logging.getLogger('tl')

    saved_inception_moments = global_cfg.saved_inception_moments.format(
        config['dataset'])

    # Get loader
    config['drop_last'] = False
    loaders = utils.get_data_loaders(use_data_root=True, **config)

    # Load inception net
    net = inception_utils.load_inception_net(parallel=config['parallel'])
    net.eval()
    # net.train()
    pool, logits, labels = [], [], []
    device = 'cuda'
    pbar = tqdm(loaders[0], desc='accumulate pool and logits')
    for i, (x, y) in enumerate(pbar):
        x = x.to(device)
        with torch.no_grad():
            pool_val, logits_val = net(x)
            pool += [np.asarray(pool_val.cpu())]
            logits += [np.asarray(F.softmax(logits_val, 1).cpu())]
            labels += [np.asarray(y.cpu())]

    pool, logits, labels = [
        np.concatenate(item, 0) for item in [pool, logits, labels]
    ]
    # uncomment to save pool, logits, and labels to disk
    # print('Saving pool, logits, and labels to disk...')
    # np.savez(config['dataset']+'_inception_activations.npz',
    #           {'pool': pool, 'logits': logits, 'labels': labels})
    # Calculate inception metrics and report them
    logger.info('Calculating inception metrics...')
    IS_mean, IS_std = inception_utils.calculate_inception_score(logits)
    logger.info('Training data from dataset %s has IS of %5.5f +/- %5.5f' %
                (config['dataset'], IS_mean, IS_std))
    # Prepare mu and sigma, save to disk. Remove "hdf5" by default
    # (the FID code also knows to strip "hdf5")
    logger.info('Calculating means and covariances...')
    mu, sigma = np.mean(pool, axis=0), np.cov(pool, rowvar=False)
    logger.info('Saving calculated means and covariances to disk...')

    logger.info(f'Save to {saved_inception_moments}')
    os.makedirs(os.path.dirname(saved_inception_moments), exist_ok=True)
    np.savez(saved_inception_moments, **{'mu': mu, 'sigma': sigma})
    pass
Beispiel #3
0
def run(config):
    # Get loader
    config['drop_last'] = False

    if config["dataset"] == "FFHQ":
        imsize = 256

        root = os.path.join(os.environ["SSD"], "images256x256")
        root_perm = os.path.join(os.environ["SSD"], "images256x256")

        transform = transforms.Compose([
            transforms.Scale(imsize),
            transforms.CenterCrop(imsize),
            transforms.RandomHorizontalFlip(),
            transforms.ToTensor(),
            transforms.Normalize([0.5, 0.5, 0.5], [0.5, 0.5, 0.5]),
        ])

        batch_size = 100  #config['batch_size']
        dataset = FFHQ(root=root, transform=transform, test_mode=False)
        data_loader = DataLoader(dataset,
                                 batch_size,
                                 shuffle=True,
                                 drop_last=True)
        loaders = [data_loader]

    elif config["dataset"] == "coco":

        imsize = 128
        batch_size = config['batch_size']

        transform = transforms.Compose([
            transforms.Resize(imsize),
            transforms.CenterCrop(imsize),
            #transforms.RandomHorizontalFlip(),
            #transforms.ColorJitter(brightness=0.01, contrast=0.01, saturation=0.01, hue=0.01),
            transforms.ToTensor(),
            transforms.Normalize((0.5, 0.5, 0.5), (0.5, 0.5, 0.5)),
        ])

        classes = [
            'bird', 'cat', 'dog', 'horse', 'sheep', 'cow', 'elephant',
            'monkey', 'zebra', 'giraffe'
        ]

        root = None
        root_perm = None
        dataset = CocoAnimals(root=root,
                              batch_size=batch_size,
                              classes=classes,
                              transform=transform,
                              masks=False,
                              return_all=True,
                              test_mode=False,
                              imsize=imsize)
        data_loader = DataLoader(dataset,
                                 batch_size,
                                 drop_last=True,
                                 num_workers=1,
                                 shuffle=True)  #,shuffle=False)
        loaders = [data_loader]

    else:
        loaders = utils.get_data_loaders(**config)

    # Load inception net
    net = inception_utils.load_inception_net(parallel=config['parallel'])
    pool, logits, labels = [], [], []
    device = 'cuda'
    used_samples = 0
    for e in range(2):
        for i, batch_data in enumerate(tqdm(loaders[0])):

            x = batch_data[0]
            y = batch_data[1]
            x = x.to(device)
            with torch.no_grad():
                pool_val, logits_val = net(x)
                pool += [np.asarray(pool_val.cpu())]
                logits += [np.asarray(F.softmax(logits_val, 1).cpu())]
                labels += [np.asarray(y.cpu())]

            used_samples += x.size(0)
            if used_samples >= 50000:
                break

    pool, logits, labels = [
        np.concatenate(item, 0) for item in [pool, logits, labels]
    ]
    # uncomment to save pool, logits, and labels to disk
    # print('Saving pool, logits, and labels to disk...')
    # np.savez(config['dataset']+'_inception_activations.npz',
    #           {'pool': pool, 'logits': logits, 'labels': labels})
    # Calculate inception metrics and report them
    print('Calculating inception metrics...')
    IS_mean, IS_std = inception_utils.calculate_inception_score(logits)
    print('Training data from dataset %s has IS of %5.5f +/- %5.5f' %
          (config['dataset'], IS_mean, IS_std))
    # Prepare mu and sigma, save to disk. Remove "hdf5" by default
    # (the FID code also knows to strip "hdf5")
    print('Calculating means and covariances...')
    print(pool.shape)
    mu, sigma = np.mean(pool, axis=0), np.cov(pool, rowvar=False)
    print('Saving calculated means and covariances to disk...')
    np.savez(config['dataset'].strip('_hdf5') + '_inception_moments.npz', **{
        'mu': mu,
        'sigma': sigma
    })
Beispiel #4
0
def run(config):
    # Get loader
    config['drop_last'] = False
    loaders = utils.get_data_loaders(**config)

    # Load inception net
    net = inception_utils.load_inception_net(parallel=config['parallel'])
    pool, logits, labels = [], [], []
    device = 'cuda'
    init = 0
    for i, (x, y) in enumerate(tqdm(loaders[0])):
        x = x.to(device)
        with torch.no_grad():
            if torch.sum(y == (init + 1)) > 0:
                init += 1
                print(y[0], init)
                pool_val, logits_val = net(x)
                pool += [np.asarray(pool_val.cpu()[y == (init - 1)])]
                logits += [
                    np.asarray(
                        F.softmax(logits_val[y == (init - 1)], 1).cpu())
                ]
                labels += [np.asarray(y.cpu()[y == (init - 1)])]

                pool, logits, labels = [
                    np.concatenate(item, 0) for item in [pool, logits, labels]
                ]
                print('Calculating inception metrics...')
                IS_mean, IS_std = inception_utils.calculate_inception_score(
                    logits)
                print(
                    'Training data from dataset %s has IS of %5.5f +/- %5.5f' %
                    (config['dataset'], IS_mean, IS_std))

                print('Calculating means and covariances...')
                mu, sigma = np.mean(pool, axis=0), np.cov(pool, rowvar=False)
                print('Saving calculated means and covariances to disk...')
                np.savez(
                    '../inception_moment/' + str(init - 1) + '_' +
                    config['dataset'].strip('_hdf5') +
                    '_inception_moments.npz', **{
                        'mu': mu,
                        'sigma': sigma
                    })

                pool, logits, labels = [], [], []
                pool_val, logits_val = net(x)
                pool += [np.asarray(pool_val.cpu()[y == (init)])]
                logits += [
                    np.asarray(F.softmax(logits_val[y == (init)], 1).cpu())
                ]
                labels += [np.asarray(y.cpu()[y == (init)])]
            else:
                pool_val, logits_val = net(x)
                pool += [np.asarray(pool_val.cpu())]
                logits += [np.asarray(F.softmax(logits_val, 1).cpu())]
                labels += [np.asarray(y.cpu())]

    pool, logits, labels = [
        np.concatenate(item, 0) for item in [pool, logits, labels]
    ]
    print('Calculating inception metrics...')
    IS_mean, IS_std = inception_utils.calculate_inception_score(logits)
    print('Training data from dataset %s has IS of %5.5f +/- %5.5f' %
          (config['dataset'], IS_mean, IS_std))

    print('Calculating means and covariances...')
    mu, sigma = np.mean(pool, axis=0), np.cov(pool, rowvar=False)
    print('Saving calculated means and covariances to disk...')
    np.savez(
        '../inception_moment/' + str(init - 1) + '_' +
        config['dataset'].strip('_hdf5') + '_inception_moments.npz', **{
            'mu': mu,
            'sigma': sigma
        })
Beispiel #5
0
def run(config):

    unique_id = datetime.datetime.now().strftime('%Y%m-%d%H-%M%S-')
    tensorboard_path = os.path.join(config['logs_root'], 'tensorboard_logs',
                                    unique_id)
    os.makedirs(tensorboard_path)
    writer = SummaryWriter(log_dir=tensorboard_path)

    # Get loader
    config['drop_last'] = False
    frame_size = utils.imsize_dict[config['dataset']]  #112
    print('Dataset:', config['dataset'], 'Frame size:', frame_size)
    loaders = utils.get_video_data_loaders(num_epochs=1,
                                           frame_size=frame_size,
                                           **config)

    # Load inception net
    net = inception_utils.load_r2plus1d_18_net(parallel=config['parallel'])
    pool, logits, labels = [], [], []
    device = 'cuda'
    accu_correct, accu_total = 0, 0
    # norm_mean = [-0.43216/0.22803, -0.394666/0.22145, -0.37645/0.216989]
    # norm_std = [1/0.22803, 1/0.22145, 1/0.216989]
    # norm_mean = torch.tensor([0.43216, 0.394666, 0.37645]).to(device)
    # norm_std  = torch.tensor([0.22803, 0.22145, 0.216989]).to(device)
    norm_mean = torch.tensor([0.5, 0.5, 0.5]).to(device)
    norm_std = torch.tensor([0.5, 0.5, 0.5]).to(device)
    transform_tensorboard = dset.VideoNormalize(norm_mean, norm_std)

    for i, (x, y) in enumerate(tqdm(loaders[0])):
        x = x.to(device)  #[B,T,C,H,W]
        # print('x shape',x.shape)
        if i % 100 == 0:
            # t_x = transform_tensorboard(x[0].permute(1,0,2,3).contiguous()).permute(1,0,2,3).unsqueeze(0)
            t_x = x * norm_std[None, None, :, None,
                               None] + norm_mean[None, None, :, None, None]
            print('Range:', t_x.min(), t_x.max())
            writer.add_video('Loaded Data', t_x, i)
        with torch.no_grad():
            pool_val, logits_val = net(x)
            pool += [np.asarray(pool_val.cpu())]
            logits += [np.asarray(F.softmax(logits_val, 1).cpu())]
            labels += [np.asarray(y.cpu())]
            accu_correct += int(
                sum(
                    F.softmax(logits_val, 1).cpu().argmax(1).squeeze() ==
                    y.cpu().squeeze()))
            accu_total += len(np.asarray(y.cpu()).squeeze())
            if i % 10 == 0:
                print(F.softmax(logits_val, 1).cpu()[0])
                print('Accumulated coorect predictions:', accu_correct)
                print('Accumulated total number of samples:', accu_total)
                print('Accumulated prediction accuracy is:',
                      accu_correct / accu_total)

    pool, logits, labels = [
        np.concatenate(item, 0) for item in [pool, logits, labels]
    ]
    # uncomment to save pool, logits, and labels to disk
    # print('Saving pool, logits, and labels to disk...')
    # np.savez(config['dataset']+'_inception_activations.npz',
    #           {'pool': pool, 'logits': logits, 'labels': labels})
    # Calculate inception metrics and report them
    print('Calculating inception metrics...')
    IS_mean, IS_std = inception_utils.calculate_inception_score(logits)
    print('Training data from dataset %s has IS of %5.5f +/- %5.5f' %
          (config['dataset'], IS_mean, IS_std))
    # Prepare mu and sigma, save to disk. Remove "hdf5" by default
    # (the FID code also knows to strip "hdf5")
    print('Calculating means and covariances...')
    mu, sigma = np.mean(pool, axis=0), np.cov(pool, rowvar=False)
    print('Saving calculated means and covariances to disk...')
    np.savez(config['dataset'].strip('_hdf5') + '_inception_moments.npz', **{
        'mu': mu,
        'sigma': sigma
    })