Esempio n. 1
0
def main():
    parser = argparse.ArgumentParser(
        description='''StyleGAN2 generator.

Run 'python %(prog)s <subcommand> --help' for subcommand help.''',
        epilog=_examples,
        formatter_class=argparse.RawDescriptionHelpFormatter
    )

    subparsers = parser.add_subparsers(help='Sub-commands', dest='command')

    parser_generate_images = subparsers.add_parser(
        'generate-images', help='Generate images')
    parser_generate_images.add_argument(
        '--network', help='Network pickle filename', dest='network_pkl', required=True)
    parser_generate_images.add_argument(
        '--label', type=_parse_label, help='Label vector', default=None, required=False)
    parser_generate_images.add_argument(
        '--seeds', type=_parse_num_range, help='List of random seeds', required=True)
    parser_generate_images.add_argument(
        '--truncation-psi', type=float, help='Truncation psi (default: %(default)s)', default=0.5)
    parser_generate_images.add_argument(
        '--result-dir', help='Root directory for run results (default: %(default)s)', default='results', metavar='DIR')

    parser_style_mixing_example = subparsers.add_parser(
        'style-mixing-example', help='Generate style mixing video')
    parser_style_mixing_example.add_argument(
        '--network', help='Network pickle filename', dest='network_pkl', required=True)
    parser_style_mixing_example.add_argument(
        '--row-seeds', type=_parse_num_range, help='Random seeds to use for image rows', required=True)
    parser_style_mixing_example.add_argument(
        '--col-seeds', type=_parse_num_range, help='Random seeds to use for image columns', required=True)
    parser_style_mixing_example.add_argument(
        '--col-styles', type=_parse_num_range, help='Style layer range (default: %(default)s)', default='0-6')
    parser_style_mixing_example.add_argument(
        '--truncation-psi', type=float, help='Truncation psi (default: %(default)s)', default=0.5)
    parser_style_mixing_example.add_argument(
        '--result-dir', help='Root directory for run results (default: %(default)s)', default='results', metavar='DIR')

    args = parser.parse_args()
    kwargs = vars(args)
    subcmd = kwargs.pop('command')

    if subcmd is None:
        print('Error: missing subcommand.  Re-run with --help for usage.')
        sys.exit(1)

    sc = dnnlib.SubmitConfig()
    sc.num_gpus = 1
    sc.submit_target = dnnlib.SubmitTarget.LOCAL
    sc.local.do_not_copy_source_files = True
    sc.run_dir_root = kwargs.pop('result_dir')
    sc.run_desc = subcmd

    func_name_map = {
        'generate-images': 'run_generator.generate_images',
        'style-mixing-example': 'run_generator.style_mixing_example'
    }
    dnnlib.submit_run(sc, func_name_map[subcmd], **kwargs)
def run(data_dir='./../datasets',
        result_dir='./../results',
        num_gpus=1,
        total_kimg=1000,
        mirror_augment=True):
    train = EasyDict(
        run_func_name='classifier_vgg.training_loop-vgg.training_loop')
    classifier = EasyDict(
        func_name='classifier_vgg.network_classifier-vgg.classifier_vgg')
    classifier_opt = EasyDict(beta1=0.0, beta2=0.99, epsilon=1e-8)
    classifier_loss = EasyDict(func_name='classifier_vgg.loss.cross_entropy')
    sched = EasyDict()
    sc = dnnlib.SubmitConfig()
    tf_config = {'rnd.np_random_seed': 1000}

    # train.resume_pkl = './results/00254-classifier-single_class_model/network-snapshot-001000.pkl'
    # train.resume_kimg = 1000.0

    sched.minibatch_size_base = 32
    sched.minibatch_gpu_base = 8

    current_label = 'background'

    dataset = 'classifier_oversample_' + current_label
    print(dataset)
    train.data_dir = data_dir
    if not os.path.exists(train.data_dir):
        print('Error: dataset root directory does not exist.')
        sys.exit(1)
    train.total_kimg = total_kimg
    train.mirror_augment = mirror_augment
    metrics = [
        EasyDict(name='acc_' + current_label,
                 func_name='metrics.accuracy_single_class.ACC',
                 test_dataset=dataset + '_test',
                 num_images=5000,
                 minibatch_per_gpu=8,
                 test_data_dir=data_dir)
    ]
    desc = 'classifier-single_class_' + current_label
    dataset_args = EasyDict(tfrecord_dir=dataset)

    assert num_gpus in [1, 2, 4, 8]
    sc.num_gpus = num_gpus

    sc.submit_target = dnnlib.SubmitTarget.LOCAL
    sc.local.do_not_copy_source_files = True
    kwargs = EasyDict(train)
    kwargs.update(classifier_args=classifier,
                  classifier_opt_args=classifier_opt,
                  classifier_loss_args=classifier_loss)
    kwargs.update(dataset_args=dataset_args,
                  sched_args=sched,
                  metric_arg_list=metrics,
                  tf_config=tf_config)
    kwargs.submit_config = copy.deepcopy(sc)
    kwargs.submit_config.run_dir_root = result_dir
    kwargs.submit_config.run_desc = desc
    dnnlib.submit_run(**kwargs)
def run(dataset, data_dir, result_dir, config_id, num_gpus, total_kimg, gamma,
        mirror_augment, metrics):
    train = EasyDict(
        run_func_name=
        'training.training_loop.rotation.v7_baseline.training_loop')
    G = EasyDict(func_name='training.networks.rotation.v7_baseline.G_main')
    D = EasyDict(
        func_name='training.networks.rotation.v7_baseline.D_stylegan2')
    G_opt = EasyDict(beta1=0.0, beta2=0.99, epsilon=1e-8)
    D_opt = EasyDict(beta1=0.0, beta2=0.99, epsilon=1e-8)
    G_loss = EasyDict(
        func_name='training.loss.rotation.v7_baseline.G_logistic_ns_pathreg')
    D_loss = EasyDict(
        func_name='training.loss.rotation.v7_baseline.D_logistic_r1')
    sched = EasyDict()
    grid = EasyDict(size='1080p', layout='random')
    sc = dnnlib.SubmitConfig()
    tf_config = {'rnd.np_random_seed': 1000}

    train.data_dir = data_dir
    train.total_kimg = total_kimg
    train.mirror_augment = mirror_augment
    train.image_snapshot_ticks = train.network_snapshot_ticks = 10
    sched.G_lrate_base = sched.D_lrate_base = 0.002
    sched.minibatch_size_base = 32
    sched.minibatch_gpu_base = 4
    D_loss.gamma = 10
    metrics = [metric_defaults[x] for x in metrics]
    desc = 'rotation-v7-baseline_256'

    dataset_args = EasyDict(tfrecord_dir=dataset)

    assert num_gpus in [1, 2, 4, 8]
    sc.num_gpus = num_gpus

    assert config_id in _valid_configs

    if gamma is not None:
        D_loss.gamma = gamma

    sc.submit_target = dnnlib.SubmitTarget.LOCAL
    sc.local.do_not_copy_source_files = True
    kwargs = EasyDict(train)
    kwargs.update(G_args=G,
                  D_args=D,
                  G_opt_args=G_opt,
                  D_opt_args=D_opt,
                  G_loss_args=G_loss,
                  D_loss_args=D_loss)
    kwargs.update(dataset_args=dataset_args,
                  sched_args=sched,
                  grid_args=grid,
                  metric_arg_list=metrics,
                  tf_config=tf_config)
    kwargs.submit_config = copy.deepcopy(sc)
    kwargs.submit_config.run_dir_root = result_dir
    kwargs.submit_config.run_desc = desc
    dnnlib.submit_run(**kwargs)
Esempio n. 4
0
def main():
    parser = argparse.ArgumentParser(
        description='''VC2 projector.

Run 'python %(prog)s <subcommand> --help' for subcommand help.''',
        epilog=_examples,
        formatter_class=argparse.RawDescriptionHelpFormatter
    )

    subparsers = parser.add_subparsers(help='Sub-commands', dest='command')

    project_generated_images_parser = subparsers.add_parser('project-generated-images', help='Project generated images')
    project_generated_images_parser.add_argument('--network', help='Network pickle filename', dest='network_pkl', required=True)
    project_generated_images_parser.add_argument('--seeds', type=_parse_num_range, help='List of random seeds', default=range(3))
    project_generated_images_parser.add_argument('--num-snapshots', type=int, help='Number of snapshots (default: %(default)s)', default=5)
    project_generated_images_parser.add_argument('--result-dir', help='Root directory for run results (default: %(default)s)', default='results', metavar='DIR')
    project_generated_images_parser.add_argument('--create_new_G', help='If create a new G for projection.', default=False, type=_str_to_bool)
    project_generated_images_parser.add_argument('--new_func_name', help='new G func name if create new G', default='training.vc_networks2.G_main_vc2')

    project_real_dataset_images_parser = subparsers.add_parser('project-real-dataset-images', help='Project real images')
    project_real_dataset_images_parser.add_argument('--network', help='Network pickle filename', dest='network_pkl', required=True)
    project_real_dataset_images_parser.add_argument('--data-dir', help='Dataset root directory', required=True)
    project_real_dataset_images_parser.add_argument('--dataset', help='Training dataset', dest='dataset_name', required=True)
    project_real_dataset_images_parser.add_argument('--num-snapshots', type=int, help='Number of snapshots (default: %(default)s)', default=5)
    project_real_dataset_images_parser.add_argument('--num-images', type=int, help='Number of images to project (default: %(default)s)', default=3)
    project_real_dataset_images_parser.add_argument('--result-dir', help='Root directory for run results (default: %(default)s)', default='results', metavar='DIR')
    project_real_dataset_images_parser.add_argument('--create_new_G', help='If create a new G for projection.', default=False, type=_str_to_bool)
    project_real_dataset_images_parser.add_argument('--new_func_name', help='new G func name if create new G', default='training.vc_networks2.G_main_vc2')

    project_real_other_images_parser = subparsers.add_parser('project-real-other-images', help='Project real images')
    project_real_other_images_parser.add_argument('--network', help='Network pickle filename', dest='network_pkl', required=True)
    project_real_other_images_parser.add_argument('--data-dir', help='Dir of images to project', required=True)
    project_real_other_images_parser.add_argument('--num-snapshots', type=int, help='Number of snapshots (default: %(default)s)', default=5)
    project_real_other_images_parser.add_argument('--result-dir', help='Root directory for run results (default: %(default)s)', default='results', metavar='DIR')
    project_real_other_images_parser.add_argument('--create_new_G', help='If create a new G for projection.', default=False, type=_str_to_bool)
    project_real_other_images_parser.add_argument('--new_func_name', help='new G func name if create new G', default='training.vc_networks2.G_main_vc2')

    args = parser.parse_args()
    subcmd = args.command
    if subcmd is None:
        print ('Error: missing subcommand.  Re-run with --help for usage.')
        sys.exit(1)

    kwargs = vars(args)
    sc = dnnlib.SubmitConfig()
    sc.num_gpus = 1
    sc.submit_target = dnnlib.SubmitTarget.LOCAL
    sc.local.do_not_copy_source_files = True
    sc.run_dir_root = kwargs.pop('result_dir')
    sc.run_desc = kwargs.pop('command')

    func_name_map = {
        'project-generated-images': 'run_projector_vc2.project_generated_images',
        'project-real-dataset-images': 'run_projector_vc2.project_real_dataset_images',
        'project-real-other-images': 'run_projector_vc2.project_real_other_images'
    }
    dnnlib.submit_run(sc, func_name_map[subcmd], **kwargs)
Esempio n. 5
0
def main():
    parser = argparse.ArgumentParser(
        description='Run StyleGAN2 metrics.',
        epilog=_examples,
        formatter_class=argparse.RawDescriptionHelpFormatter)
    parser.add_argument(
        '--result-dir',
        help='Root directory for run results (default: %(default)s)',
        default='results',
        metavar='DIR')
    parser.add_argument('--network',
                        help='Network pickle filename',
                        dest='network_pkl',
                        required=True)
    parser.add_argument('--metrics',
                        help='Metrics to compute (default: %(default)s)',
                        default='fid50k',
                        type=lambda x: x.split(','))
    parser.add_argument('--dataset', help='Training dataset')
    parser.add_argument('--data-dir', help='Dataset root directory')
    parser.add_argument('--mirror-augment',
                        help='Mirror augment (default: %(default)s)',
                        default=False,
                        type=_str_to_bool,
                        metavar='BOOL')
    parser.add_argument('--include_I',
                        help='If include I for eval',
                        default=False,
                        type=_str_to_bool,
                        metavar='INCLUDE_I')
    parser.add_argument('--num-gpus',
                        help='Number of GPUs to use',
                        type=int,
                        default=1,
                        metavar='N')
    parser.add_argument(
        '--mapping_nodup',
        help='If the mapping layer in G has no duplication operation',
        default=False,
        type=_str_to_bool,
        metavar='MAPPING_NODUP')
    parser.add_argument('--avg_mv_for_I',
                        help='Using moving average for I?',
                        default=False,
                        type=_str_to_bool,
                        metavar='AVG_MV_FOR_I')

    args = parser.parse_args()

    kwargs = vars(args)
    sc = dnnlib.SubmitConfig()
    sc.num_gpus = kwargs.pop('num_gpus')
    sc.submit_target = dnnlib.SubmitTarget.LOCAL
    sc.local.do_not_copy_source_files = True
    sc.run_dir_root = kwargs.pop('result_dir')
    sc.run_desc = 'run-metrics'
    dnnlib.submit_run(sc, 'run_metrics.run', **kwargs)
Esempio n. 6
0
def create_training_config(
    tfrecord_dir,
    checkpoint_path,
    run_dir,
    G_beta1=0.0,
    G_beta2=0.99,
    D_beta1=0.0,
    D_beta2=0.99,
    mirror_augment=False,
    style_mixing_probability=0.9,
    generator_learning_rate=0.002,
    discriminator_learning_rate=0.004,
    weight_averaging_half_life=5,
    **kwargs
):


    train     = EasyDict() # Options for training loop.
    G         = EasyDict(func_name='training.networks_stylegan2.G_main')       # Options for generator network.
    D         = EasyDict(func_name='training.networks_stylegan2.D_stylegan2')  # Options for discriminator network.
    G_opt     = EasyDict(beta1=G_beta1, beta2=G_beta2, epsilon=1e-8)           # Options for generator optimizer.
    D_opt     = EasyDict(beta1=D_beta1, beta2=D_beta2, epsilon=1e-8)           # Options for discriminator optimizer.
    G_loss    = EasyDict(func_name='training.loss.G_logistic_ns_pathreg')      # Options for generator loss.
    D_loss    = EasyDict(func_name='training.loss.D_logistic_r1')              # Options for discriminator loss.
    sched     = EasyDict()                                                     # Options for TrainingSchedule.
    grid      = EasyDict(size='8k', layout='random')                           # Options for setup_snapshot_image_grid().
    submit_config        = dnnlib.SubmitConfig()                                          # Options for dnnlib.submit_run().
    tf_config = {'rnd.np_random_seed': 1000}                                   # Options for tflib.init_tf().

    train.mirror_augment = mirror_augment

    sched.minibatch_gpu_base = 4
    # Uncomment to enable gradient accumulation
    sched.minibatch_size_base = 12

    train.G_smoothing_kimg = weight_averaging_half_life

    D_loss.gamma = 10
    metrics = [metric_defaults['fid5k']]
    desc = 'stylegan2'

    dataset_args = EasyDict(tfrecord_dir=tfrecord_dir)

    desc += '-1gpu'
    desc += '-config-f'

    sched.G_lrate_dict = {128: generator_learning_rate, 256: generator_learning_rate, 512: generator_learning_rate, 1024: generator_learning_rate}
    sched.D_lrate_dict = {128: discriminator_learning_rate, 256: discriminator_learning_rate, 512: discriminator_learning_rate, 1024: discriminator_learning_rate}

    kwargs = EasyDict(train)

    kwargs.update(G_args=G, D_args=D, G_opt_args=G_opt, D_opt_args=D_opt, G_loss_args=G_loss, D_loss_args=D_loss)
    kwargs.update(dataset_args=dataset_args, sched_args=sched, grid_args=grid, metric_arg_list=metrics, tf_config=tf_config, resume_pkl=checkpoint_path)
    kwargs.submit_config = deepcopy(submit_config)
    kwargs.submit_config.run_dir = run_dir
    kwargs.submit_config.run_desc = desc
    return kwargs
Esempio n. 7
0
def main():
    parser = argparse.ArgumentParser(
        description="Run StyleGAN2 metrics.",
        epilog=_examples,
        formatter_class=argparse.RawDescriptionHelpFormatter)
    parser.add_argument(
        "--result-dir",
        help="Root directory for run results (default: %(default)s)",
        default="results",
        metavar="DIR")
    parser.add_argument("--network",
                        help="Network pickle filename",
                        default=None,
                        dest="network_pkl")
    parser.add_argument("--metrics",
                        help="Metrics to compute (default: %(default)s)",
                        default="fid50k",
                        type=lambda x: x.split(","))
    parser.add_argument("--dataset", help="Training dataset", required=True)
    parser.add_argument("--data-dir",
                        help="Dataset root directory",
                        required=True)
    parser.add_argument("--mirror-augment",
                        help="Mirror augment (default: %(default)s)",
                        default=False,
                        type=_str_to_bool,
                        metavar="BOOL")
    parser.add_argument("--gpus",
                        help="Number of GPUs to use",
                        type=str,
                        default=None)
    parser.add_argument("--paths",
                        help="Image files to run evaluation on",
                        default=None,
                        type=str)

    args = parser.parse_args()

    if not os.path.exists(args.data_dir):
        print("Error: dataset root directory does not exist.")
        sys.exit(1)

    kwargs = vars(args)
    sc = dnnlib.SubmitConfig()

    # set GPUs
    gpus = kwargs.pop("gpus")
    sc.num_gpus = len(gpus.split(","))
    os.environ["CUDA_VISIBLE_DEVICES"] = gpus

    sc.submit_target = dnnlib.SubmitTarget.LOCAL
    sc.local.do_not_copy_source_files = True
    sc.run_dir_root = kwargs.pop("result_dir")
    sc.run_desc = "run-metrics"
    dnnlib.submit_run(sc, "run_metrics.run", **kwargs)
Esempio n. 8
0
def run(opt):
    submit_config = dnnlib.SubmitConfig()

    # Which metrics to evaluate?
    metrics = []
    if 'FID' in opt.metrics:
        metrics.append(metric_base.fid50k)
    if 'PPL' in opt.metrics:
        metrics.append(metric_base.ppl_zend_v2)

    model_pth = find_model(opt.model)
    if not model_pth:
        # Find the snapshot:
        if opt.model.endswith('.pkl'):
            model_pth = opt.model
        else:
            model_pth = misc.find_pkl(
                os.path.join(os.getcwd(), config.result_dir), int(opt.model),
                opt.snapshot_kimg)

    # Define dataset:
    dataset = find_dataset(opt.dataset)
    if not dataset:
        dataset = os.path.join(os.getcwd(), config.data_dir, opt.dataset)

    tasks = []
    tasks += [
        EasyDict(run_func_name='evaluate.run_pickle',
                 network_pkl=model_pth,
                 use_RA=True,
                 dataset_args=EasyDict(tfrecord_dir=dataset,
                                       shuffle_mb=0,
                                       resolution=opt.resolution),
                 mirror_augment=False)
    ]

    submit_config.num_gpus = opt.num_gpus

    # Execute.
    submit_config.run_dir_root = dnnlib.submission.submit.get_template_from_path(
        config.result_dir)
    submit_config.run_dir_ignore += config.run_dir_ignore
    for task in tasks:
        for metric in metrics:
            submit_config.run_desc = '%s-%s' % (task.run_func_name,
                                                metric.name)
            if task.run_func_name.endswith('run_snapshot'):
                submit_config.run_desc += '-%s-%s' % (task.run_id,
                                                      task.snapshot)
            if task.run_func_name.endswith('run_all_snapshots'):
                submit_config.run_desc += '-%s' % task.run_id
            submit_config.run_desc += '-%dgpu' % submit_config.num_gpus
            dnnlib.submit_run(submit_config, metric_args=metric, **task)
    print('Done with %s.' % model_pth)
def main():
    parser = argparse.ArgumentParser(
        description='''StyleGAN2 projector.

Run 'python %(prog)s <subcommand> --help' for subcommand help.''',
        epilog=_examples,
        formatter_class=argparse.RawDescriptionHelpFormatter
    )

    subparsers = parser.add_subparsers(help='Sub-commands', dest='command')

    project_generated_images_parser = subparsers.add_parser('project-generated-images', help='Project generated images')
    project_generated_images_parser.add_argument('--network', help='Network pickle filename', dest='network_pkl', required=True)
    project_generated_images_parser.add_argument('--seeds', type=_parse_num_range, help='List of random seeds', default=range(3))
    project_generated_images_parser.add_argument('--num-snapshots', type=int, help='Number of snapshots (default: %(default)s)', default=5)
    project_generated_images_parser.add_argument('--num-steps', type=int, help='Number of steps for running projection (default: %(default)s)', default=100)
    project_generated_images_parser.add_argument('--truncation-psi', type=float, help='Truncation psi (default: %(default)s)', default=1.0)
    project_generated_images_parser.add_argument('--result-dir', help='Root directory for run results (default: %(default)s)', default='results', metavar='DIR')
    project_generated_images_parser.add_argument('--save-snapshots', action='store_true', help='Save projection results')
    project_generated_images_parser.add_argument('--save-latents', action='store_true', help='If True, save latent vectors')
    project_generated_images_parser.add_argument('--save-umap', action='store_true', help='If True, project latents using UMAP embeddings and save.')

    project_real_images_parser = subparsers.add_parser('project-real-images', help='Project real images')
    project_real_images_parser.add_argument('--network', help='Network pickle filename', dest='network_pkl', required=True)
    project_real_images_parser.add_argument('--data-dir', help='Dataset root directory', required=True)
    project_real_images_parser.add_argument('--dataset', help='Training dataset', dest='dataset_name', required=True)
    project_real_images_parser.add_argument('--num-snapshots', type=int, help='Number of snapshots (default: %(default)s)', default=5)
    project_real_images_parser.add_argument('--num-steps', type=int, help='Number of steps for running projection (default: %(default)s)', default=100)
    project_real_images_parser.add_argument('--num-images', type=int, help='Number of images to project (default: %(default)s)', default=3)
    project_real_images_parser.add_argument('--result-dir', help='Root directory for run results (default: %(default)s)', default='results', metavar='DIR')
    project_real_images_parser.add_argument('--save-snapshots', action='store_true', help='If True, save projection results')
    project_real_images_parser.add_argument('--save-latents', action='store_true', help='If True, save latent vectors')
    project_real_images_parser.add_argument('--save-umap', action='store_true', help='If True, project latents using UMAP embeddings and save.')
    project_real_images_parser.add_argument('--save-tiles', action='store_true', help='If True, stores images as 2048-by-2048 texture map.')

    args = parser.parse_args()
    subcmd = args.command
    if subcmd is None:
        print ('Error: missing subcommand.  Re-run with --help for usage.')
        sys.exit(1)

    kwargs = vars(args)
    sc = dnnlib.SubmitConfig()
    sc.num_gpus = 1
    sc.submit_target = dnnlib.SubmitTarget.LOCAL
    sc.local.do_not_copy_source_files = True
    sc.run_dir_root = kwargs.pop('result_dir')
    sc.run_desc = kwargs.pop('command')

    func_name_map = {
        'project-generated-images': 'run_projector.project_generated_images',
        'project-real-images': 'run_projector.project_real_images'
    }
    dnnlib.submit_run(sc, func_name_map[subcmd], **kwargs)
Esempio n. 10
0
def main():

    print()
    print()
    print()
    print('GENERATOR STARTED')

    parser = argparse.ArgumentParser(
        description='''StyleGAN2 generator.

Run 'python %(prog)s <subcommand> --help' for subcommand help.''',
        epilog=_examples,
        formatter_class=argparse.RawDescriptionHelpFormatter)

    subparsers = parser.add_subparsers(help='Sub-commands', dest='command')

    parser_generate_images = subparsers.add_parser('generate-images',
                                                   help='Generate images')
    parser_generate_images.add_argument('--network',
                                        help='Network pickle filename',
                                        dest='network_pkl',
                                        default='results/002332.pkl')
    parser_generate_images.add_argument(
        '--truncation-psi',
        type=float,
        help='Truncation psi (default: %(default)s)',
        default=0.5)
    parser_generate_images.add_argument(
        '--result-dir',
        help='Root directory for run results (default: %(default)s)',
        default='results',
        metavar='DIR')

    args = parser.parse_args()
    kwargs = vars(args)
    subcmd = kwargs.pop('command')

    if subcmd is None:
        print('Error: missing subcommand.  Re-run with --help for usage.')
        sys.exit(1)

    sc = dnnlib.SubmitConfig()
    sc.num_gpus = 1
    sc.submit_target = dnnlib.SubmitTarget.LOCAL
    sc.local.do_not_copy_source_files = True
    sc.run_dir_root = kwargs.pop('result_dir')
    sc.run_desc = subcmd

    func_name_map = {
        'generate-images': 'TD_listen.generate_images',
        'style-mixing-example': 'TD_listen.style_mixing_example'
    }
    dnnlib.submit_run(sc, func_name_map[subcmd], **kwargs)
Esempio n. 11
0
def main():
    submit_config = dnnlib.SubmitConfig()

    # Which metrics to evaluate?
    metrics = []
    metrics += [metric_base.fid50k]
    #metrics += [metric_base.ppl_zfull]
    #metrics += [metric_base.ppl_wfull]
    #metrics += [metric_base.ppl_zend]
    #metrics += [metric_base.ppl_wend]
    #metrics += [metric_base.ls]
    metrics += [metric_base.lm_hd]
    metrics += [metric_base.csim]
    #metrics += [metric_base.dummy]

    # Which networks to evaluate them on?
    tasks = []
    tasks += [
        EasyDict(
            run_func_name='run_metrics.run_pickle',
            network_pkl=
            '/content/gdrive/My Drive/Public/tensorboards_shared/run/t/58_rignet_fixed_pose_only_larger_adv_scaling/snapshots/network-snapshot-01560192.pkl',
            dataset_args=EasyDict(
                tfrecord_dir=
                '/content/gdrive/My Drive/Public/tensorboards_shared/split_dataset/test',
                shuffle_mb=0),
            mirror_augment=True)
    ]
    #tasks += [EasyDict(run_func_name='run_metrics.run_snapshot', run_id=100, snapshot=25000)]
    #tasks += [EasyDict(run_func_name='run_metrics.run_all_snapshots', run_id=100)]

    # How many GPUs to use?
    submit_config.num_gpus = 1
    #submit_config.num_gpus = 2
    #submit_config.num_gpus = 4
    #submit_config.num_gpus = 8

    # Execute.
    submit_config.run_dir_root = dnnlib.submission.submit.get_template_from_path(
        config.result_dir)
    submit_config.run_dir_ignore += config.run_dir_ignore
    for task in tasks:
        for metric in metrics:
            submit_config.run_desc = '%s-%s' % (task.run_func_name,
                                                metric.name)
            if task.run_func_name.endswith('run_snapshot'):
                submit_config.run_desc += '-%s-%s' % (task.run_id,
                                                      task.snapshot)
            if task.run_func_name.endswith('run_all_snapshots'):
                submit_config.run_desc += '-%s' % task.run_id
            submit_config.run_desc += '-%dgpu' % submit_config.num_gpus
            dnnlib.submit_run(submit_config, metric_args=metric, **task)
def run(data_dir='./../datasets',
        result_dir='./../results',
        num_gpus=2,
        total_kimg=1000):
    train = EasyDict(
        run_func_name='classifier.training_loop-single_class.training_loop')
    classifier = EasyDict(
        func_name='classifier.network_classifier-new_label.classifier')
    classifier_opt = EasyDict(beta1=0.0, beta2=0.99, epsilon=1e-8)
    classifier_loss = EasyDict(func_name='classifier.loss.euclidean')
    sched = EasyDict()
    sc = dnnlib.SubmitConfig()
    tf_config = {'rnd.np_random_seed': 1000}
    dataset = 'classifier_oversample_rotation_v7'

    sched.minibatch_size_base = 8
    sched.minibatch_gpu_base = 4

    train.data_dir = data_dir
    train.total_kimg = total_kimg

    desc = 'classifier_rotation_v7'

    metrics = [
        EasyDict(
            name='ACCv7',
            func_name='metrics.accuracy_v7.ACCv7',
            num_images=5000,
            minibatch_per_gpu=8,
            test_dataset='./../datasets/classifier_oversample_rotation_v7_test'
        )
    ]

    dataset_args = EasyDict(tfrecord_dir=dataset)

    assert num_gpus in [1, 2, 4, 8]
    sc.num_gpus = num_gpus

    sc.submit_target = dnnlib.SubmitTarget.LOCAL
    sc.local.do_not_copy_source_files = True
    kwargs = EasyDict(train)
    kwargs.update(classifier_args=classifier,
                  classifier_opt_args=classifier_opt,
                  classifier_loss_args=classifier_loss)
    kwargs.update(dataset_args=dataset_args,
                  sched_args=sched,
                  metric_arg_list=metrics,
                  tf_config=tf_config)
    kwargs.submit_config = copy.deepcopy(sc)
    kwargs.submit_config.run_dir_root = result_dir
    kwargs.submit_config.run_desc = desc
    dnnlib.submit_run(**kwargs)
Esempio n. 13
0
def run_generate_pic(seeds, nums, psi=0.5, result_dir='results'):
    dict = {}
    dict['network_pkl'] = 'gdrive:networks/stylegan2-ffhq-config-f.pkl'
    dict['seeds'] = range(seeds, seeds + nums)
    dict['truncation_psi'] = psi
    #dict['run_dir']=result_dir
    sc = dnnlib.SubmitConfig()
    sc.num_gpus = 1
    sc.submit_target = dnnlib.SubmitTarget.LOCAL
    sc.local.do_not_copy_source_files = True
    sc.run_dir_root = result_dir
    sc.run_desc = 'generate-images'
    dnnlib.submit_run(sc, 'run_generator.generate_images', **dict)
def main():
    parser = argparse.ArgumentParser(
        description='Run CoModGAN metrics.',
        formatter_class=argparse.RawDescriptionHelpFormatter)
    parser.add_argument(
        '--result-dir',
        help='Root directory for run results (default: %(default)s)',
        default='results',
        metavar='DIR')
    parser.add_argument('--network',
                        help='Network pickle filename',
                        dest='network_pkls',
                        required=True)
    parser.add_argument('--metrics',
                        help='Metrics to compute (default: %(default)s)',
                        default='ids10k',
                        type=lambda x: x.split(','))
    parser.add_argument('--dataset', help='Training dataset', required=True)
    parser.add_argument('--data-dir',
                        help='Dataset root directory',
                        required=True)
    parser.add_argument('--mirror-augment',
                        help='Mirror augment (default: %(default)s)',
                        default=False,
                        type=_str_to_bool,
                        metavar='BOOL')
    parser.add_argument('--num-gpus',
                        help='Number of GPUs to use',
                        type=int,
                        default=1,
                        metavar='N')
    parser.add_argument('--num-repeats', type=int, default=1)
    parser.add_argument('--truncation', type=str, default=None)
    parser.add_argument('--resume-with-new-nets',
                        default=False,
                        action='store_true')

    args = parser.parse_args()

    if not os.path.exists(args.data_dir):
        print('Error: dataset root directory does not exist.')
        sys.exit(1)

    kwargs = vars(args)
    sc = dnnlib.SubmitConfig()
    sc.num_gpus = kwargs.pop('num_gpus')
    sc.submit_target = dnnlib.SubmitTarget.LOCAL
    sc.local.do_not_copy_source_files = True
    sc.run_dir_root = kwargs.pop('result_dir')
    sc.run_desc = 'run-metrics'
    dnnlib.submit_run(sc, 'run_metrics.run', **kwargs)
def main():
    parser = argparse.ArgumentParser(
        description='VC-GAN and INFO-GAN image-pair generator.',
        epilog=_examples,
        formatter_class=argparse.RawDescriptionHelpFormatter)

    parser.add_argument('--network_pkl',
                        help='Network pickle filename',
                        required=True)
    parser.add_argument('--n_imgs',
                        type=int,
                        help='Number of image pairs to generate',
                        required=True)
    parser.add_argument('--n_discrete',
                        type=int,
                        help='Number of discrete latents',
                        default=0)
    parser.add_argument('--n_continuous',
                        type=int,
                        help='Number of continuous latents',
                        default=14)
    parser.add_argument('--batch_size',
                        type=int,
                        help='Batch size for generation',
                        default=10)
    parser.add_argument('--latent_type',
                        type=str,
                        help='What type of latent difference to use',
                        default='onedim',
                        choices=['onedim', 'fulldim'])
    parser.add_argument('--model_type',
                        type=str,
                        help='Which model is this pkl',
                        default='vc_gan_with_vc_head',
                        choices=['info_gan', 'vc_gan', 'vc_gan_with_vc_head'])
    parser.add_argument('--result-dir',
                        help='Root directory to store this dataset',
                        required=True,
                        metavar='DIR')

    args = parser.parse_args()
    kwargs = vars(args)

    sc = dnnlib.SubmitConfig()
    sc.num_gpus = 1
    sc.submit_target = dnnlib.SubmitTarget.LOCAL
    sc.local.do_not_copy_source_files = True
    sc.run_dir_root = kwargs['result_dir']

    dnnlib.submit_run(sc, 'run_pair_generator_vc.generate_image_pairs',
                      **kwargs)
Esempio n. 16
0
def run_auto(dataset, data_dir, result_dir, config_id, num_gpus, resolution,
             total_kimg, gamma, mirror_augment, metrics, train_auto):
    train = EasyDict(run_func_name='training.training_loop.training_auto_loop'
                     )  # Options for training loop.
    Enc = EasyDict(func_name='training.networks_stylegan2.Encoder'
                   )  # Options for encoder network.
    Dec = EasyDict(func_name='training.networks_stylegan2.Decoder'
                   )  # Options for decoder network.
    opt = EasyDict(beta1=0.0, beta2=0.99,
                   epsilon=1e-8)  # Options for autoencoder optimizer.
    loss = EasyDict(
        func_name='training.loss.auto_l1')  # Options for autoencoder loss.
    sched = EasyDict()  # Options for TrainingSchedule.
    grid = EasyDict(
        size='1080p',
        layout='random')  # Options for setup_snapshot_image_grid().
    sc = dnnlib.SubmitConfig()  # Options for dnnlib.submit_run().
    tf_config = {'rnd.np_random_seed': 1000}  # Options for tflib.init_tf().

    train.data_dir = data_dir
    train.total_kimg = total_kimg
    train.image_snapshot_ticks = 10
    train.network_snapshot_ticks = 125
    sched.lrate = 0.003
    sched.minibatch_size = 64
    sched.minibatch_gpu = 64
    desc = 'stylegan2-hrae'

    desc += '-' + dataset
    dataset_args = EasyDict(tfrecord_dir=dataset)
    dataset_args.resolution = resolution
    dataset_args.num_threads = 4

    assert num_gpus in [1, 2, 4, 8]
    sc.num_gpus = num_gpus
    desc += '-%dgpu' % num_gpus
    desc += '-auto'

    sc.submit_target = dnnlib.SubmitTarget.LOCAL
    sc.local.do_not_copy_source_files = True
    kwargs = EasyDict(train)
    kwargs.update(Enc_args=Enc, Dec_args=Dec, opt_args=opt, loss_args=loss)
    kwargs.update(dataset_args=dataset_args,
                  sched_args=sched,
                  grid_args=grid,
                  tf_config=tf_config)
    kwargs.submit_config = copy.deepcopy(sc)
    kwargs.submit_config.run_dir_root = result_dir
    kwargs.submit_config.run_desc = desc
    dnnlib.submit_run(**kwargs)
def generate_images(model_path, dest_path, seed_range):
    sc = dnnlib.SubmitConfig()
    sc.num_gpus = 1
    sc.submit_target = dnnlib.SubmitTarget.LOCAL
    sc.local.do_not_copy_source_files = True
    sc.run_dir_root = './stylegan2/results'
    sc.run_desc = 'generate_images'

    dnnlib.submit_run(sc,
                      'run_generator.generate_images',
                      network_pkl=model_path,
                      seeds=range(seed_range),
                      truncation_psi=1.0,
                      dest=dest_path)
def main():
    sc = dnnlib.SubmitConfig()
    sc.num_gpus = 1
    sc.submit_target = dnnlib.SubmitTarget.LOCAL
    sc.local.do_not_copy_source_files = True
    sc.run_dir_root = "D:\PythonProjectsDDrive\stylegan2-master"
    sc.run_desc = 'generate-images'
    network_pkl = 'D:\PythonProjectsDDrive\stylegan2-master\TrainedGANs\\network-snapshot-018708.pkl'

    print('Loading networks from "%s"...' % network_pkl)
    _G, _D, Gs = pretrained_networks.load_networks(network_pkl)
    vector_size = Gs.input_shape[1:][0]

    vec1 = np.load(
        "D:\PythonProjectsDDrive\MasterThesisClothesFittingwGANs\LatentSpaceImages\ToEditImages\image39_19039.npy",
        mmap_mode=None,
        allow_pickle=True,
        fix_imports=True,
        encoding='ASCII')
    vec2 = np.load(
        "D:\PythonProjectsDDrive\MasterThesisClothesFittingwGANs\LatentSpaceImages\ToEditImages\image40_19040.npy",
        mmap_mode=None,
        allow_pickle=True,
        fix_imports=True,
        encoding='ASCII')
    vec3 = np.load(
        "D:\PythonProjectsDDrive\MasterThesisClothesFittingwGANs\LatentSpaceImages\ToEditImages\image41_19041.npy",
        mmap_mode=None,
        allow_pickle=True,
        fix_imports=True,
        encoding='ASCII')
    vec4 = np.load(
        "D:\PythonProjectsDDrive\MasterThesisClothesFittingwGANs\LatentSpaceImages\ToEditImages\image42_19042.npy",
        mmap_mode=None,
        allow_pickle=True,
        fix_imports=True,
        encoding='ASCII')
    displayList = [vec1, vec2, vec3, vec4]
    #LoadImageFromVector(displayList, Gs)

    avrgVecBack = LoadVectorAverage(
        "D:\PythonProjectsDDrive\MasterThesisClothesFittingwGANs\LatentSpaceImages\BackImages"
    )
    avrgVecFront = LoadVectorAverage(
        "D:\PythonProjectsDDrive\MasterThesisClothesFittingwGANs\LatentSpaceImages\FrontImages"
    )
    PerformVectorOperationAddSub(
        "D:\PythonProjectsDDrive\MasterThesisClothesFittingwGANs\LatentSpaceImages\ToEditImages",
        avrgVecFront, avrgVecBack, Gs)
Esempio n. 19
0
def main():
    print("run_metrics.py main()")
    submit_config = dnnlib.SubmitConfig()

    # Which metrics to evaluate?
    metrics = []
    metrics += [metric_base.fid50k]
    #metrics += [metric_base.ppl_zfull]
    #metrics += [metric_base.ppl_wfull]
    #metrics += [metric_base.ppl_zend]
    #metrics += [metric_base.ppl_wend]
    #metrics += [metric_base.ls]
    #metrics += [metric_base.dummy]

    # Which networks to evaluate them on?
    tasks = []
    tasks += [
        EasyDict(
            run_func_name='run_metrics.run_pickle',
            network_pkl=
            'https://drive.google.com/uc?id=1MEGjdvVpUsu1jB4zrXZN7Y4kBBOzizDQ',
            dataset_args=EasyDict(tfrecord_dir='ffhq', shuffle_mb=0),
            mirror_augment=True)
    ]  # karras2019stylegan-ffhq-1024x1024.pkl
    #tasks += [EasyDict(run_func_name='run_metrics.run_snapshot', run_id=100, snapshot=25000)]
    #tasks += [EasyDict(run_func_name='run_metrics.run_all_snapshots', run_id=100)]

    # How many GPUs to use?
    submit_config.num_gpus = 1
    #submit_config.num_gpus = 2
    #submit_config.num_gpus = 4
    #submit_config.num_gpus = 8

    # Execute.
    submit_config.run_dir_root = dnnlib.submission.submit.get_template_from_path(
        config.result_dir)
    submit_config.run_dir_ignore += config.run_dir_ignore
    for task in tasks:
        for metric in metrics:
            submit_config.run_desc = '%s-%s' % (task.run_func_name,
                                                metric.name)
            if task.run_func_name.endswith('run_snapshot'):
                submit_config.run_desc += '-%s-%s' % (task.run_id,
                                                      task.snapshot)
            if task.run_func_name.endswith('run_all_snapshots'):
                submit_config.run_desc += '-%s' % task.run_id
            submit_config.run_desc += '-%dgpu' % submit_config.num_gpus
            dnnlib.submit_run(submit_config, metric_args=metric, **task)
Esempio n. 20
0
def main():
    parser = argparse.ArgumentParser(
        description='''VC-GAN and INFO-GAN generator.

Run 'python %(prog)s --help' for subcommand help.''',
        epilog=_examples,
        formatter_class=argparse.RawDescriptionHelpFormatter)

    parser.add_argument('--network_pkl',
                        help='Network pickle filename',
                        required=True)
    parser.add_argument('--n_imgs',
                        type=int,
                        help='Number of images to generate',
                        required=True)
    parser.add_argument('--n_discrete',
                        type=int,
                        help='Number of discrete latents',
                        default=0)
    parser.add_argument('--n_continuous',
                        type=int,
                        help='Number of continuous latents',
                        default=14)
    parser.add_argument('--n_samples_per',
                        type=int,
                        help='Number of samples per row',
                        default=10)
    parser.add_argument('--model_type',
                        type=str,
                        help='Which model is this pkl',
                        default='vc_gan_with_vc_head',
                        choices=['info_gan', 'vc_gan', 'vc_gan_with_vc_head'])
    parser.add_argument(
        '--result-dir',
        help='Root directory for run results (default: %(default)s)',
        default='results',
        metavar='DIR')

    args = parser.parse_args()
    kwargs = vars(args)

    sc = dnnlib.SubmitConfig()
    sc.num_gpus = 1
    sc.submit_target = dnnlib.SubmitTarget.LOCAL
    sc.local.do_not_copy_source_files = True
    sc.run_dir_root = kwargs.pop('result_dir')

    dnnlib.submit_run(sc, 'run_generator_vc.generate_images', **kwargs)
Esempio n. 21
0
def main():
    submit_config = dnnlib.SubmitConfig()

    # Which metrics to evaluate?
    metrics = []
    metric_base.fid50k.update({
        "inception_net_path":
        os.path.join(config.result_dir, "inception_network",
                     "inception_v3_features.pkl")
    })
    metrics += [metric_base.fid50k]

    # Which networks to evaluate them on?
    tasks = []
    tasks += [
        EasyDict(
            run_func_name="run_metrics.run_pickle",
            network_pkl=
            "/home/karnewar/msg-stylegan/00002-msg-stylegan-indian_celebs-4gpu/network-snapshot.pkl",
            dataset_args=EasyDict(tfrecord_dir="indian_celebs/tfrecords",
                                  shuffle_mb=0),
            mirror_augment=True,
        )
    ]
    # tasks += [EasyDict(run_func_name='run_metrics.run_snapshot', run_id=100, snapshot=25000)]
    # tasks += [EasyDict(run_func_name='run_metrics.run_all_snapshots', run_id=100)]

    # How many GPUs to use?
    submit_config.num_gpus = 1
    # submit_config.num_gpus = 2
    # submit_config.num_gpus = 4
    # submit_config.num_gpus = 8

    # Execute.
    submit_config.run_dir_root = dnnlib.submission.submit.get_template_from_path(
        config.result_dir)
    submit_config.run_dir_ignore += config.run_dir_ignore
    for task in tasks:
        for metric in metrics:
            submit_config.run_desc = "%s-%s" % (task.run_func_name,
                                                metric.name)
            if task.run_func_name.endswith("run_snapshot"):
                submit_config.run_desc += "-%s-%s" % (task.run_id,
                                                      task.snapshot)
            if task.run_func_name.endswith("run_all_snapshots"):
                submit_config.run_desc += "-%s" % task.run_id
            submit_config.run_desc += "-%dgpu" % submit_config.num_gpus
            dnnlib.submit_run(submit_config, metric_args=metric, **task)
Esempio n. 22
0
def main():
    sc = dnnlib.SubmitConfig()
    sc.num_gpus = 1
    sc.submit_target = dnnlib.SubmitTarget.LOCAL
    sc.local.do_not_copy_source_files = True
    sc.run_dir_root = "D:\PythonProjectsDDrive\stylegan2-master"
    sc.run_desc = 'generate-images'
    network_pkl = 'D:\PythonProjectsDDrive\stylegan2-master\TrainedGANs\\network-snapshot-018708.pkl'

    print('Loading networks from "%s"...' % network_pkl)
    _G, _D, Gs = pretrained_networks.load_networks(network_pkl)
    vector_size = Gs.input_shape[1:][0]
    #
    seedData = range(18000, 19000)
    seeds = expand_seed(seedData, vector_size)
    generate_images(Gs, seeds, seedData, "ArithmaticBaseImages", truncation_psi=0.5)
Esempio n. 23
0
def loadingNetworks():
    
#loading network
    latentCloatingPicePath,latentGenratedImagePath,clothingPicesPath,startingClothinPicePath,personsFolderPath,startingPersonPath,networkPath= loadPath("1024")

    sc = dnnlib.SubmitConfig()
    sc.num_gpus = 1
    sc.submit_target = dnnlib.SubmitTarget.LOCAL
    sc.local.do_not_copy_source_files = True
    sc.run_dir_root = "/content/drive/My Drive/projects/stylegan2"
    sc.run_desc = 'generate-images'
    network_pkl = networkPath
    print('Loading networks from "%s"...' % network_pkl)
    _G, _D, Gs = pretrained_networks.load_networks(network_pkl)
    
    return Gs,_D
Esempio n. 24
0
def run(dataset, data_dir, result_dir, config_id, num_gpus, total_kimg, gamma, mirror_augment, metrics):
    train     = EasyDict(run_func_name='training.training_loop.conditional.v5_baseline.training_loop') # Options for training loop.
    G         = EasyDict(func_name='training.networks.conditional.baseline.G_main')       # Options for generator network.
    D         = EasyDict(func_name='training.networks.conditional.baseline.D_stylegan2')  # Options for discriminator network.
    G_opt     = EasyDict(beta1=0.0, beta2=0.99, epsilon=1e-8)                  # Options for generator optimizer.
    D_opt     = EasyDict(beta1=0.0, beta2=0.99, epsilon=1e-8)                  # Options for discriminator optimizer.
    G_loss    = EasyDict(func_name='training.loss.conditional.label_dropout.G_logistic_ns_pathreg')      # Options for generator loss.
    D_loss    = EasyDict(func_name='training.loss.conditional.label_dropout.D_logistic_r1')              # Options for discriminator loss.
    sched     = EasyDict()                                                     # Options for TrainingSchedule.
    grid      = EasyDict(size='1080p', layout='random')                           # Options for setup_snapshot_image_grid().
    sc        = dnnlib.SubmitConfig()                                          # Options for dnnlib.submit_run().
    tf_config = {'rnd.np_random_seed': 1000}                                   # Options for tflib.init_tf().

    # train.resume_pkl = './../results/00326-conditional_label_dropout_25/network-snapshot-000887.pkl'
    # train.resume_kimg = 887.0

    train.data_dir = data_dir
    train.total_kimg = total_kimg
    train.mirror_augment = mirror_augment
    train.image_snapshot_ticks = train.network_snapshot_ticks = 10
    sched.G_lrate_base = sched.D_lrate_base = 0.002
    sched.minibatch_size_base = 32
    sched.minibatch_gpu_base = 4
    D_loss.gamma = 10
    metrics = [metric_defaults[x] for x in metrics]
    desc = 'conditional_label_dropout_25'

    G_loss.label_dropout_prob = 0.5
    D_loss.label_dropout_prob = 0.5

    dataset_args = EasyDict(tfrecord_dir=dataset)

    assert num_gpus in [1, 2, 4, 8]
    sc.num_gpus = num_gpus

    if gamma is not None:
        D_loss.gamma = gamma

    sc.submit_target = dnnlib.SubmitTarget.LOCAL
    sc.local.do_not_copy_source_files = True
    kwargs = EasyDict(train)
    kwargs.update(G_args=G, D_args=D, G_opt_args=G_opt, D_opt_args=D_opt, G_loss_args=G_loss, D_loss_args=D_loss)
    kwargs.update(dataset_args=dataset_args, sched_args=sched, grid_args=grid, metric_arg_list=metrics, tf_config=tf_config)
    kwargs.submit_config = copy.deepcopy(sc)
    kwargs.submit_config.run_dir_root = result_dir
    kwargs.submit_config.run_desc = desc
    dnnlib.submit_run(**kwargs)
Esempio n. 25
0
def main():
    submit_config = dnnlib.SubmitConfig()

    # Which metrics to evaluate?
    metrics = []
    metrics += [metric_base.fid50k]
    #metrics += [metric_base.ppl_zfull]
    #metrics += [metric_base.ppl_wfull]
    #metrics += [metric_base.ppl_zend]
    #metrics += [metric_base.ppl_wend]
    #metrics += [metric_base.ls]
    #metrics += [metric_base.dummy]

    # Which networks to evaluate them on?
    tasks = []
    tasks += [
        EasyDict(
            run_func_name='run_metrics.run_pickle',
            network_pkl=
            './results/00010-sgan-logos-1gpu-cond/network-snapshot-020400.pkl',
            dataset_args=EasyDict(tfrecord_dir='resnet_conditions',
                                  shuffle_mb=0),
            mirror_augment=True)
    ]

    # How many GPUs to use?
    submit_config.num_gpus = 1
    #submit_config.num_gpus = 2
    #submit_config.num_gpus = 4
    #submit_config.num_gpus = 8

    # Execute.
    submit_config.run_dir_root = dnnlib.submission.submit.get_template_from_path(
        config.result_dir)
    submit_config.run_dir_ignore += config.run_dir_ignore
    for task in tasks:
        for metric in metrics:
            submit_config.run_desc = '%s-%s' % (task.run_func_name,
                                                metric.name)
            if task.run_func_name.endswith('run_snapshot'):
                submit_config.run_desc += '-%s-%s' % (task.run_id,
                                                      task.snapshot)
            if task.run_func_name.endswith('run_all_snapshots'):
                submit_config.run_desc += '-%s' % task.run_id
            submit_config.run_desc += '-%dgpu' % submit_config.num_gpus
            dnnlib.submit_run(submit_config, metric_args=metric, **task)
def main():
    submit_config = dnnlib.SubmitConfig()

    # Which metrics to evaluate?
    metrics = []
    metrics += [metric_base.fid50k]
    #metrics += [metric_base.ppl_zfull]
    #metrics += [metric_base.ppl_wfull]
    #metrics += [metric_base.ppl_zend]
    #metrics += [metric_base.ppl_wend]
    #metrics += [metric_base.ls]
    #metrics += [metric_base.dummy]

    # Which networks to evaluate them on?
    tasks = []
    
    # tasks += [EasyDict(run_func_name='run_metrics.run_pickle', network_pkl='../model_results/stylegan/network-snapshot-008908.pkl', dataset_args=EasyDict(tfrecord_dir='ffhq_128x128_tf_stylegan', shuffle_mb=0), mirror_augment=True)] # karras2019stylegan-ffhq-1024x1024.pkl
    tasks += [EasyDict(run_func_name='run_metrics.run_pickle', network_pkl='/home/hxz/GraduationProject/newBeholder-GAN/results/beholder-6001/network-final.pkl', dataset_args=EasyDict(tfrecord_dir='tf_files', shuffle_mb=0), mirror_augment=True)] # karras2019stylegan-ffhq-1024x1024.pkl
    
    # tasks += [EasyDict(run_func_name='run_metrics.run_pickle', network_pkl='../model_results/stylegan/karras2019stylegan-ffhq-1024x1024.pkl', dataset_args=EasyDict(tfrecord_dir='ffhq', shuffle_mb=0), mirror_augment=True)] # karras2019stylegan-ffhq-1024x1024.pkl
    #tasks += [EasyDict(run_func_name='run_metrics.run_pickle', network_pkl='../model_results/beholdergan-original/network-final.pkl', dataset_args=EasyDict(tfrecord_dir='ffhq_128x128_tf', shuffle_mb=0), mirror_augment=True)] # beholdergan
    # tasks += [EasyDict(run_func_name='run_metrics.run_pickle', network_pkl='../model_results/beholdergan-id/first/network-final.pkl', dataset_args=EasyDict(tfrecord_dir='ffhq_128x128_id_tf', shuffle_mb=0), mirror_augment=True)] # beholdergan-id
    # tasks += [EasyDict(run_func_name='run_metrics.run_pickle', network_pkl='../model_results/beholdergan-id/second/network-final.pkl', dataset_args=EasyDict(tfrecord_dir='ffhq_128x128_id_tf', shuffle_mb=0), mirror_augment=True)] # beholdergan-id
    
    #tasks += [EasyDict(run_func_name='run_metrics.run_snapshot', run_id=100, snapshot=25000)]
    #tasks += [EasyDict(run_func_name='run_metrics.run_all_snapshots', run_id=100)]

    # How many GPUs to use?
    submit_config.num_gpus = 1
    #submit_config.num_gpus = 2
    #submit_config.num_gpus = 4
    #submit_config.num_gpus = 8

    # Execute.
    submit_config.run_dir_root = dnnlib.submission.submit.get_template_from_path(config.result_dir)
    submit_config.run_dir_ignore += config.run_dir_ignore
    for task in tasks:
        for metric in metrics:
            submit_config.run_desc = '%s-%s' % (task.run_func_name, metric.name)
            if task.run_func_name.endswith('run_snapshot'):
                submit_config.run_desc += '-%s-%s' % (task.run_id, task.snapshot)
            if task.run_func_name.endswith('run_all_snapshots'):
                submit_config.run_desc += '-%s' % task.run_id
            submit_config.run_desc += '-%dgpu' % submit_config.num_gpus
            dnnlib.submit_run(submit_config, metric_args=metric, **task)
Esempio n. 27
0
    def __init__(self, num_gpus, results_dir_root, network_pkl):
        self.results_dir_root = results_dir_root
        sc = dnnlib.SubmitConfig()
        sc.num_gpus = num_gpus
        sc.submit_target = dnnlib.SubmitTarget.LOCAL
        sc.local.do_not_copy_source_files = True
        sc.run_dir_root = results_dir_root
        sc.run_desc = 'generate-images'
        self.sc = sc
        self.network_pkl = network_pkl

        print('Loading networks from "%s"...' % self.network_pkl)
        # _G = Instantaneous snapshot of the generator. Mainly useful for resuming a previous training run.
        # _D = Instantaneous snapshot of the discriminator. Mainly useful for resuming a previous training run.
        # Gs = Long-term average of the generator. Yields higher-quality results than the instantaneous snapshot.
        dnnlib.tflib.init_tf()
        self._G, self._D, self.Gs = pretrained_networks.load_networks(
            self.network_pkl)
Esempio n. 28
0
def morph():
    image_1 = request.json['image1']
    image_2 = request.json['image2']
    fc = request.json['frame_count']
    no_cache = 'no_cache' in request.json and request.json['no_cache'] is True
    seeded = 'seeded' in request.json and request.json['seeded'] is True
    if fc is None:
        fc = 9
    # floor the resolution
    frames = int(sqrt(fc)) ** 2

    # ensure that our morphs are always alphabetically sorted
    image_1, image_2 = sorted([image_1, image_2])
    output_path = "spritesheets/{}_{}_{}.jpg".format(image_1, image_2, frames)
    # check if sheet exists to return early
    bucket_blob = bucket.blob(output_path)
    if not no_cache and bucket_blob.exists():
        return bucket_blob.public_url

    if seeded:
        kwargs = kwards_from_seeds(image_1, image_2, frames)
    else:
        try:
            kwargs = kwargs_from_projections(image_1, image_2, frames)
        except IndexError:
            return "projection for one or both images not found", 400

    sc = dnnlib.SubmitConfig()
    sc.num_gpus = 1
    sc.submit_target = dnnlib.SubmitTarget.LOCAL
    sc.local.do_not_copy_source_files = True
    sc.run_dir_root = kwargs.pop('result_dir')
    sc.run_desc = "serve-latent-walk"
    result_path, _ = dnnlib.submit_run(
        sc, 'run_generator.generate_latent_walk', **kwargs)
    morph_pattern = "{}/*.png".format(result_path)
    sheet = make_spritesheet(morph_pattern, output_path)
    # delete result_path since we have our sheet
    rmtree(result_path)
    # upload to bucket instead of storing locally
    bucket_blob.upload_from_filename(sheet)
    remove(sheet)
    # return the file
    return bucket_blob.public_url
Esempio n. 29
0
def main():
    args = parse_args()
    kwargs = vars(args)
    subcmd = kwargs.pop('command')

    if subcmd is None:
        print('Error: missing subcommand.  Re-run with --help for usage.')
        sys.exit(1)

    widget = get_widget(args.psi_steps)
    canvas_elem = widget.FindElement('-CANVAS-')
    canvas = canvas_elem.TKCanvas

    fig = plt.figure()
    ax = fig.add_axes([0, 0, 1, 1])
    ax.axis("off")

    fig.tight_layout()
    fig_agg = draw_figure(canvas, fig)

    sc = dnnlib.SubmitConfig()
    sc.num_gpus = 1
    sc.submit_target = dnnlib.SubmitTarget.LOCAL
    sc.local.do_not_copy_source_files = True
    sc.run_desc = subcmd

    func_name_map = {
        'latent-walk': 'latent_explorer.generate_images',
    }

    result = dnnlib.submit_run(sc, func_name_map[subcmd], **kwargs)
    images = result.return_value

    while True:
        event, values = widget.Read(timeout=10)
        if event in ('Exit', None):
            exit(0)

        ax.cla()
        psi_idx = int(values['-PSI-'])
        ax.imshow(images[psi_idx], aspect='auto')
        fig_agg.draw()
def main():
    sc = dnnlib.SubmitConfig()
    sc.num_gpus = 1
    sc.submit_target = dnnlib.SubmitTarget.LOCAL
    sc.local.do_not_copy_source_files = True
    sc.run_dir_root = "D:\PythonProjectsDDrive\stylegan2-master"
    sc.run_desc = 'generate-images'
    #network_pkl = 'D:\PythonProjectsDDrive\stylegan2-master\TrainedGANs\\network-snapshot-018708.pkl'
    network_pkl = 'D:\PythonProjectsDDrive\stylegan2-master\TrainedGANs\\256-model-network-snapshot-018708.pkl'
    example_img_pkl = "D:\PythonProjectsDDrive\MasterThesisClothesFittingwGANs\LatentSpaceImages\ToEditImagesDL\seed8007.npy"

    print('Loading networks from "%s"...' % network_pkl)
    _G, _D, Gs = pretrained_networks.load_networks(network_pkl)
    vector_size = Gs.input_shape[1:][0]
    #
    #seedData = range(18000,19000)
    #startSeed = 190000
    #seeds = expand_seed(seedData, vector_size)
    #generate_images(Gs, seeds, seedData, "ArithmaticBaseImages", truncation_psi=0.5)
    generateDisplayVectors(example_img_pkl, 10, 1, Gs)