Пример #1
0
def main(_):
    if FLAGS.exp_name is None:
        FLAGS.exp_name = "reconstrued_results_unitLength" + str(
            int(FLAGS.latent_dim / FLAGS.latent_num))
    image_shape = [
        int(i) for i in FLAGS.image_shape.strip('()[]{}').split(',')
    ]
    dirs = init_directories(FLAGS.exp_name, FLAGS.output_dir)
    dirs['data'] = '../npz_datas' if FLAGS.data_dir is None else FLAGS.data_dir
    dirs['codes'] = os.path.join(dirs['data'], 'codes/')
    create_directories(dirs, FLAGS.train, FLAGS.save_codes)

    output_dim = reduce(mul, image_shape, 1)

    run_config = tf.ConfigProto(allow_soft_placement=True)
    run_config.gpu_options.allow_growth = True
    run_config.gpu_options.per_process_gpu_memory_fraction = 0.9
    sess = tf.Session(config=run_config)

    ae = AE(
        session=sess,
        arch=FLAGS.arch,
        lr=FLAGS.lr,
        alpha=FLAGS.alpha,
        beta=FLAGS.beta,
        latent_dim=FLAGS.latent_dim,
        latent_num=FLAGS.latent_num,
        class_net_unit_num=FLAGS.class_net_unit_num,
        output_dim=output_dim,
        batch_size=FLAGS.batch_size,
        image_shape=image_shape,
        exp_name=FLAGS.exp_name,
        dirs=dirs,
        vis_reconst=FLAGS.visualize_reconstruct,
    )

    if FLAGS.train:

        data1Name = 'SVHN10WithBg_img1_oneguided_N20000x32x32x3_train'

        data_manager = ShapesDataManager(
            dirs['data'],
            data1Name,
            FLAGS.batch_size,
            image_shape,
            shuffle=False,
            file_ext=FLAGS.file_ext,
            train_fract=0.8,
            inf=True,
            supervised=False)  # supervised=True for get label
        ae.train_iter1, ae.dev_iter1, ae.test_iter1 = data_manager.get_iterators(
        )

        n_iters_per_epoch = data_manager.n_train // data_manager.batch_size
        FLAGS.stats_interval = int(FLAGS.stats_interval * n_iters_per_epoch)
        FLAGS.ckpt_interval = int(FLAGS.ckpt_interval * n_iters_per_epoch)
        n_iters = int(FLAGS.epochs * n_iters_per_epoch)

        ae.train(n_iters, n_iters_per_epoch, FLAGS.stats_interval,
                 FLAGS.ckpt_interval)
def main(_):
    if FLAGS.exp_name is None:
        FLAGS.exp_name = "UDOR_{}_{}_{}_lr_{}".format(FLAGS.arch,
                                                      FLAGS.latent_dim,
                                                      FLAGS.image_wh, FLAGS.lr)
    image_shape = [
        int(i) for i in FLAGS.image_shape.strip('()[]{}').split(',')
    ]
    dirs = init_directories(FLAGS.exp_name, FLAGS.output_dir)
    dirs[
        'data'] = '../../../npz_datas' if FLAGS.data_dir is None else FLAGS.data_dir
    dirs['codes'] = os.path.join(dirs['data'], 'codes/')
    create_directories(dirs, FLAGS.train, FLAGS.save_codes)

    output_dim = reduce(mul, image_shape, 1)

    run_config = tf.ConfigProto(allow_soft_placement=True)
    run_config.gpu_options.allow_growth = True
    run_config.gpu_options.per_process_gpu_memory_fraction = 0.9
    sess = tf.Session(config=run_config)

    ae = AE2input(
        session=sess,
        arch=FLAGS.arch,
        lr=FLAGS.lr,
        alpha=FLAGS.alpha,
        beta=FLAGS.beta,
        latent_dim=FLAGS.latent_dim,
        latent_num=FLAGS.latent_num,
        class_net_unit_num=FLAGS.class_net_unit_num,
        output_dim=output_dim,
        batch_size=FLAGS.batch_size,
        image_shape=image_shape,
        exp_name=FLAGS.exp_name,
        dirs=dirs,
        vis_reconst=FLAGS.visualize_reconstruct,
    )

    # run to get code representations
    data1Name = "mnistImgs_GTimages_mask_GTlabel_(47x64)x32x32x1_unitLength1_CodeImageDataset_forUDOR"

    data_manager = ShapesDataManager(dirs['data'],
                                     data1Name,
                                     FLAGS.batch_size,
                                     image_shape,
                                     shuffle=False,
                                     file_ext=FLAGS.file_ext,
                                     train_fract=1.0,
                                     inf=True)
    ae.train_iter1, ae.dev_iter1, ae.test_iter1 = data_manager.get_iterators()

    ae.session.run(tf.global_variables_initializer())
    saved_step = ae.load_fixedNum(1125)
    assert saved_step > 1, "A trained model is needed to encode the data!"

    pathForSave = 'RepreCodesImgs'
    try:
        os.makedirs(pathForSave)
    except OSError as exc:  # Python >2.5
        if exc.errno == errno.EEXIST and os.path.isdir(pathForSave):
            pass
        else:
            raise

    codes = []
    images = []
    sampleNum = 3008  # 47*64
    for batch_num in range(int(sampleNum / FLAGS.batch_size)):
        img_batch, _mask1, _ = next(ae.train_iter1)
        # code = ae.encode(img_batch) #[batch_size, reg_latent_dim]
        code, image = ae.getCodesAndImgs(pathForSave, img_batch, _mask1,
                                         batch_num)
        codes.append(code)
        images.append(image)
        if batch_num < 5 or batch_num % 10 == 0:
            print(("Batch number {0}".format(batch_num)))

    codes = np.vstack(codes)
    images = np.vstack(images)
    filename = os.path.join(dirs['codes'], "codes_" + FLAGS.exp_name)
    # np.save(filename, codes)
    np.savez(filename + '.npz', imagesNorm0_1=images, codes=codes)

    print(("Images and Codes saved to: {0}".format(filename)))
Пример #3
0
def main(_):
    if FLAGS.exp_name is None:
        FLAGS.exp_name = "UDOR_{}_{}_{}_lr_{}".format(FLAGS.arch,
                                                      FLAGS.latent_dim,
                                                      FLAGS.image_wh, FLAGS.lr)
    image_shape = [
        int(i) for i in FLAGS.image_shape.strip('()[]{}').split(',')
    ]
    dirs = init_directories(FLAGS.exp_name, FLAGS.output_dir)
    dirs[
        'data'] = '../../../npz_datas' if FLAGS.data_dir is None else FLAGS.data_dir
    dirs['codes'] = os.path.join(dirs['data'], 'codes/')
    create_directories(dirs, FLAGS.train, FLAGS.save_codes)

    output_dim = reduce(mul, image_shape, 1)

    run_config = tf.ConfigProto(allow_soft_placement=True)
    run_config.gpu_options.allow_growth = True
    run_config.gpu_options.per_process_gpu_memory_fraction = 0.9
    sess = tf.Session(config=run_config)

    ae = AE2input(
        session=sess,
        arch=FLAGS.arch,
        lr=FLAGS.lr,
        alpha=FLAGS.alpha,
        beta=FLAGS.beta,
        latent_dim=FLAGS.latent_dim,
        latent_num=FLAGS.latent_num,
        class_net_unit_num=FLAGS.class_net_unit_num,
        output_dim=output_dim,
        batch_size=FLAGS.batch_size,
        image_shape=image_shape,
        exp_name=FLAGS.exp_name,
        dirs=dirs,
        vis_reconst=FLAGS.visualize_reconstruct,
    )

    if FLAGS.train:

        data1Name = 'unitLength1_mnistMultiAndMask_10000x32x32x1_train'

        data_manager = ShapesDataManager(dirs['data'],
                                         data1Name,
                                         FLAGS.batch_size,
                                         image_shape,
                                         shuffle=False,
                                         file_ext=FLAGS.file_ext,
                                         train_fract=0.8,
                                         inf=True)
        ae.train_iter1, ae.dev_iter1, ae.test_iter1 = data_manager.get_iterators(
        )

        n_iters_per_epoch = data_manager.n_train // data_manager.batch_size
        FLAGS.stats_interval = int(FLAGS.stats_interval * n_iters_per_epoch)
        FLAGS.ckpt_interval = int(FLAGS.ckpt_interval * n_iters_per_epoch)
        n_iters = int(FLAGS.epochs * n_iters_per_epoch)

        ae.train(n_iters, n_iters_per_epoch, FLAGS.stats_interval,
                 FLAGS.ckpt_interval)

    # test
    data1Name = "mnist_(20x64)x32x32x1_unitLength1_test_visualdata1"

    data_manager = ShapesDataManager(dirs['data'],
                                     data1Name,
                                     FLAGS.batch_size,
                                     image_shape,
                                     shuffle=False,
                                     file_ext=FLAGS.file_ext,
                                     train_fract=1.0,
                                     inf=True)
    ae.train_iter1, ae.dev_iter1, ae.test_iter1 = data_manager.get_iterators()

    ae.session.run(tf.global_variables_initializer())
    saved_step = ae.load()
    assert saved_step > 1, "A trained model is needed to encode the data!"

    pathForSave = 'RecostructedImg64Test'
    try:
        os.makedirs(pathForSave)
    except OSError as exc:  # Python >2.5
        if exc.errno == errno.EEXIST and os.path.isdir(pathForSave):
            pass
        else:
            raise

    assert saved_step > 1, "A trained model is needed to encode the data!"
    for k in range(1):  # just use 64 images
        fixed_x1, fixed_mk1, _ = next(ae.train_iter1)
        #print(fixed_x1.shape)
        #print(fixed_mk1 )
        ae.encodeImg(pathForSave, fixed_x1, fixed_mk1, k)
    #print(k)
    print('finish encode!')
Пример #4
0
def main(_):
    if FLAGS.exp_name is None:
        FLAGS.exp_name = "dual_diaeMnist_unitLength_{0}".format(
            np.int32(FLAGS.latent_dim / 3))
    image_shape = [
        int(i) for i in FLAGS.image_shape.strip('()[]{}').split(',')
    ]
    dirs = init_directories(FLAGS.exp_name, FLAGS.output_dir)
    dirs[
        'data'] = '../../npz_datas' if FLAGS.data_dir is None else FLAGS.data_dir
    dirs['codes'] = os.path.join(dirs['data'], 'codes/')
    create_directories(dirs, FLAGS.train, FLAGS.save_codes)

    output_dim = reduce(mul, image_shape, 1)

    run_config = tf.ConfigProto(allow_soft_placement=True)
    run_config.gpu_options.allow_growth = True
    run_config.gpu_options.per_process_gpu_memory_fraction = 0.9
    sess = tf.Session(config=run_config)

    ae = DIAE(
        session=sess,
        arch=FLAGS.arch,
        lr=FLAGS.lr,
        alpha=FLAGS.alpha,
        beta=FLAGS.beta,
        latent_dim=FLAGS.latent_dim,
        output_dim=output_dim,
        batch_size=FLAGS.batch_size,
        image_shape=image_shape,
        exp_name=FLAGS.exp_name,
        dirs=dirs,
        vis_reconst=FLAGS.visualize_reconstruct,
    )

    if FLAGS.save_codes:
        sampleNum = 1000
        dataVisualName = 'SVHN10_img_N1000x32x32x3_testForModularity'
        data_manager = ShapesDataManager(dirs['data'],
                                         dataVisualName,
                                         FLAGS.batch_size,
                                         image_shape,
                                         shuffle=False,
                                         file_ext=FLAGS.file_ext,
                                         train_fract=1.0,
                                         inf=True)

        #data_manager.set_divisor_batch_size()

        ae.train_iter, ae.dev_iter, ae.test_iter = data_manager.get_iterators()

        ae.session.run(tf.global_variables_initializer())
        #saved_step = ae.load()
        saved_step = ae.load_fixedNum(4250)
        assert saved_step > 1, "A trained model is needed to encode the data!"

        pathForSave = 'ValidateEncodedImgs'
        if not os.path.exists(pathForSave):
            os.mkdir(pathForSave)

        codes = []
        images = []
        for batch_num in range(int(sampleNum / FLAGS.batch_size)):
            img_batch, _mask1, _ = next(ae.train_iter)
            #code = ae.encode(img_batch) #[batch_size, reg_latent_dim]
            code, image = ae.getCodesAndImgs(pathForSave, img_batch, batch_num)
            codes.append(code)
            images.append(image)
            if batch_num < 5 or batch_num % 10 == 0:
                print(("Batch number {0}".format(batch_num)))

        codes = np.vstack(codes)
        images = np.vstack(images)
        codes_name = 'CIFAR3_codesModularityMetricsCal'
        filename = os.path.join(pathForSave, "codes_" + codes_name)
        #np.save(filename, codes)
        np.savez(filename + '.npz', imagesNorm0_1=images, codes=codes)

        print(("Images and Codes saved to: {0}".format(filename)))
Пример #5
0
def main(_):
    if FLAGS.exp_name is None:
        FLAGS.exp_name = "reconstrued_results_unitLength" + str(
            int(FLAGS.latent_dim / FLAGS.latent_num))
    image_shape = [
        int(i) for i in FLAGS.image_shape.strip('()[]{}').split(',')
    ]
    dirs = init_directories(FLAGS.exp_name, FLAGS.output_dir)
    dirs[
        'data'] = '../../npz_datas' if FLAGS.data_dir is None else FLAGS.data_dir
    dirs['codes'] = os.path.join(dirs['data'], 'codes/')
    create_directories(dirs, FLAGS.train, FLAGS.save_codes)

    output_dim = reduce(mul, image_shape, 1)

    run_config = tf.ConfigProto(allow_soft_placement=True)
    run_config.gpu_options.allow_growth = True
    run_config.gpu_options.per_process_gpu_memory_fraction = 0.9
    sess = tf.Session(config=run_config)

    diae = DIAE(
        session=sess,
        arch=FLAGS.arch,
        lr=FLAGS.lr,
        alpha=FLAGS.alpha,
        beta=FLAGS.beta,
        latent_dim=FLAGS.latent_dim,
        latent_num=FLAGS.latent_num,
        class_net_unit_num=FLAGS.class_net_unit_num,
        output_dim=output_dim,
        batch_size=FLAGS.batch_size,
        image_shape=image_shape,
        exp_name=FLAGS.exp_name,
        dirs=dirs,
        vis_reconst=FLAGS.visualize_reconstruct,
    )

    # if FLAGS.train:

    #     data1Name='SVHNWithBg__img1_oneguided_N10000x32x32x3_train'
    #     data2Name='SVHNWithBg__mask1_oneguided_N10000x32x32x3_train'
    #     data3Name='SVHNWithBg__aux1_GT1_oneguided_N10000x32x32x3_train'
    #     data4Name='SVHNWithBg_aux2_GT2_oneguided_N10000x32x32x3_train'

    #     data_manager = TeapotsDataManager(dirs['data'],
    #                     data1Name,data2Name,data3Name,data4Name, FLAGS.batch_size,
    #                     image_shape, shuffle=False,file_ext=FLAGS.file_ext, train_fract=0.8,inf=True)
    #     diae.train_iter1, diae.dev_iter1, diae.test_iter1,diae.train_iter2, diae.dev_iter2, diae.test_iter2,diae.train_iter3, diae.dev_iter3, diae.test_iter3,diae.train_iter4, diae.dev_iter4, diae.test_iter4= data_manager.get_iterators()

    #     n_iters_per_epoch = data_manager.n_train // data_manager.batch_size
    #     FLAGS.stats_interval = int(FLAGS.stats_interval * n_iters_per_epoch)
    #     FLAGS.ckpt_interval = int(FLAGS.ckpt_interval * n_iters_per_epoch)
    #     n_iters = int(FLAGS.epochs * n_iters_per_epoch)

    #     diae.train(n_iters, n_iters_per_epoch, FLAGS.stats_interval, FLAGS.ckpt_interval)
    if FLAGS.save_codes:
        sampleNum = 3200  # 50x64 large batch, forward prop only
        dataVisualName = 'SVHN10_img_N3200x32x32x3_testWithLabel_forMetrics'
        data_manager = ShapesDataManager(dirs['data'],
                                         dataVisualName,
                                         FLAGS.batch_size,
                                         image_shape,
                                         shuffle=False,
                                         file_ext=FLAGS.file_ext,
                                         train_fract=1.0,
                                         inf=True)

        #data_manager.set_divisor_batch_size()

        diae.train_iter, diae.dev_iter, diae.test_iter = data_manager.get_iterators(
        )

        diae.session.run(tf.global_variables_initializer())
        #saved_step = ae.load()
        saved_step = diae.load_fixedNum(3250)
        assert saved_step > 1, "A trained model is needed to encode the data!"

        pathForSave = 'ValidateEncodedImgs'
        if not os.path.exists(pathForSave):
            os.mkdir(pathForSave)

        codes = []
        images = []
        for batch_num in range(int(sampleNum / FLAGS.batch_size)):
            img_batch, _mask1, _ = next(diae.train_iter)
            #code = ae.encode(img_batch) #[batch_size, reg_latent_dim]
            code, image = diae.getCodesAndImgs(pathForSave, img_batch,
                                               batch_num)
            codes.append(code)
            images.append(image)
            if batch_num < 5 or batch_num % 10 == 0:
                print(("Batch number {0}".format(batch_num)))

        codes = np.vstack(codes)
        images = np.vstack(images)
        codes_name = 'CIFAR3_codesAndImgForMetricsCal'
        filename = os.path.join(pathForSave, "codes_" + codes_name)
        #np.save(filename, codes)
        np.savez(filename + '.npz', imagesNorm0_1=images, codes=codes)

        print(("Images and Codes saved to: {0}".format(filename)))
Пример #6
0
def main(_):
    if FLAGS.exp_name is None:
        FLAGS.exp_name = "reconstrued_results"
    image_shape = [
        int(i) for i in FLAGS.image_shape.strip('()[]{}').split(',')
    ]
    dirs = init_directories(FLAGS.exp_name, FLAGS.output_dir)
    dirs[
        'data'] = '../../../npz_datas_single_test' if FLAGS.data_dir is None else FLAGS.data_dir
    dirs['codes'] = os.path.join(dirs['data'], 'codes/')
    create_directories(dirs, FLAGS.train, FLAGS.save_codes)

    output_dim = reduce(mul, image_shape, 1)

    run_config = tf.ConfigProto(allow_soft_placement=True)
    run_config.gpu_options.allow_growth = True
    run_config.gpu_options.per_process_gpu_memory_fraction = 0.9
    sess = tf.Session(config=run_config)

    ae = AE(
        session=sess,
        arch=FLAGS.arch,
        lr=FLAGS.lr,
        alpha=FLAGS.alpha,
        beta=FLAGS.beta,
        latent_dim=FLAGS.latent_dim,
        latent_num=FLAGS.latent_num,
        class_net_unit_num=FLAGS.class_net_unit_num,
        output_dim=output_dim,
        batch_size=FLAGS.batch_size,
        image_shape=image_shape,
        exp_name=FLAGS.exp_name,
        dirs=dirs,
        vis_reconst=FLAGS.visualize_reconstruct,
    )

    # test get changed images
    data1Name = "mnistOffset1_Imgs_GTimages_mask_GTlabel_(32x64)x32x32x1_unitLength1_CodeImageDataset"

    data_manager = ShapesDataManager(dirs['data'],
                                     data1Name,
                                     FLAGS.batch_size,
                                     image_shape,
                                     shuffle=False,
                                     file_ext=FLAGS.file_ext,
                                     train_fract=1.0,
                                     inf=True)
    ae.train_iter1, ae.dev_iter1, ae.test_iter1 = data_manager.get_iterators()

    ae.session.run(tf.global_variables_initializer())
    saved_step = ae.load_fixedNum(1875)
    assert saved_step > 1, "A trained model is needed to encode the data!"

    pathForSave = 'VisualImgsResults'
    try:
        os.makedirs(pathForSave)
    except OSError as exc:  # Python >2.5
        if exc.errno == errno.EEXIST and os.path.isdir(pathForSave):
            pass
        else:
            raise

    assert saved_step > 1, "A trained model is needed to encode the data!"
    sampleNum = 2048
    for k in range(int(sampleNum / FLAGS.batch_size)):
        fixed_x1, fixed_mk1, _ = next(ae.train_iter1)
        #print(fixed_x1.shape)
        #print(fixed_mk1 )
        ae.visualise_reconstruction_path(pathForSave, fixed_x1, fixed_mk1, k)
    #print(k)
    print('finish encode!')