Exemple #1
0
                             load_size=load_size,
                             crop_size=crop_size)
b_test_pool = data.ImageData(sess,
                             b_test_img_paths,
                             batch_size,
                             load_size=load_size,
                             crop_size=crop_size)

a2b_pool = utils.ItemPool()
b2a_pool = utils.ItemPool()
'''summary'''
summary_writer = tf.summary.FileWriter('./summaries/' + dataset + '_spgan',
                                       sess.graph)
'''saver'''
ckpt_dir = './checkpoints/' + dataset + '_spgan'
utils.mkdir(ckpt_dir + '/')

saver = tf.train.Saver(max_to_keep=30)
ckpt_path = utils.load_checkpoint(ckpt_dir, sess, saver)
if ckpt_path is None:
    sess.run(tf.global_variables_initializer())
else:
    print('Copy variables from % s' % ckpt_path)
'''train'''
try:
    coord = tf.train.Coordinator()
    threads = tf.train.start_queue_runners(sess=sess, coord=coord)

    batch_epoch = min(len(a_data_pool), len(b_data_pool)) // batch_size
    max_it = epoch * batch_epoch
    now = time.strftime("%c")
        "--subclass",
        help="predict subclasses",
        action="store_true",
    )
    parser.add_argument(
        "--pretrained",
        help="Use pre-trained models from the modelzoo",
        action="store_true",
    )
    parser.add_argument(
        "--distributed",
        help="Use distributed gpu to train models",
        action="store_true",
    )

    # distributed training parameters
    parser.add_argument('--world-size',
                        default=1,
                        type=int,
                        help='number of distributed processes')
    parser.add_argument('--dist-url',
                        default='env://',
                        help='url used to set up distributed training')

    args = parser.parse_args()

    if args.output_dir:
        utils.mkdir(args.output_dir)

    main(args)
Exemple #3
0
    if False:
        pretrainer = PreTrainer(pretrain_config)
        for i in range(len(graph.ViewData)):
            pretrainer.pretrain(graph.ViewData[i], 'V' + str(i + 1))

    model_config = {
        'weight_decay': 1.0,
        'View_num': View_num,
        'View': layers,
        'is_init': True,
        'pretrain_params_path':
        './Log/' + dataset_name + '/pretrain_params.pkl'
    }

    pathResult = './result/' + dataset_name + "/"
    mkdir(pathResult)
    with open(
            pathResult + dataset_name + "_" + str(learning_rate) + '_' +
            str(beta_W) + '.txt', "w") as f:
        # for beta_i in np.transpose([1,10,50,100,200]):
        #     for alpha_i in [0.001,0.01,0.1,1,10,100]:
        #         for gama_i in [0.001,0.01,0.1,1,10,100]:
        for ccsistent_loss in [0.001, 0.01, 0.1, 0, 1, 10, 50, 100, 200]:
            tf.reset_default_graph()
            trainer_config = {
                'ccsistent_loss':
                ccsistent_loss,
                'beta_W':
                beta_W,
                'View_num':
                View_num,
Exemple #4
0
    ckpt_path = utils.load_checkpoint('./checkpoints/' + dataset + '_spgan',
                                      sess, saver)
    saver.restore(sess, ckpt_path)

    if ckpt_path is None:
        raise Exception('No checkpoint!')
    else:
        print('Copy variables from % s' % ckpt_path)

    #--test--#
    b_list = glob('./Datasets/' + dataset + '/bounding_box_train-Market/*.jpg')
    a_list = glob('./Datasets/' + dataset + '/bounding_box_train-Duke/*.jpg')

    b_save_dir = './test_predictions/' + dataset + '_spgan' + '/bounding_box_train_market2duke/'
    a_save_dir = './test_predictions/' + dataset + '_spgan' + '/bounding_box_train_duke2market/'
    utils.mkdir([a_save_dir, b_save_dir])

    for i in range(len(a_list)):
        a_real_ipt = im.imresize(im.imread(a_list[i]), [crop_size, crop_size])
        a_real_ipt.shape = 1, crop_size, crop_size, 3
        a2b_opt = sess.run(a2b, feed_dict={a_real: a_real_ipt})
        a_img_opt = a2b_opt

        img_name = os.path.basename(a_list[i])
        img_name = 'market_' + img_name  # market_style
        im.imwrite(im.immerge(a_img_opt, 1, 1), a_save_dir + img_name)
        print('Save %s' % (a_save_dir + img_name))

    for i in range(len(b_list)):
        b_real_ipt = im.imresize(im.imread(b_list[i]), [crop_size, crop_size])
        b_real_ipt.shape = 1, crop_size, crop_size, 3
b_data_pool = data.ImageData(sess, b_img_paths, batch_size, load_size=load_size, crop_size=crop_size)

a_test_img_paths = glob('./Datasets/' + dataset + '/bounding_box_train-Duke/*.jpg')
b_test_img_paths = glob('./Datasets/' + dataset +  '/bounding_box_train-Market/*.jpg')
a_test_pool = data.ImageData(sess, a_test_img_paths, batch_size, load_size=load_size, crop_size=crop_size)
b_test_pool = data.ImageData(sess, b_test_img_paths, batch_size, load_size=load_size, crop_size=crop_size)

a2b_pool = utils.ItemPool()
b2a_pool = utils.ItemPool()

'''summary'''
summary_writer = tf.summary.FileWriter('./summaries/' + dataset + '_spgan' , sess.graph)

'''saver'''
ckpt_dir = './checkpoints/' + dataset + '_spgan'
utils.mkdir(ckpt_dir + '/')

saver = tf.train.Saver(max_to_keep= 30)
ckpt_path = utils.load_checkpoint(ckpt_dir, sess, saver)
if ckpt_path is None:
    sess.run(tf.global_variables_initializer())
else:
    print('Copy variables from % s' % ckpt_path)

'''train'''
try:
    coord = tf.train.Coordinator()
    threads = tf.train.start_queue_runners(sess=sess, coord=coord)

    batch_epoch = min(len(a_data_pool), len(b_data_pool)) // batch_size
    max_it = epoch * batch_epoch
    saver = tf.train.Saver()
    ckpt_path = utils.load_checkpoint('./checkpoints/' + dataset + '_spgan', sess, saver)
    saver.restore(sess, ckpt_path)

    if ckpt_path is None:
        raise Exception('No checkpoint!')
    else:
        print('Copy variables from % s' % ckpt_path)

    #--test--#
    b_list = glob('./Datasets/' + dataset + '/bounding_box_train-Market/*.jpg')
    a_list = glob('./Datasets/' + dataset + '/bounding_box_train-Duke/*.jpg')

    b_save_dir = './test_predictions/' + dataset + '_spgan' + '/bounding_box_train_market2duke/'
    a_save_dir = './test_predictions/' + dataset + '_spgan' + '/bounding_box_train_duke2market/'
    utils.mkdir([a_save_dir, b_save_dir])

		
    for i in range(len(a_list)):
        a_real_ipt = im.imresize(im.imread(a_list[i]), [crop_size, crop_size])
        a_real_ipt.shape = 1, crop_size, crop_size, 3
        a2b_opt = sess.run(a2b, feed_dict={a_real: a_real_ipt}) 
        a_img_opt =  a2b_opt

        img_name = os.path.basename(a_list[i])
        img_name = 'market_'+img_name # market_style
        im.imwrite(im.immerge(a_img_opt, 1, 1), a_save_dir + img_name)
        print('Save %s' % (a_save_dir + img_name))