def test(): args = cmdd.args() args.images_dir_test = args.images_dir_train args.pairs_file_test = 'data/market-re-id-pairs.csv' pairs_for_each = 2 train_file_name = 'train.txt' store_train_images = True generated_as_separate = False df_keypoints = pd.read_csv(args.annotations_file_train, sep=':') df = filter_not_valid(df_keypoints) print('Compute pair for train re-id...') pairs_df_train = make_pairs(df, pairs_for_each) print('Number of pairs: %s' % len(pairs_df_train)) pairs_df_train.to_csv('data/market-re-id-pairs.csv', index=False) dataset = PoseHMDataset(test_phase=True, **vars(args)) generator = make_generator(args.image_size, args.use_input_pose, args.warp_skip, args.disc_type, args.warp_agg, args.use_bg, args.pose_rep_type) assert (args.generator_checkpoint is not None) generator.load_weights(args.generator_checkpoint) print("Generate images...") generate_images(dataset, generator, args.use_input_pose, args.generated_images_dir, store_train_images=store_train_images) print("Creating train file...") create_train_file(args.generated_images_dir, train_file_name, generated_as_separate)
def test(): args = cmdd.args() if args.load_generated_images: print ("Loading images...") input_images, target_images, generated_images, names = load_generated_images(args.generated_images_dir) else: print ("Generate images...") from keras import backend as K if args.use_dropout_test: K.set_learning_phase(1) dataset = PoseHMDataset(test_phase=True, **vars(args)) generator = make_generator(args.image_size, args.use_input_pose, args.warp_skip, args.disc_type, args.warp_agg, args.use_bg, args.pose_rep_type) assert (args.generator_checkpoint is not None) generator.load_weights(args.generator_checkpoint) input_images, target_images, generated_images, names = generate_images(dataset, generator, args.use_input_pose) print ("Save images to %s..." % (args.generated_images_dir, )) save_images(input_images, target_images, generated_images, names, args.generated_images_dir)
def main(): args = cmdd.args() generator = make_generator(args.image_size, args.use_input_pose, args.warp_skip, args.disc_type, args.warp_agg, args.use_bg, args.pose_rep_type) if args.generator_checkpoint is not None: generator.load_weights(args.generator_checkpoint) discriminator = make_discriminator(args.image_size, args.use_input_pose, args.warp_skip, args.disc_type, args.warp_agg, args.use_bg, args.pose_rep_type) if args.discriminator_checkpoint is not None: discriminator.load_weights(args.discriminator_checkpoint) dataset = PoseHMDataset(test_phase=False, **vars(args)) gan = CGAN(generator, discriminator, **vars(args)) trainer = Trainer(dataset, gan, **vars(args)) trainer.train()
import pandas as pd from cmdd import args import pose_transform import pose_utils from itertools import permutations args = args() def filter_not_valid(df_keypoints): def check_valid(x): kp_array = pose_utils.load_pose_cords_from_strings( x['keypoints_y'], x['keypoints_x']) distractor = x['name'].startswith('-1') or x['name'].startswith('0000') return pose_transform.check_valid(kp_array) and not distractor return df_keypoints[df_keypoints.apply(check_valid, axis=1)].copy() def make_pairs(df): persons = df.apply(lambda x: '_'.join(x['name'].split('_')[0:1]), axis=1) df['person'] = persons fr, to = [], [] for person in pd.unique(persons): pairs = zip(*list(permutations(df[df['person'] == person]['name'], 2))) if len(pairs) != 0: fr += list(pairs[0]) to += list(pairs[1]) pair_df = pd.DataFrame(index=range(len(fr))) pair_df['from'] = fr pair_df['to'] = to
from create_pairs_dataset import filter_not_valid import cmdd import os from shutil import copy, rmtree import pandas as pd from pose_dataset import PoseHMDataset from conditional_gan import make_generator from tqdm import tqdm from test import generate_images, save_images from keras.models import load_model from skimage.io import imread, imsave from skimage.transform import resize import numpy as np if __name__ == "__main__": args = cmdd.args() dataset = 'tmp' target_images_folder = 'data/target-images' source_image = 'data/source-image.jpg' bg_image = 'data/bg-image.jpg' #For target images use kp from previous frame if not detected interpolate = True args.images_dir_test = 'data/' + dataset + '-dataset/test' args.annotations_file_test = 'data/' + dataset + '-annotation-test.csv' args.pairs_file_test = 'data/' + dataset + '-pairs-test.csv' args.bg_images_dir_test = 'data/' + dataset + '-dataset/test-bg'