예제 #1
0
    def compute_test_result(self):
        gt_poses = []
        gt_denorm = []
        for src in range(self.n_topology):
            gt = self.motion_backup[src]
            idx = list(range(gt.shape[0]))
            gt = self.dataset.denorm(src, idx, gt)
            gt_denorm.append(gt)
            gt_pose = self.models[src].fk.forward_from_raw(
                gt, self.dataset.offsets[src][idx])
            gt_poses.append(gt_pose)
            for i in idx:
                new_path = os.path.join(self.bvh_path,
                                        self.character_names[src][i])
                from option_parser import try_mkdir
                try_mkdir(new_path)
                self.writer[src][i].write_raw(
                    gt[i, ...], 'quaternion',
                    os.path.join(new_path, '{}_gt.bvh'.format(self.id_test)))

        p = 0
        for src in range(self.n_topology):
            for dst in range(self.n_topology):
                for i in range(len(self.character_names[dst])):
                    dst_path = os.path.join(self.bvh_path,
                                            self.character_names[dst][i])
                    self.writer[dst][i].write_raw(
                        self.fake_res_denorm[p][i, ...], 'quaternion',
                        os.path.join(dst_path,
                                     '{}_{}.bvh'.format(self.id_test, src)))
                p += 1

        self.id_test += 1
예제 #2
0
def batch_copy(source_path, suffix, dest_path, dest_suffix=None):
    try_mkdir(dest_path)
    files = [f for f in os.listdir(source_path) if f.endswith('_{}.bvh'.format(suffix))]

    length = len('_{}.bvh'.format(suffix))
    for f in files:
        if dest_suffix is not None:
            cmd = 'cp \"{}\" \"{}\"'.format(os.path.join(source_path, f), os.path.join(dest_path, f[:-length] + '_{}.bvh'.format(dest_suffix)))
        else:
            cmd = 'cp \"{}\" \"{}\"'.format(os.path.join(source_path, f), os.path.join(dest_path, f[:-length] + '.bvh'))
        os.system(cmd)
예제 #3
0
    def save(self):
        for i, model in enumerate(self.models):
            model.save(
                os.path.join(self.model_save_dir, 'topology{}'.format(i)),
                self.epoch_cnt)

        for i, optimizer in enumerate(self.optimizers):
            file_name = os.path.join(
                self.model_save_dir,
                'optimizers/{}/{}.pt'.format(self.epoch_cnt, i))
            try_mkdir(os.path.split(file_name)[0])
            torch.save(optimizer.state_dict(), file_name)
예제 #4
0
    def __init__(self, args, character_names, dataset):
        super(GAN_model, self).__init__(args)
        self.character_names = character_names
        self.dataset = dataset
        self.n_topology = len(character_names)
        self.models = []
        self.D_para = []
        self.G_para = []
        self.args = args

        for i in range(self.n_topology):
            model = IntegratedModel(args, dataset.joint_topologies[i], None,
                                    self.device, character_names[i])
            self.models.append(model)
            self.D_para += model.D_parameters()
            self.G_para += model.G_parameters()

        if self.is_train:
            self.fake_pools = []
            self.optimizerD = optim.Adam(self.D_para,
                                         args.learning_rate,
                                         betas=(0.9, 0.999))
            self.optimizerG = optim.Adam(self.G_para,
                                         args.learning_rate,
                                         betas=(0.9, 0.999))
            self.optimizers = [self.optimizerD, self.optimizerG]
            self.criterion_rec = torch.nn.MSELoss()
            self.criterion_gan = GAN_loss(args.gan_mode).to(self.device)
            self.criterion_cycle = torch.nn.L1Loss()
            self.criterion_ee = Criterion_EE(args, torch.nn.MSELoss())
            for i in range(self.n_topology):
                self.fake_pools.append(ImagePool(args.pool_size))
        else:
            import option_parser
            self.err_crit = []
            for i in range(self.n_topology):
                self.err_crit.append(
                    Eval_Criterion(dataset.joint_topologies[i]))
            self.id_test = 0
            self.bvh_path = os.path.join(args.save_dir, 'results/bvh')
            option_parser.try_mkdir(self.bvh_path)

            self.writer = []
            for i in range(self.n_topology):
                writer_group = []
                for _, char in enumerate(character_names[i]):
                    from datasets.bvh_writer import BVH_writer
                    from datasets.bvh_parser import BVH_file
                    import option_parser
                    file = BVH_file(option_parser.get_std_bvh(dataset=char))
                    writer_group.append(BVH_writer(file.edges, file.names))
                self.writer.append(writer_group)
예제 #5
0
    def save(self, path, epoch):
        from option_parser import try_mkdir

        path = os.path.join(path, str(epoch))
        try_mkdir(path)

        torch.save(self.height, os.path.join(path, 'height.pt'))
        torch.save(self.auto_encoder.state_dict(),
                   os.path.join(path, 'auto_encoder.pt'))
        torch.save(self.discriminator.state_dict(),
                   os.path.join(path, 'discriminator.pt'))
        torch.save(self.static_encoder.state_dict(),
                   os.path.join(path, 'static_encoder.pt'))

        print('Save at {} succeed!'.format(path))
예제 #6
0
def batch_split(source, dest):
    files = [f for f in os.listdir(source) if f.endswith('.bvh')]
    try:
        bvh_file = BVH_file(os.path.join(source, files[0]))
        if bvh_file.skeleton_type != 1: return
    except:
        return

    print("Working on {}".format(os.path.split(source)[-1]))
    try_mkdir(dest)
    files = [f for f in os.listdir(source) if f.endswith('.bvh')]
    for i, file in tqdm(enumerate(files), total=len(files)):
        in_file = os.path.join(source, file)
        out_file = os.path.join(dest, file)
        split_joint(in_file, out_file)
예제 #7
0
def main():
    args = option_parser.get_args()
    characters = get_character_names(args)

    log_path = os.path.join(args.save_dir, 'logs/')
    try_mkdir(args.save_dir)
    try_mkdir(log_path)

    with open(os.path.join(args.save_dir, 'para.txt'), 'w') as para_file:
        para_file.write(' '.join(sys.argv))

    dataset = create_dataset(args, characters)
    data_loader = DataLoader(dataset, batch_size=args.batch_size, shuffle=True, num_workers=2)

    model = create_model(args, characters, dataset)

    if args.epoch_begin:
        model.load(epoch=args.epoch_begin, download=False)

    model.setup()

    start_time = time.time()

    for epoch in range(args.epoch_begin, args.epoch_num):
        for step, motions in enumerate(data_loader):
            model.set_input(motions)
            model.optimize_parameters()

            if args.verbose:
                res = model.verbose()
                print('[{}/{}]\t[{}/{}]\t'.format(epoch, args.epoch_num, step, len(data_loader)), res)

        if epoch % 200 == 0 or epoch == args.epoch_num - 1:
            model.save()

        model.epoch()

    end_tiem = time.time()
    print('training time', end_tiem - start_time)
예제 #8
0

def copy_std_bvh(data_path, character, files):
    """
    copy an arbitrary bvh file as a static information (skeleton's offset) reference
    """
    cmd = 'cp \"{}\" ./datasets/Mixamo/std_bvhs/{}.bvh'.format(
        data_path + character + '/' + files[0], character)
    os.system(cmd)


if __name__ == '__main__':
    prefix = './datasets/Mixamo/'
    characters = [
        f for f in os.listdir(prefix) if os.path.isdir(os.path.join(prefix, f))
    ]
    if 'std_bvhs' in characters: characters.remove('std_bvhs')
    if 'mean_var' in characters: characters.remove('mean_var')

    try_mkdir(os.path.join(prefix, 'std_bvhs'))
    try_mkdir(os.path.join(prefix, 'mean_var'))

    for character in characters:
        data_path = os.path.join(prefix, character)
        files = sorted(
            [f for f in os.listdir(data_path) if f.endswith(".bvh")])

        collect_bvh(prefix, character, files)
        copy_std_bvh(prefix, character, files)
        write_statistics(character, './datasets/Mixamo/mean_var/')
예제 #9
0
def split_joint(file_name, save_file=None):
    if save_file is None:
        save_file = file_name
    target_joints = ['Spine1', 'LeftShoulder', 'RightShoulder']
    target_idx = [-1] * len(target_joints)
    anim, names, ftime = BVH.load(file_name)

    n_joint = len(anim.parents)

    for i, name in enumerate(names):
        if ':' in name:
            name = name[name.find(':') + 1:]
            names[i] = name

        for j, joint in enumerate(target_joints):
            if joint == names[i]:
                target_idx[j] = i

    new_anim = anim.copy()
    new_anim.offsets = []
    new_anim.parents = []
    new_anim.rotations = []
    new_names = []

    target_idx.sort()

    bias = 0
    new_id = {-1: -1}
    target_idx.append(-1)
    for i in range(n_joint):
        new_id[i] = i + bias
        if i == target_idx[bias]: bias += 1

    identity = np.zeros_like(anim.rotations)
    identity = identity[:, :1, :]

    bias = 0
    for i in range(n_joint):
        new_anim.parents.append(new_id[anim.parents[i]])
        new_names.append(names[i])
        new_anim.rotations.append(anim.rotations[:, [i], :])

        if i == target_idx[bias]:
            new_anim.offsets.append(anim.offsets[i] / 2)

            new_anim.parents.append(i + bias)
            new_names.append(names[i] + '_split')
            new_anim.offsets.append(anim.offsets[i] / 2)

            new_anim.rotations.append(identity)

            new_id[i] += 1
            bias += 1
        else:
            new_anim.offsets.append(anim.offsets[i])

    new_anim.offsets = np.array(new_anim.offsets)

    offset_spine = anim.offsets[target_idx[0]] + anim.offsets[target_idx[0] +
                                                              1]
    new_anim.offsets[target_idx[0]:target_idx[0] + 3, :] = offset_spine / 3

    new_anim.rotations = np.concatenate(new_anim.rotations, axis=1)
    try_mkdir(os.path.split(save_file)[0])
    BVH.save(save_file,
             new_anim,
             names=new_names,
             frametime=ftime,
             order='xyz')