Example #1
0
    def __init__(self, service_config_file, json_path_expressions):

        #  Load configuration files
        service_config = load_json_file(service_config_file)
        update_data_using_jsonpath(service_config, json_path_expressions)
        algorithm_config_file = "config" + os.sep + service_config[
            "algorithm_settings"] + "_algorithm.config"
        if os.path.isfile(algorithm_config_file):
            write_message_to_log(
                "Load algorithm configuration from " + algorithm_config_file,
                LOG_MODE_INFO)
            algorithm_config = load_json_file(algorithm_config_file)
        else:
            write_message_to_log(
                "Did not find algorithm configuration file " +
                algorithm_config_file, LOG_MODE_INFO)
            algorithm_config = DEFAULT_ALGORITHM_CONFIG

        #  Construct morphable graph from files
        self.application = MGRestApplication(
            service_config,
            algorithm_config,
            [
                (r"/run_morphablegraphs", GenerateMotionHandler),  #legacy
                (r"/config_morphablegraphs", SetConfigurationHandler),
                (r"/generate_motion", GenerateMotionHandler),
                (r"/get_skeleton", GetSkeletonHandler)
            ])

        self.port = service_config["port"]
def main():
    global context, pool
    port = 8888
    target_skeleton_file = MODEL_DATA_DIR + os.sep + "iclone_female4.bvh"
    skeleton_model = "iclone"
    target_skeleton_file = None
    parser = argparse.ArgumentParser(
        description="Start the MorphableGraphs REST-interface")
    parser.add_argument(
        "-set",
        nargs='+',
        default=[],
        help="JSONPath expression, e.g. -set $.model_data=path/to/data")
    parser.add_argument("-config_file",
                        nargs='?',
                        default=SERVICE_CONFIG_FILE,
                        help="Path to default config file")
    parser.add_argument("-target_skeleton",
                        nargs='?',
                        default=target_skeleton_file,
                        help="Path to target skeleton file")
    parser.add_argument("-skeleton_scale",
                        nargs='?',
                        default=1.0,
                        help="Scale applied to the target skeleton offsets")
    args = parser.parse_args()
    if os.path.isfile(args.config_file):
        service_config = load_json_file(args.config_file)
        algorithm_config_file = "config" + os.sep + service_config[
            "algorithm_settings"] + "_algorithm.config"
        algorithm_config = load_json_file(algorithm_config_file)
        port = service_config["port"]
        if args.target_skeleton is not None:
            # TODO use custom json file instead
            bvh_reader = BVHReader(args.target_skeleton)
            animated_joints = list(bvh_reader.get_animated_joints())
            target_skeleton = SkeletonBuilder().load_from_bvh(
                bvh_reader, animated_joints=animated_joints)
            target_skeleton.skeleton_model = SKELETON_MODELS[skeleton_model]
        else:
            target_skeleton = None

        context = Context(service_config, algorithm_config, target_skeleton)
    count = cpu_count()
    print("run {} processes on port {}".format(count, port))
    pool = ProcessPoolExecutor(max_workers=count)

    # configure tornado to work with the asynchio loop
    tornado.platform.asyncio.AsyncIOMainLoop().install()
    app.listen(port)
    asyncio.get_event_loop().run_forever()
    pool.shutdown()
def define_sections_from_annotations(motion_folder, motions):
    filtered_motions = collections.OrderedDict()
    sections = collections.OrderedDict()
    for key in motions.keys():
        annotations_file = motion_folder + os.sep + key + "_sections.json"
        if os.path.isfile(annotations_file):
            data = load_json_file(annotations_file)
            annotations = data["semantic_annotation"]
            motion_sections = dict()
            for label in annotations:
                annotations[label].sort()
                section = dict()
                section["start_idx"] = annotations[label][0]
                section["end_idx"] = annotations[label][-1]
                motion_sections[section["start_idx"]] = section
            motion_sections = collections.OrderedDict(
                sorted(motion_sections.items()))
            sections[key] = motion_sections.values()
            filtered_motions[key] = motions[key]

    if len(sections) > 0:
        motions = filtered_motions
        return motions, sections
    else:
        return motions, None
    def load_data(self, n_basis=None):
        '''
        load the data for specified setting, the data will be loaded from data analysis folder if it is precomputed and
        stored, otherwise it will be loaded directly from alignment folder
        :return:
        '''
        data_analysis_folder = get_data_analysis_folder(
            self.elementary_action, self.motion_primitive, self.data_repo_path)
        if self.data_type == 'discrete':
            if self.parameterization == 'quaternion':
                prestored_filename = os.path.join(data_analysis_folder,
                                                  'smoothed_quat_frames.json')
                if not os.path.isfile(prestored_filename):
                    discrete_motion_data = get_smoothed_quat_frames(
                        self.elementary_action, self.motion_primitive,
                        self.data_repo_path)

                    write_to_json_file(prestored_filename,
                                       discrete_motion_data)
                else:
                    discrete_motion_data = load_json_file(prestored_filename)
                return discrete_motion_data
            elif self.parameterization == 'Euler':
                raise NotImplementedError
            else:
                raise KeyError('Motion Parameterization type is not supported')
        elif self.data_type == 'functional':
            if self.parameterization == 'quaternion':
                prestored_filename = os.path.join(data_analysis_folder,
                                                  'functional_quat_data.json')
                if not os.path.isfile(prestored_filename):
                    functional_quat_data = create_quat_functional_data(
                        self.elementary_action, self.motion_primitive,
                        self.data_repo_path, n_basis)
                    write_to_json_file(prestored_filename,
                                       functional_quat_data)
                else:
                    functional_quat_data = load_json_file(prestored_filename)
                return functional_quat_data
            elif self.parameterization == 'Euler':
                raise NotImplementedError
            else:
                raise KeyError('Motion Parameterization type is not supported')
Example #5
0
def main():
    """Loads the latest file added to the input directory specified in
        service_config.json and runs the algorithm.
    """

    if os.path.isfile(SERVICE_CONFIG_FILE):
        service_config = load_json_file(SERVICE_CONFIG_FILE)

        config_pipeline(service_config)
    else:
        print("Error: Could not read service config file", SERVICE_CONFIG_FILE)
Example #6
0
def config_pipeline(service_config):
    """Sends the current config to the morphablegraphs server
    """

    mg_input = load_json_file(ALGORITHM_CONFIG_FILE)
    data = json.dumps(mg_input)
    mg_server_url = 'http://localhost:8888/config_morphablegraphs'
    request = urllib.request.Request(mg_server_url, data)

    print("send config and wait for motion generator...")
    handler = urllib.request.urlopen(request)
    result = handler.read()
    print(result)
 def get_full_euler_frames(self, load_data=True, export_data=False):
     full_euler_frames_filename = os.path.join(self.data_analysis_folder,
                                               'full_euler_frames.json')
     if load_data and os.path.exists(full_euler_frames_filename):
         full_euler_frames_dic = load_json_file(
             full_euler_frames_filename)['data']
     else:
         full_euler_frames_dic = {}
         for filename, frames in self.spatial_data.items():
             full_euler_frames_dic[filename] = frames
     if export_data:
         output_data = {'data': full_euler_frames_dic}
         write_to_json_file(full_euler_frames_filename, output_data)
     return full_euler_frames_dic
 def get_reduced_euler_frames(self, load_data=True, export_data=False):
     reduced_euler_frames_filename = os.path.join(
         self.data_analysis_folder, 'reduced_euler_frames.json')
     if load_data and os.path.exists(reduced_euler_frames_filename):
         reduced_euler_frames_dic = load_json_file(
             reduced_euler_frames_filename)['data']
     else:
         bvhreader = BVHReader(self.skeleton_bvh)
         reduced_euler_frames_dic = {}
         for filename, frames in self.spatial_data.items():
             reduced_euler_frames_dic[
                 filename] = convert_euler_frames_to_reduced_euler_frames(
                     bvhreader, frames)
     if export_data:
         output_data = {'data': reduced_euler_frames_dic}
         write_to_json_file(reduced_euler_frames_filename, output_data)
     return reduced_euler_frames_dic
 def load_functional_data(self, n_basis):
     data_analysis_folder = get_data_analysis_folder(
         self.elementary_action, self.motion_primitive, self.data_repo_path)
     if self.parameterization == 'quaternion':
         prestored_filename = os.path.join(data_analysis_folder,
                                           'functional_quat_data.json')
         if not os.path.isfile(prestored_filename):
             functional_quat_data = create_quat_functional_data(
                 self.elementary_action, self.motion_primitive,
                 self.data_repo_path, n_basis)
             write_to_json_file(prestored_filename, functional_quat_data)
         else:
             functional_quat_data = load_json_file(prestored_filename)
     elif self.parameterization == 'Euler':
         raise NotImplementedError
     else:
         raise KeyError('Motion Parameterization type is not supported')
     return functional_quat_data
    def load_discrete_data(self):
        data_analysis_folder = get_data_analysis_folder(
            self.elementary_action, self.motion_primitive, self.data_repo_path)
        if self.parameterization == 'quaternion':
            prestored_filename = os.path.join(data_analysis_folder,
                                              'smoothed_quat_frames.json')
            if not os.path.isfile(prestored_filename):
                discrete_motion_data = get_smoothed_quat_frames(
                    self.elementary_action, self.motion_primitive,
                    self.data_repo_path)

                write_to_json_file(prestored_filename, discrete_motion_data)
            else:
                discrete_motion_data = load_json_file(prestored_filename)
        elif self.parameterization == 'Euler':
            raise NotImplementedError
        else:
            raise KeyError('Motion Parameterization type is not supported')
        return discrete_motion_data
def main():
    skeleton_path = "skeleton.bvh"
    skeleton_path = "raw_skeleton.bvh"
    settings = dict()
    settings["tree_type"] = TREE_TYPE_FEATURE_CLUSTER_TREE
    settings["feature_type"] = FEATURE_TYPE_S_VECTOR
    settings["output_mode"] = "json"
    cluster_tree_builder = ClusterTreeBuilder(settings)
    config = load_json_file(CONIFG_FILE_PATH)
    cluster_tree_builder.set_config(config)
    cluster_tree_builder.load_skeleton(skeleton_path)
    start = time.clock()
    success = cluster_tree_builder.build()

    time_in_seconds = time.clock() - start
    if success:
        print("Finished construction in", int(time_in_seconds / 60),
              "minutes and", time_in_seconds % 60, "seconds")
    else:
        print("Failed to read data from directory")
 def scale_data(self, motion_data_matrix):
     '''
     scale motion data and reshape 3d data to 2d for dimension reduction
     :param motion_data_matrix (numpy.narray<3d>): n_samples * n_frames(n_knots) * n.dims
     :return (numpy.narray<3d>): n_samples * (n_frames * n_dims)
     '''
     assert len(motion_data_matrix.shape) == 3
     if self.scaling == 'normalization':
         motion_data_2d = reshape_data_for_PCA(motion_data_matrix)
         scaler = StandardScaler().fit(motion_data_2d)
         normalized_motion_data_2d = scaler.transform(motion_data_2d)
         scale_params = {
             'mean': scaler.mean_.tolist(),
             'std': scaler.std_.tolist()
         }
         return normalized_motion_data_2d, scale_params
     elif self.scaling == 'scaled_root':
         scaled_root_motion_data, scale_root_vector = scale_root_channels(
             motion_data_matrix)
         scale_params = {'translation_maxima': scale_root_vector}
         scaled_root_motion_data_2d = reshape_data_for_PCA(
             scaled_root_motion_data)
         return scaled_root_motion_data_2d, scale_params
     elif self.scaling == 'scaled':
         data_analysis_folder = get_data_analysis_folder(
             self.elementary_action, self.motion_primitive,
             self.data_repo_path)
         optimized_weights_filename = os.path.join(
             data_analysis_folder, '_'.join([
                 self.elementary_action, self.motion_primitive,
                 'optimization', 'result.json'
             ]))
         if not os.path.isfile(optimized_weights_filename):
             raise IOError('Cannot find weight file for scaling features')
         else:
             optimal_weights_dic = load_json_file(
                 optimized_weights_filename)
     else:
         return motion_data_matrix, None
Example #13
0
 def _load_from_file(self, mgrd_skeleton, file_name, animated_joints=None, use_mgrd_mixture_model=False, scale=None):
     self.use_mgrd_mixture_model = use_mgrd_mixture_model
     data = load_json_file(file_name)
     if data is not None:
         self._initialize_from_json(mgrd_skeleton, data, animated_joints, use_mgrd_mixture_model, scale)
Example #14
0
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
# NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
# DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
# OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
# USE OR OTHER DEALINGS IN THE SOFTWARE.
from motion_database import MotionDatabase, TABLES2
from anim_utils.utilities.io_helper_functions import load_json_file
import argparse


def create_user(path, name, pw, role):
    con = MotionDatabase()
    con.connect(path)
    email = ""
    groups = "[]"
    con.create_user(name, pw, email, role, groups)


CONFIG_FILE = "db_server_config.json"

if __name__ == "__main__":
    config = load_json_file(CONFIG_FILE)
    parser = argparse.ArgumentParser(description='Create db user.')
    parser.add_argument('name', nargs='?', help='user name')
    parser.add_argument('pw', nargs='?', help='password')
    parser.add_argument('role', nargs='?', help='role')
    args = parser.parse_args()
    if args.name is not None and args.pw is not None and args.role is not None and "db_path" in config:
        db_path = config["db_path"]
        create_user(db_path, args.name, args.pw, args.role)
Example #15
0
 def load_cluster_tree_from_json_file(self, filepath):
     tree_data = load_json_file(filepath)
     self.load_cluster_tree_from_json(tree_data)
Example #16
0
def train_model(name,
                motion_folder,
                output_folder,
                skeleton,
                max_training_samples=100,
                animated_joints=None,
                save_skeleton=False,
                use_multi_processing=True,
                temp_data_dir=None,
                pre_aligned=False):
    print("train model", name, motion_folder, use_multi_processing)
    motions = load_motion_data(motion_folder,
                               max_count=max_training_samples,
                               animated_joints=animated_joints)
    ref_frame = None
    for key, m in motions.items():
        if ref_frame is None:
            ref_frame = m[0]
        motions[key] = smooth_quaternion_frames(skeleton, m, ref_frame)

    keyframes_filename = motion_folder + os.sep + "keyframes.json"
    if os.path.isfile(keyframes_filename):
        keyframes = load_json_file(keyframes_filename)
        sections = define_sections_from_keyframes(motions.keys(), keyframes)
        filtered_motions = collections.OrderedDict()
        for key in motions.keys():
            if key in keyframes:
                filtered_motions[key] = motions[key]
        motions = filtered_motions
    else:
        motions, sections = define_sections_from_annotations(
            motion_folder, motions)

    out_filename = output_folder + os.sep + name + MM_FILE_ENDING
    if len(motions) > 1:
        config = get_standard_config()
        config["use_multi_processing"] = use_multi_processing
        config["temp_data_dir"] = temp_data_dir
        constructor = MotionModelConstructor(skeleton, config)
        align_frames = True
        if not pre_aligned or not os.path.isfile(motion_folder + os.sep +
                                                 "temporal_data.npy"):
            constructor.set_motions(motions)
            constructor.set_dtw_sections(sections)
        else:
            constructor.set_aligned_frames(motions)
            temporal_data = np.load(motion_folder + os.sep +
                                    "temporal_data.npy",
                                    allow_pickle=True)
            constructor.set_timewarping(temporal_data)
            align_frames = False
        model_data = constructor.construct_model(name,
                                                 version=3,
                                                 save_skeleton=save_skeleton,
                                                 align_frames=align_frames)

        with open(out_filename, 'w') as outfile:
            json.dump(model_data, outfile)

    elif len(motions) == 1:
        keys = list(motions.keys())
        model_data = convert_motion_to_static_motion_primitive(
            name, motions[keys[0]], skeleton)
        with open(out_filename, 'w') as outfile:
            json.dump(model_data, outfile)
    else:
        print("Error: Did not find any BVH files in the directory",
              motion_folder)
        model_data = dict()
        model_data["n_motions"] = len(motions)
        model_data["n_files"] = len(glob.glob(motion_folder + "*"))
        out_filename = output_folder + os.sep + "MODELING_FAILED"
        with open(out_filename, 'w') as outfile:
            json.dump(model_data, outfile)