コード例 #1
0
def init_model(transform):
    use_gpu = True
    inference_engine = None
    neural_network = None
    postprocessors = None

    if transform == 'gesture':
        # Load feature extractor
        feature_extractor = feature_extractors.StridedInflatedEfficientNet()
        feature_extractor.load_weights_from_resources(
            'backbone/strided_inflated_efficientnet.ckpt')
        feature_extractor.eval()

        # Load a logistic regression classifier
        gesture_classifier = LogisticRegression(
            num_in=feature_extractor.feature_dim, num_out=30)
        checkpoint = load_weights_from_resources(
            'gesture_detection/efficientnet_logistic_regression.ckpt')
        gesture_classifier.load_state_dict(checkpoint)
        gesture_classifier.eval()

        # Concatenate feature extractor and met converter
        neural_network = Pipe(feature_extractor, gesture_classifier)
        postprocessors = [
            PostprocessClassificationOutput(INT2LAB, smoothing=4)
        ]

    elif transform == 'fitness':
        weight = float(60)
        height = float(170)
        age = float(20)
        gender = 'female'

        # Load feature extractor
        feature_extractor = feature_extractors.StridedInflatedMobileNetV2()
        feature_extractor.load_weights_from_resources(
            'backbone/strided_inflated_mobilenet.ckpt')
        feature_extractor.eval()

        # Load fitness activity classifier
        gesture_classifier = LogisticRegression(
            num_in=feature_extractor.feature_dim, num_out=81)
        checkpoint = load_weights_from_resources(
            'fitness_activity_recognition/mobilenet_logistic_regression.ckpt')
        gesture_classifier.load_state_dict(checkpoint)
        gesture_classifier.eval()

        # Load MET value converter
        met_value_converter = calorie_estimation.METValueMLPConverter()
        checkpoint = load_weights_from_resources(
            'calorie_estimation/mobilenet_features_met_converter.ckpt')
        met_value_converter.load_state_dict(checkpoint)
        met_value_converter.eval()

        # Concatenate feature extractor with downstream nets
        neural_network = Pipe(
            feature_extractor,
            feature_converter=[gesture_classifier, met_value_converter])

        postprocessors = [
            PostprocessClassificationOutput(INT2LAB, smoothing=8, indices=[0]),
            calorie_estimation.CalorieAccumulator(weight=weight,
                                                  height=height,
                                                  age=age,
                                                  gender=gender,
                                                  smoothing=12,
                                                  indices=[1])
        ]

    if neural_network is not None:
        inference_engine = InferenceEngine(neural_network, use_gpu=use_gpu)
        start_inference(inference_engine)

    return (inference_engine, postprocessors), None
コード例 #2
0
def run_calorie_estimation(model_name: str,
                           model_version: str,
                           weight: Optional[float] = 70.0,
                           height: Optional[float] = 170.0,
                           age: float = 30.0,
                           gender: Optional[str] = None,
                           title: Optional[str] = None,
                           display_fn: Optional[Callable] = None,
                           **kwargs):
    """
    :param model_name:
        Model from backbone (StridedInflatedEfficientNet or StridedInflatedMobileNetV2).
    :param model_version:
        Model version (pro or lite)
    :param weight:
        Weight (in kilograms). Will be used to convert MET values to calories. Default to 70.
    :param height:
        Height (in centimeters). Will be used to convert MET values to calories. Default to 170.
    :param age:
        Age (in years). Will be used to convert MET values to calories. Default to 30.
    :param gender:
        Gender ("male" or "female" or "other"). Will be used to convert MET values to calories
    :param title:
        Title of the image frame on display.
    :param display_fn:
        Optional function to further process displayed image
    """
    # Load weights
    selected_config, weights = get_relevant_weights(
        SUPPORTED_MODEL_CONFIGURATIONS,
        model_name,
        model_version
    )

    # Load backbone network
    backbone_network = build_backbone_network(selected_config, weights['backbone'])

    # Load MET value converter
    met_value_converter = calorie_estimation.METValueMLPConverter()
    met_value_converter.load_state_dict(weights['met_converter'])
    met_value_converter.eval()

    # Concatenate backbone network and met converter
    net = Pipe(backbone_network, met_value_converter)

    post_processors = [
        calorie_estimation.CalorieAccumulator(weight=weight,
                                              height=height,
                                              age=age,
                                              gender=gender,
                                              smoothing=12)
    ]

    display_ops = [
        sense.display.DisplayFPS(expected_camera_fps=net.fps,
                                 expected_inference_fps=net.fps / net.step_size),
        sense.display.DisplayDetailedMETandCalories(),
    ]

    display_results = sense.display.DisplayResults(title=title, display_ops=display_ops, display_fn=display_fn)

    # Run live inference
    controller = Controller(
        neural_network=net,
        post_processors=post_processors,
        results_display=display_results,
        callbacks=[],
        **kwargs
    )
    controller.run_inference()
コード例 #3
0
        SUPPORTED_MODEL_CONFIGURATIONS,
        model_name,
        model_version
    )

    # Load backbone network
    backbone_network = build_backbone_network(selected_config, weights['backbone'])

    # Create fitness activity classifier
    gesture_classifier = LogisticRegression(num_in=backbone_network.feature_dim,
                                            num_out=81)
    gesture_classifier.load_state_dict(weights['fitness_activity_recognition'])
    gesture_classifier.eval()

    # Create MET value converter
    met_value_converter = calorie_estimation.METValueMLPConverter()
    met_value_converter.load_state_dict(weights['met_converter'])
    met_value_converter.eval()

    # Concatenate backbone network with downstream nets
    net = Pipe(backbone_network, feature_converter=[gesture_classifier,
                                                    met_value_converter])

    post_processors = [
        PostprocessClassificationOutput(INT2LAB, smoothing=8,
                                        indices=[0]),
        calorie_estimation.CalorieAccumulator(weight=weight,
                                              height=height,
                                              age=age,
                                              gender=gender,
                                              smoothing=12,
コード例 #4
0
ファイル: run_fitness_tracker.py プロジェクト: TwentyBN/sense
def run_fitness_tracker(model_name: str,
                        model_version: str,
                        weight: Optional[float] = 70.0,
                        height: Optional[float] = 170.0,
                        age: float = 30.0,
                        gender: Optional[str] = None,
                        title: Optional[str] = None,
                        display_fn: Optional[Callable] = None,
                        **kwargs):
    """
    :param model_name:
        Model from backbone (StridedInflatedEfficientNet or StridedInflatedMobileNetV2).
    :param model_version:
        Model version (pro or lite)
    :param weight:
        Weight (in kilograms). Will be used to convert MET values to calories. Default to 70.
    :param height:
        Height (in centimeters). Will be used to convert MET values to calories. Default to 170.
    :param age:
        Age (in years). Will be used to convert MET values to calories. Default to 30.
    :param gender:
        Gender ("male" or "female" or "other"). Will be used to convert MET values to calories
    :param title:
        Title of the image frame on display.
    :param display_fn:
        Optional function to further process displayed image
    """
    # Load weights
    selected_config, weights = get_relevant_weights(
        SUPPORTED_MODEL_CONFIGURATIONS, model_name, model_version)

    # Load backbone network
    backbone_network = build_backbone_network(selected_config,
                                              weights['backbone'])

    # Create fitness activity classifier
    gesture_classifier = LogisticRegression(
        num_in=backbone_network.feature_dim, num_out=81)
    gesture_classifier.load_state_dict(weights['fitness_activity_recognition'])
    gesture_classifier.eval()

    # Create MET value converter
    met_value_converter = calorie_estimation.METValueMLPConverter()
    met_value_converter.load_state_dict(weights['met_converter'])
    met_value_converter.eval()

    # Concatenate backbone network with downstream nets
    net = Pipe(backbone_network,
               feature_converter=[gesture_classifier, met_value_converter])

    post_processors = [
        PostprocessClassificationOutput(INT2LAB, smoothing=8, indices=[0]),
        calorie_estimation.CalorieAccumulator(weight=weight,
                                              height=height,
                                              age=age,
                                              gender=gender,
                                              smoothing=12,
                                              indices=[1])
    ]

    display_ops = [
        sense.display.DisplayFPS(expected_camera_fps=net.fps,
                                 expected_inference_fps=net.fps /
                                 net.step_size),
        sense.display.DisplayTopKClassificationOutputs(top_k=1, threshold=0.5),
        sense.display.DisplayMETandCalories(y_offset=40),
    ]
    display_results = sense.display.DisplayResults(title=title,
                                                   display_ops=display_ops,
                                                   border_size_top=50,
                                                   display_fn=display_fn)

    # Run live inference
    controller = Controller(neural_network=net,
                            post_processors=post_processors,
                            results_display=display_results,
                            callbacks=[],
                            **kwargs)
    controller.run_inference()