Esempio n. 1
0
def run(model, config_file):
    global nn, pre_process, post_process
    filename, file_extension = os.path.splitext(model)
    supported_files = ['.so', '.pb']

    if file_extension not in supported_files:
        raise Exception("""
            Unknown file type. Got %s%s.
            Please check the model file (-m).
            Only .pb (protocol buffer) or .so (shared object) file is supported.
            """ % (filename, file_extension))

    config = load_yaml(config_file)
    pre_process = build_pre_process(config.PRE_PROCESSOR)
    post_process = build_post_process(config.POST_PROCESSOR)

    if file_extension == '.so':  # Shared library
        nn = NNLib()
        nn.load(model)

    elif file_extension == '.pb':  # Protocol Buffer file
        # only load tensorflow if user wants to use GPU
        from lmnet.tensorflow_graph_runner import TensorflowGraphRunner
        nn = TensorflowGraphRunner(model)

    run_impl(config)
Esempio n. 2
0
def run(model, config_file, port=80):
    global nn, pre_process, post_process, config, stream, pool

    filename, file_extension = os.path.splitext(model)
    supported_files = [".so", ".pb"]

    if file_extension not in supported_files:
        raise Exception("""
            Unknown file type. Got %s%s.
            Please check the model file (-m).
            Only .pb (protocol buffer) or .so (shared object) file is supported.
            """ % (filename, file_extension))

    if file_extension == ".so":  # Shared library
        nn = NNLib()
        nn.load(model)

    elif file_extension == ".pb":  # Protocol Buffer file
        # only load tensorflow if user wants to use GPU
        from lmnet.tensorflow_graph_runner import TensorflowGraphRunner

        nn = TensorflowGraphRunner(model)

    nn = NNLib()
    nn.load(model)

    stream = VideoStream(CAMERA_SOURCE, CAMERA_WIDTH, CAMERA_HEIGHT,
                         CAMERA_FPS)

    config = load_yaml(config_file)

    pre_process = build_pre_process(config.PRE_PROCESSOR)
    post_process = build_post_process(config.POST_PROCESSOR)

    pool = Pool(processes=1, initializer=_init_worker)

    try:
        server = ThreadedHTTPServer(("", port), MotionJpegHandler)
        print("server starting")
        server.serve_forever()
    except KeyboardInterrupt:
        print("KeyboardInterrpt in server - ending server")
        stream.release()
        pool.terminate()
        pool.join()
        server.socket.close()
        server.shutdown()

    return
Esempio n. 3
0
def _pre_process(raw_image, pre_processor, data_format):
    pre_process = build_pre_process(pre_processor)
    image = pre_process(image=raw_image)['image']
    if data_format == 'NCHW':
        image = np.transpose(image, [2, 0, 1])
    return image
Esempio n. 4
0
def run_prediction(input_image, model, config_file, trial=1):
    if not input_image or not model or not config_file:
        logger.error('Please check usage with --help option')
        exit(1)

    config = load_yaml(config_file)

    # load the image
    image_data = load_image(input_image)
    raw_image = image_data

    # initialize Network
    nn = _init(model, config)

    pre_process = build_pre_process(config.PRE_PROCESSOR)
    post_process = build_post_process(config.POST_PROCESSOR)

    # call functions once to exclude the first result which include some initializations
    init_output = _pre_process(image_data, pre_process, config.DATA_FORMAT)
    init_output = _run(nn, init_output)
    init_output = _post_process(init_output, post_process)

    results_total = []
    results_pre = []
    results_run = []
    results_post = []

    for _ in range(trial):
        # pre process for image
        output, bench_pre = _timerfunc(
            _pre_process, (image_data, pre_process, config.DATA_FORMAT))

        # run the model to inference
        output, bench_run = _timerfunc(_run, (nn, output))

        # pre process for output
        output, bench_post = _timerfunc(_post_process, (output, post_process))

        results_total.append(bench_pre + bench_run + bench_post)
        results_pre.append(bench_pre)
        results_run.append(bench_run)
        results_post.append(bench_post)

    time_stat = {
        "total": {
            "mean": np.mean(results_total),
            "std": np.std(results_total)
        },
        "pre": {
            "mean": np.mean(results_pre),
            "std": np.std(results_pre)
        },
        "post": {
            "mean": np.mean(results_post),
            "std": np.std(results_post)
        },
        "run": {
            "mean": np.mean(results_run),
            "std": np.std(results_run)
        },
    }

    # json output
    json_output = JsonOutput(
        task=Tasks(config.TASK),
        classes=config.CLASSES,
        image_size=config.IMAGE_SIZE,
        data_format=config.DATA_FORMAT,
        bench=time_stat,
    )

    image_from_json = ImageFromJson(
        task=Tasks(config.TASK),
        classes=config.CLASSES,
        image_size=config.IMAGE_SIZE,
    )

    output_dir = "output"
    outputs = output
    raw_images = [raw_image]
    image_files = [input_image]
    json_obj = json_output(outputs, raw_images, image_files)
    _save_json(output_dir, json_obj)
    filename_images = image_from_json(json_obj, raw_images, image_files)
    _save_images(output_dir, filename_images)
    logger.info("Benchmark avg result(sec) for {} trials".format(trial))
    logger.info(time_stat)