Beispiel #1
0
def run(model, config_file):
    global nn, pre_process, post_process
    filename, file_extension = os.path.splitext(model)
    supported_files = ['.so', '.pb']

    if file_extension not in supported_files:
        raise Exception("""
            Unknown file type. Got %s%s.
            Please check the model file (-m).
            Only .pb (protocol buffer) or .so (shared object) file is supported.
            """ % (filename, file_extension))

    config = load_yaml(config_file)
    pre_process = build_pre_process(config.PRE_PROCESSOR)
    post_process = build_post_process(config.POST_PROCESSOR)

    if file_extension == '.so':  # Shared library
        nn = NNLib()
        nn.load(model)

    elif file_extension == '.pb':  # Protocol Buffer file
        # only load tensorflow if user wants to use GPU
        from lmnet.tensorflow_graph_runner import TensorflowGraphRunner
        nn = TensorflowGraphRunner(model)

    run_impl(config)
Beispiel #2
0
def run(model, config_file, port=80):
    global nn, pre_process, post_process, config, stream, pool

    filename, file_extension = os.path.splitext(model)
    supported_files = [".so", ".pb"]

    if file_extension not in supported_files:
        raise Exception("""
            Unknown file type. Got %s%s.
            Please check the model file (-m).
            Only .pb (protocol buffer) or .so (shared object) file is supported.
            """ % (filename, file_extension))

    if file_extension == ".so":  # Shared library
        nn = NNLib()
        nn.load(model)

    elif file_extension == ".pb":  # Protocol Buffer file
        # only load tensorflow if user wants to use GPU
        from lmnet.tensorflow_graph_runner import TensorflowGraphRunner

        nn = TensorflowGraphRunner(model)

    nn = NNLib()
    nn.load(model)

    stream = VideoStream(CAMERA_SOURCE, CAMERA_WIDTH, CAMERA_HEIGHT,
                         CAMERA_FPS)

    config = load_yaml(config_file)

    pre_process = build_pre_process(config.PRE_PROCESSOR)
    post_process = build_post_process(config.POST_PROCESSOR)

    pool = Pool(processes=1, initializer=_init_worker)

    try:
        server = ThreadedHTTPServer(("", port), MotionJpegHandler)
        print("server starting")
        server.serve_forever()
    except KeyboardInterrupt:
        print("KeyboardInterrpt in server - ending server")
        stream.release()
        pool.terminate()
        pool.join()
        server.socket.close()
        server.shutdown()

    return
Beispiel #3
0
def run_prediction(input_image, model, config_file, trial=1):
    if not input_image or not model or not config_file:
        logger.error('Please check usage with --help option')
        exit(1)

    config = load_yaml(config_file)

    # load the image
    image_data = load_image(input_image)
    raw_image = image_data

    # initialize Network
    nn = _init(model, config)

    # pre process for image
    image_data, bench_pre = _timerfunc(
        _pre_process, (image_data, config.PRE_PROCESSOR, config.DATA_FORMAT),
        trial)

    # add the batch dimension
    image_data = np.expand_dims(image_data, axis=0)

    # run the model to inference
    output, bench_inference = _timerfunc(_run, (nn, image_data), trial)

    logger.info('Output: (before post process)\n{}'.format(output))

    # pre process for output
    output, bench_post = _timerfunc(_post_process,
                                    (output, config.POST_PROCESSOR), trial)

    logger.info('Output: (after post process)\n{}'.format(output))

    # json output
    json_output = JsonOutput(
        task=Tasks(config.TASK),
        classes=config.CLASSES,
        image_size=config.IMAGE_SIZE,
        data_format=config.DATA_FORMAT,
        bench={
            "total": bench_pre + bench_post + bench_inference,
            "pre": bench_pre,
            "post": bench_post,
            "inference": bench_inference,
        },
    )

    image_from_json = ImageFromJson(
        task=Tasks(config.TASK),
        classes=config.CLASSES,
        image_size=config.IMAGE_SIZE,
    )

    output_dir = "output"
    outputs = output
    raw_images = [raw_image]
    image_files = [input_image]
    json_obj = json_output(outputs, raw_images, image_files)
    _save_json(output_dir, json_obj)
    filename_images = image_from_json(json_obj, raw_images, image_files)
    _save_images(output_dir, filename_images)
    logger.info(
        "Benchmark avg result(sec) for {} trials: pre_process: {}  inference: {} post_process: {}  Total: {}"
        .format(
            trial,
            bench_pre,
            bench_inference,
            bench_post,
            bench_pre + bench_post + bench_inference,
        ))
Beispiel #4
0
	sqlstring = sqlstring + ");\n"
	return sqlstring


	
##======================================================================
## Lets GO!
##======================================================================

## import config and connect db
import config
config.init_db(opts.connect_yaml)

## Load yaml with table def
table_def_yaml = args[0]
tob = config.load_yaml(table_def_yaml, as_object=True)
#print tob, tob.table

## Create the table
sql =  get_create_table_sql(tob, drop=True)
config.DB.execute(sql)
config.CONN.commit()

# All done
config.CONN.close()

print "--- CREATED TABLE: %s ---" % tob.table
print sql


Beispiel #5
0
def load_cron_config():
    cron = config.load_yaml('app/config/deployment/cron.yml')

    return cron
Beispiel #6
0
def run_prediction(input_image, model, config_file, trial=1):
    if not input_image or not model or not config_file:
        logger.error('Please check usage with --help option')
        exit(1)

    config = load_yaml(config_file)

    # load the image
    image_data = load_image(input_image)
    raw_image = image_data

    # initialize Network
    nn = _init(model, config)

    pre_process = build_pre_process(config.PRE_PROCESSOR)
    post_process = build_post_process(config.POST_PROCESSOR)

    # call functions once to exclude the first result which include some initializations
    init_output = _pre_process(image_data, pre_process, config.DATA_FORMAT)
    init_output = _run(nn, init_output)
    init_output = _post_process(init_output, post_process)

    results_total = []
    results_pre = []
    results_run = []
    results_post = []

    for _ in range(trial):
        # pre process for image
        output, bench_pre = _timerfunc(
            _pre_process, (image_data, pre_process, config.DATA_FORMAT))

        # run the model to inference
        output, bench_run = _timerfunc(_run, (nn, output))

        # pre process for output
        output, bench_post = _timerfunc(_post_process, (output, post_process))

        results_total.append(bench_pre + bench_run + bench_post)
        results_pre.append(bench_pre)
        results_run.append(bench_run)
        results_post.append(bench_post)

    time_stat = {
        "total": {
            "mean": np.mean(results_total),
            "std": np.std(results_total)
        },
        "pre": {
            "mean": np.mean(results_pre),
            "std": np.std(results_pre)
        },
        "post": {
            "mean": np.mean(results_post),
            "std": np.std(results_post)
        },
        "run": {
            "mean": np.mean(results_run),
            "std": np.std(results_run)
        },
    }

    # json output
    json_output = JsonOutput(
        task=Tasks(config.TASK),
        classes=config.CLASSES,
        image_size=config.IMAGE_SIZE,
        data_format=config.DATA_FORMAT,
        bench=time_stat,
    )

    image_from_json = ImageFromJson(
        task=Tasks(config.TASK),
        classes=config.CLASSES,
        image_size=config.IMAGE_SIZE,
    )

    output_dir = "output"
    outputs = output
    raw_images = [raw_image]
    image_files = [input_image]
    json_obj = json_output(outputs, raw_images, image_files)
    _save_json(output_dir, json_obj)
    filename_images = image_from_json(json_obj, raw_images, image_files)
    _save_images(output_dir, filename_images)
    logger.info("Benchmark avg result(sec) for {} trials".format(trial))
    logger.info(time_stat)
Beispiel #7
0
def load_cron_config():
    cron = config.load_yaml('app/config/deployment/cron.yml')

    return cron