예제 #1
0
파일: mvNCProfile.py 프로젝트: DT42/ncsdk
def profile_net(network,
                inputnode=None,
                outputnode=None,
                nshaves=1,
                inputsize=None,
                weights=None,
                device_no=None):
    file_init()
    args = Arguments(network, inputnode, outputnode, inputsize, nshaves,
                     weights, device_no)
    myriad_config = MyriadParam(0, nshaves - 1)
    filetype = network.split(".")[-1]
    if args.conf_file is not None:
        get_myriad_info(args, myriad_config)
    if filetype in ["prototxt"]:
        from Controllers.CaffeParser import parse_caffe
        net = parse_caffe(args, myriad_config, file_gen=True)
    elif filetype in ["pb", "protobuf", "meta"]:
        from Controllers.TensorFlowParser import parse_tensor
        net = parse_tensor(args, myriad_config, file_gen=True)
    else:
        throw_error(ErrorTable.ParserNotSupported)
    net.finalize()
    net.optimize()
    graph_file = Blob(major_version, net.name, '', myriad_config, net, "graph")
    graph_file.generate()
    timings, myriad_output = run_myriad(graph_file, args, file_gen=False)
    net.gather_metrics(timings)
    print_summary_of_network(graph_file)
    generate_graphviz(net, graph_file, filename=args.outputs_name)
def create_graph(network, inputnode = None, outputnode = None, outfile = 'graph', nshaves = 1, inputsize = None, weights = None, explicit_concat = None, ma2480 = None, scheduler = True, model_dir='', extargs = None):
    file_init()
    args = Arguments(network, inputnode, outputnode, outfile, inputsize, nshaves, weights, explicit_concat, model_dir, extargs)
    args.ma2480 = ma2480
    args.scheduler = scheduler
    GLOBALS.USING_MA2480 = args.ma2480
    GLOBALS.OPT_SCHEDULER = args.scheduler is None
    ppInit(args.scheduler)

    myriad_config = load_myriad_config(args.number_of_shaves)

    if args.conf_file is not None:
        get_myriad_info(args, myriad_config)
    filetype = network.split(".")[-1]
    parser = None
    if filetype in ["prototxt"]:
        parser = Parser.Caffe
    elif filetype in ["pb", "protobuf", "meta"]:
        parser = Parser.TensorFlow
    else:
        throw_error(ErrorTable.ParserNotSupported)

    file_gen = True
    load_ret = load_network(args, parser, myriad_config,debug=True)
    net = load_ret['network']
    if args.new_parser:
        graph_file = load_ret['graph']
    else:
        graph_file = Blob([GLOBALS.BLOB_MAJOR_VERSION, GLOBALS.BLOB_MINOR_VERSION, GLOBALS.BLOB_PATCH_VERSION], net.name, '', myriad_config, net, outfile)
        graph_file.generate_v2(args)
    expected = load_ret['expected']
    if file_gen:
        np.save(args.outputs_name + "_expected.npy", expected.astype(dtype=np.float16))
예제 #3
0
def create_graph(network,
                 inputnode=None,
                 outputnode=None,
                 outfile='graph',
                 nshaves=1,
                 inputsize=None,
                 weights=None):
    file_init()
    args = Arguments(network, inputnode, outputnode, outfile, inputsize,
                     nshaves, weights)
    myriad_config = MyriadParam(0, nshaves - 1)
    if args.conf_file is not None:
        get_myriad_info(args, myriad_config)
    filetype = network.split(".")[-1]
    if filetype in ["prototxt"]:
        from Controllers.CaffeParser import parse_caffe
        net = parse_caffe(args, myriad_config)
    elif filetype in ["pb", "protobuf", "meta"]:
        from Controllers.TensorFlowParser import parse_tensor
        net = parse_tensor(args, myriad_config)
    else:
        throw_error(ErrorTable.ParserNotSupported)
    net.finalize()
    net.optimize()
    graph_file = Blob(major_version, net.name, '', myriad_config, net, outfile)
    graph_file.generate()
예제 #4
0
def profile_net(network,
                inputnode=None,
                outputnode=None,
                nshaves=1,
                inputsize=None,
                weights=None,
                device_no=None,
                explicit_concat=None,
                ma2480=None,
                scheduler=True,
                model_dir='',
                extargs=None):
    file_init()
    args = Arguments(network, inputnode, outputnode, inputsize, nshaves,
                     weights, device_no, explicit_concat, ma2480, scheduler,
                     model_dir, extargs)
    GLOBALS.USING_MA2480 = args.ma2480
    GLOBALS.OPT_SCHEDULER = args.scheduler is None
    ppInit(args.scheduler)
    myriad_config = load_myriad_config(args.number_of_shaves)

    if args.conf_file is not None:
        get_myriad_info(args, myriad_config)
    filetype = network.split(".")[-1]
    parser = None
    if filetype in ["prototxt"]:
        parser = Parser.Caffe
    elif filetype in ["pb", "protobuf", "meta"]:
        parser = Parser.TensorFlow
    else:
        throw_error(ErrorTable.ParserNotSupported)

    load_ret = load_network(args, parser, myriad_config, debug=True)
    net = load_ret['network']
    if args.new_parser:
        graph_file = load_ret['graph']
    else:
        graph_file = Blob([
            GLOBALS.BLOB_MAJOR_VERSION, GLOBALS.BLOB_MINOR_VERSION,
            GLOBALS.BLOB_PATCH_VERSION
        ], net.name, '', myriad_config, net, model_dir + "graph")
        graph_file.generate_v2(args)
    timings, myriad_output = run_myriad(graph_file, args)
    net.gather_metrics(timings)
    print_summary_of_network(graph_file)
    if not args.new_parser:
        generate_graphviz(net, graph_file, filename=args.outputs_name)
예제 #5
0
def check_net(network,
              image,
              inputnode=None,
              outputnode=None,
              nshaves=1,
              inputsize=None,
              weights=None,
              extargs=None):
    file_init()
    args = Arguments(network, image, inputnode, outputnode, inputsize, nshaves,
                     weights, extargs)
    myriad_config = MyriadParam(0, nshaves - 1)
    if args.conf_file is not None:
        get_myriad_info(args, myriad_config)
    filetype = network.split(".")[-1]
    if filetype in ["prototxt"]:
        from Controllers.CaffeParser import parse_caffe
        net = parse_caffe(args, myriad_config, file_gen=True)
    elif filetype in ["pb", "protobuf", "meta"]:
        from Controllers.TensorFlowParser import parse_tensor
        net = parse_tensor(args, myriad_config, file_gen=True)
    else:
        throw_error(ErrorTable.ParserNotSupported)
    net.finalize()
    net.optimize()
    graph_file = Blob(major_version, net.name, '', myriad_config, net, "graph")
    graph_file.generate()
    timings, myriad_output = run_myriad(graph_file, args, file_gen=True)
    expected = np.load(args.outputs_name + "_expected.npy")
    result = np.load(args.outputs_name + "_result.npy")
    filename = str(args.outputs_name) + "_val.csv"

    quit_code = validation(myriad_output, expected, args.exp_id,
                           ValidationStatistic[extargs.metric], filename, args)

    return quit_code
예제 #6
0
def check_net(network,
              image,
              inputnode=None,
              outputnode=None,
              nshaves=1,
              inputsize=None,
              weights=None,
              extargs=None,
              debug=False):
    file_init()
    args = Arguments(network, image, inputnode, outputnode, inputsize, nshaves,
                     weights, extargs)
    GLOBALS.USING_MA2480 = args.ma2480
    GLOBALS.OPT_SCHEDULER = args.scheduler is None
    ppInit(args.scheduler)

    myriad_config = load_myriad_config(args.number_of_shaves)

    if args.conf_file is not None:
        get_myriad_info(args, myriad_config)
    filetype = network.split(".")[-1]
    parser = None
    if filetype in ["prototxt"]:
        parser = Parser.Caffe
    elif filetype in ["pb", "protobuf", "meta"]:
        parser = Parser.TensorFlow
    else:
        throw_error(ErrorTable.ParserNotSupported)

    file_gen = True
    load_ret = load_network(args, parser, myriad_config, debug=True)
    net = load_ret['network']
    if args.new_parser:
        graph_file = load_ret['graph']
    else:
        graph_file = Blob([
            GLOBALS.BLOB_MAJOR_VERSION, GLOBALS.BLOB_MINOR_VERSION,
            GLOBALS.BLOB_PATCH_VERSION
        ], net.name, '', myriad_config, net, "graph")
        graph_file.generate_v2(args)
    timings, myriad_output = run_myriad(graph_file, args)
    report_filename = str(args.outputs_name) + "_val.csv"

    expected = load_ret['expected']
    expected_layout = load_ret['expected_layout']

    # Bring  myriad layout to the layout of the expected output, in order to allow comparison
    # between the two
    if not args.new_parser:
        myriad_output = storage_order_convert(myriad_output,
                                              net.outputTensorLayout,
                                              expected_layout)

    if file_gen:
        np.save(args.outputs_name + "_result.npy", myriad_output)
        np.save(args.outputs_name + "_expected.npy",
                expected.astype(dtype=np.float16))

    quit_code = validation(myriad_output, expected, args.exp_id,
                           ValidationStatistic[extargs.metric],
                           report_filename, args)

    return quit_code