コード例 #1
0
def load_model(dataset_val, range_from_batch, args):
    import tools
    pb_loader = tools.import_from_path(os.path.dirname(__file__) + '/../pb')
    from . import h5_converter
    if not args.h5_path.endswith('.h5'):
        raise ValueError('{} should endwith *.h5'.format(args.h5_path))

    custom_objects = None
    if args.h5_custom_objects is not None:
        custom_objects_file = args.h5_custom_objects
        custom_objects_module = tools.import_from_path(custom_objects_file)
        custom_objects = custom_objects_module.get_custom_objects()

    args.pb_path = h5_converter.convert(args.h5_path, custom_objects)

    return pb_loader.load_model(dataset_val, range_from_batch, args)
コード例 #2
0
def load_model(dataset_val, range_from_batch, args):
    import tools
    pb_loader = tools.import_from_path(os.path.dirname(__file__) + '/../pb')
    from . import h5_converter
    if not args.h5_path.endswith('.h5'):
        raise ValueError('{} should endwith *.h5'.format(args.h5_path))

    args.pb_path = h5_converter.convert(args.h5_path)

    return pb_loader.load_model(dataset_val, range_from_batch, args)
コード例 #3
0
def darknet2pb(input_dir, output_pb_path='tf_model', input_node_name='input'):
    import tools
    data_loader = tools.import_from_path(os.path.join(input_dir, 'network.py'))
    data_loader.load_data()

    # get basic info
    info_dict = dict()
    with open(os.path.join(input_dir, 'info.txt'), 'r') as F_info:
        contents = F_info.readlines()
        for l in contents:
            if len(l) > 1 and l[0] != '#':
                k, v = l.strip().split(':')
                info_dict[k.strip()] = v.strip()
    input_w = (int)(info_dict['width'])
    input_h = (int)(info_dict['height'])
    channel = (int)(info_dict['channel'])
    dtype = info_dict['data type']

    # set input placeholder
    inp = tf.placeholder(shape=[None, input_h, input_w, channel],
                         dtype=dtype,
                         name=input_node_name)

    yv2 = data_loader.network_forward(inp)

    with open(os.path.join(input_dir, 'info.txt'), 'a') as F_info:
        F_info.write('\n#I/O info\n')
        F_info.write('input node: {}\n'.format(input_node_name.split(':')[0]))
        F_info.write('output node: {}\n'.format(yv2.name.split(':')[0]))

    # # Tensorflow session
    sess1 = tf.Session()
    sess1.run(tf.global_variables_initializer())

    from tensorflow.python.framework.graph_util import convert_variables_to_constants
    graph = convert_variables_to_constants(sess1, sess1.graph_def,
                                           [yv2.name.split(':')[0]])
    tf.train.write_graph(graph,
                         input_dir,
                         '{}.pb'.format(output_pb_path),
                         as_text=False)
    tf.reset_default_graph()

    return inp.name, yv2.op.name, input_w, input_h
コード例 #4
0
def load_model(dataset_val, range_from_batch, args):
    import tools
    pb_loader = tools.import_from_path(os.path.dirname(__file__) + '/../pb')

    cfg_path = args.cfg_path
    weights_path = args.weights_path
    build_dir = tempfile.mkdtemp()
    decode_darknet(cfg_path, weights_path, build_dir)
    pb_name = 'output'
    dataset_input_name, tensor_output_name, input_w, input_h = darknet2pb(build_dir, pb_name, 'input')
    assert(args.image_w == input_w)
    assert(args.image_h == input_h)

    pb_path = os.path.join(build_dir, pb_name+'.pb')

    args.pb_path = pb_path
    args.tensor_output_name = tensor_output_name
    args.dataset_input_name = dataset_input_name

    return pb_loader.load_model(dataset_val, range_from_batch, args)
コード例 #5
0
def main():
    def str2bool(v):
        if v.lower() in ('yes', 'true', 't', 'y', '1'):
            return True
        elif v.lower() in ('no', 'false', 'f', 'n', '0'):
            return False
        else:
            raise argparse.ArgumentTypeError('Boolean value expected.')

    parser = argparse.ArgumentParser()
    parser.add_argument('--dataset_input_name', default='input:0')
    parser.add_argument('--eight_bit_mode',
                        type=str2bool,
                        nargs='?',
                        const=True,
                        default=False)
    parser.add_argument('--output_path', default='build/gencode_output')
    parser.add_argument('--output_bin_name', default='build/model.bin')
    parser.add_argument('--prefix', default='')
    parser.add_argument('--layer_start_idx', type=int, default=0)

    parser.add_argument('--model_loader', default='model_loader/pb')
    parser.add_argument('--tensorboard_mode',
                        type=str2bool,
                        nargs='?',
                        const=True,
                        default=False)
    parser.add_argument('--pb_path', default=None)
    parser.add_argument('--h5_path', default=None)
    parser.add_argument('--h5_custom_objects', default=None)
    parser.add_argument('--cfg_path', default=None)
    parser.add_argument('--weights_path', default=None)
    parser.add_argument('--tensor_input_name', default=None)
    parser.add_argument('--tensor_output_name', default=None)
    parser.add_argument('--tensor_input_min', type=float, default=0)
    parser.add_argument('--tensor_input_max', type=float, default=1)
    parser.add_argument('--tensor_input_minmax_auto',
                        type=str2bool,
                        nargs='?',
                        const=True,
                        default=True)

    parser.add_argument('--dataset_loader',
                        default='dataset_loader/img_0_1.py')
    parser.add_argument('--dataset_pic_path', default='dataset/yolo')
    parser.add_argument('--dataset_path', default='dataset/yolo')
    parser.add_argument('--image_w', type=int, default=320)
    parser.add_argument('--image_h', type=int, default=240)

    args = parser.parse_args()

    eight_bit_mode = args.eight_bit_mode
    output_path = args.output_path
    output_bin_name = args.output_bin_name
    args.prefix = args.prefix if len(args.prefix) > 0 \
        else os.path.basename(args.output_path).replace('.', '_').replace('-', '_')

    layer_start_idx = args.layer_start_idx

    model_loader = args.model_loader
    tensorboard_mode = args.tensorboard_mode  # used in model loader
    pb_path = args.pb_path  # used in model loader
    tensor_input_name = args.tensor_input_name  # used in model loader
    tensor_output_name = args.tensor_output_name  # used in model loader
    input_min = args.tensor_input_min  # used in model loader
    input_max = args.tensor_input_max  # used in model loader
    input_minmax_auto = args.tensor_input_minmax_auto  # used in model loader

    dataset_loader = args.dataset_loader
    dataset_input_name = args.dataset_input_name
    dataset_pic_path = args.dataset_pic_path  # used in dataset loader
    image_w = args.image_w  # used in dataset loader
    image_h = args.image_h  # used in dataset loader

    if ':' not in dataset_input_name:
        args.dataset_input_name = dataset_input_name + ':0'

    if output_path.endswith('.c'):
        output_path = output_path[:-2]

    dataset_loader_module = tools.import_from_path(dataset_loader)

    dataset_val = dataset_loader_module.load_dataset(args)

    model_loader_module = tools.import_from_path(model_loader)
    rfb = range_from_batch.RangeFromBatchMinMax()
    k210_layers = model_loader_module.load_model(dataset_val, rfb, args)

    c_file, h_file = k210_layer_to_c_code.gen_layer_list_code(
        k210_layers, args.eight_bit_mode, args.prefix, args.layer_start_idx)

    os.makedirs(os.path.dirname(output_path), exist_ok=True)

    with open(output_path + '.c', 'w') as of:
        of.write(c_file)
    print('generate {} finish'.format(output_path + '.c'))

    with open(output_path + '.h', 'w') as of:
        of.write(h_file)
    print('generate {} finish'.format(output_path + '.h'))

    try:
        output_bin = k210_layer_to_bin.gen_layer_bin(k210_layers,
                                                     args.eight_bit_mode)
        os.makedirs(os.path.dirname(output_bin_name), exist_ok=True)
        with open(output_bin_name, 'wb') as of:
            of.write(output_bin)
        print('generate bin finish')
    except Exception as e:
        print(e)
コード例 #6
0
def load_dataset(args):
    import tools
    img_0_1 = tools.import_from_path(os.path.dirname(__file__) + '/img_0_1.py')
    dataset = img_0_1.load_dataset(args)
    return dataset * 2 - 1