示例#1
0
def main():
    sys.setrecursionlimit(10000)  # workaround for deep copying large graph

    parser = argparse.ArgumentParser()
    parser.add_argument("--model",
                        default="resnet50",
                        choices=["vgg16", "resnet50"])
    parser.add_argument("--backend", default="webgpu,webassembly,fallback")
    parser.add_argument("--encoding")
    parser.add_argument('--out',
                        '-o',
                        default='output_chainer',
                        help='Directory to output the graph descriptor')

    args = parser.parse_args()

    os.makedirs(args.out, exist_ok=True)

    sample_image = np.zeros((224, 224, 3),
                            dtype=np.uint8)  # PIL.Image.open("")
    if args.model == "vgg16":
        link = chainer.links.model.vision.vgg.VGG16Layers()
        prepared_image = chainer.links.model.vision.vgg.prepare(
            sample_image)  # BGR, CHW
        out_layer_name = "fc8"

    elif args.model == "resnet50":
        link = chainer.links.model.vision.resnet.ResNet50Layers()
        prepared_image = chainer.links.model.vision.resnet.prepare(
            sample_image)
        out_layer_name = "fc6"

    nn_input = chainer.Variable(np.array([prepared_image], dtype=np.float32))
    nn_output = link(nn_input, layers=[
        out_layer_name
    ])[out_layer_name]  # 'prob' is also possible (uses softmax)
    chainer_cg = chainer.computational_graph.build_computational_graph(
        [nn_output])
    converter = ChainerConverter()
    graph = converter.convert(chainer_cg, [nn_input],
                              [nn_output])  # type: Graph

    any_backend_failed = False
    last_backend_exception = None
    for backend in args.backend.split(","):
        try:
            graph_exec_data = generate_descriptor(
                backend, graph, constant_encoder_name=args.encoding)
            graph_exec_data.save(args.out)
        except Exception as ex:
            any_backend_failed = True
            last_backend_exception = ex
            console.error(
                f"Failed generating descriptor for backend {backend}: {str(ex)}\n"
            )

    if any_backend_failed:
        raise last_backend_exception
示例#2
0
def main():
    sys.setrecursionlimit(10000)  # workaround for deep copying large graph
    parser = argparse.ArgumentParser()
    # default is Caffenet of Caffe example
    parser.add_argument("caffemodel")
    parser.add_argument("--backend",
                        default="webgpu,webassembly,fallback",
                        help="comma-separated list of backends")
    parser.add_argument("--input_name", help="blob name for input (mandatory)")
    parser.add_argument(
        "--input_shape",
        help="shape of blobs for inputs (example: '(1,3,224,224)')")
    parser.add_argument("--input_npy",
                        help="npy file containing sample inputs")
    parser.add_argument(
        "--output_names",
        required=True,
        help="comma-separated blob name for output (mandatory)")
    parser.add_argument(
        "--out",
        help="output directory (default: <model>/webdnn_graph_descriptor)")
    parser.add_argument("--encoding", help="name of weight encoder")
    args = parser.parse_args()

    # multiple blob input can be easily implemented, but command-line arguments becomes complicated.
    input_blob, input_filled = parse_input_blob(args)
    output_names = args.output_names.split(",")

    console.stderr(
        "[convert_caffe] Loading caffe model... (usually takes several minutes)"
    )
    link = chainer.links.caffe.CaffeFunction(args.caffemodel)

    console.stderr("[convert_caffe] Generating feedforward graph")
    if chainer.__version__ >= "2.":
        chainer.using_config("train", False)
        output_blobs = list(
            link(inputs={args.input_name: input_blob},
                 outputs=output_names))  # list of Variable
    else:
        output_blobs = list(
            link(inputs={args.input_name: input_blob},
                 outputs=output_names,
                 train=False))  # list of Variable
    chainer_cg = chainer.computational_graph.build_computational_graph(
        output_blobs)
    converter = ChainerConverter()
    graph = converter.convert(chainer_cg, [input_blob],
                              output_blobs)  # type: Graph

    if args.out:
        output_dir = args.out
    else:
        output_dir = path.join(path.dirname(args.caffemodel),
                               "webdnn_graph_descriptor")
    os.makedirs(output_dir, exist_ok=True)

    if input_filled:
        # save output of Caffe Network (not required for inference)
        output_arrays = {
            output_name: output_blob.data
            for output_name, output_blob in zip(output_names, output_blobs)
        }
        np.savez(path.join(output_dir, "example_output.npz"), **output_arrays)

    console.stderr("[convert_caffe] Generating descriptors")
    any_backend_failed = False
    for backend in args.backend.split(","):
        try:
            graph_exec_data = generate_descriptor(
                backend, graph, constant_encoder_name=args.encoding)
            graph_exec_data.save(output_dir)
        except Exception as ex:
            any_backend_failed = True
            console.error(
                f"[convert_caffe] Failed generating descriptor for backend {backend}: {str(ex)}"
            )

    if any_backend_failed:
        sys.exit(1)
def generate_graph_model1(caption_net):
    resnet_in, resnet_out = get_resnet_io_variable()
    image_vec_out = caption_net.image_vec(resnet_out)
    converter = ChainerConverter()
    graph = converter.convert([resnet_in], [image_vec_out])
    return graph