Exemple #1
0
def _convert_model(converter: KerasConverter, k_op: keras.models.Model):
    graph = converter.convert(k_op)

    # Initial state of nested model
    #
    #    Global Model : [layer] -> tensor(A) -> [...........Model..........] -> tensor(C) -> [layer] ->
    #                 :
    #     Local Model :            tensor(B) -> [layer] -> tensor -> [layer] -> tensor(D)
    #

    # 1. Replace local input variable (converted from tensor(B)) into global input variable (converted from tensor(A))
    #
    #    Global Model : [layer] -> tensor(A) -> [...........Model..........] -> tensor(C) -> [layer] ->
    #                 :             |
    #     Local Model :             +---------> [layer] -> tensor -> [layer] -> tensor(D)
    #
    global_inputs = [converter.get_variable(tensor) for tensor in converter.get_input_tensor(k_op)]
    for global_variable, local_variable in zip(global_inputs, graph.inputs):
        local_variable.replace(global_variable)

    # 2. Register local output variable (converted from tensor(D)) as the variable converted from tensor(C)
    #
    #    Global Model : [layer] -> tensor(A)                                     +---------> [layer] ->
    #                 :             |                                            |
    #     Local Model :             +---------> [layer] -> tensor -> [layer] -> tensor(D)
    #
    global_outputs = converter.get_output_tensor(k_op)
    for global_tensor, local_variable in zip(global_outputs, graph.outputs):
        converter.set_variable(global_tensor, local_variable)
Exemple #2
0
def _convert_model(converter: KerasConverter, k_op: "keras.models.Model"):
    graph = converter.convert(k_op)

    for v in graph.inputs:
        for attr in v.get_attribute(Input):
            v.attributes.remove(attr)

    for v in graph.outputs:
        for attr in v.get_attribute(Output):
            v.attributes.remove(attr)

    # Initial state of nested model
    #
    #    Global Model : [layer] -> tensor(A) -> [...........Model..........]
    #                 :
    #     Local Model :            tensor(B) -> [layer] -> tensor -> [layer] -> tensor(C)
    #

    # 1. Replace local input variable (converted from tensor(B)) into global input variable (converted from tensor(A))
    #
    #    Global Model : [layer] -> tensor(A) -> [...........Model..........]
    #                 :             |
    #     Local Model :             +---------> [layer] -> tensor -> [layer] -> tensor(C)
    #
    global_inputs = [
        converter.get_variable(tensor)
        for tensor in converter.get_input_tensor(k_op)
    ]
    for global_variable, local_variable in zip(global_inputs, graph.inputs):
        local_variable.order.unify(global_variable.order)
        local_variable.replace(global_variable)

    # 2. Register local output variable (converted from tensor(C)) as the global output variable
    #
    #    Global Model : [layer] -> tensor(A)                                     +--------->
    #                 :             |                                            |
    #     Local Model :             +---------> [layer] -> tensor -> [layer] -> tensor(C)
    #
    global_outputs = converter.get_output_tensor(k_op)
    for global_tensor, local_variable in zip(global_outputs, graph.outputs):
        converter.set_variable(global_tensor, local_variable)
Exemple #3
0
def main():
    sys.setrecursionlimit(10000)  # workaround for deep copying large graph
    parser = argparse.ArgumentParser()
    parser.add_argument("kerasmodel")
    parser.add_argument("--backend", default="webgpu,webassembly,fallback",
                        help="comma-separated list of backends")
    parser.add_argument("--input_shape", required=True,
                        help="shape of blobs for inputs (example: '(1,3,224,224)')")
    # parser.add_argument("--input_data_format", choices=["channels_first", "channels_last"])
    parser.add_argument("--out",
                        help="output directory (default: <model>/webdnn_graph_descriptor)")
    parser.add_argument("--encoding", help="name of weight encoder")
    parser.add_argument("--visualize_ir", action="store_true")
    parser.add_argument("--plugin", action="append", help="plugin python files which are imported before transpiling")
    args = parser.parse_args()

    console.stderr(f"[{path.basename(__file__)}] Generating feedforward graph")
    class_list = []
    if args.plugin:
        for plugin_path in args.plugin:
            class_list += _load_plugin(plugin_path)
    if len(class_list) > 0:
        # custom_objects is a dictionary for load_model to load user-defined custom layers
        custom_objects = {}
        for k, v in class_list:
            custom_objects[k] = v

    input_shape, _ = Shape.parse(args.input_shape)
    input_shapes = [input_shape]

    model = keras.models.load_model(args.kerasmodel, custom_objects=custom_objects)
    model.build()
    converter = KerasConverter()
    graph = converter.convert(model)

    for graph_input, input_shape in zip(graph.inputs, input_shapes):
        for p1, p2 in zip(graph_input.shape, input_shape):
            if not Placeholder.check_resolved(p1) and Placeholder.check_resolved(p2):
                p1.value = Placeholder.force_int(p2)

            elif Placeholder.check_resolved(p1) and not Placeholder.check_resolved(p2):
                raise ValueError(f'Shape mismatch: {p1} != {p2}')

            elif Placeholder.check_resolved(p1) and Placeholder.check_resolved(p2):
                assert p1 == p2, f'Shape mismatch: {p1} != {p2}'

    if args.out:
        output_dir = args.out
    else:
        output_dir = path.join(path.dirname(args.kerasmodel), "webdnn_graph_descriptor")
    os.makedirs(output_dir, exist_ok=True)

    if args.visualize_ir:
        ir_dot_path = path.join(output_dir, "ir.dot")
        with open(ir_dot_path, "w") as f:
            f.write(dump_dot(graph))
        console.stderr(f"IR graph can be visualized with graphviz command: 'dot {ir_dot_path} -T png -o output.png'")

    console.stderr(f"[{path.basename(__file__)}] Generating graph descriptor")

    any_backend_failed = False
    backends = args.backend.split(",")
    for i, backend in enumerate(backends):
        console.stderr(f"[{path.basename(__file__)}] Backend: {console.colorize(backend, console.Color.Cyan)}")
        try:
            graph_exec_data = generate_descriptor(backend, graph, constant_encoder_name=args.encoding)
            graph_exec_data.save(output_dir)
        except Exception as ex:
            if flags.DEBUG:
                raise ex

            any_backend_failed = True
            console.error(f"[{path.basename(__file__)}] Failed generating descriptor for {backend} backend")
            console.stderr(traceback.format_exc())
            continue

    if any_backend_failed:
        exit(1)