Exemplo n.º 1
0
    def call_impl(self, env, x, n_batch_axes):
        x = x.to_tensor(env)
        res = new_tensor([self.n_out])

        use_chainer_linear = True
        if use_chainer_linear:
            inputs = [x.name, self.W.name]
            if not self.nobias:
                inputs.append(self.b.name)
            return env.calc(
                "ChainerLinear",
                inputs=inputs,
                n_batch_axes=n_batch_axes.to_int()
            )

        x_shape = env.calc("Shape", inputs=[x.name])
        batch_size = env.calc("Gather",
                              inputs=[x_shape.name,
                                      Value(0).to_tensor(env).name])
        batch_size = env.calc("Unsqueeze",
                              inputs=[batch_size.name],
                              axes=[0])
        mat_shape = env.calc("Concat",
                             inputs=[batch_size.name,
                                     Value([Value(-1)]).to_tensor(env).name],
                             axis=0)
        x = env.calc("Reshape",
                     inputs=[x.name, mat_shape.name])

        if self.nobias:
            t = env.calc(
                "Transpose",
                inputs=[self.W.name],
                perm=[1, 0]
            )
            res = env.calc(
                "MatMul",
                inputs=[x.name, t.name],
            )
        else:
            res = env.calc(
                "Gemm",
                inputs=[x.name, self.W.name, self.b.name],
                transA=0, transB=1
            )
        return res
Exemplo n.º 2
0
    def call_impl(self, env, x, ksize, stride, pad, outsize, cover_all):
        assert stride.is_none()  # TODO(hamaji): Not supported yet.
        assert pad.value == 0  # TODO(hamaji): Not supported yet.
        assert outsize.is_none()  # TODO(hamaji): Not supported yet.
        assert cover_all.value is False  # TODO(hamaji): Not supported yet.

        scales = np.array([1, 1] + _pair(ksize), dtype=np.float32)
        return env.calc(
            'Upsample',
            inputs=[x.to_tensor(env).name,
                    Value(scales).to_tensor(env).name])
Exemplo n.º 3
0
 def eval_with_default(nast, default_value):
     if nast is None:
         return Value(np.array(default_value)).to_tensor(env)
     return eval_ast(nast, env).to_tensor(env)
Exemplo n.º 4
0
def _value(v):
    if (isinstance(v, User_Defined_Function)
            or isinstance(v, User_Defined_Func_In_Link)):
        return v
    return Value(v)
Exemplo n.º 5
0
def eval_for(nast, env):
    assert nast.orelse == []
    ite = eval_ast(nast.iter, env)

    # A hack for ResNet50.
    # TODO(hamaji): Come up with a sophisticated way.
    # TODO(hamaji): This code doesn't handle scope properly, I think.
    if (isinstance(ite.value, types.GeneratorType)
            and 'ChainList.children' in str(ite.value)):
        # とりあえず実際にfor文を回す
        tg = nast.target.id
        env.set_var(tg, Value(None))
        for v in ite.value:
            env.set_var(tg, _value(v))
            eval_ast(nast.body, env)
            # print('looping',env.vars.keys())

        env.pop_var(tg)
        return None

    if ite.is_py:
        ite = Value([Value(v) for v in ite.value])

    assert isinstance(nast.target, gast.Name)
    x = nast.target.id

    # 新たなenv を作って、評価中にできた子グラフをもとにする
    localenv = env.new_block()

    cnt = new_tensor()
    gtx = new_sequence()
    localenv.set_var(
        x,
        _value(
            localenv.calc(
                "ChainerSequenceLookup",
                inputs=[gtx.name, cnt.name],
            )))
    ty = eval_ast(nast.body, localenv)
    assert ty.is_none()

    in_out = _find_in_out(localenv, env)

    input_values = []
    output_values = []
    final_outputs = []
    final_setattrs = []
    for key, (iv, ov, setattr_info) in in_out.items():
        if ov is None:
            continue
        if iv is None:
            iv = Value(False)
        out = ov.copy(env, name=key)
        final_outputs.append((key, out.value))
        if setattr_info is not None:
            final_setattrs.append(tuple(list(setattr_info) + [out]))
        input_values.append(iv.to_value_info(env))
        output_values.append(ov.to_value_info(env))

    cond = new_tensor(name='loop_cond')
    localgraph = make_graph(localenv.nodes, "Loop_subgraph",
                            [cnt, cond, gtx] + input_values,
                            [cond, gtx] + output_values)

    mtc = env.calc(
        "ChainerGenericLen",
        inputs=[ite.to_sequence(env).name],
    )

    env.addnode('Loop',
                inputs=([mtc.name, "", ite.to_sequence(env).name] +
                        [i.name for i in input_values]),
                outputs=([new_tensor('out_generator').name] +
                         [o.name for _, o in final_outputs]),
                body=localgraph)

    for k, o in final_outputs:
        if '.' not in k and '/' not in k:
            env.set_var(k, _value(o))

    for var, key, value in final_setattrs:
        setattr(var.value, key, value)

    return None
Exemplo n.º 6
0
def eval_if(nast, env):
    cond = eval_ast(nast.test, env)
    if cond.is_py and cond.value is True:
        return eval_ast(nast.body, env)
    elif cond.is_py and cond.value is False:
        return eval_ast(nast.orelse, env)

    then_env = env.new_block()
    ty = eval_ast(nast.body, then_env)
    assert ty.is_none()

    else_env = env.new_block()
    ty = eval_ast(nast.orelse, else_env)
    assert ty.is_none()

    then_in_out = _find_in_out(then_env, env)
    else_in_out = _find_in_out(else_env, env)
    keys = set(list(then_in_out.keys()) + list(else_in_out.keys()))

    input_values = []
    then_outputs = []
    else_outputs = []
    final_outputs = []
    final_setattrs = []

    for key in keys:
        then_iv, then_ov, then_setattr_info = then_in_out.get(
            key, (None, None, None))
        else_iv, else_ov, else_setattr_info = else_in_out.get(
            key, (None, None, None))

        if then_setattr_info is None:
            setattr_info = else_setattr_info
        else:
            if else_setattr_info is not None:
                assert then_setattr_info == else_setattr_info
            setattr_info = then_setattr_info

        def set_final_output(key, out):
            out = out.copy(env, name=key)
            final_outputs.append((key, out.value))
            if setattr_info is not None:
                final_setattrs.append(tuple(list(setattr_info) + [out]))

        iv = else_iv if then_iv is None else then_iv
        if iv is None:
            iv = Value(False)
        input_values.append(iv.to_value_info(env))

        if then_ov is None and else_ov is None:
            continue
        if then_ov is None:
            then_outputs.append(iv.to_value_info(env))
            else_outputs.append(else_ov.to_value_info(else_env))
            set_final_output(key, else_ov)
        elif else_ov is None:
            then_outputs.append(then_ov.to_value_info(then_env))
            else_outputs.append(iv.to_value_info(env))
            set_final_output(key, then_ov)
        else:
            then_outputs.append(then_ov.to_value_info(then_env))
            else_outputs.append(else_ov.to_value_info(else_env))
            set_final_output(key, then_ov)

    then_graph = make_graph(
        then_env.nodes,
        "If_then",
        input_values,
        then_outputs,
    )

    else_graph = make_graph(
        else_env.nodes,
        "If_else",
        input_values,
        else_outputs,
    )

    env.addnode(
        'If',
        inputs=[cond.to_value_info(env).name] + [i.name for i in input_values],
        outputs=[o.name for _, o in final_outputs],
        then_branch=then_graph,
        else_branch=else_graph,
    )

    for k, o in final_outputs:
        env.set_var(k, _value(o))

    for var, key, value in final_setattrs:
        setattr(var.value, key, value)

    return None
Exemplo n.º 7
0
def _find_in_out(localenv, env):
    used_onnx_names = set()
    for node in localenv.nodes:
        used_onnx_names |= set(node.input)

    outer_vars = env.get_var_dict()
    inner_vars = localenv.get_var_dict()

    # A tuple of (in-value, out-value, extra info for later setattr)
    # keyed by a variable name.
    in_out = {}
    for key, iv in inner_vars.items():
        ov = outer_vars.get(key, None)
        if isinstance(ov, Value):
            # Changing link or something to Value is not supported.
            assert isinstance(iv, Value), '%s => %s' % (ov, iv)
        elif ov is None or iv is None:
            pass
        else:
            # Changing Value to link or something is not supported.
            assert not isinstance(iv, Value), '%s => %s' % (ov, iv)
            continue

        if ov is None or iv is None or ov.value != iv.value:
            in_out[key] = (ov, iv, None)
            continue

        if ov.to_value_info(env).name in used_onnx_names:
            in_out[key] = (ov, None, None)

    var_ids = {}

    def attr_id(var, key):
        vid = id(var.value)
        if vid not in var_ids:
            var_ids[vid] = 'v%d' % (len(var_ids) + 1)
        return var_ids[vid] + '.' + key

    in_attrs = {}
    for var, key, value in localenv.read_attrs:
        k = attr_id(var, key)
        if k not in in_attrs:
            in_attrs[k] = value

    out_attrs = {}
    for var, key, value in localenv.wrote_attrs:
        k = attr_id(var, key)
        out_attrs[k] = (value, (var, key))

    for k in set(list(in_attrs.keys()) + list(out_attrs.keys())):
        iv = in_attrs.get(k, None)
        ov, setattr_info = out_attrs.get(k, (None, None))
        in_out[k] = (iv, ov, setattr_info)

    # ループ内で使われた link パラメータは
    # 1. 外の env にコピーしなければならない
    env.init_tensors.update(localenv.init_tensors)
    # 2. state としてループ内に持ち込まなければならない
    for init in localenv.init_tensors.values():
        key = '/' + init.name
        in_out[key] = (Value(init), None, None)

    return in_out
Exemplo n.º 8
0
def compile_model(model, inputs):
    # return helper.make_graph([],'dummy',[],[])

    init_id2name(model)
    # code.InteractiveConsole({'mo': model}).interact()
    env = Env(sys.modules[model.__module__])
    molk = User_Defined_Link(model, env)

    input_tensors = []
    for i in inputs:
        # TODO(hamaji): Set valid type info.
        if isinstance(i, (list, tuple)):
            x = new_sequence()
        elif i is None:
            x = new_tensor()
        else:
            if isinstance(i, int):
                i = np.array(i)
            else:
                # TODO(durswd): This code requires chainer6.x
                i = chainer.cuda.to_cpu(i)

            x = new_tensor(dims=i.shape, dtype=i.dtype)
        input_tensors.append(x)

    input_values = [Value(i) for i in input_tensors]
    v = molk.call(input_values, [], env)

    dprint('output_tensors', v)
    if isinstance(v.value, tuple):
        output_tensors = list(v.value)  # ばらしてみる
    else:
        output_tensors = [v]  # とりあえず1tensor

    # print('env.init_tensors ',env.init_tensors)
    input_tensors += list(env.init_tensors.values())

    for f in env.restore_funcs:
        f()

    # for no in env.nodes:
    #   print(no.op_type)
    # print(env.nodes)
    # print(input_tensors)
    # print(output_tensors)
    # for ch in model.namedparams():
    #    print(ch)

    outputs_vi = [o.to_value_info(env) for o in output_tensors]
    graph = make_graph(env.nodes, 'name_is_unknown_now', input_tensors,
                       outputs_vi)

    # inputのうち、重みであるものにはinitializerをつける
    # batch_sizeやinput_sizeなどの可変なものはできる限りのそのままで

    # Chainer compiler 独自のノードを使うとcheckできなくなる...
    # checker.check_graph(graph)
    mo = helper.make_model(graph)

    # print(mo)
    return mo
Exemplo n.º 9
0
    def call_impl(self, env, hx, cx, xs):
        assert hx.value is None  # TODO(hamaji): Not implemented yet.
        assert cx.value is None  # TODO(hamaji): Not implemented yet.
        xs = xs.to_sequence(env)

        # とりあえずnstep を 1step ずつに分解する
        ilens = env.calc(
            "ChainerSequenceLengths",
            inputs=[xs.name],
        )

        tilens = env.calc("ConcatFromSequence",
                          inputs=[ilens.name],
                          axis=0,
                          new_axis=True)

        v = xs

        def lstm_param(ps):
            p = env.calc("Concat", inputs=[v.name for v in ps], axis=0)
            return env.calc("Unsqueeze", inputs=[p.name], axes=[0])

        wst = []
        rst = []
        bst = []
        for w in self.ws:
            wst.append(lstm_param([w[0], w[3], w[1], w[2]]))
            rst.append(lstm_param([w[4], w[7], w[5], w[6]]))
        for b in self.bs:
            bst.append(
                lstm_param([b[0], b[3], b[1], b[2], b[4], b[7], b[5], b[6]]))

        ws = []
        rs = []
        bs = []
        for i in range(self.n_layers):
            for s, t in [(ws, wst), (rs, rst), (bs, bst)]:
                s.append(
                    env.calc("Concat",
                             inputs=[t[i * 2].name, t[i * 2 + 1].name],
                             axis=0))

        hs = []
        cs = []
        v = Value(v).to_sequence(env)
        v = env.calc(
            "ChainerSequencePad",
            inputs=[v.name],
        )
        v = env.calc("Transpose", perm=(1, 0, 2), inputs=[v.name])

        sequence_length = env.calc("ChainerGenericLen", inputs=[v.name])
        out_shape = Value(
            [Value(sequence_length),
             Value(-1),
             Value(self.out_size * 2)]).to_tensor(env)

        for i in range(self.n_layers):
            h = new_tensor()
            c = new_tensor()
            ys = new_tensor()

            env.addnode(
                "LSTM",
                inputs=[
                    v.name, ws[i].name, rs[i].name, bs[i].name, tilens.name
                ],
                outputs=[ys.name, h.name, c.name],
                direction='bidirectional',
                hidden_size=self.out_size,
            )

            hs.append(h.name)
            cs.append(c.name)

            # ys :: [seqlen x 2 x batchsize x hiddensize]
            v = env.calc("Transpose", perm=(0, 2, 1, 3), inputs=[ys.name])
            v = env.calc("Reshape", inputs=[v.name, out_shape.name])

        v = env.calc("Transpose", perm=(1, 0, 2), inputs=[v.name])
        v = env.calc_seq("ChainerSequenceUnpad", inputs=[v.name, ilens.name])

        ths = env.calc(
            "Concat",
            inputs=hs,
            axis=0,
        )
        tcs = env.calc(
            "Concat",
            inputs=cs,
            axis=0,
        )

        tys = v
        return ths, tcs, tys
Exemplo n.º 10
0
 def call(self, args, kwargs, env):
     bound = self.sig.bind(*args, **kwargs)
     bound.apply_defaults()
     args = [Value(a) for a in bound.args]
     kwargs = {k: Value(a) for k, a in bound.kwargs.items()}
     return self.call_impl(env, *args, **kwargs)