Esempio n. 1
0
    def call_impl(self, env, a, axis, dtype, out):
        assert axis.is_none()  # TODO(hamaji): Not supported yet.
        assert dtype.is_none()  # TODO(hamaji): Not supported yet.
        assert out.is_none()  # TODO(hamaji): Not supported yet.
        # さらにさらに、入力は1次元のTensorである、と仮定してしまいます
        # 戻り値は入力に依らずテンソルらしい
        # TODO(satos) さすがに仮定がきつい
        v = a.to_tensor(env)

        # これ戻り値がテンソルでなくSequenceなら、SplitAxisみたいにかっこよく書けるはず
        """
        a = new_tensor()
        env.addnode(
            'Flatten',
            inputs=[v.name],outputs[a.name],
            axis=0
        )
        v = a
        a = new_tensor()
        env.addnode(
            'Squeeze',
            inputs=[v.name],outputs[a.name],
            axes=[0]
        )
        """
        ls = env.calc(
            'ChainerGenericLen',
            inputs=[v.name],
        )

        def dummy():
            return "dummy_" + new_tensor().name

        localenv = Env(env.module)
        cnt = new_tensor()
        cond = new_tensor()
        s = new_tensor()
        gtx = new_tensor()
        tx = localenv.calc(
            "ChainerGenericGetItem",
            inputs=[gtx.name, cnt.name],
        )
        ts = localenv.calc(
            "Add",
            inputs=[tx.name, s.name],
        )
        ts2 = localenv.calc("Identity", inputs=[ts.name])

        zero = totensor(0, env)

        res = new_tensor()
        env.addnode('Loop',
                    inputs=[ls.name, "", v.name, zero.name],
                    outputs=[dummy(), dummy(), res.name],
                    body=utils.make_graph(localenv.nodes, "Cumsum_subgraph",
                                          [cnt, cond, gtx, s],
                                          [cond, gtx, ts, ts2]))

        return res
Esempio n. 2
0
def eval_binary_op(nast, env):
    lv = eval_ast(nast.left, env)
    rv = eval_ast(nast.right, env)

    res = new_tensor(['TODO'])
    isfloor = False
    if isinstance(nast.op, gast.Add):
        optype = "Add"

        def opfun(a, b):
            return a + b

    elif isinstance(nast.op, gast.Sub):
        optype = "Sub"

        def opfun(a, b):
            return a - b

    elif isinstance(nast.op, gast.Mult):
        optype = "Mul"

        def opfun(a, b):
            return a * b

    elif isinstance(nast.op, gast.FloorDiv):
        optype = "Div"
        isfloor = True

        def opfun(a, b):
            return a // b

    elif isinstance(nast.op, gast.Div):
        optype = "Div"

        def opfun(a, b):
            return a / b

    else:
        raise Exception('unknown operator', nast.op)

    # code.InteractiveConsole({'lv': lv, 'rv': rv}).interact()

    # TODO(hamaji): Reconsider if constant folding is necessary in CH2O.
    #if not istensor(lv) and not istensor(rv):
    #    # 定数畳み込みを行う
    #    return opfun(lv, rv)

    lv.to_value_info(env)
    rv.to_value_info(env)
    if lv.is_sequence() and rv.is_sequence():
        assert optype == 'Add'
        lv = lv.to_sequence(env)
        rv = rv.to_sequence(env)

        state = new_sequence(name='seq_plus_state')
        cond = new_tensor(name='seq_plus_cond')
        index = new_tensor(name='seq_plus_index')
        elem = new_tensor(name='seq_plus_elem')
        out_state = new_tensor(name='seq_plus_out_state')
        nodes = []
        nodes.append(
            helper.make_node('ChainerSequenceLookup',
                             inputs=[rv.name, index.name],
                             outputs=[elem.name]))
        nodes.append(
            helper.make_node('ChainerSequenceAppend',
                             inputs=[state.name, elem.name],
                             outputs=[out_state.name]))
        loop = make_graph(
            nodes,
            "SeqPlus",
            [index, cond, state],
            [cond, out_state],
        )

        length = env.calc('ChainerGenericLen', inputs=[rv.name])
        res = new_sequence(name='seq_plus')
        env.addnode('Loop',
                    inputs=[length.name, "", lv.name],
                    outputs=[res.name],
                    body=loop)
    else:
        if optype == 'Div' and not isfloor:
            lv = castto(lv.to_tensor(env), TensorProto.FLOAT, env)
            rv = castto(rv.to_tensor(env), TensorProto.FLOAT, env)
        else:
            lv = lv.to_tensor(env)
            rv = rv.to_tensor(env)
        res = env.calc(
            optype,
            inputs=[lv.name, rv.name],
        )

    if isfloor:
        res = env.calc(
            "Floor",
            inputs=[res.name],
        )

    return res
Esempio n. 3
0
def eval_for(nast, env):
    assert nast.orelse == []
    ite = eval_ast(nast.iter, env)

    # A hack for ResNet50.
    # TODO(hamaji): Come up with a sophisticated way.
    # TODO(hamaji): This code doesn't handle scope properly, I think.
    if (isinstance(ite.value, types.GeneratorType)
            and 'ChainList.children' in str(ite.value)):
        # とりあえず実際にfor文を回す
        tg = nast.target.id
        env.set_var(tg, Value(None))
        for v in ite.value:
            env.set_var(tg, _value(v))
            eval_ast(nast.body, env)
            # print('looping',env.vars.keys())

        env.pop_var(tg)
        return None

    if ite.is_py:
        ite = Value([Value(v) for v in ite.value])

    assert isinstance(nast.target, gast.Name)
    x = nast.target.id

    # 新たなenv を作って、評価中にできた子グラフをもとにする
    localenv = env.new_block()

    cnt = new_tensor()
    gtx = new_sequence()
    localenv.set_var(
        x,
        _value(
            localenv.calc(
                "ChainerSequenceLookup",
                inputs=[gtx.name, cnt.name],
            )))
    ty = eval_ast(nast.body, localenv)
    assert ty.is_none()

    in_out = _find_in_out(localenv, env)

    input_values = []
    output_values = []
    final_outputs = []
    final_setattrs = []
    for key, (iv, ov, setattr_info) in in_out.items():
        if ov is None:
            continue
        if iv is None:
            iv = Value(False)
        out = ov.copy(env, name=key)
        final_outputs.append((key, out.value))
        if setattr_info is not None:
            final_setattrs.append(tuple(list(setattr_info) + [out]))
        input_values.append(iv.to_value_info(env))
        output_values.append(ov.to_value_info(env))

    cond = new_tensor(name='loop_cond')
    localgraph = make_graph(localenv.nodes, "Loop_subgraph",
                            [cnt, cond, gtx] + input_values,
                            [cond, gtx] + output_values)

    mtc = env.calc(
        "ChainerGenericLen",
        inputs=[ite.to_sequence(env).name],
    )

    env.addnode('Loop',
                inputs=([mtc.name, "", ite.to_sequence(env).name] +
                        [i.name for i in input_values]),
                outputs=([new_tensor('out_generator').name] +
                         [o.name for _, o in final_outputs]),
                body=localgraph)

    for k, o in final_outputs:
        if '.' not in k and '/' not in k:
            env.set_var(k, _value(o))

    for var, key, value in final_setattrs:
        setattr(var.value, key, value)

    return None
Esempio n. 4
0
def eval_if(nast, env):
    cond = eval_ast(nast.test, env)
    if cond.is_py and cond.value is True:
        return eval_ast(nast.body, env)
    elif cond.is_py and cond.value is False:
        return eval_ast(nast.orelse, env)

    then_env = env.new_block()
    ty = eval_ast(nast.body, then_env)
    assert ty.is_none()

    else_env = env.new_block()
    ty = eval_ast(nast.orelse, else_env)
    assert ty.is_none()

    then_in_out = _find_in_out(then_env, env)
    else_in_out = _find_in_out(else_env, env)
    keys = set(list(then_in_out.keys()) + list(else_in_out.keys()))

    input_values = []
    then_outputs = []
    else_outputs = []
    final_outputs = []
    final_setattrs = []

    for key in keys:
        then_iv, then_ov, then_setattr_info = then_in_out.get(
            key, (None, None, None))
        else_iv, else_ov, else_setattr_info = else_in_out.get(
            key, (None, None, None))

        if then_setattr_info is None:
            setattr_info = else_setattr_info
        else:
            if else_setattr_info is not None:
                assert then_setattr_info == else_setattr_info
            setattr_info = then_setattr_info

        def set_final_output(key, out):
            out = out.copy(env, name=key)
            final_outputs.append((key, out.value))
            if setattr_info is not None:
                final_setattrs.append(tuple(list(setattr_info) + [out]))

        iv = else_iv if then_iv is None else then_iv
        if iv is None:
            iv = Value(False)
        input_values.append(iv.to_value_info(env))

        if then_ov is None and else_ov is None:
            continue
        if then_ov is None:
            then_outputs.append(iv.to_value_info(env))
            else_outputs.append(else_ov.to_value_info(else_env))
            set_final_output(key, else_ov)
        elif else_ov is None:
            then_outputs.append(then_ov.to_value_info(then_env))
            else_outputs.append(iv.to_value_info(env))
            set_final_output(key, then_ov)
        else:
            then_outputs.append(then_ov.to_value_info(then_env))
            else_outputs.append(else_ov.to_value_info(else_env))
            set_final_output(key, then_ov)

    then_graph = make_graph(
        then_env.nodes,
        "If_then",
        input_values,
        then_outputs,
    )

    else_graph = make_graph(
        else_env.nodes,
        "If_else",
        input_values,
        else_outputs,
    )

    env.addnode(
        'If',
        inputs=[cond.to_value_info(env).name] + [i.name for i in input_values],
        outputs=[o.name for _, o in final_outputs],
        then_branch=then_graph,
        else_branch=else_graph,
    )

    for k, o in final_outputs:
        env.set_var(k, _value(o))

    for var, key, value in final_setattrs:
        setattr(var.value, key, value)

    return None
Esempio n. 5
0
def compile_model(model, inputs):
    # return helper.make_graph([],'dummy',[],[])

    init_id2name(model)
    # code.InteractiveConsole({'mo': model}).interact()
    env = Env(sys.modules[model.__module__])
    molk = User_Defined_Link(model, env)

    input_tensors = []
    for i in inputs:
        # TODO(hamaji): Set valid type info.
        if isinstance(i, (list, tuple)):
            x = new_sequence()
        elif i is None:
            x = new_tensor()
        else:
            if isinstance(i, int):
                i = np.array(i)
            else:
                # TODO(durswd): This code requires chainer6.x
                i = chainer.cuda.to_cpu(i)

            x = new_tensor(dims=i.shape, dtype=i.dtype)
        input_tensors.append(x)

    input_values = [Value(i) for i in input_tensors]
    v = molk.call(input_values, [], env)

    dprint('output_tensors', v)
    if isinstance(v.value, tuple):
        output_tensors = list(v.value)  # ばらしてみる
    else:
        output_tensors = [v]  # とりあえず1tensor

    # print('env.init_tensors ',env.init_tensors)
    input_tensors += list(env.init_tensors.values())

    for f in env.restore_funcs:
        f()

    # for no in env.nodes:
    #   print(no.op_type)
    # print(env.nodes)
    # print(input_tensors)
    # print(output_tensors)
    # for ch in model.namedparams():
    #    print(ch)

    outputs_vi = [o.to_value_info(env) for o in output_tensors]
    graph = make_graph(env.nodes, 'name_is_unknown_now', input_tensors,
                       outputs_vi)

    # inputのうち、重みであるものにはinitializerをつける
    # batch_sizeやinput_sizeなどの可変なものはできる限りのそのままで

    # Chainer compiler 独自のノードを使うとcheckできなくなる...
    # checker.check_graph(graph)
    mo = helper.make_model(graph)

    # print(mo)
    return mo