Пример #1
0
    def __init__(self):
        super(GeneralOptimizeRule, self).__init__()

        self.register(RemoveLastSoftmax())
        self.register(ConcatAffine())
        self.register(ConcatScalarOperation())
        self.register(RemoveUnnecessaryOperator())
        self.register(ConcatZeroPadding())
Пример #2
0
    def __init__(self):
        super(GeneralOptimizeRule, self).__init__()

        self.register(RemoveRedundantOperator())
        self.register(RemoveLastSoftmax())
        self.register(RemoveNoEffectOperator())
        self.register(ReplaceScalarAffine())
        self.register(SimplifyElementwise())
        self.register(ConcatZeroPadding())
Пример #3
0
def test_single_softmax():
    linear = Linear('linear')
    softmax = Softmax('softmax', axis=Axis.C)

    x = Variable([4, 5], OrderNC)
    w = Variable([4, 5], OrderNC)
    h, = linear(x, w)
    y, = softmax(h)

    graph = Graph([x], [y])

    graph, _ = RemoveLastSoftmax().optimize(graph)

    ops = listup_operators(graph)
    assert len(ops) == 1 and isinstance(ops[0], Linear)
    assert len(graph.outputs) == 1 and ops[0].outputs["y"] == graph.outputs[0]
Пример #4
0
def test_internal_softmax():
    linear1 = Linear('linear')
    softmax1 = Softmax('softmax', axis=Axis.C)
    linear2 = Linear('linear')
    softmax2 = Softmax('softmax', axis=Axis.C)

    x = Variable([4, 5], OrderNC)
    w1 = Variable([4, 5], OrderNC)
    w2 = Variable([3, 4], OrderNC)
    h, = linear1(x, w1)
    h, = softmax1(h)
    h, = linear2(h, w2)
    y, = softmax2(h)

    graph = Graph([x], [y])

    graph, _ = RemoveLastSoftmax().optimize(graph)

    ops = listup_operators(graph)
    assert len(ops) == 3 and isinstance(ops[0], Linear) and isinstance(ops[1], Softmax) and isinstance(ops[2], Linear)
    assert len(graph.outputs) == 1 and ops[2].outputs["y"] == graph.outputs[0]