コード例 #1
0
ファイル: test_dropout.py プロジェクト: ugiwgh/ngraph
def test_dropout_train(nin, batch_size, keep, transformer_factory):

    # set inputs
    N = ng.make_axis(batch_size, name='N')
    F = ng.make_axis(nin, name='F')

    inp = ng.placeholder([F, N])
    layer = Dropout(keep=keep)
    fprop = layer(inp)

    # create data
    x = np.random.uniform(size=(nin, batch_size))

    # evaluate
    with ExecutorFactory() as ex:
        comp = ex.executor([fprop, layer.mask], inp)
        out, mask = comp(x)
        numpy_out = x * mask[:, None]
        ng.testing.assert_allclose(out, numpy_out, atol=atol, rtol=rtol)

        if keep < 1.0:
            out1, mask1 = out.copy(), mask.copy()
            out2, mask2 = comp(x)
            assert (out1 != out2).any()
            assert (mask1 != mask2).any()
コード例 #2
0
ファイル: ops_compound.py プロジェクト: jlwhite709/ngraph
    def _dropout_op(self, cntk_op, inputs):
        """
        Stochastically dropping activations to prevent overfitting

        Arguments:
            cntk_op: CNTK operation to be imported.
            inputs: List of inputs to this node.

        Returns:
            A ngraph Op.
        """
        return Dropout(cntk_op.attributes['dropoutRate'])(inputs[0])
コード例 #3
0
def test_dropout_inference(transformer_factory):
    nin, batch_size = 8, 2

    # set inputs
    N = ng.make_axis(batch_size, name='N')
    F = ng.make_axis(nin).named('F')

    inp = ng.placeholder([F, N])
    layer = Dropout(keep=0.5)
    fprop = layer.inference_outputs(inp)

    # create data
    x = np.random.uniform(size=(nin, batch_size))

    # evaluate
    with executor(fprop, inp) as comp:
        out = comp(x)
        numpy_out = x * 0.5
        np.testing.assert_allclose(out, numpy_out, rtol=1e-6)
        out1 = out.copy()
        out2 = comp(x)
        np.testing.assert_allclose(out1, out2, rtol=1e-6)
コード例 #4
0
    def Dropout(self, cntk_op, inputs):
        """
        Stochastically dropping activations to prevent overfitting

        Arguments:
            cntk_op: CNTK operation to be imported.
            inputs: List of inputs to this node.

        Returns:
            A ngraph Op.
        """
        node = cntk_op.block_root.root_function
        layer = Dropout(node.attributes['dropoutRate'])
        return layer(inputs[0]).named(cntk_op.uid)
コード例 #5
0
def test_dropout_bprop_single_comp(transformer_factory):
    nin, batch_size = 32, 2

    # set inputs
    N = ng.make_axis(batch_size, name='N')
    F = ng.make_axis(nin).named('F')

    mul_factor = ng.placeholder(())
    inp = ng.placeholder([F, N])
    layer = Dropout(keep=0.5)
    fprop = layer.train_outputs(inp * mul_factor)
    out_graph = ng.sum(fprop, out_axes=())

    # create data
    x = np.random.uniform(size=(nin, batch_size))
    bprop = ng.deriv(out_graph, mul_factor)

    # evaluate
    trans = ngt.make_transformer()
    comp = trans.computation([fprop, bprop, layer.mask], inp, mul_factor)
    fout, bout, mask = comp(x, 2)
    # Calculate derivative by hand and compare
    np.testing.assert_allclose(bout, (x * mask[:, None]).sum(), rtol=1e-6)
    trans.close()
コード例 #6
0
def test_dropout_train(transformer_factory):
    nin, batch_size = 32, 2

    # set inputs
    N = ng.make_axis(batch_size, name='N')
    F = ng.make_axis(nin).named('F')

    inp = ng.placeholder([F, N])
    layer = Dropout(keep=0.5)
    fprop = layer.train_outputs(inp)

    # create data
    x = np.random.uniform(size=(nin, batch_size))

    # evaluate
    with executor([fprop, layer.mask], inp) as comp:
        out, mask = comp(x)
        numpy_out = x * mask[:, None]
        np.testing.assert_allclose(out, numpy_out, rtol=1e-6)

        out1, mask1 = out.copy(), mask.copy()
        out2, mask2 = comp(x)
        assert (out1 != out2).any()
        assert (mask1 != mask2).any()
コード例 #7
0
ファイル: test_dropout.py プロジェクト: ugiwgh/ngraph
def test_dropout_bprop_single_comp(nin, batch_size, keep, transformer_factory):
    # set inputs
    N = ng.make_axis(batch_size, name='N')
    F = ng.make_axis(nin, name='F')

    mul_factor = ng.placeholder(())
    inp = ng.placeholder([F, N])
    layer = Dropout(keep=keep)
    fprop = layer(inp * mul_factor)
    out_graph = ng.sum(fprop, out_axes=())
    bprop = ng.deriv(out_graph, mul_factor)

    # create data
    x = np.random.uniform(size=(nin, batch_size))

    # evaluate
    with ExecutorFactory() as ex:
        comp = ex.executor([fprop, bprop, layer.mask], inp, mul_factor)
        fout, bout, mask = comp(x, 2)
        # Calculate derivative by hand and compare
        ng.testing.assert_allclose(bout, (x * mask[:, None]).sum(), rtol=1e-6)
コード例 #8
0
ファイル: test_dropout.py プロジェクト: ugiwgh/ngraph
def test_dropout_inference(nin, batch_size, transformer_factory):
    # set inputs
    N = ng.make_axis(batch_size, name='N')
    F = ng.make_axis(nin, name='F')

    inp = ng.placeholder([F, N])
    layer = Dropout(keep=0.5)
    with Layer.inference_mode_on():
        fprop = layer(inp)

    # create data
    x = np.random.uniform(size=(nin, batch_size))

    # evaluate
    with ExecutorFactory() as ex:
        comp = ex.executor(fprop, inp)
        out = comp(x)
        numpy_out = x * 0.5
        ng.testing.assert_allclose(out, numpy_out, atol=atol, rtol=rtol)
        out1 = out.copy()
        out2 = comp(x)
        ng.testing.assert_allclose(out1, out2, atol=atol, rtol=rtol)