Exemplo n.º 1
0
def test_lambda():
    net1 = mx.gluon.nn.HybridSequential()
    net1.add(nn.Activation('tanh'), nn.LeakyReLU(0.1))

    net2 = mx.gluon.nn.HybridSequential()
    op3 = lambda F, x, *args: F.LeakyReLU(x, *args, slope=0.1)
    net2.add(nn.HybridLambda('tanh'), nn.HybridLambda(op3))

    op4 = lambda x: mx.nd.LeakyReLU(x, slope=0.1)
    net3 = mx.gluon.nn.Sequential()
    net3.add(nn.Lambda('tanh'), nn.Lambda(op4))

    input_data = mx.nd.random.uniform(shape=(2, 3, 5, 7))
    out1, out2, out3 = net1(input_data), net2(input_data), net3(input_data)
    assert_almost_equal(out1.asnumpy(), out2.asnumpy(), rtol=1e-3, atol=1e-3)
    assert_almost_equal(out1.asnumpy(), out3.asnumpy(), rtol=1e-3, atol=1e-3)
Exemplo n.º 2
0
    def __init__(self, ctx=mx.cpu(), warmup=10, runs=50, inputs=None):
        # Set the default Inputs.
        # Default data is (3, 512, 512) to mimic an input image of size 512*512 with 3 channels.
        default_parameters = {
            "data": (128, 512, 512),
            "data_initializer": nd.normal,
            "run_backward": True,
            "dtype": "float32"
        }

        super().__init__(ctx=ctx,
                         warmup=warmup,
                         runs=runs,
                         default_parameters=default_parameters,
                         custom_parameters=inputs)

        self.data = get_mx_ndarray(ctx=self.ctx,
                                   in_tensor=self.inputs["data"],
                                   dtype=self.inputs["dtype"],
                                   initializer=self.inputs["data_initializer"],
                                   attach_grad=self.inputs["run_backward"])

        # Batchify the input data. (3, 1024, 1024) => (1, 3, 1024, 1024)
        self.block = nn.Lambda(lambda x: nd.expand_dims(data=x, axis=0))

        self.block.initialize(ctx=self.ctx)
Exemplo n.º 3
0
    def __init__(self, **kwargs):
        super(netG, self).__init__(**kwargs)
        with self.name_scope():
            self.fcz = nn.HybridSequential()
            with self.fcz.name_scope():
                self.fcz.add(nn.Dense(256), nn.BatchNorm(), nn.LeakyReLU(0.0))

            self.fcy = nn.HybridSequential()
            with self.fcy.name_scope():
                self.fcy.add(nn.Dense(256), nn.BatchNorm(), nn.LeakyReLU(0.0))
            self.rest = nn.Sequential()
            with self.rest.name_scope():
                self.rest.add(nn.Dense(512), nn.BatchNorm(), nn.LeakyReLU(0.0),
                              nn.Dense(1024), nn.BatchNorm(),
                              nn.LeakyReLU(0.0), nn.Dense(784),
                              nn.Lambda('tanh'))
Exemplo n.º 4
0
def SequentialTextCNN(config):

    net = nn.Sequential()
    with net.name_scope():
        net.add(
            nn.Embedding(input_dim=config['vocab_size'],
                         output_dim=config['embedding_dim']))
        net.add(nn.Lambda(lambda x: x.transpose((0, 2, 1))))
        net.add(
            nn.Conv1D(channels=config['feature_map'],
                      kernel_size=config['kernel_size'][0],
                      strides=1))
        net.add(nn.BatchNorm(axis=1))
        net.add(nn.Activation('relu'))
        net.add(nn.GlobalMaxPool1D())
        net.add(nn.Dropout(rate=config['dropout_rate']))
        net.add(nn.Dense(units=2))
    return net
Exemplo n.º 5
0
def get_bare_nn(num_out_channels):
    net = nn.Sequential()
    net.add(RNN_TYPE(HIDDEN_SIZE, NUM_RECURRENT_LAYERS, dropout=RNN_DROPOUT),
            nn.Lambda(lambda x: x[-1, :, :]), nn.Dense(num_out_channels))
    return net