Ejemplo n.º 1
0
def test_broadcast_to():
    A = np.ones((2))
    A.attach_grad()
    with mx.autograd.record():
        B = np.broadcast_to(A, (INT_OVERFLOW, 2))
    assert B.shape == (INT_OVERFLOW, 2)
    assert B[0][0] == 1
    B.backward()
    assert A.grad.shape == (2, )
    with mx.autograd.record():
        B = np.broadcast_to(A.reshape(2, 1), (2, INT_OVERFLOW))
    assert B.shape == (2, INT_OVERFLOW)
    assert B[0][0] == 1
    B.backward()
    assert A.grad.shape == (2, )
 def forward(self, X, state):
     # The output `X` shape: (`num_steps`, `batch_size`, `embed_size`)
     X = self.embedding(X).swapaxes(0, 1)
     # `context` shape: (`batch_size`, `num_hiddens`)
     context = state[0][-1]
     #print(f"context.shape:{context.shape}")
     # Broadcast `context` so it has the same `num_steps` as `X`
     context = mxnp.broadcast_to(
         context, (X.shape[0], context.shape[0], context.shape[1]))
     # X_and_context shape: `num_steps`, `batch_size`, (embed_size + bi*num_hiddens)
     X_and_context = mxnp.concatenate((X, context), 2)
     output, state = self.rnn(X_and_context, state)
     output = self.dense(output).swapaxes(0, 1)
     # `output` shape: (`batch_size`, `num_steps`, `vocab_size`)
     # `state[0]` shape: (`num_layers`, `batch_size`, `bidirectional*num_hiddens`)
     return output, state
Ejemplo n.º 3
0
def expand_hidden(feature, num_hiddens):
    """Expand features with number of hidden layers."""
    #pylint: disable=redefined-outer-name
    import numpy as np
    expand_feat = np.expand_dims(np.array(feature), axis=0)
    return np.broadcast_to(expand_feat.T, shape=(len(feature), num_hiddens))