def _run_test(self, requires_grad): options_file = os.path.join(FIXTURES, 'options.json') weight_file = os.path.join(FIXTURES, 'lm_weights.hdf5') embedder = ElmoTokenEmbedder(options_file, weight_file, requires_grad=requires_grad) batch_size = 3 seq_len = 4 char_ids = Variable( torch.from_numpy( numpy.random.randint(0, 262, (batch_size, seq_len, 50)))) embeddings = embedder(char_ids) loss = embeddings.sum() loss.backward() elmo_grads = [ param.grad for name, param in embedder.named_parameters() if '_elmo_lstm' in name ] if requires_grad: # None of the elmo grads should be None. assert all([grad is not None for grad in elmo_grads]) else: # All of the elmo grads should be None. assert all([grad is None for grad in elmo_grads])
def _run_test_with_vocab_to_cache(self, requires_grad): vocab_to_cache = ['<pad>', 'hello', 'world'] embedder = ElmoTokenEmbedder(self.options_file, self.weight_file, requires_grad=requires_grad, vocab_to_cache=vocab_to_cache) word_tensor = torch.LongTensor([[[1, 2]]]) for _ in range(2): embeddings = embedder(word_tensor, word_tensor) loss = embeddings.sum() loss.backward() elmo_grads = [param.grad for name, param in embedder.named_parameters() if '_elmo_lstm' in name and '_token_embedder' not in name] if requires_grad: # None of the elmo grads should be None. assert all([grad is not None for grad in elmo_grads]) else: # All of the elmo grads should be None. assert all([grad is None for grad in elmo_grads]) assert all([param.grad is None for name, param in embedder.named_parameters() if '_token_embedder' in name])
def _run_test(self, requires_grad): embedder = ElmoTokenEmbedder(self.options_file, self.weight_file, requires_grad=requires_grad) batch_size = 3 seq_len = 4 char_ids = torch.from_numpy(numpy.random.randint(0, 262, (batch_size, seq_len, 50))) embeddings = embedder(char_ids) loss = embeddings.sum() loss.backward() elmo_grads = [param.grad for name, param in embedder.named_parameters() if '_elmo_lstm' in name] if requires_grad: # None of the elmo grads should be None. assert all([grad is not None for grad in elmo_grads]) else: # All of the elmo grads should be None. assert all([grad is None for grad in elmo_grads])
def _run_test(self, requires_grad): embedder = ElmoTokenEmbedder(self.options_file, self.weight_file, requires_grad=requires_grad) batch_size = 3 seq_len = 4 char_ids = torch.from_numpy(numpy.random.randint(0, 262, (batch_size, seq_len, 50))) for _ in range(2): embeddings = embedder(char_ids) loss = embeddings.sum() loss.backward() elmo_grads = [param.grad for name, param in embedder.named_parameters() if '_elmo_lstm' in name] if requires_grad: # None of the elmo grads should be None. assert all([grad is not None for grad in elmo_grads]) else: # All of the elmo grads should be None. assert all([grad is None for grad in elmo_grads])