Esempio n. 1
0
 def test_check_output(self):
     ids = self.inputs['Ids']
     flatten_idx = ids.flatten()
     padding_idx = np.random.choice(flatten_idx, 1)[0]
     self.outputs['Out'][np.squeeze(ids == padding_idx)] = np.zeros(31)
     self.attrs = {'padding_idx': cpt.long_type(padding_idx)}
     self.check_output()
Esempio n. 2
0
                        embedding_dim,
                        padding_idx=int(padding_idx),
                        vocab_embeddings=table,
                        compare=False)
    # print('outputs {}'.format(outputs))

    #
    ids = np.random.randint(low=0, high=vocab_size,
                            size=(2, 4, 5)).astype("int32")
    embedding("embedding_tensorIds",
              ids,
              vocab_size,
              embedding_dim,
              vocab_embeddings=table,
              compare=False)

    #
    ids = np.random.randint(low=0, high=vocab_size,
                            size=(2, 4, 5)).astype("int32")
    flatten_idx = ids.flatten()
    padding_idx = np.random.choice(flatten_idx, 1)[0]
    # print('padding_idx {}'.format(padding_idx))
    outputs = embedding("embedding_tensorIds_paddings",
                        ids,
                        vocab_size,
                        embedding_dim,
                        padding_idx=cpt.long_type(padding_idx),
                        vocab_embeddings=table,
                        compare=False)
    # print('outputs {}'.format(outputs))
Esempio n. 3
0
    embedding("embedding_none_weight", ids, vocab_size, embedding_dim, compare=False)

    #
    ids = np.random.randint(0, vocab_size, 4).astype("int32")
    ids = np.squeeze(ids)
    padding_idx = np.random.choice(ids, 1)[0]
    # print('padding_idx {}, ids {}'.format(padding_idx, ids))
    outputs = embedding("embedding_paddings", ids, vocab_size, embedding_dim, padding_idx=int(padding_idx), vocab_embeddings=table, compare=False)
    # print('outputs {}'.format(outputs))

    # corner case
    ids = np.random.randint(0, vocab_size, 4).astype("int32")
    pick = np.random.choice(4, 1)[0] # pick randomly to be max vacab_size -1
    ids[pick] = vocab_size-1
    padding_idx = -1
    # print('padding_idx {}, ids {}'.format(padding_idx, ids))
    outputs = embedding("embedding_paddings_neg1", ids, vocab_size, embedding_dim, padding_idx=int(padding_idx), vocab_embeddings=table, compare=False)
    # print('outputs {}'.format(outputs))    

    #
    ids = np.random.randint(low=0, high=vocab_size, size=(2, 4, 5)).astype("int32")
    embedding("embedding_tensorIds", ids, vocab_size, embedding_dim, vocab_embeddings=table, compare=False)
    
    #
    ids = np.random.randint(low=0, high=vocab_size, size=(2, 4, 5)).astype("int32")
    flatten_idx = ids.flatten()
    padding_idx = np.random.choice(flatten_idx, 1)[0]
    # print('padding_idx {}'.format(padding_idx))
    outputs = embedding("embedding_tensorIds_paddings", ids, vocab_size, embedding_dim, padding_idx=cpt.long_type(padding_idx), vocab_embeddings=table, compare=False)
    # print('outputs {}'.format(outputs))