def testSequenceClassifierWithSelfAttentionEncoder(self): # SelfAttentionEncoder does not return a state, so test that the classifier # does not crash on this. model = models.SequenceClassifier( inputters.WordEmbedder(10), encoders.SelfAttentionEncoder(num_layers=2, num_units=16, num_heads=4, ffn_inner_dim=32)) features_file, labels_file, data_config = self._makeToyClassifierData() model.initialize(data_config) dataset = model.examples_inputter.make_training_dataset(features_file, labels_file, 16) features, labels = iter(dataset).next() model(features, labels, training=True)
def testSequenceClassifier(self, mode): model = models.SequenceClassifier(inputters.WordEmbedder(10), encoders.MeanEncoder()) features_file, labels_file, data_config = self._makeToyClassifierData() params = {"optimizer": "SGD", "learning_rate": 0.1} self._testGenericModel(model, mode, features_file, labels_file, data_config, prediction_heads=["classes"], metrics=["accuracy"], params=params)
def testSequenceClassifier(self, mode): model = models.SequenceClassifier( inputters.WordEmbedder("source_vocabulary", 10), encoders.MeanEncoder(), "target_vocabulary") features_file, labels_file, metadata = self._makeToyClassifierData() params = { "optimizer": "GradientDescentOptimizer", "learning_rate": 0.1 } self._testGenericModel(model, mode, features_file, labels_file, metadata, prediction_heads=["classes"], metrics=["accuracy"], params=params)