Example #1
0
 def __init__(
     self,
     backbone: str = "Helsinki-NLP/opus-mt-en-ro",
     loss_fn: Optional[Union[Callable, Mapping, Sequence]] = None,
     optimizer: Type[torch.optim.Optimizer] = torch.optim.Adam,
     metrics: Union[pl.metrics.Metric, Mapping, Sequence, None] = None,
     learning_rate: float = 3e-4,
     val_target_max_length: Optional[int] = 128,
     num_beams: Optional[int] = 4,
     n_gram: bool = 4,
     smooth: bool = False,
 ):
     self.save_hyperparameters()
     super().__init__(
         backbone=backbone,
         loss_fn=loss_fn,
         optimizer=optimizer,
         metrics=metrics,
         learning_rate=learning_rate,
         val_target_max_length=val_target_max_length,
         num_beams=num_beams,
     )
     self.bleu = BLEUScore(
         n_gram=n_gram,
         smooth=smooth,
     )
Example #2
0
def test_bleu_score(smooth, expected):
    translate_corpus = ['the cat is on the mat'.split()]
    reference_corpus = [[
        'there is a cat on the mat'.split(), 'a cat is on the mat'.split()
    ]]
    metric = BLEUScore(smooth=smooth)
    assert torch.allclose(metric(translate_corpus, reference_corpus),
                          torch.tensor(expected), 1e-4)