def _select_tokens_to_mask(self, tokens: torch.Tensor, mask_prob: float) -> torch.tensor: if self.masking_strategy == MaskingStrategy.RANDOM: return random_masking(tokens, mask_prob) elif self.masking_strategy == MaskingStrategy.FREQUENCY: return frequency_based_masking(tokens, self.token_sampling_weights, mask_prob) else: raise NotImplementedError( "Specified Masking Strategy isnt currently implemented.")
def _select_tokens_to_mask(self, tokens: torch.Tensor, mask_prob: float) -> torch.tensor: if self.masking_strategy == MaskingStrategy.RANDOM: mask = random_masking(tokens, mask_prob) if not self.mask_bos: bos_idx = self.vocab.idx[self.token_tensorizer.bos_token] mask *= (tokens != bos_idx).long() return mask elif self.masking_strategy == MaskingStrategy.FREQUENCY: return frequency_based_masking(tokens, self.token_sampling_weights, mask_prob) else: raise NotImplementedError( "Specified Masking Strategy isnt currently implemented.")