Beispiel #1
0
 def __init__(self,
              tokenizer: transformers.GPT2Tokenizer,
              max_context_length: int = 500):
     self.bob, self.eob, self.eokb = tokenizer.convert_tokens_to_ids(
         ['=>', '<|eob|>', '<|eokb|>'])
     self.eos = tokenizer.eos_token_id
     self.tokenizer = tokenizer
     self.max_context_length = max_context_length