예제 #1
0
    def set_text(self, text):
        self.text = text
        textpath = self.text.replace(' ', '_')

        self.textpath = textpath
        self.filename = Path(f'./{textpath}.png')
        self.encoded_text = tokenize(text).cuda()
예제 #2
0
파일: big_sleep.py 프로젝트: alx/big-sleep
    def set_text(self, text):
        self.text = text
        textpath = self.text.replace(' ','_')[:255]
        if self.save_date_time:
            textpath = datetime.now().strftime("%y%m%d-%H%M%S-") + textpath

        self.textpath = textpath
        self.filename = Path(f'./{textpath}.png')
        self.encoded_text = tokenize(text).cuda()
예제 #3
0
    def set_text(self, text):
        self.text = text
        textpath = self.text.replace(' ','_').replace('.','_')[:30]
        #textpath = datetime.now().strftime("%Y%m%d-%H%M%S-") + textpath
        if exists(self.seed):
            textpath = str(self.seed) + '-' + textpath

        self.textpath = textpath
        self.filename = Path(f'./{textpath}.png')
        self.encoded_text = tokenize(text).cuda()
예제 #4
0
    def __init__(
        self,
        text,
        *,
        lr = .07,
        image_size = 512,
        gradient_accumulate_every = 1,
        save_every = 50,
        epochs = 20,
        iterations = 1050,
        save_progress = False,
        bilinear = False,
        open_folder = True,
        seed = None
    ):
        super().__init__()

        if exists(seed):
            torch.manual_seed(seed)

        self.epochs = epochs
        self.iterations = iterations

        model = BigSleep(
            image_size = image_size,
            bilinear = bilinear
        ).cuda()

        self.model = model

        self.optimizer = Adam(model.model.latents.parameters(), lr)
        self.gradient_accumulate_every = gradient_accumulate_every
        self.save_every = save_every

        self.text = text
        textpath = self.text.replace(' ','_')

        self.textpath = textpath
        self.filename = Path(f'./{textpath}.png')
        self.save_progress = save_progress

        self.encoded_text = tokenize(text).cuda()
        self.open_folder = open_folder
예제 #5
0
 def create_text_encoding(self, text):
     tokenized_text = tokenize(text).cuda()
     with torch.no_grad():
         text_encoding = self.model.perceptor.encode_text(tokenized_text).detach()
     return text_encoding
예제 #6
0
 def encode_one_phrase(self, phrase):
     return perceptor.encode_text(
         tokenize(f'''{phrase}''').cuda()).detach().clone()