def debug(self, epoch): if epoch == 0: if os.path.exists(self.debugFile): shutil.rmtree(self.debugFile) epoch += 1 if os.path.exists(self.debugFile) == False: os.mkdir(self.debugFile) if os.path.exists(self.debugFile + 'epoch_%d' % epoch) == False: os.mkdir(self.debugFile + 'epoch_%d' % epoch) saveModel(self.model, self.debugFile + 'epoch_%d/' % epoch, 'model.pth') if self.validDebug: validOut = generator(self.model, self.validData, Parameter.validSrcFilePath, self.index2word, Parameter.validDictPath, Parameter.search_method, Parameter.beam_size) saveOutput(validOut, self.debugFile + 'epoch_%d/' % epoch, 'valid') bleu_valid, sari_valid, fkgl_valid = eval_score(Parameter.validSrcOri, Parameter.validTgtOrih, self.debugFile + 'epoch_%d/valid' % epoch) if self.testDebug: testOut = generator(self.model, self.testData, Parameter.testSrcFilePath, self.index2word, Parameter.testDictPath, Parameter.search_method, Parameter.beam_size) saveOutput(testOut, self.debugFile + 'epoch_%d/' % epoch, 'test') bleu_test, sari_test, fkgl_test = eval_score(Parameter.testSrcOri, Parameter.testTgtOri, self.debugFile + 'epoch_%d/test' % epoch) with open(self.debugFile + 'score.txt', 'a') as f: f.write('EPOCH: %d\n' % epoch) if self.validDebug: f.write('valid:\tBLEU: %.2f\tSARI: %.2f\tFKGL: %.2f\n' % (bleu_valid, sari_valid, fkgl_valid)) if self.testDebug: f.write('test:\tBLEU: %.2f\tSARI: %.2f\tFKGL: %.2f\n' % (bleu_test, sari_test, fkgl_test)) f.write('++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++\n')
def do_test(self, args: Union[str, List[str]], expected_fname: str) -> None: args = args.split() if isinstance(args, str) else args if '-td' not in args: args += ['-td', template_dir] if '-o' not in args: args += ['-o', model_dir] if '-cd' not in args: args += ['-cd', cache_dir] actual = StringIO() expected_file = os.path.join(test_output_log_dir, expected_fname) with redirect_stdout(actual): try: generator(args) except HelpPrinted: pass if os.path.exists(expected_file): with open(expected_file) as f: expected = f.read() self.assertEqual(expected.strip(), actual.getvalue().strip()) else: with open(expected_file, 'w') as f: f.write(actual.getvalue()) self.fail(f"{expected_file} created - rerun test")
def makesong(): generator(self.bass_cluster.fomm_pitch, self.bass_cluster.fomm_beats, self.treb_cluster.fomm_pitch, self.treb_cluster.fomm_beats, self.bass_cluster.sample.sample, self.treb_cluster.sample.sample, kick_sample, bdpitch, self.kick_cluster.fomm_beats, snare_sample, snpitch, self.snare_cluster.fomm_beats, bp2tp, bt2tt)
def run(): g = generate.generator() try: # g.export_batch() # g.export_batch_task() # g.export_task() g.export_data() finally: g.close()
def train(model, source, target, lr, conf, idx2char): model.train() opt = optim.Adam(model.parameters(), lr=lr) loss_fn = nn.NLLLoss() total_loss = 0 data_size = len(source) for batch, (x, y, mask) in enumerate( utils.batchify(source, target, conf.stride, conf.batch_size, False)): batch_size, max_len = x.shape x = Variable(torch.Tensor(x.tolist()), volatile=False) y = Variable(torch.LongTensor(y.tolist())) enc_h = model.encoder.init_gru(batch_size) model.zero_grads() batch_loss = 0 if conf.cuda: x = x.cuda() y = y.cuda() enc_h = enc_h.cuda() context, dec_h = model.encoder(x, enc_h) translation = [] gen = generator(model, idx2char) for i in range(1, y.size(1)): next_char, dec_h, attn = model(y[:, i - 1], context, dec_h) batch_loss += loss_in_batch(next_char, y[:, i], mask[:, i], loss_fn) char_idx = next_char.data.topk(1)[1][0][0] translation.append(char_idx) batch_loss /= batch_size print(translation) batch_loss.backward() opt.step() total_loss += batch_loss.data[0] * batch_size if (batch + 1) % conf.log_interval == 0: size = conf.batch_size * batch + batch_size print("[{:5d}/{:5d}] batches\tLoss: {:5.6f}".format( size, data_size, total_loss / size)) return total_loss / data_size
if(girl.types == 'Normal'): girl.happinessNormal(gift) if(girl.types == 'Desperate'): girl.happinessDesperate(gift) def calBoyHappiness(girl,boy,gift) : "calculate boy's happiness" if(boy.types == 'Miser'): setattr(boy,boy.happiness,boy.budget - sum([i.price for i in gift])) if(boy.types == 'Generous'): setattr(boy,boy.happiness,girl.happiness) if(boy.types == 'Geeks'): setattr(boy,boy.happiness,girl.intel_level) generator() boycsv = open('boy.csv') girlcsv = open('girl.csv') giftcsv = open('gift.csv') readBoy = csv.reader(boycsv, delimiter = ',') readGirl = csv.reader(girlcsv, delimiter = ',') readGift = csv.reader(giftcsv, delimiter = ',') B = [ Boy(row[0],int(row[1]),int(row[2]),int(row[3]),int(row[4]),row[5]) for row in readBoy] G = [ Girl(row[0],int(row[1]),int(row[2]),int(row[3]),row[4]) for row in readGirl] gift = [ Gift(row[0],int(row[1]),int(row[2]),row[3]) for row in readGift] C = [] count = 0 for g in G:
from generate import generator from model import function_model, training_model fm = function_model(2, 1) tm = training_model(2, fm) g = generator(200) tm.fit_generator(g, steps_per_epoch=100, epochs=100, verbose=1)
betas=(Parameter.beta_1, Parameter.beta_2), eps=Parameter.eps, weight_decay=Parameter.weight_decay), d_model=Parameter.embedding_dim, warmup_steps=Parameter.warmup_steps, factor=Parameter.factor) fit = fit(model=model, criterion=criterion, optimizer=optimizer, checkpoint=checkpoint, epoch=Parameter.EPOCH, numBatchPrint=Parameter.numBatchPrint, validDebug=Parameter.validDebug, testDebug=Parameter.testDebug, debugFile=Parameter.debugFile, maxlen=Parameter.maxLen, gradient_clipper=Parameter.gradient_clipper) fit(trainDataSet, validDataSet, testDataSet, index2word) else: model.load_state_dict( loadModel(Parameter.modelPath, Parameter.modelFile)) model.cuda() candidate = generator(model, testDataSet, Parameter.testSrcFilePath, index2word, Parameter.testDictPath, Parameter.search_method, Parameter.beam_size) saveOutput(candidate, Parameter.outputPath, Parameter.outputFile) eval_score(Parameter.testSrcOri, Parameter.testTgtOri, Parameter.outputPath + Parameter.outputFile, Parameter.outputPath)