# embedding list
    idx_to_emb = [w2v_model[word] for word in w2v_model.wv.index2word]
    # get index of period
    pd_idx = w2v_model.wv.vocab['.'].index

    # get indices to remove
    words_to_remove = ['<unk>']
    remove_idxs = [
        w2v_model.wv.vocab[word].index for word in words_to_remove
        if word in w2v_model.wv.vocab
    ]

    # initialize Logger
    log_dir = config['generate']['log_dir']
    fn_suffix = config['generate']['log_fn_suffix']
    logger = ut.Logger(log_dir, fn_suffix)

    # instantiate LSTM model
    lstm_rnn = rnn_keras.LyricsLSTM(config, logger, generate_mode=True)

    # perform prediction
    if not input_words:

        for i in range(num_stanzas):
            #cur_word_idx = pd_idx
            cur_word_idx = np.random.randint(low=0,
                                             high=len(w2v_model.wv.index2word))
            lyrics_str = w2v_model.wv.index2word[cur_word_idx]
            lstm_rnn.model.reset_states()
            # generate lyrics until period is encountered
            # as long as length is within min_words:max_words
Exemple #2
0
                              weight_decay=args.weight_decay)
        scheduler = ReduceLROnPlateau(optimizer,
                                      factor=args.gamma,
                                      patience=args.patience,
                                      min_lr=args.min_lr)
        # scheduler = StepLR(optimizer, 100, gamma=0.1)
    elif args.optimizer == "adam":
        optimizer = optim.Adam(model.parameters(),
                               lr=args.learning_rate,
                               weight_decay=args.weight_decay)

    try:
        os.mkdir("logs")
    except:
        pass
    logger = utils.Logger("logs/{}.log".format(args.name))

    with open("logs/{}.log".format(args.name), "a") as f:
        f.write(str(args))
        f.write("\nParameters : " + str(n_parameters))
        if hasattr(model, "n_filters"):
            f.write("\nFilters : " + str(model.n_filters))
        else:
            f.write("\nFilters : _ ")
        f.write("\n*******\n")

    print("-" * 80 + "\n")
    test_loss = 0
    test_acc = torch.zeros(len(topk))
    lr = optimizer.state_dict()["param_groups"][0]["lr"]
    for epoch in range(1, args.epochs + 1):
Exemple #3
0
 def init_log(self, folder=''):
     self.logfile = utils.ensure_path(os.path.join(self.folder, 'log.txt'))
     sys.stdout = utils.Logger(self.logfile)