def trainprocess():
     sym = txt.get()
     if sym != "":
         TR.process(sym, 4)
         tm.showinfo("Input", "Training Successfully Finished")
     else:
         tm.showinfo("Input error", "Select Dataset")
Exemple #2
0
def get_sql():
    df = pd.read_sql('SELECT * FROM stock_table', con=db_connection)
    df.drop(columns=['index'], inplace=True)

    print(df.columns)
    l = process(df, db_connection)
    # print("ppppppppppppppppppp")
    return l
Exemple #3
0
    nparams["training"]["loss_func"] = 'cosine'
    nparams["training"]["optimizer"] = 'adam'
    nparams["training"]["normalize_y"] = True
    nparams["cnn"]["architecture"] = '33'
    nparams["cnn"]["n_dense"] = 0
    nparams["cnn"]["dropout_factor"] = 0.7
    nparams["cnn"]["final_activation"] = 'linear'
    nparams["dataset"]["nsamples"] = 'all'
    nparams["dataset"]["dataset"] = 'MuMu-albums'
    nparams["dataset"]["meta-suffix"] = meta_suffix
    nparams["dataset"]["meta-suffix2"] = meta_suffix2
    nparams["dataset"]["meta-suffix3"] = meta_suffix3
    add_extra_params(nparams, extra_params)
    params['cosine_multilabel_tri'] = copy.deepcopy(nparams)   

    return params[suffix]

if __name__ == '__main__':
    parser = argparse.ArgumentParser(
        description='Run experiment',
        formatter_class=argparse.ArgumentDefaultsHelpFormatter)
    parser.add_argument('suffix', default="class_bow", help='Suffix of experiment params')
    parser.add_argument('meta_suffix', nargs='?', default="", help='Suffix of input matrix for experiment')
    parser.add_argument('meta_suffix2', nargs='?', default="", help='Suffix of input matrix for experiment')
    parser.add_argument('meta_suffix3', nargs='?', default="", help='Suffix of input matrix for experiment')
    parser.add_argument('extra_params', nargs='?', default="", help='Specific extra parameters')
    args = parser.parse_args()
    print args.extra_params
    params = get_configuration(args.suffix,args.meta_suffix,args.meta_suffix2,args.meta_suffix3,args.extra_params)
    process(params)
Exemple #4
0
        model = models.model_from_json(f.read())
    model.load_weights(wfile)
    return model


if __name__ == '__main__':
    model = load_model()
    words, embeddings = load_data()
    word_len = model.input_shape[1]

    word = raw_input('Enter a word [enter nothing to exit]: ')

    while word:
        try:
            vector = model.predict(
                process(word, word_len).reshape(1, word_len))[0]
            nn_words, nn_dists = rank_nearest_neighbors(
                vector, words, embeddings)
            print('Closest words to "%s":' % word)
            s = len(words) - 4
            for i, (word, dist) in enumerate(zip(nn_words[:5], nn_dists[:5]),
                                             1):
                print('  %d. "%s"  [%.3f]' % (i, word, dist))
            print('    ...')
            for i, (word, dist) in enumerate(zip(nn_words[-5:], nn_dists[-5:]),
                                             s):
                print('  %d. "%s"  [%.3f]' % (i, word, dist))
        except ValueError, e:
            print(e)
        word = raw_input('Enter another word [enter nothing to exit]: ')
    # optimizer and criterion
    optimizer = optimizer_dict[optimizer_name](model.parameters(), **optimizer_options)
    criterion = nn.CrossEntropyLoss()
    criterion = criterion.to(device)

    # initialize data
    ds_trn, ds_val = imagenet_1k()
    dl_trn = DataLoader(ds_trn, batch_size, shuffle=True, num_workers=4, pin_memory=True)
    dl_val = DataLoader(ds_val, batch_size, num_workers=4, pin_memory=True)

    # release unused objects
    del ds_trn, ds_val
    del parser, args, optimizer_name, optimizer_options, optimizer_dict

    # obtain best accuracy from checkpoint
    _, best_acc1, _ = process(dl_val, model, criterion, None, mode='eval', device=device, progress=True)
    print(f" *** Starting Acc@1 {best_acc1:.4f}")

    set_all_rng_seed(2019)

    for epoch in range(epochs):
        l, t1, t5 = process(dl_trn, model, criterion, optimizer, mode='train')
        print(f" * Train Loss {l:.4f} Acc@1 {t1:.4f} Acc@5 {t5:.4f}")
        l, t1, t5 = process(dl_trn, model, criterion, None, mode='eval')
        print(f" ** Test Loss {l:.4f} Acc@1 {t1:.4f} Acc@5 {t5:.4f}")
        is_best = t1 > best_acc1
        best_acc1 = max(t1, best_acc1)
        checkpoint_filename = f'checkpoint-epoch-{epoch}.pt'
        torch.save(model.state_dict(), checkpoint_filename)
        if is_best:
            shutil.copyfile(checkpoint_filename, 'model_best.pt')