Ejemplo n.º 1
0
def main(disable=0, device="cpu", cycles=100, D=32, N=128, name="evo"):

    disable = int(disable)
    cycles = int(cycles)

    print("Using device: %s" % device)

    N = int(N)
    D = int(D)

    data = dataset.create(N, D)
    test = dataset.test(N, D)

    if name == "evo":
        net = model.EvolutionaryModel(D, disable=disable).to(device)

        try:
            for i in range(cycles):
                net.do_cycle(*data, *test)
        except KeyboardInterrupt:
            pass

        best = net.select_best()
        print(best.net[0].weight.data)

        train.visualize(net.select_best(), outf="results.png", D=D)

    else:
        net = model.Model(D)
        train.train(*data, *test, net)
        print(net.net[0].weight)
Ejemplo n.º 2
0
    def eval_euc_svm(self, data, visualize=True):
        """Evaluate euclidean SVM on given data"""
        X_tr, X_te, Y_tr, Y_te = data
        Y_tr[Y_tr == 0] = -1.0
        Y_te[Y_te == 0] = -1.0

        # logger.info('CV on train data')
        # euc_SVM = LinearSVC(C=params['C'], max_iter=params['epochs'])
        # scores = cross_val_score(euc_SVM, X_tr, Y_tr, scoring='roc_auc')
        # logger.info('Train ROC AUC: {:.2f} +/- {:.2f} ({})'.format(np.mean(scores), np.std(scores), scores))

        # euc_SVM = LinearSVC(C=params['C'], max_iter=params['epochs'])
        # euc_SVM.fit(X_tr, Y_tr)
        # te_score = euc_SVM.score(X_te, Y_te)
        # te_auc = roc_auc_score(Y_te, euc_SVM.decision_function(X_te))
        # logger.info('test accuracy {}, ROC AUC {}'.format(te_score, te_auc))

        res = {'algo': 'euc_svm'}

        logger.info('(euc svm) grid search hyperparameter tunning')
        param_grid = config.EUC_SVM_PARAM_GRID
        clf = GridSearchCV(estimator=LinearSVC(),
                           param_grid=param_grid,
                           scoring='roc_auc',
                           n_jobs=-1)
        clf.fit(X_tr, Y_tr)
        logger.info('(train) best roc auc: {:.3f}, best params_ {}'.format(
            clf.best_score_, clf.best_params_))
        res['train_roc_auc'] = clf.best_score_
        res['train_params'] = clf.best_params_

        roc_auc_te = clf.score(X_te, Y_te)
        roc_auc_te2 = roc_auc_score(Y_te, clf.decision_function(X_te))
        logger.info('(test) roc auc: {:.3f} ({:.3f})'.format(
            roc_auc_te, roc_auc_te2))
        res['test_roc_auc'] = roc_auc_te

        if visualize:
            train.visualize(X_te, Y_te, clf.best_estimator_.coef_.ravel())

        return res
Ejemplo n.º 3
0
    gru.train(X_train, F_train, y_train, iters_retrain=20, num_iters=1500,
               batch_size=10, lr=1e-3, param_scale=0.1, log_every=1)

    tree = gru.tree

    if not os.path.isdir('./trained_models'):
        os.mkdir('./trained_models')

    indicator = str(args.strength)

    with open('./trained_models/trained_weights_'+indicator+'_mixed_fs2.pkl', 'wb') as fp:
        pickle.dump({'gru': gru.gru.weights, 'mlp': gru.mlp.weights}, fp)
        print('saved trained model to ./trained_models')

    visualize(gru.tree, './trained_models/tree_'+str(indicator)+'_mixed_fs2.pdf',True)
    print('saved final decision tree to ./trained_models')
    print('\n')

    print('name of the file: ./trained_models/trained_weights_'+indicator+'_mixed_fs2.pkl')

    X_test = obs_test
    F_test = fcpt_test
    y_test = out_test

    y_hat = gru.pred_fun(gru.weights, X_test, F_test)
    y_hat_int = np.rint(y_hat).astype(int)
    auc_test = roc_auc_score(y_test.T, y_hat.T)
    print('Test AUC: {:.2f}'.format(auc_test))
    avg_precision = average_precision_score(y_test.T, y_hat.T)
    print('Test Average Precision: {:.2f}'.format(avg_precision))
Ejemplo n.º 4
0
def main(args):
    config = cfg.get_default()
    cfg.set_params(config, args.config_path, args.set)
    cfg.freeze(config, True)
    print('- Configuration:')
    print(config)

    if config.dataset == 'FluidIceShake':
        n_groups = 2
        n_particles = 348
    elif config.dataset == 'RigidFall':
        n_groups = 3
        n_particles = 192
    elif config.dataset == 'MassRope':
        n_groups = 2
        n_particles = 95
    else:
        raise ValueError('Unsupported environment')

    # generate outputs for both train and valid data
    train_loader = get_dataloader(config, 'train', shuffle=False)
    valid_loader = get_dataloader(config, 'valid', shuffle=False)

    # build model
    model = PointSetNet(
        config.n_frames,
        config.pred_hidden,
        n_particles,
        n_groups,
        config.batchnorm,
        single_out=False,
        recur_pred=config.recur_pred).to(_DEVICE)

    # a model checkpoint must be loaded
    if config.load_path != '':
        print('- Loading model from {}'.format(config.load_path))

        # load model on GPU/CPU
        if torch.cuda.is_available():
            model.load_state_dict(torch.load(config.load_path))
        else:
            model.load_state_dict(
                torch.load(config.load_path, map_location='cpu'))

    else:
        raise ValueError('- Please provide a valid checkpoint')

    if config.log_eval:
        # [[train_data_loss], [valid_data_loss]]
        losses = []

    for loader, name in [(train_loader, 'train'), (valid_loader, 'valid')]:
        # load data with progress bar
        pbar = tqdm(loader)
        n_traj = 0

        # create directory to save output data
        save_dir = os.path.join(config.run_dir, 'eval', name)
        if not os.path.isdir(save_dir):
            os.makedirs(save_dir)

        if config.vis_eval:
            vis_save_dir = os.path.join(save_dir, 'out_vis')
            if not os.path.isdir(vis_save_dir):
                os.makedirs(vis_save_dir)

        if config.log_eval:
            # [(loss, pos_loss, grp_loss) for all data in current loader]
            loader_loss = []

        for images, positions, groups in pbar:
            if config.log_eval:
                model, _, loss, pos_loss, grp_loss = step(
                    config, model, None, images, positions, groups, False)
                loader_loss.append((loss, pos_loss, grp_loss))
                pbar.set_description('Loss {:f}'.format(loss))

            pbar.set_description('Generating video outputs')
            n_traj = generate_outputs(config, model, n_traj, images, save_dir)

            if config.vis:
                visualize(config, model, n_traj // config.batch_size,
                          n_particles, images, positions, groups, False)

        if config.log_eval:
            losses.append(loader_loss)

    if config.log_eval:
        # save all losses into JSON file
        stats = {}
        train_losses, valid_losses = losses
        (stats['train_losses'],
         stats['train_pos_losses'],
         stats['train_grp_losses']) = list(zip(*train_losses))
        (stats['valid_losses'],
         stats['valid_pos_losses'],
         stats['valid_grp_losses']) = list(zip(*valid_losses))

        with open(os.path.join(config.run_dir,
                               'eval_stats.json'), 'w') as fout:
            json.dump(stats, fout)
Ejemplo n.º 5
0
# Handle multi-gpu if desired
if (device.type == 'cuda') and (ngpu > 1):
    netD = nn.DataParallel(netD, list(range(ngpu)))

# Apply the weights_init function to randomly initialize all weights
#  to mean=0, stdev=0.2.
netD.apply(weights_init)

# Initialize BCELoss function
criterion = nn.BCELoss()

# Create batch of latent vectors that we will use to visualize
#  the progression of the generator
fixed_noise = torch.randn(64, nz, 1, 1, device=device)

# Establish convention for real and fake labels during training
real_label = 1
fake_label = 0

# Setup Adam optimizers for both G and D
optimizerD = optim.Adam(netD.parameters(), lr=lr, betas=(beta1, 0.999))
optimizerG = optim.Adam(netG.parameters(), lr=lr, betas=(beta1, 0.999))

dataloader = get_loader(dataroot, image_size, batch_size, workers)
G_losses, D_losses, img_list = train(num_epochs, dataloader, netG, netD,
                                     real_label, fake_label, optimizerG,
                                     optimizerD, criterion, device,
                                     fixed_noise, nz)
visualize(G_losses, D_losses, img_list, dataloader, device)
Ejemplo n.º 6
0
    gru = GRUTree(num_features, 1, [25], 1, strength=1000)
    gru.train(X_train,
              F_train,
              y_train,
              iters_retrain=25,
              num_iters=300,
              batch_size=10,
              lr=1e-2,
              param_scale=0.1,
              log_every=10)

    if not os.path.isdir('./trained_models_adult'):
        os.mkdir('./trained_models_adult')

    with open('./trained_models_adult/trained_weights.pkl', 'wb') as fp:
        cPickle.dump({'gru': gru.gru.weights, 'mlp': gru.mlp.weights}, fp)
        print('saved trained model to ./trained_models_adult')

    visualize(gru.tree, './trained_models_adult/tree.pdf')
    print('saved final decision tree to ./trained_models_adult')

    y_hat = gru.pred_fun(gru.weights, X_test, F_test)
    auc_test = roc_auc_score(y_test.T, y_hat.T)
    print('Test AUC: {:.2f}'.format(auc_test))

    auc_test = mean_squared_error(y_test.T, y_hat.T)
    print('Test MSE: {:.2f}'.format(auc_test))

    auc_test = accuracy_score(y_test.T, np.round(y_hat.T))
    print('Test ACC: {:.2f}'.format(auc_test))
    gru = GRUTree(features, 1, [25], 1, strength=1000)
    gru.train(X_train,
              F_train,
              y_train,
              iters_retrain=25,
              num_iters=300,
              batch_size=10,
              lr=1e-2,
              param_scale=0.1,
              log_every=10)

    if not os.path.isdir('./trained_models_titanic'):
        os.mkdir('./trained_models_titanic')

    with open('./trained_models_titanic/trained_weights.pkl', 'wb') as fp:
        cPickle.dump({'gru': gru.gru.weights, 'mlp': gru.mlp.weights}, fp)
        print('saved trained model to ./trained_models_titanic')

    visualize(gru.tree, './trained_models_titanic/tree.pdf')
    print('saved final decision tree to ./trained_models_titanic')

    y_hat = gru.pred_fun(gru.weights, X_test, F_test)
    auc_test = roc_auc_score(y_test.T, y_hat.T)
    print('Test AUC: {:.2f}'.format(auc_test))

    auc_test = mean_squared_error(y_test.T, y_hat.T)
    print('Test MSE: {:.2f}'.format(auc_test))

    auc_test = accuracy_score(y_test.T, np.round(y_hat.T))
    print('Test ACC: {:.2f}'.format(auc_test))
Ejemplo n.º 8
0
              y_train,
              iters_retrain=50,
              num_iters=300,
              batch_size=10,
              lr=1e-2,
              param_scale=0.1,
              log_every=10)

    if not os.path.isdir('./trained_models'):
        os.mkdir('./trained_models')

    indicator = args.strength

    with open('./trained_models/trained_weights_' + str(indicator) + '.pkl',
              'wb') as fp:
        pickle.dump({'gru': gru.gru.weights, 'mlp': gru.mlp.weights}, fp)
        print('saved trained model to ./trained_models')

    visualize(gru.tree, './trained_models/tree_' + str(indicator) + '.pdf')
    print('saved final decision tree to ./trained_models')
    print('\n')

    X_test = obs_test
    F_test = fcpt_test
    y_test = out_test

    y_hat = gru.pred_fun(gru.weights, X_test, F_test)
    auc_test = roc_auc_score(y_test.T, y_hat.T)
    print('Test AUC: {:.2f}'.format(auc_test))
    avg_precision = average_precision_score(y_test.T, y_hat.T)
    print('Test Average Precision: {:.2f}'.format(avg_precision))