return 2**-8
        elif 8000 <= x:
            return 2**-11

    update_step_size_method = 'schedule'
else:
    raise NotImplementedError

# Set up the data, parameters, model, optimizer, and results objects
data = opt_structures.Data(train_loader, valid_loader, test_loader)
params = opt_structures.Params(num_classes=num_classes,
                               ckn=True,
                               train_w_layers=None,
                               lambda_filters=0,
                               normalize=True,
                               w_last_init=None,
                               update_step_size_method=update_step_size_method,
                               step_size_init=args.step_size,
                               step_size_schedule=step_size_schedule,
                               tau=args.hessian_reg,
                               num_iters=args.num_iters,
                               save_path=save_file + '_params.pickle',
                               eval_test_every=args.eval_test_every,
                               save_every=500)
model = opt_structures.Model(model, save_path=save_file + '_model.pickle')
results = opt_structures.Results(save_path=save_file + '_results.pickle')
optimizer = train_network.TrainSupervised(data, model, params, results)

# Train the model
optimizer.train()
Ejemplo n.º 2
0
    data = opt_structures.Data(None,
                               train_unlabeled_loader,
                               None,
                               None,
                               test_loader,
                               deepcluster_loader=train_loader)
params = opt_structures.Params(
    nclasses=nclasses,
    min_frac_points_class=min_frac_points_class,
    max_frac_points_class=max_frac_points_class,
    ckn=ckn,
    project=True,
    lambda_pix=args.lambda_pix,
    lam=args.lam,
    normalize=True,
    balanced=balanced_version,
    labeling_method=args.labeling_method,
    deepcluster_k=args.num_clusters,
    deepcluster_update_clusters_every=args.update_clusters_every,
    labeling_burnin=args.labeling_burnin,
    step_size_init_sup=args.lr_sup_init,
    step_size_init_semisup=args.lr_semisup,
    maxiter=args.num_iters,
    eval_test_every=args.eval_test_every,
    save_every=args.save_every,
    save_path=save_file + '_params.pickle')

model = opt_structures.Model(model, save_path=save_file + '_model.pickle')
results = opt_structures.Results(save_path=save_file + '_results.pickle')
optimizer = train_xsdc.TrainSupervised(data, model, params, results)

# Train the model with XSDC
train_loader, train_labeled_loader, train_unlabeled_loader, valid_loader, test_loader = \
    mnist.get_dataloaders(data_path=args.data_path, num_labeled=args.num_labeled, num_train=args.num_train,
                          num_workers=0, seed=args.seed)
print('Size of training set:', len(train_loader.dataset))

# Load and initialize the model
model_params = parse_config.load_config('../../cfg/lenet-5_ckn.cfg')
if args.num_filters > 0:
    nlayers = len(model_params['num_filters'])
    model_params['num_filters'] = [args.num_filters] * nlayers
    model_params['patch_sigma'] = [bw] * nlayers

layers = parse_config.create_layers(model_params)
model = net.CKN(layers).to(defaults.device)
model.init(train_loader)
print('Done with initialization')

# Set up the data, parameters, model, results, and optimizer objects
data = opt_structures.Data(train_labeled_loader, train_unlabeled_loader, valid_loader, test_loader)
params = opt_structures.Params(batch_size=args.batch_size, ckn=True, epsilon=args.epsilon,
                               eval_test_every=args.eval_test_every, lambda_cov=args.lambda_cov, lambda_params=0,
                               lr=args.lr, lr_schedule=None, min_dist=args.min_dist, num_classes=10,
                               num_iters=args.num_iters, project=True, save_every=args.save_every,
                               save_path=save_file + '_params.pickle', train_w_layers=[0, 2, 4, 5])
model = opt_structures.Model(model, save_path=save_file + '_model.pickle')
results = opt_structures.Results(save_path=save_file + '_results.pickle')
optimizer = train_network.TrainSupervised(data, model, params, results)

# Train the model
optimizer.train()
Ejemplo n.º 4
0
# Load and initialize the model
model = sum_sigmoids.SumSigmoids(args.num_filters)
model.apply(sum_sigmoids.init_normal)
model.to(defaults.device)
print('Initial thetas:', torch.stack(list(model.state_dict()['thetas'])))

# Set up the data, parameters, model, results, and optimizer objects
data = opt_structures.Data(train_labeled_loader, train_unlabeled_loader,
                           valid_loader, test_loader)
params = opt_structures.Params(ckn=False,
                               epsilon=args.epsilon,
                               eval_test_every=args.eval_test_every,
                               lambda_cov=args.lambda_cov,
                               lambda_params=args.lambda_params,
                               lr=args.lr,
                               lr_schedule=None,
                               min_dist=args.min_dist,
                               num_classes=2,
                               num_iters=args.num_iters,
                               project=False,
                               save_every=args.save_every,
                               save_path=save_file + '_params.pickle',
                               train_w_layers=None)
model = opt_structures.Model(model, save_path=save_file + '_model.pickle')
results = opt_structures.Results(save_path=save_file + '_results.pickle')
optimizer = train_network.TrainSupervised(data, model, params, results)

# Train the model
optimizer.train()
Ejemplo n.º 5
0
                               test_loader,
                               deepcluster_loader=train_loader)
params = opt_structures.Params(
    nclasses=nclasses,
    min_frac_points_class=min_frac_points_class,
    max_frac_points_class=max_frac_points_class,
    ckn=ckn,
    project=True,
    train_w_layers=[0, 2, 4, 5],
    lam=args.lam,
    normalize=True,
    augment=args.augment,
    add_constraints=add_constraints,
    add_constraints_method=add_constraints_method,
    add_constraints_frac=add_constraints_frac,
    add_constraints_classes=add_constraints_classes,
    balanced=balanced_version,
    labeling_method=args.labeling_method,
    rounding=args.rounding,
    deepcluster_k=args.num_clusters,
    deepcluster_update_clusters_every=args.update_clusters_every,
    labeling_burnin=args.labeling_burnin,
    step_size_init_sup=args.lr_sup_init,
    step_size_init_semisup=args.lr_semisup,
    update_lambda=args.update_lambda,
    maxiter=args.num_iters,
    eval_test_every=args.eval_test_every,
    save_every=args.save_every,
    save_path=save_file + '_params.pickle',
)

model = opt_structures.Model(model, save_path=save_file + '_model.pickle')