Beispiel #1
0
from datascience.ml.neural.checkpoints import create_model
from datascience.ml.neural.models import InceptionQRouting
from datascience.ml.neural.reinforcement import fit
from datascience.ml.neural.reinforcement.game import OffshoreRegatta

model = create_model(model_class=InceptionQRouting)

fit(model,
    OffshoreRegatta,
    game_params={
        'source': 'grib_gfs_2018',
        'islands_sup': 1000
    },
    training_params=None,
    predict_params=None,
    validation_params=None,
    export_params=None,
    optim_params=None)
painter_list = generator.unique_painters()

with open(export_result, 'w') as f:
    f.write('painter_val;painter_test;prediction;true_label\n')

for i in range(len(painter_list)):
    painter_val = painter_list[i]
    painter_test = painter_list[(i + 1) % len(painter_list)]
    print_h1('||| PAINTER VAL: ' + painter_val + ', PAINTER TEST: ' +
             painter_test + ' |||')

    train, val, test, _ = generator.country_dataset_one_fold(
        painter_val=painter_val, painter_test=painter_test)

    model = create_model(model_class=initialize_model,
                         model_params=model_params)
    mmodel = model.module if type(model) is torch.nn.DataParallel else model
    mmodel.aux_logits = False

    training_params = {
        'iterations': [100, 130, 150, 160],
        'batch_size': 256,
    }

    optim_params = {'lr': 0.001}

    validation_params = {'metrics': (ValidationAccuracy(1), )}

    model_selection_params = {'cross_validation': True, 'min_epochs': 50}

    stats = fit(model,
Beispiel #3
0
from datascience.ml.neural.checkpoints import create_model

from datascience.visu.deep_test_plots import plot_db_partitions_gradients, plot_separator, plot_db_partitions
from datascience.visu.util.util import save_fig, remove_axis

# constructing the dataset
train, test = create_dataset(param_train=(250, 250), poly=True)

# creating/loading a model
model_params = {
    'architecture':
    (10, 10,
     10),  # play with config GD and SGD + architecture for the first figures
    'dropout': 0.0,
}
model = create_model(model_class=FullyConnectedDeepAnalysis,
                     model_params=model_params)

# optimization
training_params = {
    'iterations': [120],
    'log_modulo': -1,
    'val_modulo': 1,
    'loss': HebbLoss()
}

optim_params = {
    'momentum': 0.0,
    'lr': 0.1,
}

validation_params = {'metrics': (ValidationAccuracy(1), )}
from engine.parameters import special_parameters

train, test = create_dataset(param_train=(250, 250, True, {
    'scale': 0.42
}),
                             poly=False)
# creating/loading a model
model_params = {
    'architecture': (30, ),
    'dropout': 0.0,
    'batchnorm': True,
    'bias': False,
    'relu': True
}

model = create_model(model_class=FullyConnected, model_params=model_params)

for k, v in model.named_parameters():
    print(k, v)
# exit()
training_params = {
    'iterations': [70, 90, 100],
    'log_modulo': -1,
    'val_modulo': 1,
    'loss': HebbLoss()
}
validation_params = {
    'vcallback': (CircleCallback(bias=False, wk=True), ),
    # 'metrics': (ValidationAccuracy(1),)
}
optim_params = {'lr': 0.1, 'momentum': 0.0}
Beispiel #5
0
from datascience.ml.neural.models import InceptionEnv
from datascience.data.loader import occurrence_loader
from datascience.data.datasets import EnvironmentalIGNDataset
from datascience.ml.neural.supervised import fit
from datascience.ml.neural.checkpoints import create_model
from projects.max_env.configs.inception import training_params, validation_params, model_params, optim_params

# creating environmental inception (more channels than classical inception)
model = create_model(model_class=InceptionEnv, model_params=model_params)

# loading dataset
train, val, test = occurrence_loader(EnvironmentalIGNDataset,
                                     source='full_ign_5m')

# memory issue on full_ign_5m due to size
test.limit = 30000

# training model
fit(model,
    train=train,
    val=val,
    test=test,
    training_params=training_params,
    validation_params=validation_params,
    optim_params=optim_params)
Beispiel #6
0
import torch

# load MNIST or CIFAR10
train, test = mnist() if get_parameters('mnist', False) else cifar10()
# classes = ('plane', 'car', 'bird', 'cat', 'deer', 'dog', 'frog', 'horse', 'ship', 'truck')

model_params = {
    'im_shape': train[0][0].shape,
    'conv_layers': (64,),  # (150, 150),
    'linear_layers': tuple(),  # (128, 128),
    'pooling': torch.nn.AvgPool2d,
    'conv_size': 3
}

model = create_model(model_class=CustomizableCNN, model_params=model_params)

training_params = {
    'iterations': [120],  # iterations with learning rate decay
    'log_modulo': -1,  # print loss once per epoch
    'val_modulo': 1,  # run a validation on the validation set every 5 epochs
    'batch_size': 512

}

optim_params = {
    'lr': 0.01,
    'momentum': 0.0
}

validation_params = {