Exemple #1
0
dataset = libs_dataset.CellsDataset(training_files,
                                    training_labels,
                                    testing_files,
                                    testing_labels,
                                    window_size=128,
                                    classes_count=2,
                                    augmentations_count=100)

#train 100 epochs
epoch_count = 100

#cyclic learning rate cheduler
learning_rates = [0.001, 0.001, 0.0001, 0.0001, 0.0001, 0.00001, 0.00001]

train = libs.Train(dataset,
                   Modelconv_4d05,
                   batch_size=256,
                   learning_rates=learning_rates)
train.step_epochs(epoch_count, log_path="../models/model_conv_4d05")
'''
training result saved into ../models/model_conv_4d05/result

training progress saved into file training.log columns description
epoch               [int]
training_accuracy   [%]
testing_accuracy    [%]
training_loss_mean  [float]
testing_loss_mean   [float]
training_loss_std   [float]
testing_loss_std    [float]

best model is saved into ../models/model_conv_4d05/trained
dataset = libs_dataset.CellsDataset(training_files,
                                    training_labels,
                                    testing_files,
                                    testing_labels,
                                    window_size=64,
                                    classes_count=2,
                                    augmentations_count=20)

#train 100 epochs
epoch_count = 100

#cyclic learning rate cheduler
learning_rates = [0.001, 0.001, 0.0001, 0.0001, 0.0001, 0.00001, 0.00001]

train = libs.Train(dataset,
                   Modellstm_64_ws64,
                   batch_size=256,
                   learning_rates=learning_rates)
train.step_epochs(epoch_count, log_path="../models/model_lstm_64_ws64")
'''
training result saved into ../models/model_lstm_64_ws64/result

training progress saved into file training.log columns description
epoch               [int]
training_accuracy   [%]
testing_accuracy    [%]
training_loss_mean  [float]
testing_loss_mean   [float]
training_loss_std   [float]
testing_loss_std    [float]

best model is saved into ../models/model_lstm_64_ws64/trained
'''
create dataset with pairs training testing
labels corresponds to class IDs
for details see libs_dataset/cells_dataset.py
'''
dataset = libs_dataset.CellsDataset(training_files, training_labels, testing_files, testing_labels, window_size = 128, classes_count = 2, augmentations_count=100)


#train 100 epochs
epoch_count = 100

#cyclic learning rate cheduler
learning_rates  = [0.001, 0.001, 0.0001, 0.0001, 0.0001, 0.00001, 0.00001]

train = libs.Train(dataset, Modelresnet_1_d05, batch_size = 256, learning_rates = learning_rates)
train.step_epochs(epoch_count, log_path = "../models/model_resnet_1_d05")

'''
training result saved into ../models/model_resnet_1_d05/result

training progress saved into file training.log columns description
epoch               [int]
training_accuracy   [%]
testing_accuracy    [%]
training_loss_mean  [float]
testing_loss_mean   [float]
training_loss_std   [float]
testing_loss_std    [float]

best model is saved into ../models/model_resnet_1_d05/trained
dataset = libs_dataset.CellsDataset(training_files,
                                    training_labels,
                                    testing_files,
                                    testing_labels,
                                    window_size=128,
                                    classes_count=2,
                                    augmentations_count=100)

#train 100 epochs
epoch_count = 100

#cyclic learning rate cheduler
learning_rates = [0.001, 0.001, 0.0001, 0.0001, 0.0001, 0.00001, 0.00001]

train = libs.Train(dataset,
                   Modelresnet_1_4d001_beforeResBlock,
                   batch_size=256,
                   learning_rates=learning_rates)
train.step_epochs(epoch_count,
                  log_path="../models/model_resnet_1_4d001_beforeResBlock")
'''
training result saved into ../models/model_resnet_1_4d001_beforeResBlock/result

training progress saved into file training.log columns description
epoch               [int]
training_accuracy   [%]
testing_accuracy    [%]
training_loss_mean  [float]
testing_loss_mean   [float]
training_loss_std   [float]
testing_loss_std    [float]
dataset = libs_dataset.CellsDataset(training_files,
                                    training_labels,
                                    testing_files,
                                    testing_labels,
                                    window_size=128,
                                    classes_count=2,
                                    augmentations_count=20)

#train 100 epochs
epoch_count = 100

#cyclic learning rate cheduler
learning_rates = [0.001, 0.001, 0.0001, 0.0001, 0.0001, 0.00001, 0.00001]

train = libs.Train(dataset,
                   Modelgru,
                   batch_size=256,
                   learning_rates=learning_rates)
train.step_epochs(epoch_count, log_path="../models/model_gru")
'''
training result saved into ../models/model_gru/result

training progress saved into file training.log columns description
epoch               [int]
training_accuracy   [%]
testing_accuracy    [%]
training_loss_mean  [float]
testing_loss_mean   [float]
training_loss_std   [float]
testing_loss_std    [float]

best model is saved into ../models/model_gru/trained
import sys
sys.path.insert(0, '../..')

import libs
import libs_dataset
import models.net_0.model as Model0
import models.net_1.model as Model1

epoch_count = 20
learning_rates = [0.001, 0.001, 0.0001]

#dataset = libs_dataset.DatasetLineFollower(width = 8, height = 8, classes_count = 5, training_count = 50000, testing_count = 5000)
#train = libs.Train(dataset, Model0, libs.MetricsClassification, batch_size = 64, learning_rates = learning_rates)
#train.step_epochs(epoch_count, log_path = "./models/net_0")

dataset = libs_dataset.DatasetLineFollowerStream()
train = libs.Train(dataset,
                   Model1,
                   libs.MetricsRegression,
                   batch_size=64,
                   learning_rates=learning_rates)
train.step_epochs(epoch_count, log_path="./models/net_1")