def __init__(self, train_valid_test_ratio=[8, 1, 1], preprocessor=None, noise=None, batch_size=100, num_batches=None, iter_class='SequentialSubsetIterator', rng=None, log=None): ''' DESCRIPTION: Abstract class PARAMS: split_mode(sequential | random): sequentially or randomly split the dataset ''' assert len(train_valid_test_ratio) == 3, 'the size of list is not 3' self.ratio = train_valid_test_ratio self.preprocessor = preprocessor self.noise = noise self.iter_class = iter_class self.batch_size = batch_size self.num_batches = num_batches self.rng = rng self.log = log if self.log is None: # use default Log setting self.log = Log(logger=internal_logger)
def build_log(self, save_to_database=None, id=None): log = Log(experiment_name = id is not None and '%s_%s'%(self.state.log.experiment_name,id) \ or self.state.log.experiment_name, description = self.state.log.description, save_outputs = self.state.log.save_outputs, save_learning_rule = self.state.log.save_learning_rule, save_model = self.state.log.save_model, save_epoch_error = self.state.log.save_epoch_error, save_to_database = save_to_database) return log
def __init__(self, model, dataset, learning_rule, learning_method, log=None): self.model = model self.dataset = dataset self.learning_rule = learning_rule self.learning_method = learning_method self.log = log if self.log is None: # use default Log setting self.log = Log(logger=internal_logger) elif self.log.save_to_database: self.log.print_records() print('\n') self.log.info('..begin setting up train object') self.setup()
def mlp(): # build dataset data = Mnist(preprocessor=None, train_valid_test_ratio=[5, 1, 1]) # build mlp mlp = MLP(input_dim=data.feature_size()) W1 = GaussianWeight(prev_dim=mlp.input_dim, this_dim=1000) hidden1 = PRELU(dim=1000, name='h1_layer', W=W1(mean=0, std=0.1), b=None, dropout_below=None) mlp.add_layer(hidden1) W2 = XavierWeight(prev_dim=hidden1.dim, this_dim=data.target_size()) output = Softmax(dim=data.target_size(), name='output_layer', W=W2(), b=None, dropout_below=None) mlp.add_layer(output) # build learning method learning_method = AdaGrad(learning_rate=0.1, momentum=0.9) # set the learning rules learning_rule = LearningRule(max_col_norm=10, L1_lambda=None, L2_lambda=None, training_cost=Cost(type='mse'), learning_rate_decay_factor=None, stopping_criteria={ 'max_epoch': 300, 'epoch_look_back': 10, 'cost': Cost(type='error'), 'percent_decrease': 0.01 }) # (optional) build the logging object log = Log(experiment_name='mnist', description='This is tutorial example', save_outputs=True, save_learning_rule=True, save_model=True, save_epoch_error=True, save_to_database={ 'name': 'Example.db', 'records': { 'Dataset': data.__class__.__name__, 'max_col_norm': learning_rule.max_col_norm, 'Weight_Init_Seed': mlp.rand_seed, 'Dropout_Below': str([layer.dropout_below for layer in mlp.layers]), 'Batch_Size': data.batch_size, 'Layer_Dim': str([layer.dim for layer in mlp.layers]), 'Layer_Types': str([layer.__class__.__name__ for layer in mlp.layers]), 'Preprocessor': data.preprocessor.__class__.__name__, 'Learning_Rate': learning_method.learning_rate, 'Momentum': learning_method.momentum, 'Training_Cost': learning_rule.cost.type, 'Stopping_Cost': learning_rule.stopping_criteria['cost'].type } }) # end log # put everything into the train object train_object = TrainObject(model=mlp, dataset=data, learning_rule=learning_rule, learning_method=learning_method, log=log) # finally run the code train_object.run()