x = tfg.Placeholder(name="x") target = tfg.Placeholder(name="target") n = tfn.Multi_Layer_Network( input_size=input_size, hidden_size_list=[hidden_layer1_size, hidden_layer2_size], output_size=output_size, input_node=x, target_node=target, init_mean=0.0, init_sd=0.01, activator=tfe.Activator.ReLU.value, optimizer=tfe.Optimizer.Adam.value, learning_rate=0.01, model_params_dir=model_params_dir ) #n.draw_and_show() data = mnist.MNIST_Data() forward_final_output = n.feed_forward(input_data=data.test_input, is_numba=False) print(forward_final_output.shape) print(tff.accuracy(forward_final_output, data.test_target)) batch_size = 1000 n.learning(max_epoch=100, data=data, batch_size=batch_size, print_period=1, is_numba=False, verbose=False) forward_final_output = n.feed_forward(input_data=data.test_input, is_numba=False) print(tff.accuracy(forward_final_output, data.test_target))
def set_learning_process_specification(self, data, batch_size, epoch, print_period, is_numba, fold_idx, max_epoch, verbose): batch_mask = np.random.choice(data.num_train_data, batch_size) i_batch = data.train_input[batch_mask] t_batch = data.train_target[batch_mask] train_error = self.session.run(self.error, { self.input_node: i_batch, self.target_node: t_batch }, is_numba=is_numba, verbose=False) self.train_error_list.append(train_error) validation_error = self.session.run( self.error, { self.input_node: data.validation_input, self.target_node: data.validation_target }, is_numba=is_numba, verbose=False) self.validation_error_list.append(validation_error) min_flag = False if validation_error < self.min_validation_error: self.min_validation_error = validation_error self.min_validation_error_epoch = epoch + fold_idx * max_epoch self.min_fold_idx = fold_idx self.save_params() min_flag = True forward_final_output = self.feed_forward(input_data=data.test_input, is_numba=is_numba) test_accuracy = tff.accuracy(forward_final_output, data.test_target) self.test_accuracy_list.append(test_accuracy) for idx in range(self.hidden_layer_num + 1): d = self.get_param_describe(layer_num=idx, kind="W") self.param_mean_list['W'][idx].append(d.mean) self.param_variance_list['W'][idx].append(d.variance) self.param_skewness_list['W'][idx].append(d.skewness) self.param_kurtosis_list['W'][idx].append(d.kurtosis) d = self.get_param_describe(layer_num=idx, kind="b") self.param_mean_list['b'][idx].append(d.mean) self.param_variance_list['b'][idx].append(d.variance) self.param_skewness_list['b'][idx].append(d.skewness) self.param_kurtosis_list['b'][idx].append(d.kurtosis) d = self.get_activation_describe(layer_num=idx, kind="affine") self.output_mean_list['affine'][idx].append(d.mean) self.output_variance_list['affine'][idx].append(d.variance) self.output_skewness_list['affine'][idx].append(d.skewness) self.output_kurtosis_list['affine'][idx].append(d.kurtosis) if idx != self.hidden_layer_num: d = self.get_activation_describe(layer_num=idx, kind="activation") self.output_mean_list['activation'][idx].append(d.mean) self.output_variance_list['activation'][idx].append(d.variance) self.output_skewness_list['activation'][idx].append(d.skewness) self.output_kurtosis_list['activation'][idx].append(d.kurtosis) if epoch % print_period == 0: print( "Epoch {:3d} Completed - Train Error:{:6.5f} - Validation Error:{:6.5f} - Test Accuracy:{:6.5f}" .format(epoch, float(train_error), float(validation_error), float(test_accuracy)), end="") if min_flag: print(" <== Minimal Val. Error") else: print() if verbose: self.draw_params_histogram() for idx in range(self.hidden_layer_num + 1): desc_obj = self.get_param_describe(layer_num=idx, kind="W") num = "{:10d}".format(desc_obj.nobs) min = "{:5.4f}".format(desc_obj.minmax[0]) max = "{:5.4f}".format(desc_obj.minmax[1]) mean = "{:5.4f}".format(desc_obj.mean) variance = "{:5.4f}".format(desc_obj.variance) skewness = "{:5.4f}".format(desc_obj.skewness) kurtosis = "{:5.4f}".format(desc_obj.kurtosis) print( 'W' + str(idx) + '-', "num:{:10s}, min:{:5s}, max:{:5s}, mean:{:5s}, variance:{:5s}, skewness:{:5s}, kurtosis:{:5s}" .format(num, min, max, mean, variance, skewness, kurtosis)) for idx in range(self.hidden_layer_num + 1): desc_obj = self.get_param_describe(layer_num=idx, kind="b") num = "{:10d}".format(desc_obj.nobs) min = "{:5.4f}".format(desc_obj.minmax[0]) max = "{:5.4f}".format(desc_obj.minmax[1]) mean = "{:5.4f}".format(desc_obj.mean) variance = "{:5.4f}".format(desc_obj.variance) skewness = "{:5.4f}".format(desc_obj.skewness) kurtosis = "{:5.4f}".format(desc_obj.kurtosis) print( 'b' + str(idx) + '-', "num:{:10s}, min:{:5s}, max:{:5s}, mean:{:5s}, variance:{:5s}, skewness:{:5s}, kurtosis:{:5s}" .format(num, min, max, mean, variance, skewness, kurtosis)) print()
def set_learning_process_parameters(self, data, batch_size, epoch, print_period, verbose): batch_mask = np.random.choice(data.num_train_data, batch_size) i_batch = data.train_input[batch_mask] t_batch = data.train_target[batch_mask] train_error = self.session.run(self.error, { self.input_node: i_batch, self.target_node: t_batch }, False) self.train_error_list.append(train_error) validation_error = self.session.run(self.error, { self.input_node: data.validation_input, self.target_node: data.validation_target }, False) self.validation_error_list.append(validation_error) forward_final_output = self.feed_forward(input_data=data.test_input) test_accuracy = tff.accuracy(forward_final_output, data.test_target) self.test_accuracy_list.append(test_accuracy) for idx in range(self.hidden_layer_num + 1): d = self.get_param_describe(layer_num=idx, kind="W") self.param_mean_list['W' + str(idx)].append(d.mean) self.param_variance_list['W' + str(idx)].append(d.variance) self.param_skewness_list['W' + str(idx)].append(d.skewness) self.param_kurtosis_list['W' + str(idx)].append(d.kurtosis) d = self.get_param_describe(layer_num=idx, kind="b") self.param_mean_list['b' + str(idx)].append(d.mean) self.param_variance_list['b' + str(idx)].append(d.variance) self.param_skewness_list['b' + str(idx)].append(d.skewness) self.param_kurtosis_list['b' + str(idx)].append(d.kurtosis) if epoch % print_period == 0: print( "Epoch {:3d} Completed - Train Error: {:7.6f} - Validation Error: {:7.6f} - Test Accuracy: {:7.6f}".format( epoch, float(train_error), float(validation_error), float(test_accuracy) )) if verbose: self.draw_params_histogram() for idx in range(self.hidden_layer_num + 1): desc_obj = self.get_param_describe(layer_num=idx, kind="W") num = "{:10d}".format(desc_obj.nobs) min = "{:5.4f}".format(desc_obj.minmax[0]) max = "{:5.4f}".format(desc_obj.minmax[1]) mean = "{:5.4f}".format(desc_obj.mean) variance = "{:5.4f}".format(desc_obj.variance) skewness = "{:5.4f}".format(desc_obj.skewness) kurtosis = "{:5.4f}".format(desc_obj.kurtosis) print('W' + str(idx) + '-', "num:{:10s}, min:{:5s}, max:{:5s}, mean:{:5s}, variance:{:5s}, skewness:{:5s}, kurtosis:{:5s}".format( num, min, max, mean, variance, skewness, kurtosis ) ) for idx in range(self.hidden_layer_num + 1): desc_obj = self.get_param_describe(layer_num=idx, kind="b") num = "{:10d}".format(desc_obj.nobs) min = "{:5.4f}".format(desc_obj.minmax[0]) max = "{:5.4f}".format(desc_obj.minmax[1]) mean = "{:5.4f}".format(desc_obj.mean) variance = "{:5.4f}".format(desc_obj.variance) skewness = "{:5.4f}".format(desc_obj.skewness) kurtosis = "{:5.4f}".format(desc_obj.kurtosis) print('b' + str(idx) + '-', "num:{:10s}, min:{:5s}, max:{:5s}, mean:{:5s}, variance:{:5s}, skewness:{:5s}, kurtosis:{:5s}".format( num, min, max, mean, variance, skewness, kurtosis ) ) print()