# This program is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation; either version 2 of the License, or # (at your option) any later version. # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # You should have received a copy of the GNU General Public License along # with this program; if not, write to the Free Software Foundation, Inc., # 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. import hebel hebel.init(0) import unittest import random import numpy as np from pycuda import gpuarray from pycuda.curandom import rand as curand from hebel import sampler from hebel.models import NeuralNet, NeuralNetRegression from hebel.optimizers import SGD from hebel.parameter_updaters import SimpleSGDUpdate, \ MomentumUpdate, NesterovMomentumUpdate from hebel.data_providers import MNISTDataProvider, BatchDataProvider from hebel.monitors import SimpleProgressMonitor from hebel.schedulers import exponential_scheduler, linear_scheduler_up, \ constant_scheduler
#!/usr/bin/env python import hebel from hebel.models import NeuralNet from hebel.optimizers import SGD from hebel.parameter_updaters import MomentumUpdate from hebel.data_providers import MNISTDataProvider from hebel.monitors import ProgressMonitor from hebel.schedulers import exponential_scheduler, linear_scheduler_up hebel.init(random_seed=0) # Initialize data providers train_data = MNISTDataProvider('train', batch_size=100) validation_data = MNISTDataProvider('val') test_data = MNISTDataProvider('test') D = train_data.D # Dimensionality of inputs K = 10 # Number of classes # Create model object model = NeuralNet(n_in=train_data.D, n_out=K, layers=[2000, 2000, 2000, 500], activation_function='relu', dropout=True, input_dropout=0.2) # Create optimizer object progress_monitor = ProgressMonitor(experiment_name='mnist', save_model_path='examples/mnist',
#!/usr/bin/env python import hebel from hebel.models import NeuralNet from hebel.optimizers import SGD from hebel.parameter_updaters import MomentumUpdate from hebel.data_providers import MNISTDataProvider from hebel.monitors import ProgressMonitor from hebel.schedulers import exponential_scheduler, linear_scheduler_up hebel.init(random_seed=0) # Initialize data providers train_data = MNISTDataProvider("train", batch_size=100) validation_data = MNISTDataProvider("val") test_data = MNISTDataProvider("test") D = train_data.D # Dimensionality of inputs K = 10 # Number of classes # Create model object model = NeuralNet( n_in=train_data.D, n_out=K, layers=[2000, 2000, 2000, 500], activation_function="relu", dropout=True, input_dropout=0.2, ) # Create optimizer object