def test_minibatch_default(): input_data = np.zeros((4, 5, 3)) targets = np.ones((4, 5, 1)) it = Minibatches( batch_size=3, my_data=input_data, my_targets=targets, shuffle=False)(default_handler) x = next(it) assert set(x.keys()) == {'my_data', 'my_targets'} assert x['my_data'].shape == (4, 3, 3) assert x['my_targets'].shape == (4, 3, 1) x = next(it) assert set(x.keys()) == {'my_data', 'my_targets'} assert x['my_data'].shape == (4, 2, 3) assert x['my_targets'].shape == (4, 2, 1) with pytest.raises(StopIteration): next(it)
def test_minibatch_with_length(): input_data = np.zeros((4, 5, 3)) targets = np.ones((4, 5, 1)) seq_lens = [2, 3, 4, 1, 2] it = Minibatches(batch_size=3, cut_according_to=seq_lens, my_data=input_data, my_targets=targets, shuffle=False)(default_handler) x = next(it) assert set(x.keys()) == {'my_data', 'my_targets'} assert x['my_data'].shape == (4, 3, 3) assert x['my_targets'].shape == (4, 3, 1) x = next(it) assert set(x.keys()) == {'my_data', 'my_targets'} assert x['my_data'].shape == (2, 2, 3) assert x['my_targets'].shape == (2, 2, 1) with pytest.raises(StopIteration): next(it)
layer = SquareLayerImpl('Square', {'default': BufferStructure('T', 'B', 3)}, set(), set()) run_layer_tests(layer, cfg) # ------------------------------ Demo Example ------------------------------- # # ---------------------------- Set up Iterators ----------------------------- # data_dir = os.environ.get('BRAINSTORM_DATA_DIR', '../data') data_file = os.path.join(data_dir, 'MNIST.hdf5') ds = h5py.File(data_file, 'r')['normalized_split'] x_tr, y_tr = ds['training']['default'][:], ds['training']['targets'][:] x_va, y_va = ds['validation']['default'][:], ds['validation']['targets'][:] getter_tr = Minibatches(100, default=x_tr, targets=y_tr) getter_va = Minibatches(100, default=x_va, targets=y_va) # ----------------------------- Set up Network ------------------------------ # inp, out = bs.tools.get_in_out_layers('classification', (28, 28, 1), 10) network = bs.Network.from_layer( inp >> FullyConnected(500, name='Hid1', activation='linear') >> Square( name='MySquareLayer') >> out) network.initialize(bs.initializers.Gaussian(0.01)) # ----------------------------- Set up Trainer ------------------------------ # trainer = bs.Trainer( bs.training.MomentumStepper(learning_rate=0.01, momentum=0.9))
import brainstorm as bs from brainstorm.data_iterators import OneHot, Minibatches from brainstorm.handlers import PyCudaHandler bs.global_rnd.set_seed(42) # ---------------------------- Set up Iterators ----------------------------- # data_dir = os.environ.get('BRAINSTORM_DATA_DIR', '../data') data_file = os.path.join(data_dir, 'HutterPrize.hdf5') ds = h5py.File(data_file, 'r')['split'] x_tr, y_tr = ds['training']['default'][:], ds['training']['targets'][:] x_va, y_va = ds['validation']['default'][:], ds['validation']['targets'][:] getter_tr = OneHot(Minibatches(100, default=x_tr, targets=y_tr, shuffle=False), {'default': 205}) getter_va = OneHot(Minibatches(100, default=x_va, targets=y_va, shuffle=False), {'default': 205}) # ----------------------------- Set up Network ------------------------------ # network = bs.tools.create_net_from_spec('classification', 205, 205, 'L1000') # Uncomment next line to use the GPU # network.set_handler(PyCudaHandler()) network.initialize(bs.initializers.Gaussian(0.01)) # ----------------------------- Set up Trainer ------------------------------ # trainer = bs.Trainer(
import brainstorm as bs from brainstorm.data_iterators import Minibatches from brainstorm.handlers import PyCudaHandler from brainstorm.initializers import Gaussian bs.global_rnd.set_seed(42) # ----------------------------- Set up Iterators ---------------------------- # data_dir = os.environ.get('BRAINSTORM_DATA_DIR', '../data') data_file = os.path.join(data_dir, 'CIFAR-10.hdf5') ds = h5py.File(data_file, 'r')['normalized_split'] getter_tr = Minibatches(100, default=ds['training']['default'][:], targets=ds['training']['targets'][:]) getter_va = Minibatches(100, default=ds['validation']['default'][:], targets=ds['validation']['targets'][:]) # ------------------------------ Set up Network ----------------------------- # inp, fc = bs.tools.get_in_out_layers('classification', (32, 32, 3), 10) (inp >> bs.layers.Convolution2D( 32, kernel_size=(5, 5), padding=2, name='Conv1') >> bs.layers.Pooling2D( type="max", kernel_size=(3, 3), stride=(2, 2)) >> bs.layers.Convolution2D(32, kernel_size=(5, 5), padding=2, name='Conv2') >> bs.layers.Pooling2D(type="max", kernel_size=(3, 3), stride=(2, 2)) >> bs.layers.Convolution2D(64, kernel_size=(5, 5), padding=2, name='Conv3') >>
from brainstorm.handlers import PyCudaHandler from time import time bs.global_rnd.set_seed(42) # ---------------------------- Set up Iterators ----------------------------- # data_dir = os.environ.get('BRAINSTORM_DATA_DIR', 'data') data_file = os.path.join(data_dir, 'MNIST.hdf5') ds = h5py.File(data_file, 'r')['normalized_split'] x_tr, y_tr = ds['training']['default'][:], ds['training']['targets'][:] x_va, y_va = ds['validation']['default'][:], ds['validation']['targets'][:] batch_size = 100 getter_tr = Minibatches(batch_size, default=x_tr, targets=y_tr) getter_va = Minibatches(batch_size, default=x_va, targets=y_va) # ----------------------------- Set up Network ------------------------------ # inp, fc = bs.tools.get_in_out_layers('classification', (28, 28, 1), 10, projection_name='FC') network = bs.Network.from_layer( inp >> bs.layers.Dropout(drop_prob=0.2) >> bs.layers.FullyConnected(1200, name='Hid1', activation='rel') >> bs.layers.Dropout(drop_prob=0.5) >> bs.layers.FullyConnected(1200, name='Hid2', activation='rel') >> bs.layers.Dropout(drop_prob=0.5) >> fc )