Example #1
0
def create_network():
    init = Kaiming()
    padding = dict(pad_d=1, pad_h=1, pad_w=1)
    strides = dict(str_d=2, str_h=2, str_w=2)
    dilation = dict(dil_d=2, dil_h=2, dil_w=2)
    common = dict(init=init, batch_norm=True, activation=Rectlin())
    layers = [
        Conv((9, 9, 9, 16),
             padding=padding,
             strides=strides,
             init=init,
             activation=Rectlin()),
        Conv((5, 5, 5, 32), dilation=dilation, **common),
        Conv((3, 3, 3, 64), dilation=dilation, **common),
        Pooling((2, 2, 2), padding=padding, strides=strides),
        Conv((2, 2, 2, 128), **common),
        Conv((2, 2, 2, 128), **common),
        Conv((2, 2, 2, 128), **common),
        Conv((2, 2, 2, 256), **common),
        Conv((2, 2, 2, 1024), **common),
        Conv((2, 2, 2, 4096), **common),
        Conv((2, 2, 2, 2048), **common),
        Conv((2, 2, 2, 1024), **common),
        Dropout(),
        Affine(2,
               init=Kaiming(local=False),
               batch_norm=True,
               activation=Softmax())
    ]
    return Model(layers=layers)
Example #2
0
 def constructCNN(self):
     layers = []
     if self.network_type == "idsia":
         layers.append(
             Conv((3, 3, 100),
                  strides=1,
                  init=Kaiming(),
                  bias=Constant(0.0),
                  activation=Rectlin(),
                  name="Conv1"))
         layers.append(Pooling(2, op="max", strides=2, name="neon_pool1"))
         layers.append(
             Conv((4, 4, 150),
                  strides=1,
                  init=Kaiming(),
                  bias=Constant(0.0),
                  activation=Rectlin(),
                  name="Conv2"))
         layers.append(Pooling(2, op="max", strides=2, name="neon_pool2"))
         layers.append(
             Conv((3, 3, 250),
                  strides=1,
                  init=Kaiming(),
                  bias=Constant(0.0),
                  activation=Rectlin(),
                  name="Conv3"))
         layers.append(Pooling(2, op="max", strides=2, name="neon_pool3"))
         layers.append(
             Affine(nout=200,
                    init=Kaiming(local=False),
                    bias=Constant(0.0),
                    activation=Rectlin(),
                    name="neon_fc1"))
         layers.append(
             Affine(nout=self.class_num,
                    init=Kaiming(local=False),
                    bias=Constant(0.0),
                    activation=Softmax(),
                    name="neon_fc2"))
     elif self.network_type == "resnet-56":
         layers = resnet(9, self.class_num,
                         int(self.resize_size[0] / 4))  # 6*9 + 2 = 56
     elif self.network_type == "resnet-32":
         layers = resnet(5, self.class_num,
                         int(self.resize_size[0] / 4))  # 6*5 + 2 = 32
     elif self.network_type == "resnet-20":
         layers = resnet(3, self.class_num,
                         int(self.resize_size[0] / 4))  # 6*3 + 2 = 20
     return layers
Example #3
0
def conv_params(fsize, nfm, strides=1, relu=True, batch_norm=True):
    return dict(fshape=(fsize, fsize, nfm),
                strides=strides,
                activation=(Rectlin() if relu else None),
                padding=(fsize // 2),
                batch_norm=batch_norm,
                init=Kaiming(local=True))
Example #4
0
def create_network(stage_depth):
    # Structure of the deep residual part of the network:
    # stage_depth modules of 2 convolutional layers each at feature map depths of 16, 32, 64
    nfms = [2**(stage + 4) for stage in sorted(list(range(3)) * stage_depth)]
    strides = [
        1 if cur == prev else 2 for cur, prev in zip(nfms[1:], nfms[:-1])
    ]

    # Now construct the network
    layers = [Conv(**conv_params(3, 16))]
    layers.append(module_s1(nfms[0], True))

    for nfm, stride in zip(nfms[1:], strides):
        res_module = module_s1(nfm) if stride == 1 else module_s2(nfm)
        layers.append(res_module)
    layers.append(BatchNorm())
    layers.append(Activation(Rectlin()))
    layers.append(Pooling('all', op='avg'))
    layers.append(
        Affine(10,
               init=Kaiming(local=False),
               batch_norm=True,
               activation=Softmax()))

    return Model(layers=layers), GeneralizedCost(costfunc=CrossEntropyMulti())
Example #5
0
    def __init__(self, depth=9):
        self.depth = depth

        depth = 9
        train = (3, 32, 32)

        nfms = [2**(stage + 4) for stage in sorted(list(range(3)) * depth)]
        strides = [
            1 if cur == prev else 2 for cur, prev in zip(nfms[1:], nfms[:-1])
        ]

        # Now construct the network
        layers = [Conv(**self.conv_params(3, 16))]
        layers.append(self.module_s1(nfms[0], True))

        for nfm, stride in zip(nfms[1:], strides):
            res_module = self.module_s1(
                nfm) if stride == 1 else self.module_s2(nfm)
            layers.append(res_module)
        layers.append(BatchNorm())
        layers.append(Activation(Rectlin()))
        layers.append(Pooling('all', op='avg'))
        layers.append(
            Affine(10,
                   init=Kaiming(local=False),
                   batch_norm=True,
                   activation=Softmax()))
        self.layers = layers
        model = Model(layers=layers)
        cost = GeneralizedCost(costfunc=CrossEntropyMulti())
        model.initialize(train, cost=cost)
        self.model = model
Example #6
0
def create_network(stage_depth):
    if stage_depth in (18, 18):
        stages = (2, 2, 2, 2)
    elif stage_depth in (34, 50):
        stages = (3, 4, 6, 3)
    elif stage_depth in (68, 101):
        stages = (3, 4, 23, 3)
    elif stage_depth in (102, 152):
        stages = (3, 8, 36, 3)
    else:
        raise ValueError('Invalid stage_depth value'.format(stage_depth))

    bottleneck = False
    if stage_depth in (50, 101, 152):
        bottleneck = True

    layers = [Conv(**conv_params(7, 64, strides=2)), Pooling(3, strides=2)]

    # Structure of the deep residual part of the network:
    # stage_depth modules of 2 convolutional layers each at feature map depths
    # of 64, 128, 256, 512
    nfms = list(
        itt.chain.from_iterable(
            [itt.repeat(2**(x + 6), r) for x, r in enumerate(stages)]))
    strides = [-1] + [
        1 if cur == prev else 2 for cur, prev in zip(nfms[1:], nfms[:-1])
    ]

    for nfm, stride in zip(nfms, strides):
        layers.append(module_factory(nfm, bottleneck, stride))

    layers.append(Pooling('all', op='avg'))
    layers.append(
        Affine(nout=1000, init=Kaiming(local=False), activation=Softmax()))
    return Model(layers=layers), GeneralizedCost(costfunc=CrossEntropyMulti())
Example #7
0
def gen_model(num_channels, height, width):
    assert NervanaObject.be is not None, 'need to generate a backend before using this function'

    init_uni = Kaiming()

    # we have 1 issue, they have bias layers we don't allow batchnorm and biases
    conv_common = dict(padding=1, init=init_uni, activation=Rectlin(), batch_norm=True)

    # set up the layers
    layers = []

    # need to store a ref to the pooling layers to pass
    # to the upsampling layers to get the argmax indicies
    # for upsampling, this stack holds the pooling layer refs
    pool_layers = []

    # first loop generates the encoder layers
    nchan = [64, 128, 256, 512, 512]
    for ind in range(len(nchan)):
        nchanu = nchan[ind]
        lrng = 2 if ind <= 1 else 3
        for lind in range(lrng):
            nm = 'conv%d_%d' % (ind+1, lind+1)
            layers.append(Conv((3, 3, nchanu), strides=1, name=nm, **conv_common))

        layers.append(Pooling(2, strides=2, name='conv%d_pool' % ind))
        pool_layers.append(layers[-1])
        if ind >= 2:
            layers.append(Dropout(keep=0.5, name='drop%d' % (ind+1)))

    # this loop generates the decoder layers
    for ind in range(len(nchan)-1,-1,-1):
        nchanu = nchan[ind]
        lrng = 2 if ind <= 1 else 3
        # upsampling layers need a ref to the corresponding pooling layer
        # to access the argmx indices for upsampling
        layers.append(Upsampling(2, pool_layers.pop(), strides=2, padding=0,
                      name='conv%d_unpool' % ind))
        for lind in range(lrng):
            nm = 'deconv%d_%d' % (ind+1, lind+1)
            if ind < 4 and lind == lrng-1:
                nchanu = nchan[ind]/2
            layers.append(Conv((3, 3, nchanu), strides=1, name=nm, **conv_common))
            if ind == 0:
                break
        if ind >= 2:
            layers.append(Dropout(keep=0.5, name='drop%d' % (ind+1)))

    # last conv layer outputs 12 channels, 1 for each output class
    # with a pixelwise softmax over the channels
    act_last = PixelwiseSoftmax(num_channels, height, width, name="PixelwiseSoftmax")
    conv_last = dict(padding=1, init=init_uni, activation=act_last, batch_norm=False)
    layers.append(Conv((3, 3, num_channels), strides=1, name='deconv_out', **conv_last))
    return layers
Example #8
0
 def layers(self):
     bn = True
     return [
         Conv((7, 7, 96), init=Kaiming(), activation=Explin(), batch_norm=bn, padding=3, strides=1)\
             if self.bn_first_layer else\
             Conv((7, 7, 96), init=Kaiming(), bias=Constant(0), activation=Explin(), padding=3, strides=1),
         Pooling(3, strides=2, padding=1),
         Conv((7, 7, 128), init=Kaiming(), activation=Explin(), batch_norm=bn, padding=3, strides=1),
         Pooling(3, strides=2, padding=1),
         Conv((5, 5, 256), init=Kaiming(), activation=Explin(), batch_norm=bn, padding=2, strides=1),
         Pooling(3, strides=2, padding=1),
         Conv((3, 3, 384), init=Kaiming(), activation=Explin(), batch_norm=bn, padding=1, strides=1),
         Conv((3, 3, 384), init=Kaiming(), activation=Explin(), batch_norm=bn, padding=1, strides=1),
         Conv((3, 3, 384), init=Kaiming(), activation=Explin(), batch_norm=bn, padding=1, strides=1),
         Pooling(3, strides=2, padding=1, op='avg'),
         Affine(nout=self.noutputs, init=Kaiming(), activation=Explin(), batch_norm=bn),
         Affine(nout=self.noutputs, init=Kaiming(), activation=Explin(), batch_norm=bn),
         Affine(nout=self.noutputs, init=Kaiming(), bias=Constant(0),
                activation=Softmax() if self.use_softmax else Logistic(shortcut=True))
     ]
Example #9
0
    def input_layers(self, analytics_input, init, activation, gate):
        """
        return the input layers. we currently support convolutional and LSTM
        :return:
        """
        if self.recurrent:
            if analytics_input:
                # support analytics + content
                input_layers = MergeMultistream([[
                    LSTM(300,
                         init,
                         init_inner=Kaiming(),
                         activation=activation,
                         gate_activation=gate,
                         reset_cells=True),
                    RecurrentSum()
                ], [Affine(30, init, activation=activation)]], 'stack')
            else:
                # content only
                input_layers = [
                    LSTM(300,
                         init,
                         init_inner=Kaiming(),
                         activation=activation,
                         gate_activation=gate,
                         reset_cells=True),
                    RecurrentSum()
                ]
        else:
            if analytics_input:
                # support analytics + content
                input_layers = MergeMultistream([
                    self.conv_net(activation),
                    [Affine(30, init, activation=Logistic())]
                ], 'stack')
            else:
                # content only
                input_layers = self.conv_net(activation)

        return input_layers
Example #10
0
    def conv_net(self, activation, init=Kaiming(), version=-1):
        width = max([self.width, self.lookup_dim])
        if version == -1:
            if self.lookup_size:
                pre_layers = [
                    LookupTable(vocab_size=self.lookup_size,
                                embedding_dim=width,
                                init=GlorotUniform()),
                    Reshape((1, self.num_words, width)),
                ]
                first_width = width
            else:
                pre_layers = [
                    Conv((1, width, width),
                         padding=0,
                         init=init,
                         activation=activation)
                ]
                first_width = 1

            return pre_layers + \
                   [
                       MergeBroadcast(
                           [
                               [
                                   Conv((3, first_width, 15), padding={'pad_h': 1, 'pad_w': 0}, init=init,
                                        activation=activation)
                               ],
                               [
                                   Conv((5, first_width, 15), padding={'pad_h': 2, 'pad_w': 0}, init=init,
                                        activation=activation)
                               ],
                               [
                                   Conv((7, first_width, 15), padding={'pad_h': 3, 'pad_w': 0}, init=init,
                                        activation=activation)
                               ],
                           ],
                           merge='depth'
                       ),
                       NoisyDropout(keep=0.5, noise_pct=1.0, noise_std=0.001),
                       Conv((5, 1, 15), strides={'str_h': 2 if self.num_words > 59 else 1,
                                                 'str_w': 1}, padding=0, init=init,
                            activation=activation),
                       NoisyDropout(keep=0.9, noise_pct=1.0, noise_std=0.00001),
                       Conv((3, 1, 9), strides={'str_h': 2, 'str_w': 1}, padding=0, init=init,
                            activation=activation),
                       NoisyDropout(keep=0.9, noise_pct=1.0, noise_std=0.00001),
                       Conv((9, 1, 9), strides={'str_h': 2, 'str_w': 1}, padding=0, init=init,
                            activation=activation)
                   ]
Example #11
0
 def conv_params(fsize,
                 nfm,
                 padding='SAME',
                 strides=1,
                 activation=Rectlin(),
                 batch_norm=True):
     fsize = fsize if isinstance(fsize, tuple) else (fsize, fsize)
     fshape = fsize + (nfm, )
     padding = {
         'pad_h': (fsize[0] // 2 if padding == 'SAME' else 0),
         'pad_w': (fsize[1] // 2 if padding == 'SAME' else 0),
         'pad_d': 0
     }
     strides = {'str_h': strides, 'str_w': strides, 'str_d': 1}
     return dict(fshape=fshape,
                 strides=strides,
                 activation=activation,
                 padding=padding,
                 batch_norm=batch_norm,
                 init=Kaiming(local=True))
Example #12
0
def resnet(depth, num_classes, s):
    # Structure of the deep residual part of the network:
    # args.depth modules of 2 convolutional layers each at feature map depths of 16, 32, 64
    nfms = [2**(stage + 4) for stage in sorted(list(range(3)) * depth)]
    strides = [1] + [
        1 if cur == prev else 2 for cur, prev in zip(nfms[1:], nfms[:-1])
    ]

    # Now construct the network
    layers = [Conv(**conv_params(3, 16))]
    for nfm, stride in zip(nfms, strides):
        layers.append(module_factory(nfm, stride))
    layers.append(Pooling(s, op='avg'))
    layers.append(
        Affine(nout=num_classes,
               init=Kaiming(local=False),
               batch_norm=True,
               activation=Softmax()))

    return layers
Example #13
0
nfms = [2**(stage + 4) for stage in sorted(range(3) * args.depth)]
strides = [1 if cur == prev else 2 for cur, prev in zip(nfms[1:], nfms[:-1])]

# Now construct the network
layers = [Conv(**conv_params(3, 16))]
layers.append(module_s1(nfms[0], True))

for nfm, stride in zip(nfms[1:], strides):
    res_module = module_s1(nfm) if stride == 1 else module_s2(nfm)
    layers.append(res_module)
layers.append(BatchNorm())
layers.append(Activation(Rectlin()))
layers.append(Pooling('all', op='avg'))
layers.append(
    Affine(10,
           init=Kaiming(local=False),
           batch_norm=True,
           activation=Softmax()))

model = Model(layers=layers)
opt = GradientDescentMomentum(0.1,
                              0.9,
                              wdecay=0.0001,
                              schedule=Schedule([82, 124], 0.1))

# configure callbacks
valmetric = Misclassification()
callbacks = Callbacks(model,
                      eval_set=test,
                      metric=valmetric,
                      **args.callback_args)
Example #14
0
def create_network():
    layers = [
        DataTransform(transform=Normalizer(divisor=128.)),
        Conv((11, 11, 96),
             init=Kaiming(),
             activation=Rectlin(),
             strides=4,
             padding=1),
        Conv((1, 1, 96), init=Kaiming(), activation=Rectlin(), strides=1),
        Conv((3, 3, 96),
             init=Kaiming(),
             activation=Rectlin(),
             strides=2,
             padding=1),  # 54->2,
        Conv((5, 5, 256), init=Kaiming(), activation=Rectlin(),
             strides=1),  # 27->2,
        Conv((1, 1, 256), init=Kaiming(), activation=Rectlin(), strides=1),
        Conv((3, 3, 256),
             init=Kaiming(),
             activation=Rectlin(),
             strides=2,
             padding=1),  # 23->1,
        Conv((3, 3, 384),
             init=Kaiming(),
             activation=Rectlin(),
             strides=1,
             padding=1),
        Conv((1, 1, 384), init=Kaiming(), activation=Rectlin(), strides=1),
        Conv((3, 3, 384),
             init=Kaiming(),
             activation=Rectlin(),
             strides=2,
             padding=1),  # 12->,
        Dropout(keep=0.5),
        Conv((3, 3, 1024),
             init=Kaiming(),
             activation=Rectlin(),
             strides=1,
             padding=1),
        Conv((1, 1, 1024), init=Kaiming(), activation=Rectlin(), strides=1),
        Conv((1, 1, 1000), init=Kaiming(), activation=Rectlin(), strides=1),
        Pooling(6, op='avg'),
        Activation(Softmax())
    ]

    return Model(layers=layers), GeneralizedCost(costfunc=CrossEntropyMulti())
Example #15
0
 def layers(self):
     bn = True
     return [
         # input 128
         Conv((7, 7, 96),
              init=Kaiming(),
              bias=Constant(0),
              activation=Explin(),
              padding=3,
              strides=1),
         Pooling(3, strides=2, padding=1),
         # 64
         Conv((7, 7, 128),
              init=Kaiming(),
              activation=Explin(),
              batch_norm=bn,
              padding=3,
              strides=1),
         Pooling(3, strides=2, padding=1),
         # 32
         Conv((5, 5, 256),
              init=Kaiming(),
              activation=Explin(),
              batch_norm=bn,
              padding=2,
              strides=1),
         Pooling(3, strides=2, padding=1),
         # 16
         Conv((3, 3, 384),
              init=Kaiming(),
              activation=Explin(),
              batch_norm=bn,
              padding=1,
              strides=1),
         Conv((3, 3, 384),
              init=Kaiming(),
              activation=Explin(),
              batch_norm=bn,
              padding=1,
              strides=1),
         Conv((3, 3, 384),
              init=Kaiming(),
              activation=Explin(),
              batch_norm=bn,
              padding=1,
              strides=1),
         Pooling(3, strides=2, padding=1),
         # 8
         Conv((3, 3, 8192),
              init=Kaiming(),
              activation=Explin(),
              batch_norm=bn,
              padding=1,
              strides=1),
         Pooling('all', op='avg'),
         Affine(nout=self.noutputs,
                init=Kaiming(),
                bias=Constant(0),
                activation=Softmax() if self.use_softmax else Logistic(
                    shortcut=True))
     ]
Example #16
0
 def layers(self):
     bn = True
     return [
         # input 128
         Conv((7, 7, 64),
              init=Kaiming(),
              bias=Constant(0),
              activation=Explin(),
              padding=3,
              strides=1),
         Pooling(3, strides=2, padding=1),
         # 64
         Conv((3, 3, 96),
              init=Kaiming(),
              activation=Explin(),
              batch_norm=bn,
              padding=1,
              strides=1),
         Conv((3, 3, 96),
              init=Kaiming(),
              activation=Explin(),
              batch_norm=bn,
              padding=1,
              strides=1),
         Pooling(3, strides=2, padding=1),
         # 32
         Conv((3, 3, 192),
              init=Kaiming(),
              activation=Explin(),
              batch_norm=bn,
              padding=1,
              strides=1),
         Conv((3, 3, 192),
              init=Kaiming(),
              activation=Explin(),
              batch_norm=bn,
              padding=1,
              strides=1),
         Pooling(3, strides=2, padding=1),
         # 16
         Conv((3, 3, 384),
              init=Kaiming(),
              activation=Explin(),
              batch_norm=bn,
              padding=1,
              strides=1),
         Conv((3, 3, 384),
              init=Kaiming(),
              activation=Explin(),
              batch_norm=bn,
              padding=1,
              strides=1),
         Conv((3, 3, 384),
              init=Kaiming(),
              activation=Explin(),
              batch_norm=bn,
              padding=1,
              strides=1),
         # this 4th deep layer may have been in for vgg3pool64all run? can not fit for 6fold so commented
         #Conv((3, 3, 384), init=Kaiming(), activation=Explin(), batch_norm=bn, padding=1, strides=1),
         Pooling(3, strides=2, padding=1),
         # 8
         Conv((3, 3, 6144),
              init=Kaiming(),
              activation=Explin(),
              batch_norm=bn,
              padding=1,
              strides=1),
         Pooling('all', op='avg'),
         Affine(nout=self.noutputs,
                init=Kaiming(),
                bias=Constant(0),
                activation=Softmax() if self.use_softmax else Logistic(
                    shortcut=True))
     ]
Example #17
0
def module_factory(nfm, stride=1):
    mainpath = [Conv(**conv_params(3, nfm, stride=stride)),
                Conv(**conv_params(3, nfm, relu=False))]
    sidepath = [SkipNode() if stride == 1 else Conv(**id_params(nfm))]
    module = [MergeSum([mainpath, sidepath]),
              Activation(Rectlin())]
    return module

# Structure of the deep residual part of the network:
# args.depth modules of 2 convolutional layers each at feature map depths of 16, 32, 64
nfms = [2**(stage + 4) for stage in sorted(range(3) * args.depth)]
strides = [1] + [1 if cur == prev else 2 for cur, prev in zip(nfms[1:], nfms[:-1])]

# Now construct the network
layers = [Conv(**conv_params(3, 16))]
for nfm, stride in zip(nfms, strides):
    layers.append(module_factory(nfm, stride))
layers.append(Pooling(8, op='avg'))
layers.append(Affine(nout=10, init=Kaiming(local=False), batch_norm=True, activation=Softmax()))

model = Model(layers=layers)
opt = GradientDescentMomentum(0.1, 0.9, wdecay=0.0001,
                              schedule=Schedule([90, 123], 0.1))

# configure callbacks
callbacks = Callbacks(model, eval_set=test, metric=Misclassification(), **args.callback_args)
cost = GeneralizedCost(costfunc=CrossEntropyMulti())

model.fit(train, optimizer=opt, num_epochs=args.epochs, cost=cost, callbacks=callbacks)
Example #18
0
# setup backend
be = gen_backend(**extract_valid_args(args, gen_backend))
#be.enable_winograd = 4  # default to winograd 4 for fast autotune

SUBSET = args.subset
train_set = HDF5IteratorOneHot('/mnt/data/medical/luna16/luna16_roi_except_subset{}_augmented.h5'.format(SUBSET), \
                                 flip_enable=True, rot90_enable=True, crop_enable=False, border_size=5)


valid_set = HDF5IteratorOneHot('/mnt/data/medical/luna16/luna16_roi_subset{}_augmented.h5'.format(SUBSET), \
                                flip_enable=False, rot90_enable=False, crop_enable=False, border_size=5)

print('Using subset{}'.format(SUBSET))

init_uni = Kaiming()

relu = Rectlin()
bn = True
convp1 = dict(init=init_uni, batch_norm=bn, activation=relu, padding=1)

layers = [
    Conv((5, 5, 24), **convp1),
    Pooling(2, op='max'),
    Conv((3, 3, 32), **convp1),
    Pooling(2, op='max'),
    Conv((3, 3, 48), **convp1),
    Pooling('all', op='avg'),
    Affine(512, init=init_uni, batch_norm=True, activation=relu),
    Affine(2, init=init_uni, activation=Softmax())
]
Example #19
0
# layers.append(Affine(nout=100, init=Kaiming(local=False), batch_norm=True, activation=Softmax()))

scales = [112, 128, 160, 240]

for scale in scales:
    print scale

    layers = []
    layers += [Conv(**conv_params(7, 32, 2))]
    for nfm, stride in zip(nfms, strides):
        layers.append(module_factory(nfm, stride))
    layers.append(Pooling(7, op='avg'))

    layers.append(
        Conv(fshape=(1, 1, 100), init=Kaiming(local=True), batch_norm=True))
    layers.append(Pooling(fshape='all', op='avg'))
    layers.append(Activation(Softmax()))

    model = Model(layers=layers)
    test = ImageLoader(set_name='validation',
                       shuffle=False,
                       do_transforms=False,
                       inner_size=scale,
                       scale_range=scale,
                       repo_dir=args.data_dir)

    model.load_params("/home/users/hunter/bigfeat_dropout.pkl")

    softmaxes = model.get_outputs(test)
    from neon.util.persist import save_obj