示例#1
0
trn, tst = buildmodel.get_mnist()
input_layer = Input((trn.X.shape[1], ))

# hidden_layers_to_add should include a list of all layers that will get added before the nonlinearIB layers
hidden_layers_to_add = [
    Dense(800, activation='relu'),
    Dense(800, activation='relu'),
    Dense(2, activation='linear'),
]

# *** The following creates the layers and callbacks necessary to run nonlinearIB ***
micalculator = layers.MICalculator(BETA_VAL,
                                   model_layers=hidden_layers_to_add,
                                   data=trn.X,
                                   miN=1000)
noiselayer = layers.NoiseLayer(logvar_trainable=True, test_phase_noise=False)
micalculator.set_noiselayer(noiselayer)

#    Start hooking up the layers together
cur_hidden_layer = input_layer
for l in hidden_layers_to_add:
    cur_hidden_layer = l(cur_hidden_layer)

noise_input_layer = layers.IdentityMap(
    activity_regularizer=micalculator)(cur_hidden_layer)
nonlinearIB_output_layer = noiselayer(noise_input_layer)

nonlinearIB_callback = training.KDETrain(mi_calculator=micalculator)
# *** Done setting up nonlinearIB stuff ***

decoder = Dense(800, activation='relu')(nonlinearIB_output_layer)
示例#2
0
def buildmodel(opts, trn):
    noiselayer = None
    micalculator = None

    # Build model
    inputs = Input(shape=(opts['INPUT_DIM'], ))
    model_layers = []
    cbs = []

    HIDDEN_DIMS = map(int, opts['encoder'].split('-'))
    HIDDEN_ACTS = opts['encoder_acts'].split('-')
    if opts['decoder']:
        DECODER_DIMS = map(int, opts['decoder'].split('-'))
    else:
        DECODER_DIMS = []
    for hndx, hdim in enumerate(HIDDEN_DIMS):
        if opts['mode'] == 'dropout':
            model_layers.append(Dropout(.2 if hndx == 0 else .5))

        layer_args = {}
        layer_args['activation'] = HIDDEN_ACTS[hndx]
        if layer_args['activation'] == 'relu':
            layer_args['init'] = 'he_uniform'
        else:
            layer_args['init'] = 'glorot_uniform'
        #if args.maxnorm is not None:
        #    import keras.constraints
        #    layer_args['W_constraint'] = keras.constraints.maxnorm(args.maxnorm)

        model_layers.append(Dense(hdim, **layer_args))

    if opts['mode'] in ['nlIB', 'nlIBnokde', 'vIB']:
        test_phase_noise = not opts['no_test_phase_noise']
        if opts['mode'] == 'nlIB' or opts['mode'] == 'nlIBnokde':
            micalculator = layers.MICalculator(
                opts['beta'],
                model_layers,
                data=trn.X,
                miN=opts['miN'],
                init_kde_logvar=opts['init_kde_logvar'])
            if opts['mode'] != 'nlIBnokde':
                cbs.append(training.KDETrain(mi_calculator=micalculator))
            noiselayer = layers.NoiseLayer(
                init_logvar=opts['init_noise_logvar'],
                logvar_trainable=opts['noise_logvar_grad_trainable'],
                test_phase_noise=test_phase_noise)
        else:
            micalculator = vib.MICalculatorVIB(opts['beta'])
            noiselayer = vib.NoiseLayerVIB(mean_dims=HIDDEN_DIMS[-1] / 2,
                                           test_phase_noise=test_phase_noise)

        micalculator.set_noiselayer(noiselayer)

        cur_layer = inputs
        for l in model_layers:
            cur_layer = l(cur_layer)
        noise_input_layer = layers.IdentityMap(
            activity_regularizer=micalculator)(cur_layer)
        del cur_layer

        cur_layer = noiselayer(noise_input_layer)

        #if not opts['noise_logvar_grad_trainable']:
        #    cbs.append(training.NoiseTrain(traindata=trn, noiselayer=noiselayer))
    else:
        cur_layer = inputs
        for l in model_layers:
            cur_layer = l(cur_layer)

    for hndx, hdim in enumerate(DECODER_DIMS):
        layer_args = {}
        layer_args['activation'] = 'relu'  # opts['DECODING_ACTS'][hndx]
        if layer_args['activation'] == 'relu':
            layer_args['init'] = 'he_uniform'
        else:
            layer_args['init'] = 'glorot_uniform'
        #if args.maxnorm is not None:
        #    import keras.constraints
        #    layer_args['W_constraint'] = keras.constraints.maxnorm(args.maxnorm)

        cur_layer = Dense(hdim, **layer_args)(cur_layer)

    predictions = Dense(trn.nb_classes,
                        init='glorot_uniform',
                        activation='softmax')(cur_layer)
    model = Model(input=inputs, output=predictions)

    return model, cbs, noiselayer, micalculator
示例#3
0
from keras.layers import Input, Dense

import buildmodel, layers, training, reporting

trn, tst = buildmodel.get_mnist()
BETA_VAL = 1e-1

input_layer      = Input((trn.X.shape[1],))

# *** The following creates the layers and callbacks necessary to run nonlinearIB ***

# hidden_layers_to_add should include a list of all layers that will get added before the nonlinearIB layers
hidden_layers_to_add = [ Dense(20, activation='relu'), ] # Hidden layer with 20 hidden units

micalculator = layers.MICalculator(BETA_VAL, hidden_layers_to_add, data=trn.X)
noiselayer = layers.NoiseLayer()
micalculator.set_noiselayer(noiselayer)

# Start hooking up the layers together
cur_hidden_layer = input_layer
for l in hidden_layers_to_add:
    cur_hidden_layer = l(cur_hidden_layer)

noise_input_layer = layers.IdentityMap(activity_regularizer=micalculator)(cur_hidden_layer)
nonlinearIB_output_layer = noiselayer(noise_input_layer)

nonlinearIB_callback = training.KDETrain(mi_calculator=micalculator)
# *** Done setting up nonlinearIB stuff ***

outputs = Dense(trn.nb_classes, activation='softmax')(nonlinearIB_output_layer)