コード例 #1
0
def get_HMC_models():
    _, encoder, decoder = define_VAE()
    encoder.load_weights('save/enc_weights.h5')
    decoder.load_weights('save/dec_weights.h5')

    input_shape = (28, 28, 1)
    num_classes = 10

    def modeldef():
        model = Sequential()
        model.add(Conv2D(32, kernel_size=(3, 3),
                        activation='relu',
                        input_shape=input_shape))
        model.add(Conv2D(64, (3, 3), activation='relu'))
        model.add(MaxPooling2D(pool_size=(2, 2)))
        model.add(Flatten())
        model.add(Dense(128, activation='relu'))
        model.add(Dense(num_classes, activation='softmax'))

        model.compile(loss=keras.losses.categorical_crossentropy,
                    optimizer=keras.optimizers.SGD(),
                    metrics=['accuracy'])
        return model

    class HMC_model():
        def __init__(self, ensemble_weights):
            self.m = modeldef()
            self.ws = ensemble_weights
            self.ms = [modeldef() for _ in self.ws]
            for model, wlist in zip(self.ms,self.ws):
                for tensor, weight in zip(model.weights, wlist):
                    K.set_value(tensor, weight)
                    
        def get_results(self,X):
            mc_preds = mcmc.HMC_ensemble_predict(self.m, self.ws, X)
            preds = np.mean(mc_preds, axis=0)
            predictive_entropy = H(preds)
            expected_entropy = np.mean(H(mc_preds), axis=0)
            minfo = predictive_entropy - expected_entropy
            return preds, predictive_entropy, minfo
        
        def predict(self,X):
            mc_preds = mcmc.HMC_ensemble_predict(self.m, self.ws, X)
            preds = np.mean(mc_preds, axis=0)
            return preds

        def __call__(self, X):
            """get predictions on a symbolic tensor; this is a little inefficient in terms of space"""
            return K.mean( K.stack([ model(X) for model in self.ms]), axis=0)
                
    #load Y's weights
    with open('save/mnist_hmc_ensemble_run_1.pkl', 'rb') as pkl:
        weights = pickle.load(pkl)
    with open('save/mnist_hmc_ensemble_run.pkl', 'rb') as pkl:
        weights += pickle.load(pkl)

    hmc_model = HMC_model(weights)
    return hmc_model, encoder, decoder
コード例 #2
0
def get_models():
    _, encoder, decoder = define_VAE()
    encoder.load_weights('save/enc_weights.h5')
    decoder.load_weights('save/dec_weights.h5')

    model = keras.models.load_model('save/mnist_cnn_run_3.h5')
    mc_model = U.MCModel(model, model.input, n_mc=50)
    #we have been using more mc samples elsewhere, but save time for now
    return mc_model, encoder, decoder
コード例 #3
0
def get_models_3s_7s():
    _, encoder, decoder = define_VAE()
    encoder.load_weights('save/enc_weights_3s_7s.h5')
    decoder.load_weights('save/dec_weights_3s_7s.h5')
    model = define_cdropout_3s_7s()
    model.load_weights('save/mnist_cdrop_3s_7s.h5')
    mc_model = U.MCModel(model, model.input, n_mc=50)
    #we have been using more mc samples elsewhere, but save time for now
    return mc_model, encoder, decoder
コード例 #4
0
def get_model_ensemble(n_mc=10):
    _, encoder, decoder = define_VAE()
    encoder.load_weights('save/enc_weights.h5')
    decoder.load_weights('save/dec_weights.h5')

    models = []
    for name in filter(lambda x: 'mnist_cnn' in x, os.listdir('save')):
        print('loading model {}'.format(name))
        model = load_drop_model('save/' + name)
        models.append(model)
    mc_model = U.MCEnsembleWrapper(models, n_mc=10)
    return mc_model, encoder, decoder
コード例 #5
0
def get_ML_models():
    _, encoder, decoder = define_VAE()
    encoder.load_weights('save/enc_weights.h5')
    decoder.load_weights('save/dec_weights.h5')

    model = keras.models.load_model('save/mnist_cnn.h5')

    def get_results(X):
        preds = model.predict(X)
        ent = - np.sum(preds * np.log(preds + 1e-10), axis=-1)
        return preds, ent, np.zeros(ent.shape)

    model.get_results = get_results 
    return model, encoder, decoder
コード例 #6
0
def get_ML_ensemble():
    
    _, encoder, decoder = define_VAE()
    encoder.load_weights('save/enc_weights.h5')
    decoder.load_weights('save/dec_weights.h5')
    K.set_learning_phase(False)
    ms = []
    for name in filter(lambda x: 'mnist_cnn' in x, os.listdir('save')):
        print('loading model {}'.format(name))
        model = load_model('save/' + name)
        ms.append(model)

    model = U.MCEnsembleWrapper(ms, n_mc=1)
    return model, encoder, decoder