Beispiel #1
0
def run(model, train_set=[], test_sets=[]):
    dataController = DataController(batch_size=batch_size, data_list=train_set)
    logging.info("Training on: {}".format(train_set))
    model.train(dataController)
    for test_set in test_sets:
        dataController = DataController(batch_size=batch_size, data_list=test_set)
        model.load()
        if validation_mode == 'OpenMax':
            model.calc_mean_and_dist(dataController, 4)
        logging.info("Validate on: {}".format(test_set))
        model.validate(dataController, 4, mode=validation_mode)
Beispiel #2
0
def prepare_data(labels):
    datas = {}
    autoencoder = keras.models.load_model('model')

    layer_name = 'enc'
    encoded_layer = Model(inputs=autoencoder.input,
                          outputs=autoencoder.get_layer(layer_name).output)

    for label in labels:
        print("Getting encoded data for", label)
        datas[label] = []
        dataController = DataController(batch_size=batch_size,
                                        data_list=normal_labels)
        while 1:
            data = dataController.generate('full')
            if data is False:
                break
            x = data["x"]
            enc_out = encoded_layer.predict(x)
            datas[label].extend(enc_out)
        datas[label] = np.array(datas[label])

    with open('encoded_datas.pkl', 'wb') as f:
        pickle.dump(datas, f, pickle.HIGHEST_PROTOCOL)

    return datas