Ejemplo n.º 1
0
def evaluate_importance(model_data: ModelData, importance_type: ImportanceType, importance_calculation: ImportanceCalculation):
    model_data.reload_model()
    importance_handler: ImportanceEvaluator = ImportanceEvaluator(model_data)
    importance_handler.setup(importance_type, importance_calculation)
    (x_train, y_train), (x_test, y_test), input_shape, num_classes = get_prepared_data(model_data.get_class_selection())
    importance_handler.set_train_and_test_data(x_train, y_train, x_test, y_test)
    importance_handler.create_evaluation_data(10)
Ejemplo n.º 2
0
def create(name: str, batch_size: int, epochs: int, layer_data: List[int], learning_rate: float = 0.001,
           regularized: bool = False, train_type: ModelTrainType = ModelTrainType.BALANCED, main_class: int = None,
           other_class_percentage: float = None, class_selection: List[int] = None) -> ModelData:
    logging.info("Create MNIST neural network model with training type \"%s\"." % train_type.name)

    if train_type is not ModelTrainType.UNBALANCED:
        (x_train, y_train), (x_test, y_test), input_shape, num_classes = get_prepared_data(class_selection)
    else:
        (x_train, y_train), (x_test, y_test), input_shape, num_classes = get_unbalance_data(main_class,
                                                                                            other_class_percentage,
                                                                                            class_selection)
    logging.info("Train examples: %i" % x_train.shape[0])
    logging.info("Test examples: %i" % x_test.shape[0])

    if class_selection is not None:
        num_classes = len(class_selection)

    model: Model = build_mnist_model(layer_data, num_classes, input_shape, learning_rate, regularized)
    if train_type is not ModelTrainType.UNTRAINED:
        model.fit(x_train, y_train, batch_size=batch_size, epochs=epochs, verbose=1, validation_data=(x_test, y_test))

    model_description: str = generate_model_description(batch_size, epochs, model.layers, learning_rate)
    model_layer_nodes: List[int] = [input_shape[0]]
    model_layer_nodes.extend(layer_data)
    model_layer_nodes.append(num_classes)
    model_data: ModelData = ModelData(name, model_description, model)
    model_data.set_parameter(batch_size, epochs, model_layer_nodes, learning_rate, x_train.shape[0], x_test.shape[0])
    model_data = evaluate_model(model_data, x_train, y_train, x_test, y_test)
    model_data.set_class_selection(class_selection)
    model_data.save_model()
    model_data.store_model_data()

    return model_data
Ejemplo n.º 3
0
def calculate_performance_of_model(model_data: ModelData):
    (x_train, y_train), (x_test, y_test), input_shape, num_classes = get_prepared_data()

    logging.info("Train examples: %i" % x_train.shape[0])
    logging.info("Test examples: %i" % x_test.shape[0])

    model_data.reload_model()
    model_data.model.compile(loss=keras.losses.categorical_crossentropy, optimizer=keras.optimizers.Adam(0.001),
                             metrics=["accuracy"])

    model_data = evaluate_model(model_data, x_train, y_train, x_test, y_test)
    return model_data
from data.mnist_data_handler import get_prepared_data
from data.model_data import ModelData
from evaluation.evaluator import ImportanceEvaluator
from utility.log_handling import setup_logger

setup_logger("sample_evaluation")

name: str = "default_all"
model_data: ModelData = ModelData(name)
model_data.reload_model()
importance_handler: ImportanceEvaluator = ImportanceEvaluator(model_data)
importance_handler.setup()
(x_train, y_train), (x_test, y_test), input_shape, num_classes = get_prepared_data(model_data.get_class_selection())
importance_handler.set_train_and_test_data(x_train, y_train, x_test, y_test)
importance_handler.create_evaluation_data(10)