def neural_network_test_table_gen():
    X, y = default_img_set()
    X = [[full_prepare(img) for img in same_sample] for same_sample in X]
    Xs = count_blobs_with_all_methods(X)
    Xs = [np.array(X_count) for X_count in Xs]
    y = np.array(y)

    row_names = ('Wszystkie detale', 'Śledzone detale',
                 'Stosunek śledzonych detali')

    with open('exports/neural_network_test.csv', 'w') as csvfile:
        filewriter = csv.writer(csvfile, delimiter=';')
        # Header
        filewriter.writerow(('Metoda zliczania detali', 'Wskaźnik', 
                             'wskaźnik'))
        filewriter.writerow(('Metoda zliczania detali', 'Błąd', 'Dokładność'))

        for X, name in zip(Xs, row_names):
            X = np.array(X)
            X_train, X_test, y_train, y_test = train_test_split(
                X, y, stratify=y, test_size=0.33, random_state=1)

            model = default_grain_classifier_model()
            model.compile(
                optimizer='adam',
                loss='sparse_categorical_crossentropy',
                metrics=['accuracy'])
            model.fit(X_train, y_train, epochs=300, verbose=0)

            score = model.evaluate(X_test, y_test, verbose=0)
            filewriter.writerow((name, *score))
def neural_network_validation_table_gen():
    X, y = default_img_set()
    X = [[full_prepare(img) for img in same_sample] for same_sample in X]
    Xs = count_blobs_with_all_methods(X)
    Xs = [np.array(X_count) for X_count in Xs]
    y = np.array(y)

    row_names = ('Wszystkie detale', 'Śledzone detale',
                 'Stosunek śledzonych detali')

    with open('exports/neural_network_validation.csv', 'w') as csvfile:
        filewriter = csv.writer(csvfile, delimiter=';')
        # Header
        filewriter.writerow(('Metoda zliczania detali', 'Wskaźnik', 
                     'wskaźnik'))
        filewriter.writerow(('Metoda zliczania detali', 'Błąd', 'Dokładność'))

        for X, name in zip(Xs, row_names):
            model = default_grain_classifier_model()
            model.compile(
                optimizer='adam',
                loss='sparse_categorical_crossentropy',
                metrics=['accuracy'])

            scores = np.array(network_cross_validation(model, X, y, 3))
            score = np.round(scores.mean(axis=0), 2)
            filewriter.writerow((name, *score))
def neural_network_trainig_plots_gen():
    X, y = default_img_set()
    X = [[full_prepare(img) for img in same_sample] for same_sample in X]
    Xs = count_blobs_with_all_methods(X)

    files_suffixes = ('all', 'remaining', 'ratio')

    for X, suffix in zip(Xs, files_suffixes):
        X = np.array(X)
        y = np.array(y)

        X_train, _, y_train, _ = train_test_split(
            X, y, stratify=y, test_size=0.33, random_state=1)

        model = default_grain_classifier_model()
        model.compile(
            optimizer='adam',
            loss='sparse_categorical_crossentropy',
            metrics=['accuracy'])
        history = model.fit(X_train, y_train, epochs=300, verbose=0)

        plt.figure()
        plt.title('Historia treningu modelu')
        plt.xlabel('Epoka')

        plt.plot(history.history['accuracy'], c='b')
        plt.plot(history.history['loss'], c='r')

        plt.legend(('Dokładność', 'Błąd'))

        tikzplotlib.save('exports/neural_network_trainig_' + suffix)
def confusion_matrix_table_gen():
    X, y = default_img_set()
    X = [[full_prepare(img) for img in same_sample] for same_sample in X]
    X = count_blobs_with_all_methods(X)[2]
    X = np.array(X)
    y = np.array(y)

    model = default_grain_classifier_model()
    model.compile(
        optimizer='adam',
        loss='sparse_categorical_crossentropy',
        metrics=['accuracy'])

    mcm = mean_confusion_matrix(model, X, y, 3)
    np.savetxt(
        "exports/mean_confusion_matrix_ratio.csv",
        mcm,
        fmt='%.2f',
        delimiter=";")
def network_comparison_table_gen():
    X, y = default_img_set()
    X = [[full_prepare(img) for img in same_sample] for same_sample in X]
    X = [
        ratio_of_remaining_blobs_in_stages(find_blob_series(img_series))
        for img_series in X
    ]
    X = np.array(X)
    y = np.array(y)

    with open('exports/neural_network_comparison.csv', 'w') as csvfile:
        filewriter = csv.writer(csvfile, delimiter=';')
        # Header
        filewriter.writerow(('Parametr', 'Wartość', 'Błąd', 'Dokładność'))

        # Activation functions
        activation_funcs = ('sigmoid', 'relu', 'elu', 'tanh')
        for func in activation_funcs:
            model = keras.Sequential([
                keras.layers.Dense(5, activation=func),
                keras.layers.Dense(256, activation=func),
                keras.layers.Dense(128, activation=func),
                keras.layers.Dense(4, activation='softmax')
            ])

            model.compile(
                optimizer='adam',
                loss='sparse_categorical_crossentropy',
                metrics=['accuracy'])
            scores = np.array(network_cross_validation(model, X, y, 3))
            score = np.round(scores.mean(axis=0), 2)

            filewriter.writerow(('Funkcja aktywacji', func, *score))

        # Number of hidden layers
        models = []
        models.append(
            keras.Sequential([
                keras.layers.Dense(5, activation='tanh'),
                keras.layers.Dense(512, activation='tanh'),
                keras.layers.Dense(4, activation='softmax')
            ]))
        models.append(
            keras.Sequential([
                keras.layers.Dense(5, activation='tanh'),
                keras.layers.Dense(256, activation='tanh'),
                keras.layers.Dense(128, activation='tanh'),
                keras.layers.Dense(4, activation='softmax')
            ]))

        for model, i in zip(models, (1, 2)):
            model.compile(
                optimizer='adam',
                loss='sparse_categorical_crossentropy',
                metrics=['accuracy'])
            scores = np.array(network_cross_validation(model, X, y, 3))
            score = np.round(scores.mean(axis=0), 2)

            filewriter.writerow(('Liczba warstw ukrytych', i, *score))

        # Number of neurons in hidden layers
        neurons_num = ((128, 64), (256, 128), (512, 126))

        for num in neurons_num:
            model = keras.Sequential([
                keras.layers.Dense(5, activation='tanh'),
                keras.layers.Dense(num[0], activation='tanh'),
                keras.layers.Dense(num[1], activation='tanh'),
                keras.layers.Dense(4, activation='softmax')
            ])

            model.compile(
                optimizer='adam',
                loss='sparse_categorical_crossentropy',
                metrics=['accuracy'])
            scores = np.array(network_cross_validation(model, X, y, 3))
            score = np.round(scores.mean(axis=0), 2)

            filewriter.writerow(('Liczba neuronów w warstwach ukrytych',
                                 '{} i {}'.format(num[0], num[1]), *score))

        # Optimizer
        model = default_grain_classifier_model()
        optimizers = ('sgd', 'adam')
        for opt in optimizers:
            model.compile(
                optimizer=opt,
                loss='sparse_categorical_crossentropy',
                metrics=['accuracy'])
            scores = np.array(network_cross_validation(model, X, y, 3))
            score = np.round(scores.mean(axis=0), 2)

            filewriter.writerow(('Algorytm uczenia', opt, *score))