model.add(Dense(256, activation='relu')) model.add(Dropout(dropout)) # Dense layer 3/sigmoid boi model.add(Dense(1, activation='sigmoid')) # Compile model optimizer = optimizers.Adam(lr=learning_rate, decay=0.0) model.compile(loss='binary_crossentropy', optimizer=optimizer, metrics=['accuracy']) model.summary() return model if __name__ == '__main__': X, y, _ = get_input_data(train_file_path='train.json') # Incorporate rotated images into training data (note that this significantly increases training time) X_rotated, y_rotated = get_rotated_images(X, y) X = np.concatenate([X, X_rotated]) y = np.concatenate([y, y_rotated]) X_train = X y_train = y X_test, ids = get_input_data('data/test.json', train=False) model = get_model(learning_rate=0.001, dropout=0.2) # Train and test model train_history = model.fit(X_train, y_train,
import numpy as np from numpy.random import seed seed(7) from tensorflow import set_random_seed set_random_seed(420) import csv import keras from data_preprocessing import get_input_data from sklearn.model_selection import train_test_split import os from sklearn.metrics import accuracy_score, precision_score, recall_score if __name__ == '__main__': X, y = get_input_data(train_file_path='train.json') X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.3, random_state=42) saved_models = [f for f in os.listdir('.') if f.startswith('hyperparams_')] with open('hyperparamater_search_results.csv', 'w') as csv_file: writer = csv.writer(csv_file) header = [ 'learning_rate', 'epochs', 'batch_size', 'dropout', 'accuracy', 'log_loss', 'precision', 'recall' ] writer.writerow(header)