import pandas as pd import numpy as np from sklearn.model_selection import train_test_split from keras.models import Sequential from keras.layers import Dense, Conv2D, MaxPooling2D, Flatten, Dropout from keras.optimizers import Adam from keras.callbacks import ModelCheckpoint from utilities import get_image_tensor from define_models import create_model score_data_location = 'data/test.hdf' hyper_p = ('elu', 3, 2) model = create_model(2, False, False) model.load_weights('data/train_weightsnone_.hdf5') model.compile(optimizer = Adam(), loss = 'binary_crossentropy', metrics = ['accuracy']) submission_data = pd.read_hdf(score_data_location) #submission_data.to_hdf('data/test.hdf', 'data', mode = 'w') submission = pd.DataFrame() submission['id'] = submission_data['id'] submission_data = get_image_tensor(submission_data, extra_channel = 'none') results = model.predict(submission_data) results = results.reshape(results.shape[0]) submission['is_iceberg'] = results submission.to_csv('data/submission.csv', index = False)
import pandas as pd import numpy as np from sklearn.model_selection import train_test_split from keras.models import Sequential from keras.layers import Dense, Conv2D, MaxPooling2D, Flatten, Dropout from keras.optimizers import Adam from keras.callbacks import ModelCheckpoint from utilities import get_image_tensor from define_models import create_model score_data_location = '../architecture_experiments/data/test.hdf' model = create_model(nchannels=2, base_size=42, drop=0.06767, activation='relu', normalize_batches=False, angle=False) model.load_weights('data/train_weights_9.hdf5') model.compile(optimizer=Adam(), loss='binary_crossentropy', metrics=['accuracy']) submission_data = pd.read_hdf(score_data_location) #submission_data.to_hdf('data/test.hdf', 'data', mode = 'w') submission = pd.DataFrame() submission['id'] = submission_data['id'] submission_data = get_image_tensor(submission_data, extra_channel='none',
# Run Experiments on for i in range(hyperparameter_bounds['n_experiments']): hyp = get_hyperparameters(hyperparameter_bounds) print(hyp) train_images = get_image_tensor(train_data, extra_channel='none', normalize=False) test_images = get_image_tensor(test_data, extra_channel='none', normalize=False) model = create_model(nchannels=2, base_size=hyp['base_size'], drop=hyp['dropout'], activation=hyp['activation'], normalize_batches=hyp['bn'], angle=False) model.compile(optimizer=Adam(lr=hyp['lr']), loss='binary_crossentropy', metrics=['accuracy']) hist = model.fit(train_images, train_data.is_iceberg, epochs=80, batch_size=hyp['batch_size'], callbacks=get_callbacks( 'data/train_weights_' + str(i) + '.hdf5', 6), validation_data=(test_images, test_data.is_iceberg))
iceberg = pd.read_json('data/train.json') iceberg['inc_angle'] = iceberg['inc_angle'].replace('na', -1) train_data, test_data = train_test_split(iceberg, train_size=.75, random_state=123) pars = [(chan, norm, angle) for chan in ('none', 'diff', 'avg') for angle in (True, False) for norm in (True, False)] # Run Experiments on for p in pars: train_images = get_image_tensor(train_data, extra_channel=p[0]) test_images = get_image_tensor(test_data, extra_channel=p[0]) if p[0] == 'none': model = create_model(2, p[1], p[2]) else: model = create_model(3, p[1], p[2]) optimizer = mypotim = RMSprop(lr=0.001) model.compile(optimizer=optimizer, loss='binary_crossentropy', metrics=['accuracy']) pars_suffix = p[0] + '_' + 'norm_' * p[1] + 'angle_' * p[2] print(pars_suffix) if p[2]: hist = model.fit([train_images, train_data.inc_angle], train_data.is_iceberg, epochs=50,
pars = [(learn, normalize) for learn in (0.0005, 0.0002, 0.0001) for normalize in (True, False)] j = 20 # Run Experiments on for p in pars: j += 1 train_images = get_image_tensor(train_data, extra_channel='avg', normalize=p[1]) test_images = get_image_tensor(test_data, extra_channel='avg', normalize=p[1]) model = create_model(3, False, False) optimizer = Adam(lr=p[0]) model.compile(optimizer=optimizer, loss='binary_crossentropy', metrics=['accuracy']) hist = model.fit(train_images, train_data.is_iceberg, epochs=1, batch_size=36, callbacks=get_callbacks( 'data/train_weights_' + str(j) + '.hdf5', 6), validation_data=(test_images, test_data.is_iceberg))