Exemple #1
0
def main(unused_argv):
    batchsize_video = 1

    dir_path = os.path.dirname(os.path.realpath(__file__))
    train_folder = os.path.join(dir_path, "train/")
    test_folder = os.path.join(dir_path, "test/")

    train_target = os.path.join(dir_path, 'train_target.csv')
    my_solution_file = os.path.join(dir_path, 'solution.csv')

    tf_record_dir = os.path.join(dir_path, 'tf_records')
    os.makedirs(tf_record_dir, exist_ok=True)

    tf_record_train = os.path.join(tf_record_dir, 'train' + '.tfrecords')
    tf_record_test = os.path.join(tf_record_dir, 'test' + '.tfrecords')

    if not os.path.exists(tf_record_train):
        x_train = get_videos_from_folder(train_folder)
        y_train = get_target_from_csv(train_target)
        save_tf_record(x_train, tf_record_train, y=y_train)

    if not os.path.exists(tf_record_test):
        x_test = get_videos_from_folder(test_folder)
        save_tf_record(x_test, tf_record_test)

    # Create the Estimator
    classifier = tf.estimator.Estimator(model_fn=cnn_model_fn,
                                        model_dir="\\tmp\\model")

    # Set up logging for predictions
    # Log the values in the "Softmax" tensor with label "probabilities"
    tensors_to_log = {"probabilities": "softmax_tensor"}
    logging_hook = tf.train.LoggingTensorHook(tensors=tensors_to_log,
                                              every_n_iter=50)

    print('{}: Train'.format(datetime.now().strftime("%H:%M:%S")))
    # Train the model
    classifier.train(input_fn=lambda: input_fn_from_dataset(
        tf_record_train, batch_size=batchsize_video),
                     max_steps=1,
                     hooks=[logging_hook])

    print('{}: Evaluate'.format(datetime.now().strftime("%H:%M:%S")))
    # Evaluate the model and print results
    #eval_input_fn = tf.estimator.inputs.numpy_input_fn(x={"x": eval_data}, y=eval_labels, num_epochs=1, shuffle=False)
    eval_results = classifier.evaluate(input_fn=lambda: input_fn_from_dataset(
        tf_record_test, batch_size=batchsize_video))
    print(eval_results)

    print('{}: Predict'.format(datetime.now().strftime("%H:%M:%S")))
    pred = classifier.predict(
        input_fn=lambda: input_fn_from_dataset(tf_record_test,
                                               batch_size=batchsize_video,
                                               num_epochs=1,
                                               shuffle=False))

    print('{}: Save solution to {}'.format(datetime.now().strftime("%H:%M:%S"),
                                           my_solution_file))
    solution = prob_positive_class_from_prediction(pred)
    save_solution(my_solution_file, solution)
Exemple #2
0
from get_data import get_videos_from_folder, get_target_from_csv
import os
import numpy as np
from utils import save_solution

dir_path = os.path.dirname(os.path.realpath(__file__))
train_folder = os.path.join(dir_path, "train/")
test_folder = os.path.join(dir_path, "test/")

train_target = os.path.join(dir_path, 'train_target.csv')
my_solution_file = os.path.join(dir_path, 'solution.csv')

x_train = get_videos_from_folder(train_folder)
y_train = get_target_from_csv(train_target)
x_test = get_videos_from_folder(test_folder)

dummy_solution = 0.1 * np.ones(len(x_test))
save_solution(my_solution_file, dummy_solution)
# Load datasets
print("# Loading data")
dir_path = "D:/Amr/OneDrive/_MyActivities/180922 ETH DAS Data Science/Advanced Machine Learning/Projects/Task4/"
train_folder = os.path.join(dir_path, "train/")
test_folder = os.path.join(dir_path, "test/")

# =============================================================================
# train_target = os.path.join(dir_path,'train_target.csv')
# my_solution_file = os.path.join(dir_path,'solution.csv')
# train_folder = "D:/Amr/OneDrive/_MyActivities/180922 ETH DAS Data Science/Advanced Machine Learning/Projects/Task4/train/"
# test_folder = "D:/Amr/OneDrive/_MyActivities/180922 ETH DAS Data Science/Advanced Machine Learning/Projects/Task4/test/"
# train_target = "D:/Amr/OneDrive/_MyActivities/180922 ETH DAS Data Science/Advanced Machine Learning/Projects/Task4/train_target.csv"
# #my_solution_file = os.path.join(dir_path,'solution.csv')
# =============================================================================

X_train_raw = get_videos_from_folder(train_folder)
X_test_raw = get_videos_from_folder(test_folder)
Id = pd.read_csv('solution.csv')['id']
y_train_raw = pd.read_csv('train_target.csv').drop(columns=['id'])

print("# Done")
print()

# Extract features from ECG waveforms
print("# Extracting features")
X_train_ft = ExtractECOFeatures(X_train_raw)
X_test_ft = ExtractECOFeatures(X_test_raw)
print("# Done")
print()
#%%
# Impute median values in empty cells