Example #1
0
from inti_param import InitParam
from optimization import Optimization
import numpy as np
from image_prediction import image_predict
import matplotlib.pyplot as plt

learning_rate = 0.005

from cat_noncat import CatNonCat

X, Y, classes, num_px = CatNonCat.load_data()
w, b = InitParam.initialize_params(X.shape[0])
params, grads, cost = Optimization.optimize(w, b, X, Y, 2000, learning_rate,
                                            False)
image_predict(params, classes, num_px)
costs = np.squeeze(cost)
plt.plot(costs)
plt.ylabel('cost')
plt.xlabel('iterations (per hundreds)')
plt.title("Learning rate =" + str(learning_rate))

# y_predict = predict(params['w'], params['b'], X)
# print("train accuracy: {} %".format(100 - np.mean(np.abs(y_predict - Y)) * 100))

# #Common Model Algorithms
# from sklearn import svm, tree, linear_model, neighbors, naive_bayes, ensemble, discriminant_analysis, gaussian_process
# #Common Model Helpers
# from sklearn.preprocessing import OneHotEncoder, LabelEncoder
# from sklearn import feature_selection
# from sklearn import model_selection
# from sklearn import metrics
import pandas as pd
# import matplotlib.pyplot as plt
from prediction import predict
learning_rate = 0.005

TRAIN_DATA = '\\input\\train\\'
TEST_DATA = '\\input\\test\\'
FILE_COUNT = 10000

# Train on dataset
print('Training..........')
X_train, Y_train = load_cat_vs_dog_data(TRAIN_DATA, FILE_COUNT, shuffle=True)
# X_train = X_train.T
# Y_train = Y_train.T
w, b = InitParam.initialize_params(X_train.shape[0])
params, grads, cost = Optimization.optimize(w, b, X_train, Y_train, 500,
                                            learning_rate, False)

FILE_COUNT = 12500
print('Predicting..........')
X_test, id_list = load_cat_vs_dog_test_data(TEST_DATA, FILE_COUNT)
Y_Predict = predict(params['w'], params['b'], X_test)

my_solution = pd.DataFrame(Y_Predict.T, id_list, columns=["Id, Label"])
my_solution.to_csv("my_solution_one.csv", index_label=["Id"])
# print(my_solution)

# #Common Model Algorithms
# from sklearn import svm, tree, linear_model, neighbors, naive_bayes, ensemble, discriminant_analysis, gaussian_process
# #Common Model Helpers
# from sklearn.preprocessing import OneHotEncoder, LabelEncoder
# from sklearn import feature_selection