def main(argv): try: opts, args = getopt.getopt(argv, "ho:", ["output="]) except getopt.GetoptError: print 'random_forest.py [-o [2008] [2012] [graphs]]' sys.exit(2) for opt, arg in opts: if opt == '-h': print 'random_forest.py [-o [2008] [2012] [graphs]]' sys.exit() elif opt in ("-o", "--output"): if (arg == "2008"): # DEBUG print("rand_forest_model.predict(X_test_2008).shape" + str(rand_forest_model.predict(X_test_2008).shape)) make_submission_2008( "submissions/random_forest_2008.csv", rand_forest_modified_predict(rand_forest_model, X_test_2008)) elif (arg == "2012"): make_submission_2012( "submissions/random_forest_2012.csv", rand_forest_modified_predict(rand_forest_model, X_test_2012)) elif (arg == "graphs"): tune_and_graph()
def main(argv): try: opts, args = getopt.getopt(argv, "ho:", ["output="]) except getopt.GetoptError: print 'xgb.py [-o [2008] [2012]]' sys.exit(2) for opt, arg in opts: if opt == '-h': print 'xgb.py [-o [2008] [2012]]' sys.exit() elif opt in ("-o", "--output"): if (arg == "2008"): preds = round_predictions(xgb_model.predict(X_test_2008)) # DEBUG print("preds.shape" + str(preds.shape)) make_submission_2008("submissions/xgb_2008.csv", preds) elif (arg == "2012"): preds = round_predictions(xgb_model.predict(X_test_2012)) make_submission_2012("submissions/xgb_2012.csv", preds)
def main(argv): try: opts, args = getopt.getopt(argv,"ho:",["output="]) except getopt.GetoptError: print 'mlpclassifier.py [-o [2008] [2012] [tune]]' sys.exit(2) for opt, arg in opts: if opt == '-h': print 'mlpclassifer.py [-o [2008] [2012] [tune]]' sys.exit() elif opt in ("-o", "--output"): if (arg == "2008"): # DEBUG print("mlp.predict(X_test_2008).shape" + str(mlp.predict(X_test_2008).shape)) make_submission_2008("submissions/mlp_2008.csv", mlp_modified_predict(mlp, X_test_2008)) elif (arg == "2012"): make_submission_2012("submissions/adaboost_2012.csv", mlp_modified_predict(mlp, X_test_2012)) elif (arg == "tune"): optimize_parameters()
def main(argv): try: opts, args = getopt.getopt(argv, "ho:", ["output="]) except getopt.GetoptError: print 'linear_regression.py [-o [2008] [2012]]' sys.exit(2) for opt, arg in opts: if opt == '-h': print 'linear_regression.py [-o [2008] [2012]]' sys.exit() elif opt in ("-o", "--output"): if (arg == "2008"): # DEBUG print("lin_reg_model.predict(X_test_2008).shape" + str(lin_reg_model.predict(X_test_2008).shape)) make_submission_2008( "submissions/linear_regression_2008.csv", lin_reg_modified_predict(lin_reg_model, X_test_2008)) elif (arg == "2012"): make_submission_2012( "submissions/linear_regression_2012.csv", lin_reg_modified_predict(lin_reg_model, X_test_2012))
mlp_2012 = mlp_weight * mlp_unrounded rand_forest_2012 = rand_forest_weight * rand_forest_unrounded lasso_unrounded = 0 ridge_unrounded = 0 mlp_unrounded = 0 xgb_unrouned = 0 rand_forest_unrounded = 0 adaboost_ran_forest_unrounded = 0 adaboost_unrounded = 0 print "Starting Adding" temp1 = np.add(np.add(lasso_2012, ridge_2012), xgb_2012_weighted) print "Halway Through Adding" temp2 = np.add( np.add(np.add(adaboost_ran_forest_2012, adaboost_2012), mlp_2012), rand_forest_2012) ensemble = np.add(temp1, temp2) print "Done Adding!" temp1 = 0 temp2 = 0 print ensemble ensemble = round_predictions(ensemble) print "Min of ensemble: ", np.min(ensemble), ". Max: ", np.max(ensemble) return ensemble make_submission_2008("submissions/ensemble_2008.csv", pred_2008()) make_submission_2012("submissions/ensemble_2012.csv", pred_2012())