def main(met_train, met_test, aqi_train, aqi_test, test): while True: ch = int( input("\n\nchose among the following classifier\n" "1.Rnadom Forrest\n" "2.K-NN\n" "3.SVM\n" "4.Decision Tree\n" "5.exit\n")) if ch == 1: model, accuracy = Classifiers.Random_Forest_Classifier( met_train, met_test, aqi_train, aqi_test) print(model.predict(test)) print(accuracy) elif ch == 2: model, accuracy = Classifiers.KNN(met_train, met_test, aqi_train, aqi_test) print(model.predict(test)) print(accuracy) elif ch == 3: model, accuracy = Classifiers.SVM(met_train, met_test, aqi_train, aqi_test) print(model.predict(test)) print(accuracy) elif ch == 4: model, accuracy = Classifiers.Decision_tree( met_train, met_test, aqi_train, aqi_test) print(model.predict(test)) print(accuracy) elif ch == 5: break
X = [ list(map(int, x.split(',')[:-1])) for x in open('covtype.data').read().splitlines()[:SIZE_DATA] ] _Y = [ x.split(',')[-1] for x in open('covtype.data').read().splitlines()[:SIZE_DATA] ] larg = largestClass(_Y) # treat the largest class as positive, the rest as negative Y = [1 if x == larg else -1 for x in _Y] xTrain, xTest, yTrain, yTest = cv.train_test_split(X, Y, train_size=5000 / len(X)) # In[2]: import Classifiers as clfs clfs.KNN(xTrain, xTest, yTrain, yTest) clfs.RandomForest(xTrain, xTest, yTrain, yTest) clfs.BoostedDecisionTree(xTrain, xTest, yTrain, yTest) clfs.NeuralNets(xTrain, xTest, yTrain, yTest) #clfs.SVM(xTrain, xTest, yTrain, yTest) clfs.linearSVC(xTrain, xTest, yTrain, yTest) import Classifiers as clfs clfs.XGBoost(xTrain, xTest, yTrain, yTest) # In[ ]: