def get_confusion_matrix(): # db = DoubleBayes() # predicted, actual = db.generate_predictions(weighted=True) nb = Naive_Bayes() predicted, actual = nb.generate_predictions() cnf_matrix = confusion_matrix(actual, predicted) return cnf_matrix
def post(): if request.is_json: data = request.get_json() else: return jsonify({'request': request.get_data()}) assert (len(data) == 3) try: age = int(data['age']) condition_list = list(data['conditions']) state = str(data['state']) except KeyError: return jsonify(dict()) nb = Naive_Bayes(age, state, condition_list) return jsonify({'probability': nb.get_probability()})
hmm.fit(data_train) print(f"Duration of training: {time.time() - start_time}") # evaluation hmm # ------------------------------------------------------------------------- # plot confusion matrix, calculate precision, recall, f1-score hmm.evaluate(data_test) # show misclassifications features_test, labels_test = separate_labels_from_features(data_test) predictions = hmm.predict(features_test) show_misclassifications(data_test, predictions) elif model_type == "NB": # fit naive bayes model # ------------------------------------------------------------------------- nb = Naive_Bayes() data_train_featurized = feature_maker.get_pos_features_nltk( data_train ) if not load_entities else feature_maker.get_ner_features_nltk(data_train) data_train_featurized = flatten(data_train_featurized) start_time = time.time() nb.fit_nltk(data_train_featurized) print(f"Duration of training: {time.time() - start_time}") # evaluation naive bayes # ------------------------------------------------------------------------- data_test_featurized = feature_maker.get_pos_features_nltk( data_test ) if not load_entities else feature_maker.get_ner_features_nltk(data_test)
def train(instances, algorithm, high_idx, learn_rate, iterate, peg_lambda, k_val, T, clus_lambda, K, clus_iter): if (algorithm == "perceptron"): classifier = Perceptron(instances, high_idx, learn_rate) #iterate the training for i in range(iterate): classifier.train(instances) return classifier elif (algorithm == "averaged_perceptron"): classifier2 = AveragePerceptron(instances, high_idx, learn_rate) for i in range(iterate): classifier2.train(instances) return classifier2 elif (algorithm == "pegasos"): classifier3 = Pegasos(instances, high_idx, peg_lambda) for i in range(iterate): classifier3.train(instances) return classifier3 elif (algorithm == "margin_perceptron"): classifier4 = PerceptronMargin(instances, high_idx, learn_rate, iterate) for i in range(iterate): classifier4.train(instances) return classifier4 elif (algorithm == "knn"): classifier5 = KNN(instances, k_val, high_idx) for i in range(iterate): classifier5.train(instances) return classifier5 elif (algorithm == "distance_knn"): classifier6 = Distance_KNN(instances, k_val, high_idx) for i in range(iterate): classifier6.train(instances) return classifier6 elif (algorithm == "adaboost"): classifier7 = Adaboost(instances, T, high_idx) for i in range(iterate): classifier7.train(instances) return classifier7 elif (algorithm == "lambda_means"): classifier8 = Lambda_Means2(instances, high_idx, clus_lambda, clus_iter) for i in range(iterate): #print "##################Training", i+1, "out of", iterate,"###############" classifier8.train(instances) return classifier8 elif (algorithm == "nb_clustering"): classifier9 = Naive_Bayes(instances, high_idx, K) for i in range(iterate): #print "##################Training", i+1, "out of", iterate,"###############" classifier9.train(instances) return classifier9 elif (algorithm == "mc_perceptron"): classifier10 = MC_Perceptron(instances, high_idx) for i in range(iterate): classifier10.train(instances) return classifier10 else: return None