def run_tc3_0(): global dataset, feature_extraction_method, classifiers, experiment_controller dataset = Dataset.arxiv_metadata feature_extraction_method = FeatureExtractionMethod.BOW classifiers = [ClassificationMethod.Gradient_Boosting_Machines] experiment_controller = ExperimentController('tc#3.0', '3') experiment_controller.set_variables(dataset, feature_extraction_method, classifiers, should_load_embedding_model=False) experiment_controller.run_experiment()
def run_tc0_0(): global dataset, feature_extraction_method, classifiers, experiment_controller dataset = Dataset.ds20newsgroups feature_extraction_method = FeatureExtractionMethod.BOW classifiers = [ ClassificationMethod.Naive_Bayes_Classifier, # ClassificationMethod.Logistic_Regression, ClassificationMethod.Support_Vector_Machines, ClassificationMethod.SVM_with_SGD ] experiment_controller = ExperimentController('tc#0.0', '1') experiment_controller.set_variables(dataset, feature_extraction_method, classifiers) experiment_controller.run_experiment()
def run_tc3_4(): global dataset, feature_extraction_method, classifiers, experiment_controller dataset = Dataset.arxiv_metadata feature_extraction_method = FeatureExtractionMethod.FASTTEXT classifiers = [ ClassificationMethod.Logistic_Regression, ClassificationMethod.Support_Vector_Machines, ClassificationMethod.SVM_with_SGD ] experiment_controller = ExperimentController('tc#3.4', '2') experiment_controller.set_variables(dataset, feature_extraction_method, classifiers, should_load_embedding_model=False) experiment_controller.run_experiment()
def run_tc0_2(): global dataset, feature_extraction_method, classifiers, experiment_controller dataset = Dataset.ds20newsgroups feature_extraction_method = FeatureExtractionMethod.WORD2VEC classifiers = [ ClassificationMethod.Naive_Bayes_Classifier, ClassificationMethod.Logistic_Regression, ClassificationMethod.Support_Vector_Machines, ClassificationMethod.SVM_with_SGD ] experiment_controller = ExperimentController('tc#0.2', '1') experiment_controller.set_variables(dataset, feature_extraction_method, classifiers, should_load_embedding_model=True) experiment_controller.run_experiment()
def run_tc3_1(): global dataset, feature_extraction_method, classifiers, experiment_controller dataset = Dataset.arxiv_metadata feature_extraction_method = FeatureExtractionMethod.TF_IDF classifiers = [ ClassificationMethod.Naive_Bayes_Classifier, ClassificationMethod.Logistic_Regression, ClassificationMethod.Support_Vector_Machines, ClassificationMethod.SVM_with_SGD, ClassificationMethod.Gradient_Boosting_Machines ] experiment_controller = ExperimentController('tc#3.1', '2') experiment_controller.set_variables(dataset, feature_extraction_method, classifiers, should_load_embedding_model=False) experiment_controller.run_experiment()
import warnings # %matplotlib inline from topic_classification.ExperimentController import ExperimentController from topic_classification.constants import * warnings.filterwarnings('ignore') # Script for different kind of experiments dataset = Dataset.bbc_news_summary feature_extraction_method = FeatureExtractionMethod.FASTTEXT classifiers = [ ClassificationMethod.Logistic_Regression, ClassificationMethod.Support_Vector_Machines, ClassificationMethod.SVM_with_SGD ] experiment_controller = ExperimentController('tc#2.4', '1') experiment_controller.run_experiment(dataset, feature_extraction_method, classifiers, should_load_embedding_model=True)