def check_nb_svm(sk_model, monkeypatch): # load data as_data = asr.ASReviewData.from_file(data_fp) _, texts, _ = as_data.get_data() # create features and labels X, _ = asr.text_to_features(texts) # create the model model = sk_model() monkeypatch.setattr('builtins.input', lambda _: "0") # start the review process. reviewer = asr.ReviewOracle( X, as_data=as_data, model=model, n_instances=1, n_queries=1, prior_included=[1, 3], # List of some included papers prior_excluded=[2, 4], # List of some excluded papers ) reviewer.review() check_log(reviewer._logger._log_dict)
def check_lstm(lstm_model, monkeypatch): # load data as_data = asr.ASReviewData.from_file(data_fp) _, texts, _ = as_data.get_data() # create features and labels X, word_index = asr.text_to_features(texts) # Load embedding layer. embedding = asr.load_embedding(embedding_fp, word_index=word_index) embedding_matrix = asr.sample_embedding(embedding, word_index) # create the model model = KerasClassifier( lstm_model(embedding_matrix=embedding_matrix), verbose=1, ) fit_kwargs = {"epochs": 2, "batch_size": 2, "class_weight": 20.0} monkeypatch.setattr('builtins.input', lambda _: "0") # start the review process. reviewer = asr.ReviewOracle( X, as_data=as_data, model=model, n_instances=1, n_queries=1, fit_kwargs=fit_kwargs, prior_included=[1, 3], # List of some included papers prior_excluded=[2, 4], # List of some excluded papers ) reviewer.review() check_log(reviewer._logger._log_dict)
def check_lstm(lstm_model): # load data _, texts, y = asr.read_data(data_fp) # create features and labels X, word_index = asr.text_to_features(texts) # Load embedding layer. embedding = asr.load_embedding(embedding_fp, word_index=word_index) embedding_matrix = asr.sample_embedding(embedding, word_index) # create the model model = KerasClassifier( lstm_model(embedding_matrix=embedding_matrix), verbose=1, ) fit_kwargs = {"epochs": 2, "batch_size": 2, "class_weight": 20.0} # start the review process. reviewer = asr.ReviewSimulate( X, y=y, model=model, n_instances=1, n_queries=1, fit_kwargs=fit_kwargs, prior_included=[1, 3], # List of some included papers prior_excluded=[2, 4], # List of some excluded papers ) reviewer.review() check_log(reviewer._logger._log_dict)
def check_nb_svm(sk_model): # load data _, texts, y = asr.read_data(data_fp) # create features and labels X, _ = asr.text_to_features(texts) # create the model model = sk_model() # start the review process. reviewer = asr.ReviewSimulate( X, y=y, model=model, n_instances=1, n_queries=1, prior_included=[1, 3], # List of some included papers prior_excluded=[2, 4], # List of some excluded papers ) reviewer.review() check_log(reviewer._logger._log_dict)
#!/usr/bin/env python ''' Created on 23 Apr 2019 @author: qubix ''' import sys import asreview filename = sys.argv[1] file_out = sys.argv[2] print(filename) _, text, labels = asreview.read_data(filename) X, word_index = asreview.text_to_features(text) with open(file_out, "w") as f: for key in word_index: f.write(f"{key}\n")