Exemplo n.º 1
0
def record_experment_data(fs, fnus):

    # print(fnus[0].Neq)
    m2 = 800000
    threshold = 400000

    temperature, Humidity = dataset.import_data(fs)

    number = []
    decomposition = []
    extension_rate = []
    litter = []

    record_x = []
    record_y = []
    # training
    for i in range(5000):
        for j in range(50):
            fnus[j].T_real = temperature[i]
            fnus[j].W_real = Humidity[i]

        total_number, total_decomposition_rate, m2, d_number, flag, threshold = Q1.update_real_number(
            fnus, m2, threshold)

        number.append(total_number)
        decomposition.append(total_decomposition_rate)
        extension_rate.append(d_number)
        litter.append(m2)
        if flag == 1:
            record_x.append(i)
            record_y.append(threshold * 2)
        # plt.plot()
        # plt.show()
    return extension_rate, number, fnus, decomposition, litter, record_x, record_y, temperature, Humidity
Exemplo n.º 2
0
def train_rf_squad():
    dataset = import_data(None, True)
    x_train, y, feature_names = feature_engineering(dataset, 'squad-fpp', True)
    model = rf_model_squad(x_train, y)
    print("Model training completed successfully")
    print(model)
Exemplo n.º 3
0
def ramdon_forest_duo(file):
    dataset = import_data(file, False)
    df, y, feature_names = feature_engineering(dataset, 'duo-fpp', False)
    ans = rf_duo(df)
    return ans
Exemplo n.º 4
0
def model_training():
    dataset = import_data(None, True)
    x_train, y = feature_engineering(dataset, True)
    model = rf_model_solo(x_train, y)
    print(model)
Exemplo n.º 5
0
def ramdon_forest_solo(file):
    dataset = import_data(file, False)
    df, _ = feature_engineering(dataset, False)
    ans = random_forest(df)
    return ans
Exemplo n.º 6
0
import tornado.ioloop
import tornado.web
from transformers import BertForTokenClassification

from dataset import import_data, split_data, convert_dataframe_to_data, tags_and_tag_to_idx, tokenizer
from model import one_sentence_prediction_bert

path_to_dataset = '/home/andrei/Documents/ML/ner.csv'
path_to_model = '/home/andrei/Documents/ML/bert_uncased/'

data = import_data(path_to_dataset)
training, testing = split_data(data)
train_data = convert_dataframe_to_data(training)
test_data = convert_dataframe_to_data(testing)
_, tag_to_idx = tags_and_tag_to_idx(train_data, test_data)
tag_values = list(tag_to_idx.keys())

bert_model_loaded = BertForTokenClassification.from_pretrained(path_to_model)


class Ner(tornado.web.RequestHandler):
    def get(self):
        form = """<form method="post">
        <input type="text" name="sentence"/>
        <input type="submit"/>
        </form>"""
        self.write(form)

    def post(self):
        sentence = self.get_argument('sentence')
        prediction = one_sentence_prediction_bert(sentence, bert_model_loaded,