Exemplo n.º 1
0
def knn(p_id):
    # get_session_data(p_id)
    (exists, test) = get_session_data(p_id)
    if not exists:
        print("Running KNN Model")
        s_id = crud.get_latest_session_table_by_id(db, p_id).split('_')[3]
        model = TimeSeriesKMeans.from_json('res/knn_model.txt')
        pred = model.predict(test)

        leng = len(test)
        more = int(0.8 * leng)
        less = int(0.2 * leng)
        if more + less < leng:
            more += 1
        elif more + less > leng:
            more -= 1
        a = np.zeros((more,), dtype=int)
        b = np.ones((less,), dtype=int)
        true = np.concatenate([a, b])

        res = 0
        if (pred == 0).sum() < (pred == 1).sum():
            res = 1
        print("-----------------------------")
        print(exists)
        print("-----------------------------")
        res = models.Result(session_id=int(s_id), patient_id=int(p_id), result=int(res), model_id=0)
        return crud.create_patient_result(db, res)
    else:
        return crud.get_last_result(db, p_id, m_id)
        # return crud.get_last_result_by_patient_id(db, p_id)
    print(pred)
    print(confusion_matrix(true, pred))
Exemplo n.º 2
0
    def assess(self, stk, stki):
        type = ['km.json', 'dba_km.json', 'sdtw_km.json']
        asmnt = ['before the pump.', 'near the dump.', 'after the dump.']
        gbw = list('gbw')
        rating = [0, 0, 0]
        srating = [0, 0, 0]
        data1 = stki['Close']
        data1 = data1.to_numpy()
        data1 = data1.reshape(-1, 1)
        data1 = data1[np.newaxis, ...]
        if data1.shape[1] != 15:
            need = 15 - data1.shape[1]
            stki = data.DataReader(stk,
                                   start=datetime.today() -
                                   timedelta(days=21 + need, hours=14),
                                   end=self.now,
                                   data_source='yahoo')
            data1 = stki['Close']
            data1 = data1.to_numpy()
            data1 = data1.reshape(-1, 1)
            data1 = data1[np.newaxis, ...]

        for num in range(3):  # no scaler data processing
            for num1 in range(3):
                model = tsm.from_json('MLModels/' + gbw[num] + type[num1])
                bits = model.transform(data1)
                bits = np.sum(bits)
                rating[num] = rating[num] + bits
        data1 = TimeSeriesScalerMeanVariance(1, .5).fit_transform(data1)
        for num in range(3):  # scaler data processing
            for num1 in range(3):
                model = tsm.from_json('MLModels/s' + gbw[num] + type[num1])
                bits = model.transform(data1)
                bits = np.sum(bits)
                srating[num] = srating[num] + bits
        print("This stock is possibly " + asmnt[srating.index(min(srating))])

        return "This stock is possibly " + asmnt[srating.index(min(srating))]
Exemplo n.º 3
0
pred

a = np.zeros((320,), dtype=int)
b = np.ones((80,), dtype=int)
true = np.concatenate([a, b])

confusion_matrix(true, pred)

centers = model.cluster_centers_
centers = np.array([centers[0].flatten(), centers[1].flatten()])
centers

plt.plot(centers[0], color = 'red')
for dataset in [Z, O]:
    for i in range(1):
        plt.plot(dataset.iloc[:, i][0 : 1000], color = 'c')

plt.plot(centers[1], color = 'r')
for dataset in [N, F]:
    for i in range(1):
        plt.plot(dataset.iloc[:, i][0: 1000], color = 'c')

model.to_json("Model")

model = TimeSeriesKMeans.from_json('Model.txt')
model
pred = model.predict(test)
a = np.zeros((320,), dtype=int)
b = np.ones((80,), dtype=int)
true = np.concatenate([a, b])
confusion_matrix(true, pred)