コード例 #1
0
 def fit(self, train_data):
     self.nn_model.fit(train_data)
     pred = self.nn_model.predict(train_data)
     train_x, train_y, train_q = get_data_params(train_data)
     boosted_data = train_data
     boosted_data[0] = pred
     self.dt_model.fit(boosted_data, np.subtract(pred, train_y))
コード例 #2
0
 def fit(self, train_data):
     train_x, train_y, train_q = get_data_params(train_data)
     self.dt_model.fit(train_x, train_y)
     pred = self.dt_model.predict(train_x)
     boosted_data = train_data
     boosted_data[0] = pred
     self.nn_model.fit_with_labels(train_x, np.subtract(pred, train_y))
コード例 #3
0
 def fit_with_labels(self, train_data, labels):
     train_x, train_y, train_q = get_data_params(train_data)
     train_x = normalize_data(train_x)
     train_y = encode_label(labels)
     self.history = self.model.fit(train_x,
                                   train_y,
                                   epochs=100,
                                   batch_size=64)
     return self.history
コード例 #4
0
from sklearn.model_selection import train_test_split
from util.Utils import read_dataset_as_df
from constant.Constant import DATASET_MQ2008_PATH
from constant.Constant import MQ2008_TSV_FILE_NAME
from model.DtStackedNn import DtStackedNn
from util.Utils import get_data_params
from util.Utils import calculate_ndcg
from util.Utils import calculate_map
from model.NnBoostedDt import NnBoostedDt
''''
Main Running logic for the Deep NN model and Decision tree with ensemble technique

Technique : Boosted Stacking (NN -> DT)
Dataset : MQ2008

'''

print(":: Boosted Stacking (NN -> DT) started ::")
df = read_dataset_as_df(DATASET_MQ2008_PATH + MQ2008_TSV_FILE_NAME)
# Dividing the data
train, test = train_test_split(df, test_size=0.7)
test_x, test_y, test_q = get_data_params(test)
model = NnBoostedDt()
model.fit(train)
pred = model.predict(test_x)
print("predicted value :", pred)
ndcg = calculate_ndcg(pred, test_y)
mAP = calculate_map(pred, test_y)

print('NDCG For Deep NN and DT Boosted Stacking (NN -> DT) :', ndcg)
print('MAP For Deep NN and DT Boosted Stacking (NN -> DT) :', mAP)
コード例 #5
0
from sklearn.model_selection import train_test_split
from util.Utils import read_dataset_as_df
from constant.Constant import DATASET_MQ2008_PATH
from constant.Constant import MQ2008_TSV_FILE_NAME
from model.NnDtBagger import NnDtBagger
from util.Utils import get_data_params
from util.Utils import calculate_ndcg
from util.Utils import calculate_map
''''
Main Running logic for the Deep NN model and Decision tree with ensemble technique

Technique : Bagging
Dataset : MQ2008

'''

print(":: Bagging started ::")
df = read_dataset_as_df(DATASET_MQ2008_PATH + MQ2008_TSV_FILE_NAME)
# Dividing the data
train, test = train_test_split(df, test_size=0.7)
train_x, train_y, train_q = get_data_params(train)
test_x, test_y, test_q = get_data_params(test)
model = NnDtBagger(0.6)
model.fit(train)
pred = model.predict(test_x)
print("predicted value :", pred)
ndcg = calculate_ndcg(pred, test_y)
mAP = calculate_map(pred, test_y)

print('NDCG For Deep NN and DT bagging :', ndcg)
print('MAP For Deep NN and DT bagging :', mAP)
コード例 #6
0
 def predict(self, test_data):
     test_x, test_y, test_q = get_data_params(test_data)
     return self.model.predict(Pool(data=normalize_data(test_x))).ravel()
コード例 #7
0
 def fit(self, train_data):
     train_x, train_y, train_q = get_data_params(train_data)
     self.train_pool = Pool(data=normalize_data(train_x), label=train_y.astype(int))
     self.model.fit(X=self.train_pool)
コード例 #8
0
 def predict(self, test_data):
     test_x, test_y, test_q = get_data_params(test_data)
     pred_nn = self.nn_model.predict(test_x)
     pred_dt = self.dt_model.predict(test_x)
     return pred_nn + pred_dt
コード例 #9
0
 def predict(self, test_data):
     test_x, test_y, test_q = get_data_params(test_data)
     test_x = normalize_data(test_x)
     return decode_label(self.model.predict(test_x))