Beispiel #1
0
def TreePartialFit():
    # clf_p = RandomForestRegressor()
    clf_p = ExtraTreesRegressor()
    for batch in range(1000, dataset.shape[0]):
        print("in partial fit")
        data_batch = dataset[batch:batch + 1000]
        f_batch = data_batch[data_batch.columns.values.tolist()[:-1]]
        l_batch = data_batch['Price']
        clf_p.partial_fit(f_batch, l_batch)
    print(clf_p.feature_importances_)
Beispiel #2
0
m = (~gameFinishedMask) & (actingPlayerIdx == randomizedPlayerIdx)

regressorOld = copy.deepcopy(regressor)
#regressor = ExtraTreesRegressor(n_estimators=100, min_samples_leaf=1, min_samples_split=2,
#                                verbose=2, n_jobs=-1, random_state=0)
#regressor = ExtraTreesClassifier(n_estimators=100, min_samples_leaf=3, min_samples_split=2,
#                                verbose=2, n_jobs=-1, class_weight='balanced')
#regressor.fit(features[m], targetActions[m])

#regressor = MLPClassifier(hidden_layer_sizes=(100,100,),max_iter=100,verbose=1)
#regressor.fit(features2[m], targetActions[m])

rndIdx = np.random.choice(len(features), size=len(features), replace=0)
features2 = scaler.transform(features)
for i in range(100):
    regressor.partial_fit(features2[rndIdx][m], targetActions[rndIdx][m])
    print('Iteration: ' + str(i) + '   loss: ' + str(regressor.loss_))

# %%

np.histogram(targetActions[m], 6)

from sklearn.metrics import confusion_matrix

pred = regressor.predict(features[m])

cm = confusion_matrix(targetActions[m], pred)
cm22 = cm.astype('float') / cm.sum(axis=1)[:, np.newaxis]

print(np.around(cm22, 2))