Exemplo n.º 1
0
         weightlist[datatotrain[tr]] *= beta[-1]
     sum = 0
     for i in weightlist:
         sum += i
     for i in range(len(weightlist)):
         weightlist[i] /= sum
     print(error)
 #test
 sum = 0
 tp = 0
 fp = 0
 fn = 0
 tn = 0
 result = []
 for lea in learner:
     result.append(decision.getResult(lea, data_example, ma, 'bot', test))
 rs = []
 for i in range(len(test)):
     total = 0
     rs.append(0)
     for j in range(len(learner)):
         total += 1
         if result[j][i] == 1:
             rs[-1] += math.log(1 / beta[j], 2)
         else:
             rs[-1] -= math.log(1 / beta[j], 2)
     if rs[-1] > 0:
         rs[-1] = 1
     elif rs[-1] < 0:
         rs[-1] = 0
     elif i % 2 == 0:
Exemplo n.º 2
0
    row = []
    for name in ma:
        row.append(data_trai[name][i])
    thedatatr.append(row)
    targettr.append(data_trai['bot'][i])
for i in range(len(data_test)):
    row = []
    for name in ma:
        row.append(data_test[name][i])
    thedatate.append(row)

for k in range(1):
    model = mybayes.buildmodel(data_trai, ma, 'bot', trainindexlist)
    result1 = model.getResult(data_test, ma, 'bot', testindexlist)
    mynode = decision.makesubtree(data_trai, ma, 'bot', trainindexlist)
    result2 = decision.getResult(mynode, data_test, ma, 'bot', testindexlist)
    logreg = LogisticRegression(C=1000).fit(thedatatr, targettr)
    thetar = logreg.predict(thedatate)
    result = []
    for i in range(len(result1)):
        t = result1[i] + result2[i] + thetar[i]
        if t >= 2:
            result.append(1)
        else:
            result.append(0)
    accuracy = 0.0
    precision = 0.0
    recall = 0.0
    f1score = 0.0
    tp = 0
    fp = 0