def test(model, X_test, y_test):
    loss, accuracy, recall, precision = model.evaluate(X_test, y_test)
    print("accuracy: {}%".format(accuracy * 100))
    print("recall: {}%".format(recall * 100))
    print("precision: {}%".format(precision * 100))
    outputs3 = model.predict(X_test)
    answers3 = [np.argmax(output) for output in outputs3]
    targets3 = [np.argmax(truth) for truth in y_test]
    util.analyze(answers3, targets3, [0, 1, 2, 3])
Esempio n. 2
0
def test(model, X_test, y_test):
    loss, accuracy, recall, precision = model.evaluate(X_test, y_test)
    print("accuracy: {}%".format(accuracy * 100))
    outputs = model.predict(X_test)
    answers = [output[0] > 0.5 for output in outputs]
    print(answers)
    targets = [truth for truth in y_test]
    print(targets)
    util.analyze(answers, targets, [0, 1])
Esempio n. 3
0
    except (Exception):
        print("set_mapping except")


# 主函数
if __name__ == '__main__':
    # args = read_args()
    # 初始化es环境
    init_es(hosts=["localhost:9200"], timeout=5000)
    # 创建es类
    es = es_tool(hosts=["localhost:9200"], timeout=5000)
    # 执行写入操作
    batchsize = 50000
    i = 0
    datalist = []
    with open("/data/SDDMCS/LA_orgs/proc.txt") as f:
        while i < batchsize:
            i += 1

            line = f.readline()
            if not line:
                if len(datalist) != 0:
                    es.set_data(datalist)
                break
            datalist.append(analyze(line))
            if (i == batchsize):
                # print("datalist",datalist)
                es.set_data(datalist)
                datalist = []
                i = 0
Esempio n. 4
0
          y,
          num_steps,
          data_path,
          log_path,
          gpu=gpu,
          norm=True,
          learning_rate=0.1)

#train the model
som.train()

# get the trained map
#net = som.get_weights()

### save/load weights
som.save_weights()
#som.load_weights("./data/test.npy")

# get the location / cluster of the data
data_map, clstr_map = som.map_data()

########################################################
###########
# ANALYZE #
###########

idxs = np.load("./data/idxs.npy")
code_lbl = np.load("./data/code_lbl.npy")

pred_counts, pred_acc, best_pred = analyze(clstr_map, code_lbl, 0.6, 2)