def FinalProcessData(AVY_dprove_path, pdr_IC3_path, iimc_path, data_path):
    method_list = utils.method_list

    AVY_dprove_dic = utils.ReadJson(AVY_dprove_path)

    pdr_IC3_dic = utils.ReadJson(pdr_IC3_path)
    pdr_IC3_name_list = list(pdr_IC3_dic.keys())

    iimc_dic = utils.ReadJson(iimc_path)

    data_list = []
    for name in pdr_IC3_name_list:
        temp_data = [name]
        temp_data.append(AVY_dprove_dic[name]["dprove"])
        temp_data.append(pdr_IC3_dic[name]["pdr"])
        temp_data.append(iimc_dic[name]["iimc"])
        temp_data.append(pdr_IC3_dic[name]["IC3"])

        mark = False
        for time in temp_data[1:]:
            if time != "timeout" and time != "failed" and time != "0.0":
                mark = True
        if mark == True:
            data_list.append(temp_data)

    title = "filename"
    for method in method_list:
        title = title + "," + method
    with open(data_path, "w") as writer:
        writer.write(title + "\n")
        for line in data_list:
            writer.write(",".join(line) + "\n")
Exemple #2
0
def FormerProcessData(others_path, data_path, AVY_dprove_path=""):
    method_list = utils.method_list

    others_list = utils.ReadJson(others_path)
    others_dic = {}
    for i in range(len(others_list)):
        aig = others_list[i]
        aig_name = list(aig.keys())[0]
        run_times = aig[aig_name]
        temp_dic = {}
        for method_time_pair in run_times:
            method = list(method_time_pair.keys())[0]
            time = str(method_time_pair[method])
            temp_dic[method] = time
        others_dic[aig_name] = temp_dic
    others_name_list = list(others_dic.keys())
    # print(len(others_name_list))

    AVY_dprove_name_list = []
    AVY_dprove_dic = {}
    if AVY_dprove_path != "":
        AVY_dprove_dic = utils.ReadJson(AVY_dprove_path)
        AVY_dprove_name_list = list(AVY_dprove_dic.keys())
        # print(len(AVY_dprove_name_list))

    data_list = []
    if AVY_dprove_path != "":
        others_method_list = copy.deepcopy(method_list)
        others_method_list.remove("dprove")
        for name in AVY_dprove_name_list:
            if all(keyword in others_dic[name].keys() for keyword in others_method_list):
                temp_data = [name, AVY_dprove_dic[name]["dprove"]]
                for method in others_method_list:
                    temp_data.append(others_dic[name][method])
                mark = False
                for time in temp_data[1:]:
                    if time != "timeout" and time != "failed" and time != "0.0":
                        mark = True
                if mark == True:
                    data_list.append(temp_data)
    else:
        for name in others_name_list:
            if all(keyword in others_dic[name].keys() for keyword in method_list):
                temp_data = [name]
                for method in method_list:
                    temp_data.append(others_dic[name][method])
                mark = False
                for time in temp_data[1:]:
                    if time != "timeout" and time != "failed" and time != "0.0":
                        mark = True
                if mark == True:
                    data_list.append(temp_data)
    title = "filename"
    for method in method_list:
        title = title + "," + method
    with open(data_path, "w") as writer:
        writer.write(title + "\n")
        for line in data_list:
            writer.write(",".join(line) + "\n")
Exemple #3
0
def ProcessNameList(train_name_list_path, test_name_list_path):
    train_name_list = utils.ReadJson(train_name_list_path)
    test_name_list = utils.ReadJson(test_name_list_path)
    total_name_list = list(set(train_name_list) | set(test_name_list))
    processed_name_list = []
    for name in total_name_list:
        processed_name_list.append(name.split(".aig")[0])
    return total_name_list, processed_name_list
def Statistic(train_label_dic_path, test_label_dic_path):
    train_label_dic = utils.ReadJson(train_label_dic_path)
    test_label_dic = utils.ReadJson(test_label_dic_path)
    statistic_dic = utils.Statistic([train_label_dic])
    print("train_data")
    print(statistic_dic)
    statistic_dic = utils.Statistic([test_label_dic])
    print("test_data")
    print(statistic_dic)
    statistic_dic = utils.Statistic([train_label_dic, test_label_dic])
    print("all_data")
    print(statistic_dic)
def ClassifyAddCol(data, name_list, classify_predict_path, choose_top_method_number):
    method_list = utils.method_list
    predict = utils.ReadJson(classify_predict_path)
    statistic_dic = utils.InitialDic()
    for index in range(len(data)):
        target_method_list = predict[name_list[index]]
        predict_time = "None"
        point = 0
        for i in range(choose_top_method_number):
            target_method = target_method_list[i]
            assert(target_method in method_list)
            temp_point = method_list.index(target_method) + 1
            temp_time = data[index][temp_point]
            if predict_time == "None":
                predict_time = temp_time
                point = temp_point
            elif temp_time != "timeout" and temp_time != "failed" and temp_time != "0.0" and temp_time != "0":
                if predict_time == "timeout" or predict_time == "failed" or predict_time == "0.0" or predict_time == "0":
                    predict_time = temp_time
                    point = temp_point
                elif float(temp_time) < float(predict_time):
                    predict_time = temp_time
                    point = temp_point                
        data[index].append(predict_time)
        statistic_dic[method_list[point - 1]] += 1
    return data
Exemple #6
0
def ProcessiimcData(iimc_path, data_path):
    method_list = utils.method_list

    iimc_dic = utils.ReadJson(iimc_path)
    iimc_name_list = list(iimc_dic.keys())
    # print(len(iimc_name_list))

    data_list = []
    for name in iimc_name_list:
        if all(keyword in iimc_dic[name].keys() for keyword in method_list):
            time_list = []
            for method in method_list:
                time_list.append(iimc_dic[name][method])
            temp_data = [name]
            for method in method_list:
                temp_data.append(iimc_dic[name][method])
            data_list.append(temp_data)

    title = "filename"
    for method in method_list:
        title = title + "," + method
    with open(data_path, "w") as writer:
        writer.write(title + ",DeepChecker\n")
        for line in data_list:
            writer.write(",".join(line) + "\n")
Exemple #7
0
def SplitData(data_path,
              train_data_path,
              test_data_path,
              new_format_json_path,
              remove_mark=False):
    with open(data_path, "r") as csvfile:
        data = list(csv.reader(csvfile))
        print(len(data))

    new_format = utils.ReadJson(new_format_json_path)
    new_data = []
    if remove_mark:
        for item in data[1:]:
            if item[0] not in new_format:
                new_data.append(item)
    else:
        new_data = data[1:]

    train_data, test_data = train_test_split(new_data, test_size=0.2)
    print(len(train_data))
    print(len(test_data))

    with open(train_data_path, 'w') as writer:
        for line in train_data:
            writer.write(",".join(line) + "\n")
    with open(test_data_path, 'w') as writer:
        for line in test_data:
            writer.write(",".join(line) + "\n")
def ClassifyAddPredictionWithEncoding(predict_data_path,
                                      predict_data_with_encoding_path,
                                      encoding_time_path_0,
                                      encoding_time_path_1,
                                      encoding_time_path_2):
    with open(predict_data_path, newline='') as csvfile:
        data = list(csv.reader(csvfile))
    title_list = data[0]
    data = data[1:]

    encoding_time_path_list = [
        encoding_time_path_0, encoding_time_path_1, encoding_time_path_2
    ]
    for i in range(len(encoding_time_path_list)):
        encoding_time_path = encoding_time_path_list[i]
        encoding_time_dic = utils.ReadJson(encoding_time_path)
        for index in range(len(data)):
            name = data[index][0].split(".aig")[0]
            encoding_time = encoding_time_dic[name]
            predict_time = data[index][i + 5]
            if predict_time != "timeout" and predict_time != "failed" and predict_time != "0.0" and predict_time != "0":
                total_time = str(encoding_time + float(predict_time))
            else:
                total_time = "timeout"
            data[index].append(total_time)

    title = ",".join(title_list)
    for encoding_layer in utils.encoding_layer_list:
        title = title + "," + "AddEncoding_" + encoding_layer
    with open(predict_data_with_encoding_path, "w") as writer:
        writer.write(title + "\n")
        for line in data:
            writer.write(",".join(line) + "\n")
def StatisticSamples(test_label_dic_path, statistic_name_dic_path):
    test_label_dic = utils.ReadJson(test_label_dic_path)
    statistic_name_dic = {}
    for method in utils.method_list:
        statistic_name_dic[method] = []
    for name in test_label_dic.keys():
        statistic_name_dic[test_label_dic[name]].append(name)
    utils.WriteJson(statistic_name_dic, statistic_name_dic_path)
def ProcessData(AVY_dprove_path, pdr_IC3_path, others_path, data_path):
    method_list = utils.method_list

    AVY_dprove_dic = utils.ReadJson(AVY_dprove_path)

    pdr_IC3_dic = utils.ReadJson(pdr_IC3_path)
    pdr_IC3_name_list = list(pdr_IC3_dic.keys())

    others_list = utils.ReadJson(others_path)
    others_dic = {}
    for i in range(len(others_list)):
        aig = others_list[i]
        aig_name = list(aig.keys())[0]
        run_times = aig[aig_name]
        temp_dic = {}
        for method_time_pair in run_times:
            method = list(method_time_pair.keys())[0]
            time = str(method_time_pair[method])
            temp_dic[method] = time
        others_dic[aig_name] = temp_dic

    data_list = []
    for name in pdr_IC3_name_list:
        temp_data = [name]
        temp_data.append(AVY_dprove_dic[name]["dprove"])
        temp_data.append(pdr_IC3_dic[name]["pdr"])
        temp_data.append(others_dic[name]["iimc"])
        temp_data.append(pdr_IC3_dic[name]["IC3"])

        mark = False
        for time in temp_data[1:]:
            if time != "timeout" and time != "failed" and time != "0.0" and time != "0":
                mark = True
        if mark == True:
            data_list.append(temp_data)

    title = "filename"
    for method in method_list:
        title = title + "," + method
    with open(data_path, "w") as writer:
        writer.write(title + "\n")
        for line in data_list:
            writer.write(",".join(line) + "\n")
Exemple #11
0
def GeneratePrediction(name_list_path, classify_predict, layer):
    name_list = utils.ReadJson(name_list_path)
    title = "filename,predict"
    data = []
    for name in name_list:
        line = []
        line.append(name)
        line.append(classify_predict[name][0])
        data.append(line)
    with open(utils.classify_predict_path + "predict_" + str(layer) + ".csv",
              "w") as writer:
        writer.write(title + "\n")
        for line in data:
            writer.write(",".join(line) + "\n")
def Predict(name_list_path, model_path, layer, method):
    name_list = utils.ReadJson(name_list_path)
    if layer == 0:
        encoding_dic_dir = utils.encoding_dic_dir_0
    elif layer == 1:
        encoding_dic_dir = utils.encoding_dic_dir_1
    elif layer == 2:
        encoding_dic_dir = utils.encoding_dic_dir_2
    vec_list = utils.GetVecListFromDic(encoding_dic_dir, name_list)
    model = utils.Load_pkl(model_path)
    predict_time_list = model.predict(vec_list)
    time_predict = predict_time_list.tolist()
    time_predict_path = utils.time_predict_path + "predict_" + method + "_" + str(layer) + ".json"
    utils.WriteJson(time_predict, time_predict_path)
    return time_predict
def Predict(name_list_path, model_path, layer):
    name_list = utils.ReadJson(name_list_path)
    if layer == 0:
        encoding_dic_dir = utils.encoding_dic_dir_0
    elif layer == 1:
        encoding_dic_dir = utils.encoding_dic_dir_1
    elif layer == 2:
        encoding_dic_dir = utils.encoding_dic_dir_2
    vec_list = utils.GetVecListFromDic(encoding_dic_dir, name_list)
    model = utils.Load_pkl(model_path)
    predictions = model.predict_proba(vec_list)
    predict_label_list = np.argsort(-predictions, axis=1)
    classify_predict_path = utils.classify_predict_path + "predict_" + str(layer) + ".json"
    classify_predict = GeneratePredictResult(name_list, predict_label_list, classify_predict_path)
    return classify_predict
Exemple #14
0
def GeneratePrediction(name_list_path, time_predict, layer, method):
    name_list = utils.ReadJson(name_list_path)
    title = "filename,predict"
    data = []
    for name in name_list:
        line = []
        line.append(name)
        line.append(str(time_predict[name_list.index(name)]))
        data.append(line)
    with open(
            utils.time_predict_path + "predict_" + method + "_" + str(layer) +
            ".csv", "w") as writer:
        writer.write(title + "\n")
        for line in data:
            writer.write(",".join(line) + "\n")
def ChangeData(data):
    if data != "timeout" and data != "failed" and data != "0.0" and data != "0":
        changed_data = float(data)
    else:
        changed_data = 3600.00
    return changed_data


if __name__ == '__main__':
    time_basic_data_path = utils.time_basic_data_path

    for method in utils.method_list:
        save_path = utils.time_result_path + "2-depth_Encoding_Sort_" + method + ".pdf"
        plt.figure()
        predict_name_sort = utils.ReadJson(time_basic_data_path + method +
                                           "_name_sort_2.json")
        truth_name_sort = utils.ReadJson(time_basic_data_path + method +
                                         "_name_sort_truth.json")
        yaxis = list(range(len(truth_name_sort)))
        xaxis = []
        for name in truth_name_sort:
            xaxis.append(predict_name_sort.index(name))
        plt.plot([0, 824], [0, 824], color="k", linewidth=2)
        plt.scatter(xaxis, yaxis, s=7, color="k")
        # temp_ax.set_xscale('linear')
        # temp_ax.set_yscale('linear')
        plt.title(utils.NameMap(method), size=30)

        plt.xlim(0, 824)
        plt.ylim(0, 824)
        plt.xticks(range(0, len(yaxis), 200),
Exemple #16
0
import utils
import numpy as np
import matplotlib.pyplot as plt

if __name__ == '__main__':
    classify_basic_data_path = utils.classify_basic_data_path

    embedded_dir_0 = utils.embedded_dir_0
    embedded_dir_1 = utils.embedded_dir_1
    embedded_dir_2 = utils.embedded_dir_2

    statistic_name_dic_path = classify_basic_data_path + "statistic_name_dic.json"
    statistic_name_dic = utils.ReadJson(statistic_name_dic_path)

    statistic_sample_distribution_path = utils.statistic_sample_distribution_path

    for i in range(len(utils.encoding_layer_list)):
        dir = [embedded_dir_0, embedded_dir_1, embedded_dir_2][i]
        for method in utils.method_list:
            temp_path = statistic_sample_distribution_path + method + "_distribution_" + str(i) + ".pdf"
            plt.figure()
            index = utils.method_list.index(method)
            name_list = statistic_name_dic[method]
            vec_list = utils.GetVecList(dir, name_list)
            statistic_vec = np.log(np.array(vec_list).sum(axis=0) + 1.0).tolist()
            # print(statistic_vec)
            # plt.subplot(2,2,index + 1)
            if i == 0:
                width = 0.3
                x = range(0,len(vec_list[0]),1)
            elif i == 1:
Exemple #17
0

if __name__ == '__main__':
    use_all_methods = utils.use_all_methods

    embedded_dir_0 = utils.embedded_dir_0
    embedded_dir_1 = utils.embedded_dir_1
    embedded_dir_2 = utils.embedded_dir_2

    time_basic_data_path = utils.time_basic_data_path
    train_name_list_path = time_basic_data_path + "train_name_list.json"
    train_time_message_path = time_basic_data_path + "train_time_message.json"
    test_name_list_path = time_basic_data_path + "test_name_list.json"
    test_time_message_path = time_basic_data_path + "test_time_message.json"

    train_name_list = utils.ReadJson(train_name_list_path)
    train_time_message = utils.ReadJson(train_time_message_path)
    test_name_list = utils.ReadJson(test_name_list_path)
    test_time_message = utils.ReadJson(test_time_message_path)

    time_predict_path = utils.time_predict_path
    time_predict_path_0 = time_predict_path + "time_predict_0.json"
    time_predict_path_1 = time_predict_path + "time_predict_1.json"
    time_predict_path_2 = time_predict_path + "time_predict_2.json"

    layer_0 = "0"
    layer_1 = "1"
    layer_2 = "2"

    if use_all_methods:
        print("0")
def ClassifyAddPrediction(data_path, name_list_path, predict_data_path, 
                        classify_predict_path_0, classify_predict_path_1, classify_predict_path_2):
    with open(data_path, newline='') as csvfile:
        data = list(csv.reader(csvfile))

    method_list = utils.method_list
    name_list = utils.ReadJson(name_list_path)
    for set_predict_path in [classify_predict_path_0, classify_predict_path_1, classify_predict_path_2]:
        data = ClassifyAddCol(data, name_list, set_predict_path, utils.choose_top_method_number_1)
    for set_predict_path in [classify_predict_path_0, classify_predict_path_1, classify_predict_path_2]:
        data = ClassifyAddCol(data, name_list, set_predict_path, utils.choose_top_method_number_2)    

    for index in range(len(data)):
        predict_time = "timeout"
        for method in method_list:
            temp_point = method_list.index(method) + 1
            temp_time = data[index][temp_point]
            if temp_time != "timeout" and temp_time != "failed" and temp_time != "0.0" and temp_time != "0":
                if predict_time == "timeout":
                    predict_time = temp_time
                elif float(temp_time) < float(predict_time):
                    predict_time = temp_time
        data[index].append(predict_time)

    correct_num = 0
    for index in range(len(data)):
        random_point = random.randint(1, len(utils.method_list))
        random_predict = data[index][random_point]
        if random_predict == data[index][-1]:
            correct_num += 1
        data[index].append(random_predict)
    print("Random Acc top1")
    print(correct_num / len(data))

    correct_num = 0
    for index in range(len(data)):
        point_candidate = [1, 2, 3, 4]
        random_point_list = random.sample(point_candidate, utils.choose_top_method_number_2)
        random_predict = "timeout"
        for point in random_point_list:
            temp_time = data[index][point]
            if temp_time != "timeout" and temp_time != "failed" and temp_time != "0.0" and temp_time != "0":
                if random_predict == "timeout":
                    random_predict = temp_time
                elif float(temp_time) < float(random_predict):
                    random_predict = temp_time
        if random_predict == data[index][-2]:
            correct_num += 1
        data[index].append(random_predict)
    print("Random Acc top2")
    print(correct_num / len(data))

    title = "filename"
    for method in method_list:
        title = title + "," + method
    for encoding_layer in utils.encoding_layer_list:
        title = title + "," + "top1_" + encoding_layer
    for encoding_layer in utils.encoding_layer_list:
        title = title + "," + "top2_" + encoding_layer
    title = title + ",Ground Truth,Random Top1,Random Top2"
    with open(predict_data_path, "w") as writer:
        writer.write(title + "\n")
        for line in data:
            writer.write(",".join(line) + "\n")
import utils

if __name__ == '__main__':
    directory_path = "/mnt/hd0/DeepChecker/StatisticAvgEncodingTime/old_directory_3D.json"
    new_directory_path = "/mnt/hd0/DeepChecker/StatisticAvgEncodingTime/old_directory.json"
    latex_format_path = "/mnt/hd0/DeepChecker/StatisticAvgEncodingTime/latex_format.txt"
    directory = utils.ReadJson(directory_path)
    new_directory = []
    latex_format = ""
    for index in range(len(directory)):
        item = directory[index]
        reverse_item = item[::-1]
        new_directory.append(reverse_item)
        lettermark = False
        firstmark = True
        for i in range(len(reverse_item)):
            if reverse_item[i] != "-":
                if lettermark == False:
                    lettermark = True
                    if firstmark == False:
                        latex_format += "|"
                    else:
                        firstmark = False
                    latex_format += "\\mathtt{"
                latex_format += reverse_item[i]
            else:
                if lettermark == True:
                    lettermark = False
                    latex_format += "}\\verb|"
                latex_format += "-"
        if lettermark == True:
def JudgeSituation15(test_name_list_path):
    test_name_list = utils.ReadJson(test_name_list_path)
    for name in test_name_list:
        vec = utils.GetVec(utils.embedded_dir_1, name)
        if vec[14] != 0:
            print(name)
Exemple #21
0
if __name__ == '__main__':
    use_all_methods = utils.use_all_methods

    embedded_dir_0 = utils.embedded_dir_0
    embedded_dir_1 = utils.embedded_dir_1
    embedded_dir_2 = utils.embedded_dir_2
    embedded_dir_list = [embedded_dir_0, embedded_dir_1, embedded_dir_2]

    classify_basic_data_path = utils.classify_basic_data_path
    train_name_list_path = classify_basic_data_path + "train_name_list.json"
    train_label_dic_path = classify_basic_data_path + "train_label_dic.json"
    test_name_list_path = classify_basic_data_path + "test_name_list.json"
    test_label_dic_path = classify_basic_data_path + "test_label_dic.json"

    train_name_list = utils.ReadJson(train_name_list_path)
    train_label_dic = utils.ReadJson(train_label_dic_path)
    test_name_list = utils.ReadJson(test_name_list_path)
    test_label_dic = utils.ReadJson(test_label_dic_path)

    train_label_list = utils.GetLabelList(train_name_list, train_label_dic)
    test_label_list = utils.GetLabelList(test_name_list, test_label_dic)

    classify_predict_path = utils.classify_predict_path
    classify_predict_path_0 = classify_predict_path + "DNN_predict_0.json"
    classify_predict_path_1 = classify_predict_path + "DNN_predict_1.json"
    classify_predict_path_2 = classify_predict_path + "DNN_predict_2.json"

    classify_model_path = utils.classify_model_path
    classify_model_path_0 = classify_model_path + "DNN_model_0.pkl"
    classify_model_path_1 = classify_model_path + "DNN_model_1.pkl"
Exemple #22
0
if __name__ == '__main__':
    time_predict_path = utils.time_predict_path
    time_predict_path_0 = time_predict_path + "time_predict_0.json"
    time_predict_path_1 = time_predict_path + "time_predict_1.json"
    time_predict_path_2 = time_predict_path + "time_predict_2.json"

    time_basic_data_path = utils.time_basic_data_path
    test_name_list_path = time_basic_data_path + "test_name_list.json"
    test_data_path = time_basic_data_path + "test_data.csv"
    predict_data_path = time_basic_data_path + "time_predict_data.csv"

    with open(test_data_path, newline='') as csvfile:
        data = list(csv.reader(csvfile))

    test_name_list = utils.ReadJson(test_name_list_path)
    method_list = utils.method_list
    for test_set_predict_path in [
            time_predict_path_0, time_predict_path_1, time_predict_path_2
    ]:
        predict = utils.ReadJson(test_set_predict_path)
        for method in method_list:
            time_list = predict[method]
            for index in range(len(data)):
                data[index].append(str(time_list[index]))

    for index in range(len(data)):
        predict_time = "timeout"
        for i in range(len(utils.method_list)):
            temp_point = i + 1
            temp_time = data[index][temp_point]
Exemple #23
0
            sum_time_list.append(sum_time)
            solved_number_list.append(solved_number)
        else:
            sum_time += temp_time
            sum_time_list.append(sum_time)
            solved_number += 1
            solved_number_list.append(solved_number)
    return sum_time_list, solved_number_list


if __name__ == '__main__':
    time_predict_path = utils.time_predict_path
    time_predict_path_0 = time_predict_path + "time_predict_0.json"
    time_predict_path_1 = time_predict_path + "time_predict_1.json"
    time_predict_path_2 = time_predict_path + "time_predict_2.json"
    time_predict_0 = utils.ReadJson(time_predict_path_0)
    time_predict_1 = utils.ReadJson(time_predict_path_1)
    time_predict_2 = utils.ReadJson(time_predict_path_2)
    time_predict_list = [time_predict_2, time_predict_1, time_predict_0]
    time_predict_label_list = [
        "2-depth Encoding", "1-depth Encoding", "0-depth Encoding"
    ]

    time_basic_data_path = utils.time_basic_data_path
    test_name_list_path = time_basic_data_path + "test_name_list.json"
    test_time_message_path = time_basic_data_path + "test_time_message.json"
    test_timeout_message_path = time_basic_data_path + "test_timeout_message.json"
    test_name_list = utils.ReadJson(test_name_list_path)
    test_time_message = utils.ReadJson(test_time_message_path)
    test_timeout_message = utils.ReadJson(test_timeout_message_path)
def ProcessDataForBenchmark(AVY_dprove_path, pdr_IC3_path, others_path,
                            hwmcc_clean_path, train_path, test_path):
    AVY_dprove_dic = utils.ReadJson(AVY_dprove_path)

    pdr_IC3_dic = utils.ReadJson(pdr_IC3_path)
    train_name_list = list(pdr_IC3_dic.keys())

    others_list = utils.ReadJson(others_path)
    others_dic = {}
    for i in range(len(others_list)):
        aig = others_list[i]
        aig_name = list(aig.keys())[0]
        run_times = aig[aig_name]
        temp_dic = {}
        for method_time_pair in run_times:
            method = list(method_time_pair.keys())[0]
            time = str(method_time_pair[method])
            temp_dic[method] = time
        others_dic[aig_name] = temp_dic

    hwmcc_dic = utils.ReadJson(hwmcc_clean_path)
    test_name_list = list(hwmcc_dic.keys())
    print(len(train_name_list))
    print(len(test_name_list))
    train_name_list = list(set(train_name_list) - set(test_name_list))
    print(len(train_name_list))

    data_list = []
    for name in train_name_list:
        temp_data = [name]
        temp_data.append(AVY_dprove_dic[name]["dprove"])
        temp_data.append(pdr_IC3_dic[name]["pdr"])
        temp_data.append(others_dic[name]["iimc"])
        temp_data.append(pdr_IC3_dic[name]["IC3"])

        mark = False
        for time in temp_data[1:]:
            if time != "timeout" and time != "failed" and time != "0.0" and time != "0":
                mark = True
        if mark == True:
            data_list.append(temp_data)

    with open(train_path, "w") as writer:
        for line in data_list:
            writer.write(",".join(line) + "\n")

    data_list = []
    for name in test_name_list:
        temp_data = [name]
        temp_data.append(hwmcc_dic[name]["dprove"])
        temp_data.append(hwmcc_dic[name]["pdr"])
        temp_data.append(hwmcc_dic[name]["iimc"])
        temp_data.append(hwmcc_dic[name]["IC3"])

        mark = False
        for time in temp_data[1:]:
            if time != "timeout" and time != "failed" and time != "0.0" and time != "0":
                mark = True
        if mark == True:
            data_list.append(temp_data)

    with open(test_path, "w") as writer:
        for line in data_list:
            writer.write(",".join(line) + "\n")
Exemple #25
0
import utils
import matplotlib.pyplot as plt

if __name__ == '__main__':
    importance_message_path = utils.importance_message_path
    importance_path_0 = importance_message_path + "importance_0.json"
    importance_path_1 = importance_message_path + "importance_1.json"
    importance_path_2 = importance_message_path + "importance_2.json"

    importance_0 = utils.ReadJson(importance_path_0)
    importance_1 = utils.ReadJson(importance_path_1)
    importance_2 = utils.ReadJson(importance_path_2)

    importance_fig_path = utils.importance_fig_path
    importance_0_save_path = importance_fig_path + "importance_0.pdf"
    importance_1_save_path = importance_fig_path + "importance_1.pdf"
    importance_2_save_path = importance_fig_path + "importance_2.pdf"

    plt.figure()
    #plt.title("Importance of 0-depth Encoding")
    plt.bar(range(len(importance_0)), importance_0, width=0.3, color="k")
    #plt.xlabel('Features')
    #plt.ylabel('Importance')
    x = range(0, 4, 1)
    y = [0, 0.1, 0.2, 0.3, 0.4]
    plt.xticks(x, size=25)
    plt.yticks(y, size=25)
    plt.subplots_adjust(left=0.12, right=0.99, top=0.96, bottom=0.1)
    plt.savefig(importance_0_save_path)
    plt.show()
Exemple #26
0
import utils
import numpy as np

if __name__ == '__main__':
    embedded_dir_2 = utils.embedded_dir_2

    train_name_list_path = utils.classify_basic_data_path + "train_name_list.json"
    test_name_list_path = utils.classify_basic_data_path + "test_name_list.json"
    train_name_list = utils.ReadJson(train_name_list_path)
    test_name_list = utils.ReadJson(test_name_list_path)

    train_vec_list = utils.GetVecList(embedded_dir_2, train_name_list)
    test_vec_list = utils.GetVecList(embedded_dir_2, test_name_list)

    train_statistic_vec = np.array(train_vec_list).sum(axis=0)
    test_statistic_vec = np.array(test_vec_list).sum(axis=0)
    print(train_statistic_vec)
    print(test_statistic_vec)
    for index in range(133, 161):
        if train_statistic_vec[index] != 0:
            print("train")
            print(index)
        if test_statistic_vec[index] != 0:
            print("test")
            print(index)