Ejemplo n.º 1
0
def main(result, data, redo, name_of_folder):
    common_result_path = result
    output_path = os.path.join(result, name_of_folder)
    exhaustive_approach_results_path = os.path.join(result,
                                                    "exhaustive_approach")
    try:
        data_graph = nx.read_gpickle(data)
    except:
        data_graph = nx.read_gml(data)
    #data_graph=nx.read_gpickle(data)
    pattern = nx.read_gml(os.path.join(output_path, 'input_pattern.gml'))
    #load Plist
    pkl_file = open(os.path.join(output_path, 'Plist_base.pickle'), 'rb')
    Plist_base = pickle.load(pkl_file)
    #load monitoring marks
    pkl_file = open(os.path.join(output_path, 'monitoring_marks.pickle'), 'rb')
    monitoring_marks = pickle.load(pkl_file)
    #load monitoring_reports
    pkl_file = open(os.path.join(output_path, 'monitoring_reports.pickle'),
                    'rb')
    monitoring_reports = pickle.load(pkl_file)

    detailed_result_path = os.path.join(output_path, "monitoring")
    if (not redo) and os.path.exists(detailed_result_path) and len(
            os.listdir(detailed_result_path)) >= 120:
        row = csv_report.get_row(result, output_path, "false_furer",
                                 result.replace("RESULTS", "PATTERNS"))
        with open(os.path.join(output_path, name_of_folder + ".info"),
                  'w') as f:
            f.write(str(row))
        sys.exit()
    pattern_file_name = common_result_path.split("/")[-1]
    if pattern_file_name == "":
        pattern_file_name = common_result_path.split("/")[-2]

    picklename = os.path.join(exhaustive_approach_results_path,
                              "fdict_exhaustive_%s.pickle" % pattern_file_name)
    pickin = open(picklename, 'rb')
    fdict_exhaustive = pickle.load(pickin)
    approaches.globals_sampling.output_path = output_path
    if pattern_file_name.startswith("dblp"):
        approaches.globals_sampling.experiment_name = "dblp"
    else:
        approaches.globals_sampling.experiment_name = "yeast"

    my_version_report_online(fdict_exhaustive, data_graph, pattern,
                             monitoring_marks, output_path,
                             detailed_result_path, monitoring_reports,
                             exhaustive_approach_results_path, Plist_base, 1,
                             pattern_file_name)  #print monitoring_reports
    row = csv_report.get_row(result, output_path, "false_furer",
                             result.replace("RESULTS", "PATTERNS"))
    with open(os.path.join(output_path, name_of_folder + ".info"), 'w') as f:
        f.write(str(row))
Ejemplo n.º 2
0
def main(pattern_path, output_folder, result, data, redo, write,
         monitoring_reports):
    common_result_path = result
    output_path = os.path.join(result, output_folder)
    detailed_result_path = os.path.join(output_path, "monitoring")
    if (not redo) and os.path.exists(detailed_result_path) and len(
            os.listdir(detailed_result_path)) >= 100:
        row = csv_report.get_row(result, output_path, "furer",
                                 result.replace("RESULTS", "PATTERNS"))
        with open(os.path.join(output_path, "furer_row.info"), 'w') as f:
            f.write(str(row))
        sys.exit()
    exhaustive_approach_results_path = os.path.join(common_result_path,
                                                    "exhaustive_results")
    try:
        data_graph = nx.read_gpickle(data)
    except:
        data_graph = nx.read_gml(data)
    pattern = nx.read_gml(pattern_path)
    #load Plist
    pkl_file = open(os.path.join(output_path, 'Plist.pickle'), 'rb')
    Plist = pickle.load(pkl_file)
    #load monitoring marks
    pkl_file = open(os.path.join(output_path, 'monitoring_marks.pickle'), 'rb')
    monitoring_marks = pickle.load(pkl_file)
    #load monitoring_reports
    if os.path.exists(os.path.join(output_path, 'monitoring_reports.pickle')):
        pkl_file = open(os.path.join(output_path, 'monitoring_reports.pickle'),
                        'rb')
        monitoring_reports = pickle.load(pkl_file)
    pattern_file_name = common_result_path.split("/")[-1]
    if pattern_file_name == "":
        pattern_file_name = common_result_path.split("/")[-2]
    fdict_exhaustive = None
    if os.path.exists(
            os.path.join(exhaustive_approach_results_path, "fudict.pickle")):
        pickin = open(
            os.path.join(exhaustive_approach_results_path, "fudict.pickle"))
        fdict_exhaustive = pickle.load(pickin)
    approaches.globals_sampling.output_path = output_path

    my_version_report(fdict_exhaustive, data_graph, pattern, monitoring_marks,
                      output_path, detailed_result_path, monitoring_reports,
                      exhaustive_approach_results_path, Plist, 1,
                      pattern_file_name, write)  #print monitoring_reports
    row = csv_report.get_row(pattern_path, result, output_path, "furer",
                             result.replace("RESULTS", "PATTERNS"))
    with open(os.path.join(output_path, "furer_row.info"), 'w') as f:
        f.write(str(row))
Ejemplo n.º 3
0
def main(result, data, redo, write, monitoring_reports):
    flag_version = 'my'
    common_result_path = result
    output_path = os.path.join(result, 'results_false_furer')
    exhaustive_approach_results_path = os.path.join(common_result_path,
                                                    "exhaustive_approach")
    detailed_result_path = os.path.join(output_path, "monitoring")
    if (not redo) and os.path.exists(detailed_result_path) and len(
            os.listdir(detailed_result_path)) >= 120:
        print "Results already post-processed"
        row = csv_report.get_row(result, output_path, "false_furer",
                                 result.replace("RESULTS", "PATTERNS"))
        with open(os.path.join(output_path, "false_furer_row.info"), 'w') as f:
            f.write(str(row))
        sys.exit()
    try:
        data_graph = nx.read_gpickle(data)
    except:
        data_graph = nx.read_gml(data)
    #data_graph=nx.read_gpickle(data)
    pattern = nx.read_gml(os.path.join(common_result_path,
                                       'input_pattern.gml'))
    #load Plist
    pkl_file = open(os.path.join(output_path, 'Plist.pickle'), 'rb')
    Plist = pickle.load(pkl_file)
    #load monitoring marks
    pkl_file = open(os.path.join(output_path, 'monitoring_marks.pickle'), 'rb')
    monitoring_marks = pickle.load(pkl_file)
    #load monitoring_reports
    pattern_file_name = common_result_path.split("/")[-1]
    fdict_exhaustive = None
    if pattern_file_name == "":
        pattern_file_name = common_result_path.split("/")[-2]

    if os.path.exists(os.path.join(output_path, 'monitoring_reports.pickle')):
        pkl_file = open(os.path.join(output_path, 'monitoring_reports.pickle'),
                        'rb')
        monitoring_reports = pickle.load(pkl_file)

    if write == True:
        picklename = os.path.join(
            exhaustive_approach_results_path,
            "fdict_exhaustive_%s.pickle" % pattern_file_name)
        pickin = open(picklename, 'rb')
        fdict_exhaustive = pickle.load(pickin)

    print common_result_path.split("/")

    experiments.globals.output_path = output_path
    if pattern_file_name.startswith("dblp"):
        experiments.globals.experiment_name = "dblp"
    else:
        experiments.globals.experiment_name = "yeast"
    #smplr.complete_combinations(fdict_exhaustive, data_graph,  pattern,  Plist)      # add zeros to all not present combinations
    #smplr.smooth(fdict_exhaustive,  fdict_exhaustive)     # Laplace smoothing also for the exhaustive
    #Report the results
    print "Monitoring report..."
    if (flag_version == 'my'):
        my_version_report(fdict_exhaustive, data_graph, pattern,
                          monitoring_marks, output_path, detailed_result_path,
                          monitoring_reports, exhaustive_approach_results_path,
                          Plist, 1, pattern_file_name,
                          write)  #print monitoring_reports
    if (flag_version == 'martin'):
        martin_version_report(fdict_exhaustive, data_graph, pattern,
                              monitoring_marks, output_path,
                              detailed_result_path, monitoring_reports,
                              exhaustive_approach_results_path, Plist, 1,
                              pattern_file_name)  #print monitoring_reports

    #report.report_monitoring(monitoring_marks,output_path,detailed_result_path,monitoring_reports,exhaustive_approach_results_path,data_graph,pattern,Plist,1,pattern_file_name)    #print monitoring_reports
    row = csv_report.get_row(result, output_path, "false_furer",
                             result.replace("RESULTS", "PATTERNS"))
    with open(os.path.join(output_path, "false_furer_row.info"), 'w') as f:
        f.write(str(row))
Ejemplo n.º 4
0
def main(result, data, redo, write, monitoring_reports):
    print "reporting furer"
    flag_version = 'my'
    common_result_path = result
    output_path = os.path.join(result, 'results_furer')
    detailed_result_path = os.path.join(output_path, "monitoring")
    if (not redo) and os.path.exists(detailed_result_path) and len(
            os.listdir(detailed_result_path)) >= 100:
        print "Results already post-processed"
        row = csv_report.get_row(result, output_path, "furer",
                                 result.replace("RESULTS", "PATTERNS"))
        with open(os.path.join(output_path, "furer_row.info"), 'w') as f:
            f.write(str(row))
        sys.exit()
    exhaustive_approach_results_path = os.path.join(common_result_path,
                                                    "exhaustive_approach")
    try:
        data_graph = nx.read_gpickle(data)
    except:
        data_graph = nx.read_gml(data)
    #data_graph=nx.read_gpickle(data)
    pattern = nx.read_gml(os.path.join(common_result_path,
                                       'input_pattern.gml'))
    #load Plist
    pkl_file = open(os.path.join(output_path, 'Plist.pickle'), 'rb')
    Plist = pickle.load(pkl_file)
    #load monitoring marks
    pkl_file = open(os.path.join(output_path, 'monitoring_marks.pickle'), 'rb')
    monitoring_marks = pickle.load(pkl_file)
    #load monitoring_reports
    if os.path.exists(os.path.join(output_path, 'monitoring_reports.pickle')):
        pkl_file = open(os.path.join(output_path, 'monitoring_reports.pickle'),
                        'rb')
        monitoring_reports = pickle.load(pkl_file)
    print common_result_path, common_result_path.split("/")
    pattern_file_name = common_result_path.split("/")[-1]
    if pattern_file_name == "":
        pattern_file_name = common_result_path.split("/")[-2]
    print "Number of reports: ", len(monitoring_reports)
    print "pattern file name: ", pattern_file_name
    print "Do we need exhaustive dict: ", write
    fdict_exhaustive = None
    if write == True:
        picklename = os.path.join(
            exhaustive_approach_results_path,
            "fdict_exhaustive_%s.pickle" % pattern_file_name)
        pickin = open(picklename, 'rb')
        fdict_exhaustive = pickle.load(pickin)
    experiments.globals.output_path = output_path
    if pattern_file_name.startswith("dblp"):
        experiments.globals.experiment_name = "dblp"
    else:
        experiments.globals.experiment_name = "yeast"

    if (flag_version == 'my'):
        my_version_report(fdict_exhaustive, data_graph, pattern,
                          monitoring_marks, output_path, detailed_result_path,
                          monitoring_reports, exhaustive_approach_results_path,
                          Plist, 1, pattern_file_name,
                          write)  #print monitoring_reports
    if (flag_version == 'martin'):
        martin_version_report(fdict_exhaustive, data_graph, pattern,
                              monitoring_marks, output_path,
                              detailed_result_path, monitoring_reports,
                              exhaustive_approach_results_path, Plist, 1,
                              pattern_file_name)  #print monitoring_reports
    row = csv_report.get_row(result, output_path, "furer",
                             result.replace("RESULTS", "PATTERNS"))
    with open(os.path.join(output_path, "furer_row.info"), 'w') as f:
        f.write(str(row))
Ejemplo n.º 5
0
def main(result, data, redo, exp, write, monitoring_reports):
    print "************************************** Reporting random for ", result, "*************************************************"
    flag_version = exp
    print "Running version: ", flag_version
    output_path = os.path.join(result, 'random_vertex_approach')
    common_result_path = result
    detailed_result_path = os.path.join(output_path, "monitoring")
    exhaustive_approach_results_path = os.path.join(common_result_path,
                                                    "exhaustive_approach")
    if (not redo) and os.path.exists(detailed_result_path) and len(
            os.listdir(detailed_result_path)) >= 120:
        print "Results already post-processed"
        row = csv_report.get_row(result, output_path, "random",
                                 result.replace("RESULTS", "PATTERNS"))
        with open(os.path.join(output_path, "random_row.info"), 'w') as f:
            f.write(str(row))
        sys.exit()
    try:
        data_graph = nx.read_gpickle(data)
    except:
        data_graph = nx.read_gml(data)
    #data_graph=nx.read_gpickle(data)
    pattern = nx.read_gml(os.path.join(common_result_path,
                                       'input_pattern.gml'))
    with open(os.path.join(result, 'root_node.dec'), 'r') as f:
        for line in f.readlines():
            root_node = int(line.split(" ")[0])
            root_node_predicate_name = str(
                line.split(" ")[1].rstrip().lstrip())
            break
    root_nodes = [
        x for x in data_graph.nodes() if data_graph.node[x]['predicate'] ==
        pattern.node[root_node]['predicate']
    ]
    print "NR root nodeS: ", len(root_nodes)
    experiments.globals.nr_root_nodes = len(root_nodes)

    fdict_exhaustive = None
    pattern_file_name = common_result_path.split("/")[-1]
    if pattern_file_name == "":
        pattern_file_name = common_result_path.split("/")[-2]

    if os.path.exists(os.path.join(output_path, 'monitoring_reports.pickle')):
        pkl_file = open(os.path.join(output_path, 'monitoring_reports.pickle'),
                        'rb')
        monitoring_reports = pickle.load(pkl_file)

    if write == True:
        picklename = os.path.join(
            exhaustive_approach_results_path,
            "fdict_exhaustive_%s.pickle" % pattern_file_name)
        pickin = open(picklename, 'rb')
        fdict_exhaustive = pickle.load(pickin)

    print "Nr monitoring reports: ", len(monitoring_reports)
    #load Plist
    pkl_file = open(os.path.join(output_path, 'Plist.pickle'), 'rb')
    Plist = pickle.load(pkl_file)
    #load monitoring marks
    pkl_file = open(os.path.join(output_path, 'monitoring_marks.pickle'), 'rb')
    monitoring_marks = pickle.load(pkl_file)

    experiments.globals.output_path = output_path
    if pattern_file_name.startswith("dblp"):
        experiments.globals.experiment_name = "dblp"
    else:
        experiments.globals.experiment_name = "yeast"

    if (flag_version == 'my'):
        my_version_report(fdict_exhaustive, data_graph, pattern,
                          monitoring_marks, output_path, detailed_result_path,
                          monitoring_reports, exhaustive_approach_results_path,
                          Plist, 1, pattern_file_name,
                          write)  #print monitoring_reports
    if (flag_version == 'martin'):
        martin_version_report(fdict_exhaustive, data_graph, pattern,
                              monitoring_marks, output_path,
                              detailed_result_path, monitoring_reports,
                              exhaustive_approach_results_path, Plist, 1,
                              pattern_file_name)  #print monitoring_reports

    #smplr.complete_combinations(fdict_exhaustive, data_graph,  pattern,  Plist)      # add zeros to all not present combinations
    #smplr.smooth(fdict_exhaustive,  fdict_exhaustive)     # Laplace smoothing also for the exhaustive
    #Report the results
    #print "Monitoring report..."
    #report.report_monitoring(monitoring_marks,output_path,detailed_result_path,monitoring_reports,exhaustive_approach_results_path,data_graph,pattern,Plist,1,pattern_file_name)    #print monitoring_reports
    row = csv_report.get_row(result, output_path, "random",
                             result.replace("RESULTS", "PATTERNS"))
    with open(os.path.join(output_path, "random_row.info"), 'w') as f:
        f.write(str(row))