コード例 #1
0
 #check if pattern invalid. if yes there is no point in calculation / calculate anyway for to be safe. but output file invalid.info
 if os.path.exists(os.path.join(args.pattern_path,"invalid.info")):
     print "Pattern is invalid... problem with sampling approaches"
     with open(os.path.join(output_path,"invalid.info"),"w") as f:
         f.write("invalid pattern "+args.pattern_path)
 
 #writing input gml into output
 if(not os.path.exists(output_path)):
     os.makedirs(output_path)
 nx.write_gml(pattern, output_path+'/input_pattern.gml')
 
 #choose or load root node and nr observations
 hist=analyzer.get_sorted_labels_by_occurence_frequency_in_graph(args.data_graph_path)
 if not os.path.exists(os.path.join(args.output_path,'root_node.dec')):
     hist=analyzer.get_sorted_labels_by_occurence_frequency_in_graph(args.data_graph_path)
     root_node,root_predicate_name=u.choose_root_node(pattern,None,hist)
     with open(os.path.join(args.output_path,'root_node.dec'),'w') as f:
         f.write(str(root_node)+" "+root_predicate_name.rstrip().lstrip()+"\n")
         f.write("Chosen by furer during the selection ...")
 else: #else, it is not selection so root node was already decided by someone
     with open(os.path.join(args.output_path,'root_node.dec'),'r') as f:
         for line in f.readlines():
             root_node=int(line.split(" ")[0])
             root_node_predicate_name=str(line.split(" ")[1].rstrip().lstrip())
             break
 
 all_randnode_times = []
 rndicts = []
 exhaustive_approach_results_file=os.path.join(args.exhaustive_approach_results_path,'results_'+pattern_file_name+'.res')
 if os.path.exists(exhaustive_approach_results_file):
     all_randnode_times = [] 
コード例 #2
0
def get_nr_emb_within_time(data_graph_path, pattern_path, output_path,
                           time_seconds):
    print "Exhaustive checkup ...."
    nr_emb = None
    monitoring_marks = utils.generate_monitoring_marks(time_seconds,
                                                       time_seconds)

    data_graph = None
    try:
        data_graph = nx.read_gpickle(data_graph_path)
    except:
        data_graph = nx.read_gml(data_graph_path)

    number_of_nodes_in_data = len(data_graph)
    pattern = nx.read_gml(pattern_path)
    #vis.visualize_graph(pattern, "sat")
    #analyzer.add_values_in_pattern_for_graph_if_missing(pattern)
    output_path = os.path.join(output_path)
    if not os.path.exists(output_path):
        os.makedirs(output_path)
    root_node_predicate_name = None  #well, not predefined. Let the algorithm find it and denote it (its id in the pattern)
    pattern_name = os.path.basename(pattern_path)[:-4]

    #     logging.basicConfig(
    #          level=logging.DEBUG,
    #          filename=os.path.join(output_path,'error_exhaustive.log'),
    #          filemode='w')
    #     sys.excepthook = my_excepthook

    root_node = None
    #first check if the root node is determined by some other algorithm
    if not os.path.exists(os.path.join(output_path, 'root_node.dec')):
        hist = analyzer.get_sorted_labels_by_occurence_frequency_in_graph(
            data_graph_path)
        root_node, root_node_predicate_name = ut.choose_root_node(
            pattern, root_node_predicate_name, hist)
        with open(os.path.join(output_path, 'root_node.dec'), 'w') as f:
            f.write(str(root_node) + " ")
            f.write(str(root_node_predicate_name) + "\n")
            f.write("Determined by exhaustive approach")
    else:
        #read root node from the file
        with open(os.path.join(output_path, 'root_node.dec'), 'r') as f:
            for line in f.readlines():
                root_node = int(line.split(" ")[0])
                root_node_predicate_name = str(
                    line.split(" ")[1].rstrip().lstrip())
                break

    print "root node predicate name: ", root_node_predicate_name
    #get root nodes
    root_nodes = [
        x for x in data_graph.nodes()
        if data_graph.node[x]['predicate'] == root_node_predicate_name
    ]
    print "Number of root nodes: ", len(root_nodes)

    #get OBD
    print "Root node,", pattern.node[root_node]
    OBdecomp = OBDsearch.get_heuristic4_OBD(pattern, startNode=root_node)

    if OBdecomp == None:
        print "No ombdecomp!"
        no_obd_decomp = True
        with open(os.path.join(output_path, 'no_obdecomp.info'), 'w') as f:
            f.write("No OBDecomp!")
        OBdecomp = OBDsearch.get_flatList(pattern, startNode=root_node)

    #get ordered list from OBD
    Plist = [item for sublist in OBdecomp for item in sublist]
    print "Using OBD: %s" % str(OBdecomp)
    print "and Plist: %s" % str(Plist)
    print "monitoring marks: ", monitoring_marks
    start = timeit.default_timer()
    try:
        lock = threading.Lock()
        print "starting scheduler"
        s = sched.scheduler(time.time, time.sleep)
        e1 = s.enter(
            0, 4, exhaustive.find_nr_emb,
            (data_graph, pattern, Plist, root_nodes, output_path, lock))
        t = threading.Thread(target=s.run)
        t.daemon = True
        t.start()
        time.sleep(time_seconds)
        end = timeit.default_timer()
        print "Main finished after ", end - start, "seconds"
        freq_dict = experiments.globals.fdict_exhaustive_limited
        if len(freq_dict) == 0:
            nr_emb = None
        else:
            nr_emb = 0
            for k in freq_dict.keys():
                nr_emb = nr_emb + freq_dict[k]
    except Wrong_root_node as e:
        print "Exception for the node occurred!"
    return nr_emb
コード例 #3
0
    
    #check if pattern invalid. if yes there is no point in calculation / calculate anyway for to be safe. but output file invalid.info
    if os.path.exists(os.path.join(args.pattern_path,"invalid.info")):
        print "Pattern is invalid... problem with sampling approaches"
        with open(os.path.join(output_path,"invalid.info"),"w") as f:
            f.write("invalid pattern "+args.pattern_path)
 
     #writing input gml into output
    nx.write_gml(pattern, args.output_path+'/input_pattern.gml')
    
    #choose or load root node and nr observations
    desired_predicate=None
    print "Specified root node? ",os.path.join(args.output_path,'root_node.dec')
    if args.sample or not(os.path.exists(os.path.join(args.output_path,'root_node.dec'))):
        hist=analyzer.get_sorted_labels_by_occurence_frequency_in_graph(args.data_graph_path)
        root_node,root_predicate_name=u.choose_root_node(pattern,desired_predicate,hist)
        with open(os.path.join(args.output_path,'root_node.dec'),'w') as f:
            f.write(str(root_node)+" "+root_predicate_name.rstrip().lstrip()+"\n")
            f.write("Chosen by furer during the selection ...")
    else: #else, it is not selection so root node was already decided by someone
        with open(os.path.join(args.output_path,'root_node.dec'),'r') as f:
            for line in f.readlines():
                root_node=int(line.split(" ")[0])
                root_node_predicate_name=str(line.split(" ")[1].rstrip().lstrip())
                break
        
    #if it's not the selection phase it means the exhaustive approach is run, therefore read NLIMIT values from the file
    
    exhaustive_approach_results_file=os.path.join(args.exhaustive_approach_results_path,'results_'+pattern_file_name+'.res')
    if os.path.exists(exhaustive_approach_results_file):
        all_randnode_times = [] 
コード例 #4
0
 if (not os.path.exists(output_path)):
     os.makedirs(output_path)
 if not (os.path.exists(args.output_path + '/input_pattern.gml')):
     nx.write_gml(pattern, args.output_path + '/input_pattern.gml')
 #DETERMINING ROOT NODE
 root_node = None
 print "Root node predicate name: ", root_node_predicate_name
 print "Exists? ", os.path.join(args.output_path,
                                'root_node.dec'), os.path.exists(
                                    os.path.join(args.output_path,
                                                 'root_node.dec'))
 if root_node_predicate_name != None or not os.path.exists(
         os.path.join(args.output_path, 'root_node.dec')):
     hist = analyzer.get_sorted_labels_by_occurence_frequency_in_graph(
         args.data_graph_path)
     root_node, root_node_predicate_name = ut.choose_root_node(
         pattern, root_node_predicate_name, hist)
     with open(os.path.join(args.output_path, 'root_node.dec'), 'w') as f:
         f.write(str(root_node) + " ")
         f.write(str(root_node_predicate_name) + "\n")
         f.write("Determined by exhaustive approach")
 else:
     #read root node from the file
     with open(os.path.join(args.output_path, 'root_node.dec'), 'r') as f:
         for line in f.readlines():
             root_node = int(line.split(" ")[0])
             root_node_predicate_name = str(
                 line.split(" ")[1].rstrip().lstrip())
             break
 print "root node predicate name: ", root_node_predicate_name
 #get root nodes
 root_nodes = [