Ejemplo n.º 1
0
def do_get_node_ttl_dist_map_job(node_ttl_dist_map_list, sub_pipe=None):
    """
    :param node_ttl_dist_map_list: [... node_ttl_dist_map_k ...]
    :param sub_pipe: Corresponding to a master pipe owned by the master process,
    if sub_pipe is not empty, we put some information into it and send to the master pipe.
    :return:
    """
    """
    node_ttl_dist_map_pattern(reference pattern):
                     
    [graph_name_str, tree_method_info_str, node_ttl_dist_map_str]
    tree_method_info : 'tree num'_'tree method'
    
    """
    """
    Instantiate node_ttl_dist_map_msg_pattern:
                                      
    [graph_name_str, tree_method_info_str, elapsed_time_str]
    """
    msg_list = get_instances(node_ttl_dist_map_pattern,
                             node_ttl_dist_map_msg_pattern,
                             node_ttl_dist_map_list, sep)
    msg_to_send = []
    """
    Parse parameter tree_num
    """
    tree_method_info_list = get_instances(node_ttl_dist_map_pattern,
                                          tree_method_info_str,
                                          node_ttl_dist_map_list, sep)
    tree_num_list = [parse_tree_num(item) for item in tree_method_info_list]

    related_graph_list = get_graph_instances(graph_dir, graph_name_str,
                                             node_ttl_dist_map_list,
                                             node_ttl_dist_map_pattern, sep)

    for i in range(0, len(node_ttl_dist_map_list)):
        graph_path = path.join(graph_dir, related_graph_list[i])
        adj_table = get_adjtable(graph_path)

        node_ttl_dist_map_path = path.join(node_ttl_dist_map_dir,
                                           node_ttl_dist_map_list[i])

        start_time = clock()
        node_ttl_dist_map = get_node_ttl_dist_map(adj_table, tree_num_list[i])
        end_time = clock()

        pickle_to_disk(node_ttl_dist_map_path, node_ttl_dist_map)
        """
        Instantiate elapsed_time_str in node_ttl_dist_map_msg_pattern.
        """
        msg_list[i] = msg_list[i].replace(
            elapsed_time_str, str(get_elapsed_minute(start_time, end_time)))

        msg_list[i] = format_message(msg_list[i], sep)
        print msg_list[i]
        msg_to_send.append(msg_list[i])

    if sub_pipe:
        sub_pipe.send(msg_to_send)
Ejemplo n.º 2
0
def do_improve_init_sols_job(monitor_list, sub_pipe):
    """
    :param monitor_list: [..., monitor_k,...]
    :param sub_pipe: Corresponding to a master pipe owned by the master process,
    if sub_pipe is not empty, we put some information into it and send to the master pipe.
    :return:
    """
    """
    monitor_pattern(reference pattern):
    [graph_name_str, monitor_info_fmt, monitor_str] 
    """
    """
    Instantiate init_sol_pattern from instances of monitor_pattern.
    
    init_sol_pattern: 
                       ********************  *********************                                     
    [graph_name_str,   tree_method_info_str, init_sol_monitor_info, init_sol_str]
    
    """

    init_sol_list = get_instances(monitor_pattern, init_sol_pattern,
                                  monitor_list, sep)
    """
    Instantiate monitor_msg_pattern from instances of monitor_pattern.
    monitor_msg_pattern: 
                                       ***********  ****************
    [graph_name_str, monitor_info_fmt, penalty_str, elapsed_time_str]
    
    """
    msg_list = get_instances(monitor_pattern, monitor_msg_pattern,
                             monitor_list, sep)
    """
    Parse parameter trunc number and trunc step
    """
    monitor_info_list = get_instances(monitor_pattern, monitor_info_str,
                                      monitor_list, sep)
    """
    [... (trunc number, trunc step) ...]
    """
    trunc_num_step_list = [
        parse_trunc_num_step(item) for item in monitor_info_list
    ]

    related_graph_id_list = get_graph_instances(graph_dir, graph_name_str,
                                                monitor_list, monitor_pattern,
                                                sep)

    for i in range(0, len(monitor_list)):
        """
        Load graph_title.
        """
        graph_path = os.path.join(graph_dir, related_graph_id_list[i])
        adj_table = get_adjtable(graph_path)
        """
        Instantiate tree_method_info_str, init_sol_monitor_info_str.
        """
        node_num = len(adj_table)
        tree_num = get_tree_num(node_num)
        init_sol_list[i] = init_sol_list[i].replace(
            tree_method_info_str, tree_num_method.format(tree_num))
        max_monitor_num = get_max_monitor_num(node_num)
        init_sol_list[i] = init_sol_list[i].replace(
            init_sol_monitor_info_str,
            max_monitor_num_method.format(max_monitor_num))
        """
        Load initial solution.
        """
        init_sol_path = os.path.join(init_sol_dir, init_sol_list[i])
        init_sol = pickle_from_disk(init_sol_path)
        """
        Improve initial solution and write to disk.
        """
        start_time = clock()
        monitors, penalty = improve_init_sols(adj_table, init_sol,
                                              *trunc_num_step_list[i])
        end_time = clock()

        monitor_path = os.path.join(monitor_dir, monitor_list[i])
        pickle_to_disk(monitor_path, monitors)
        """
        Instantiate penalty_str and elapsed_time_str in monitor_msg_pattern.
        """
        msg_list[i] = msg_list[i].replace(penalty_str, str(penalty))
        msg_list[i] = msg_list[i].replace(
            elapsed_time_str, str(get_elapsed_minute(start_time, end_time)))

        msg_list[i] = format_message(msg_list[i], sep)
        print msg_list[i]

    if sub_pipe:
        sub_pipe.send(msg_list)
Ejemplo n.º 3
0
def do_get_avg_detection_time_job(avg_detec_time_list, sub_pipe):
    """
    :param avg_detec_time_list: [... avg_detec_time_id ...],
    :param sub_pipe:
    :return:
    """
    """
    avg_detection_time_pattern:
    [graph_name_str, 
     trunc_num_step_str, 
     infection_info_str, 
     snapshot_num_str,
     avg_detection_time_str]

    monitor_pattern:
                     ****************
    [graph_name_str, monitor_info_str, monitor_str]
    """
    graph_id_list = get_graph_instances(graph_dir, graph_name_str,
                                        avg_detec_time_list,
                                        avg_detection_time_pattern, sep)

    trunc_num_step_list = get_instances(avg_detection_time_pattern,
                                        trunc_num_step_str,
                                        avg_detec_time_list, sep)

    monitor_list = get_instances(avg_detection_time_pattern, monitor_pattern,
                                 avg_detec_time_list, sep)

    msg_list = [None] * len(avg_detec_time_list)

    for graph_id_idx in range(0, len(graph_id_list)):
        graph_id = graph_id_list[graph_id_idx]
        trunc_num_step = trunc_num_step_list[graph_id_idx]
        monitor_id = monitor_list[graph_id_idx]
        avg_detec_time_id = avg_detec_time_list[graph_id_idx]
        """
        Make sure the existence of graph_title file.
        """
        graph_path = path.join(graph_dir, graph_id)

        if not path.exists(graph_path):
            msg_list[graph_id_idx] = 'Missing {0:s}. Cannot finish {1:s}'. \
                format(graph_path, avg_detec_time_id)

            print msg_list[graph_id_idx]
            continue
        """
        Make sure the existence of related monitor file.
        """
        monitor_info_list = [
            monitor_info_fmt.format(trunc_num_step, method)
            for method in methods
        ]

        monitor_id_list = [
            monitor_id.replace(monitor_info_str, monitor_info)
            for monitor_info in monitor_info_list
        ]

        monitor_paths = [
            path.join(monitor_dir, monitor_id)
            for monitor_id in monitor_id_list
        ]

        chk_res = [path.exists(monitor_path) for monitor_path in monitor_paths]
        if not all(chk_res):
            msg_list[graph_id_idx] = 'Missing monitor. Cannot finish {0:s}'. \
                format(avg_detec_time_id)
            print msg_list[graph_id_idx]
            continue
        """
        Now, all related files exist.
        """
        avg_detec_time_path = path.join(avg_detection_time_dir,
                                        avg_detec_time_id)
        adj_table = get_adjtable(path.join(graph_dir, graph_id))
        """
        monitors = [
                    ...
                    ... 
                    [step * 1] ... [step * trunc_point_num] ---- method_k
                    ...
                    ... 
                   ]
        """
        monitors = []
        for monitor_path in monitor_paths:
            monitors.extend(pickle_from_disk(monitor_path))
        """
        res = [
                 ...
                 res_1, ..., res_(trunc point num) ---- method_k
                 ...
              ]
        """
        res = [
            round(float(item), 2) for item in get_avg_detection_time(
                adj_table, monitors, snapshot_num)
        ]
        """
        Construct message.
        avg_detection_time_pattern:
        [graph_name_str, trunc_num_step_str, infection_info_str, snapshot_num_str, avg_detection_time_str]
        """
        msg_parts = avg_detec_time_id.split(sep)

        msg_content = '{0:s}:{1:s}:{2:s}:{3:s}\n'.format(
            msg_parts[0], msg_parts[1], msg_parts[2], msg_parts[3])

        base_method = methods[0]
        base_line = res[trunc_point_num - 1]
        msg_content += '{0:s}:{1:.2f}\n'.format(base_method, base_line)

        for mtd_idx in range(1, len(methods)):
            tmp = (mtd_idx + 1) * trunc_point_num - 1
            msg_content += '{0:s}:{1:.2f} ({2:.2f}%)\n'.format(
                methods[mtd_idx], res[tmp],
                ((res[tmp] - base_line) * 100.0) / base_line)

        msg_list[graph_id_idx] = msg_content
        print msg_content

        if trunc_point_num > 1:
            """
            res = [
                        ...
                        res_1, ..., res_(trunc point num) ---- method_k
                        ...
                      ]
            """
            pickle_to_disk(avg_detec_time_path, res)
        elif trunc_point_num == 1:
            """
            Baseline and stretch ratio.
            """
            write_to_disk(avg_detec_time_path, msg_content)

    if sub_pipe:
        sub_pipe.send(msg_list)
Ejemplo n.º 4
0
def do_get_avg_detection_time_job(avg_penalty_list, sub_pipe):
    """
    :param avg_penalty_list: [... avg_penalty_id ...],
    :param sub_pipe:
    :return:
    """
    """
    avg_penalty_pattern:
    [model_type_str, trunc_num_step_str, penalty_str]
    
    monitor pattern:
     **************  ****************
    [graph_name_str, monitor_info_str, monitor_str]
    """

    models = get_instances(avg_penalty_pattern, model_type_str,
                           avg_penalty_list, sep)

    trunc_num_step_list = get_instances(avg_penalty_pattern,
                                        trunc_num_step_str, avg_penalty_list,
                                        sep)

    msg_list = [None] * len(avg_penalty_list)

    for mdl_idx in range(0, len(models)):
        model = models[mdl_idx]
        avg_penalty_id = avg_penalty_list[mdl_idx]
        """
        Make sure the existence of related graph_title files.
        """
        graph_names = [
            unit_sep.join([model, str(iid)]) for iid in instance_num_range
        ]
        graph_ids = get_graph_instances(graph_dir, graph_name_str, graph_names,
                                        graph_name_str, sep)

        graph_paths = [
            path.join(graph_dir, graph_id) for graph_id in graph_ids
        ]

        chk_res = [path.exists(graph_path) for graph_path in graph_paths]
        if not all(chk_res):
            msg_list[mdl_idx] = 'Missing required graph_title files.' \
                          'Cannot finish {0:s}'.format(avg_penalty_id)
            print msg_list[mdl_idx]

            continue
        """
        Make sure the existence of related monitor files.
        """
        monitor_info_list = [
            monitor_info_fmt.format(trunc_num_step_list[mdl_idx], method)
            for method in methods
        ]

        monitor_ids = [
            sep.join(item)
            for item in product(graph_names, monitor_info_list, [monitor_str])
        ]

        monitor_paths = [
            path.join(monitor_dir, monitor_id) for monitor_id in monitor_ids
        ]
        chk_res = [path.exists(monitor_path) for monitor_path in monitor_paths]

        if not all(chk_res):
            msg_list[mdl_idx] = 'Missing required monitor files.' \
                          'Cannot finish {0:s}'.format(avg_penalty_id)
            print msg_list[mdl_idx]

            continue
        """
        Now, all related files exist.
        """
        avg_penalty_path = path.join(penalty_dir, avg_penalty_id)
        avg_res = [0] * (len(methods) * trunc_point_num)
        for j in range(0, len(graph_paths)):
            adj_table = get_adjtable(graph_paths[j])
            """
            monitors = [
                        ...
                        ... 
                        [step * 1] ... [step * trunc_point_num] ---- method_k
                        ...
                        ... 
                       ]
            """
            monitors = []
            for monitor_path in monitor_paths[j * len(methods):(j + 1) *
                                              len(methods)]:
                monitors.extend(pickle_from_disk(monitor_path))
            """
            avg_res = [
                    ...
                    res_1, ..., res_(trunc point num) ---- method_k
                    ...
                   ]
            """
            avg_res = map(add, avg_res, get_penalty(adj_table, monitors))

        avg_res = [
            round(float(item) / len(graph_paths), 2) for item in avg_res
        ]
        """
        Construct message.
        avg_penalty_pattern:  [model_type_str, trunc_num_step_str, penalty_str]
        """
        msg_parts = avg_penalty_id.split(sep)

        msg_content = '{0:s}:{1:s}:{2:s}\n'.format(
            msg_parts[0],
            msg_parts[1],
            msg_parts[2],
        )

        base_method = methods[0]
        base_line = avg_res[trunc_point_num - 1]
        msg_content += '{0:s}:{1:.2f}\n'.format(base_method, base_line)

        for mtd_idx in range(1, len(methods)):
            tmp = (mtd_idx + 1) * trunc_point_num - 1
            msg_content += '{0:s}:{1:.2f} ({2:.2f}%)\n'.format(
                methods[mtd_idx], avg_res[tmp],
                ((avg_res[tmp] - base_line) * 100.0) / base_line)

        msg_list[mdl_idx] = msg_content
        print msg_content

        if trunc_point_num > 1:
            """
            avg_res = [
                        ...
                        avg_res_1, ..., avg_res_(trunc point num) ---- method_k
                        ...
                      ]
            """
            pickle_to_disk(avg_penalty_path, avg_res)
        elif trunc_point_num == 1:
            """
            Baseline and stretch ratio.
            """
            write_to_disk(avg_penalty_path, msg_content)

    if sub_pipe:
        sub_pipe.send(msg_list)
Ejemplo n.º 5
0
def do_get_init_sol_job(init_sol_list, sub_pipe=None):
    """
    :param init_sol_list: [..., init_sol_k, ...]
    :param sub_pipe: Corresponding to a master pipe owned by the master process,
    if sub_pipe is not empty, we put some information into it and send to the master pipe.
    """
    """
    init_sol_pattern(reference pattern):                   
    [graph_name_str, tree_method_info_str, init_sol_monitor_info_str, init_sol_str]
    tree_method_info : tree-num_tree-method
    init_sol_monitor_info: max-monitor-num_method
    """
    """
    Instantiate node_ttl_dist_map_pattern.
    node_ttl_dist_map_pattern:                                            
    [graph_name_str, tree_method_info_str, node_ttl_dist_map_str]
                           
    """
    node_ttl_dist_map_list = get_instances(init_sol_pattern,
                                           node_ttl_dist_map_pattern,
                                           init_sol_list, sep)
    """
    Instantiate init_sol_message_pattern. 
    init_sol_message_pattern: 
    [graph_name_str, tree_method_info_str, init_sol_monitor_info_str, 
    ***********  ****************
    penalty_str, elapsed_time_str]   

    """
    msg_list = get_instances(init_sol_pattern, init_sol_msg_pattern,
                             init_sol_list, sep)

    msg_to_send = []
    """
    Parse parameter max_monitor_num.
    """
    init_sol_monitor_info_list = get_instances(init_sol_pattern,
                                               init_sol_monitor_info_str,
                                               init_sol_list, sep)

    max_monitor_num_list = [
        parse_max_monitor_num(item) for item in init_sol_monitor_info_list
    ]

    related_graph_id_list = get_graph_instances(graph_dir, graph_name_str,
                                                init_sol_list,
                                                init_sol_pattern, sep)

    for i in range(0, len(init_sol_list)):
        """
        Load adjacent table. 
        """
        graph_path = path.join(graph_dir, related_graph_id_list[i])
        adj_table = get_adjtable(graph_path)
        """
        Load node_ttl_dist_map.
        """
        node_ttl_dist_map_path = path.join(node_ttl_dist_map_dir,
                                           node_ttl_dist_map_list[i])
        node_ttl_dist_map = pickle_from_disk(node_ttl_dist_map_path)
        """
        Get initial solution and write to disk.
        """
        init_sol_path = path.join(init_sol_dir, init_sol_list[i])
        start_time = clock()
        monitors, penalty = get_init_sol(adj_table, node_ttl_dist_map,
                                         max_monitor_num_list[i])
        end_time = clock()
        pickle_to_disk(init_sol_path, monitors)
        """
        Instantiate penalty_str and elapsed_time_str in init_sol_message_pattern.
        """
        msg_list[i] = msg_list[i].replace(penalty_str, str(penalty))
        msg_list[i] = msg_list[i].replace(
            elapsed_time_str, str(get_elapsed_minute(start_time, end_time)))
        """
        Process msg.
        """
        msg_list[i] = format_message(msg_list[i], sep)
        print msg_list[i]
        msg_to_send.append(msg_list[i])

    if sub_pipe:
        sub_pipe.send(msg_to_send)
def do_maximize_degree_discount_heuristic_job(monitor_list, sub_pipe=None):
    """
    :param monitor_list: [..., monitor_k,...]
    :param sub_pipe:
    Corresponding to a master pipe owned by the master process,
    if sub_pipe is not empty, we put some information into it and send to the master pipe.
    :return:
    """
    related_graph_id_list = get_graph_instances(graph_dir,
                                                graph_name_str,
                                                monitor_list,
                                                monitor_pattern,
                                                sep)

    """
    monitor_pattern (reference pattern):

    [graph_name_str, monitor_info_fmt, monitor_str]

    monitor_message_pattern:
                                       ***********  ****************
    [graph_name_str, monitor_info_fmt, penalty_str, elapsed_time_str]
    """
    message_list = get_instances(monitor_pattern,
                                 monitor_msg_pattern,
                                 monitor_list,
                                 sep)

    """
    Parse parameter trunc number and trunc step
    """
    monitor_info_list = get_instances(monitor_pattern,
                                      monitor_info_str,
                                      monitor_list,
                                      sep)

    """
    [... (trunc number, trunc step) ...]
    """
    trunc_num_step_list = [parse_trunc_num_step(item) for item in monitor_info_list]

    for i in range(0, len(monitor_list)):
        graph_path = os.path.join(graph_dir, related_graph_id_list[i])
        adj_table = get_adjtable(graph_path)
        monitor_path = os.path.join(monitor_dir, monitor_list[i])

        node_num = len(adj_table)
        start_time = clock()
        monitors, penalty = maximize_degree_discount_heuristic(adj_table,
                                                               get_max_monitor_num(node_num))
        end_time = clock()

        trunc_num, trunc_step = trunc_num_step_list[i]
        monitors = [monitors[0: trunc_idx * trunc_step] for trunc_idx in range(1, trunc_num+1, 1)]
        pickle_to_disk(monitor_path, monitors)

        message_list[i] = message_list[i].replace(penalty_str, str(penalty))
        message_list[i] = message_list[i].replace(elapsed_time_str,
                                                  str(get_elapsed_minute(start_time, end_time)))
        message_list[i] = format_message(message_list[i],
                                         sep)
        print message_list[i]

    if sub_pipe:
        sub_pipe.send(message_list)
Ejemplo n.º 7
0
def do_get_penalty_job(penalty_list, sub_pipe=None):
    """
    :param penalty_list: [... penalty_k ...],

    :param sub_pipe: Corresponding to a master pipe owned by the master process,
    if sub_pipe is not empty, we put some information into it and send to the master pipe.

    :return: write [..., penalty val_(i*scale) , ...] to penalty_path
    penalty val_(i*scale) : the penalty value of i*scale monitor
    """
    """
    penalty pattern: 
    [graph_name_str, trunc_num_step_str, penalty_str]
    
    monitor pattern:
                     ****************
    [graph_name_str, monitor_info_str, monitor_str]
        
    """
    trunc_num_step_list = get_instances(penalty_pattern, trunc_num_step_str,
                                        penalty_list, sep)

    monitor_list = get_instances(penalty_pattern, monitor_pattern,
                                 penalty_list, sep)

    graph_id_list = get_graph_instances(graph_dir, graph_name_str,
                                        penalty_list, penalty_pattern, sep)

    msg_list = [None] * len(penalty_list)

    for graph_id_idx in range(0, len(graph_id_list)):
        graph_id = graph_id_list[graph_id_idx]
        trunc_num_step = trunc_num_step_list[graph_id_idx]
        monitor_id = monitor_list[graph_id_idx]
        penalty_id = penalty_list[graph_id_idx]
        """
        Make sure the existence of graph_title file.
        """
        graph_path = path.join(graph_dir, graph_id)

        if not path.exists(graph_path):
            msg_list[graph_id_idx] = 'Missing {0:s}. Cannot finish {1:s}'.\
                format(graph_path, penalty_id)

            print msg_list[graph_id_idx]
            continue
        """
        Make sure the existence of related monitor file.
        """
        monitor_info_list = [
            monitor_info_fmt.format(trunc_num_step, method)
            for method in methods
        ]

        monitor_id_list = [
            monitor_id.replace(monitor_info_str, monitor_info)
            for monitor_info in monitor_info_list
        ]

        monitor_paths = [
            path.join(monitor_dir, monitor_id)
            for monitor_id in monitor_id_list
        ]
        chk_res = [path.exists(monitor_path) for monitor_path in monitor_paths]
        if not all(chk_res):
            msg_list[graph_id_idx] = 'Missing monitor. Cannot finish {0:s}'. \
                format(penalty_id)
            print msg_list[graph_id_idx]
            continue
        """
        Now, all related files exist.
        """
        penalty_path = path.join(penalty_dir, penalty_id)
        adj_table = get_adjtable(graph_path)
        """
        monitors = [
                    ...
                    ... 
                    [step * 1] ... [step * trunc_point_num] ---- method_k
                    ...
                    ... 
                   ]
        """
        monitors = []
        for monitor_path in monitor_paths:
            monitors.extend(pickle_from_disk(monitor_path))
        """
        res = [
                ...
                res_1, ..., res_(trunc point num) ---- method_k
                ...
               ]
        """
        res = [
            round(float(item), 2) for item in get_penalty(adj_table, monitors)
        ]
        """
        Construct message.
        penalty pattern: 
        [graph_name_str, trunc_num_step_str, penalty_str]
        """

        msg_parts = penalty_id.split(sep)

        msg_content = '{0:s}:{1:s}:{2:s}\n'.format(
            msg_parts[0],
            msg_parts[1],
            msg_parts[2],
        )

        base_method = methods[0]
        base_line = res[trunc_point_num - 1]
        msg_content += '{0:s}:{1:.2f}\n'.format(base_method, base_line)

        for mtd_idx in range(1, len(methods)):
            tmp = (mtd_idx + 1) * trunc_point_num - 1
            msg_content += '{0:s}:{1:.2f} ({2:.2f}%)\n'.format(
                methods[mtd_idx], res[tmp],
                ((res[tmp] - base_line) * 100.0) / base_line)

        msg_list[graph_id_idx] = msg_content
        print msg_content

        if trunc_point_num > 1:
            """
            res = [
                        ...
                        res_1, ..., res_(trunc point num) ---- method_k
                        ...
                      ]
            """
            pickle_to_disk(penalty_path, res)
        elif trunc_point_num == 1:
            """
            Baseline and stretch ratio.
            """
            write_to_disk(penalty_path, msg_content)

    if sub_pipe:
        sub_pipe.send(msg_list)