def generate_graph(filename):
    num_of_tasks, num_of_processors, comp_costs, rate, comm_cost = create_input.init(filename)
    avg_comp_costs = []
    for row in comp_costs:
        avg_comp_costs.append(sum(row)/len(row))

    print('avg_comp_costs: ')
    print(avg_comp_costs)
    g = Graph(directed=True)
    print('node weithts: ')
    g.es["weight"] = 1.0
    g.add_vertices(num_of_tasks)

    g.vs["weight"] = avg_comp_costs

    print(g.vs["weight"])
    j=0
    for line in comm_cost:
        for i in range(num_of_tasks):
            if line[i] != -1:
                g.add_edge(j, i)
                g[j,i] = line[i]
        j +=1
    print(g.es["weight"])
    '''
    g.vs["label"] = g.vs["weight"]
    g.es["label"] = g.es["weight"]
    g.vs["name"] ="Node "+ str( i in range(g.vcount()))
    layout1 = g.layout("kk")
    plot(g, layout=layout1, autocurve=True)
    layout2 = g.layout("fr")
    plot(g, layout = layout2)
    '''
    return g
Esempio n. 2
0
    def __init__(self, filename):
        self.num_task, self.num_processor, comp_cost, self.rate, self.data = init(
            filename)
        self.makespan = 0
        self.tasks = [Task(n) for n in range(self.num_task)]
        self.processors = [Processor(n) for n in range(self.num_processor)]
        self.start_task_num, self.end_task_num = 0, 1

        for line in self.data:
            print line

        for i in range(self.num_task):
            self.tasks[i].comp_cost = comp_cost[i]

        for task in self.tasks:
            task.avg_comp = sum(task.comp_cost) / self.num_processor

        self.cal_up_rank(self.tasks[self.start_task_num])
        self.cal_down_rank(self.tasks[self.end_task_num])
        self.cal_critical_path()
        self.cal_critical_processor()
        self.tasks.sort(cmp=lambda x, y: cmp(x.up_rank, y.up_rank),
                        reverse=True)
Esempio n. 3
0
    def __init__(self, filename):
        """
        Initialize some parameters.
        """
        self.num_task, self.num_processor, comp_cost, self.rate, self.data = init(filename)

        self.tasks = [Task(n) for n in range(self.num_task)]
        self.processors = [Processor(n) for n in range(self.num_processor)]
        self.start_task_num, self.end_task_num = 0, 9
        self.dup_tasks = []
        self.critical_pre_task_num = -1

        #for line in self.data:
        #    print line

        for i in range(self.num_task):
            self.tasks[i].comp_cost = comp_cost[i]

        for task in self.tasks:
            task.avg_comp = sum(task.comp_cost) / self.num_processor

        self.cal_up_rank(self.tasks[self.start_task_num])
        self.cal_down_rank(self.tasks[self.end_task_num])
        self.tasks.sort(cmp=lambda x, y: cmp(x.up_rank, y.up_rank), reverse=True)
Esempio n. 4
0
def create_input_heft(tgff_file, num_nodes, network_info, execution_info,
                      node_list, task_list, tasks):
    """Generate the TGFF file
    
    Args:
        - tgff_file (str): file of output TGFF file
        - num_nodes (int): number of nodes 
        - network_info (list): network profling information
        - execution_info (list): execution profiling information
        - node_list (list): list of nodes
        - task_list (list): (DAG) task list in the order of execution
        - tasks (list): DAG dictionary 
    """
    # print('---------------')
    # print(tgff_file)
    # print(num_nodes)
    # print(network_info)
    # print(execution_info)
    # print(node_list)
    # print(task_list)
    # print(tasks)
    # print('---------------2')
    target = open(tgff_file, 'w')
    target.write('@TASK_GRAPH 0 {')
    target.write("\n")
    target.write('\tAPERIODIC')
    target.write("\n\n")

    # print(task_list)
    task_map = ['t0_%d' % (i) for i in range(0, len(task_list))]
    task_ID_dict = dict(zip(task_list, range(0, len(task_list))))
    task_dict = dict(zip(task_list, task_map))
    # print(task_dict)
    # print(task_ID_dict)

    computation_matrix = []
    for i in range(0, len(task_list)):
        task_times = [0 for i in range(num_nodes)]
        computation_matrix.append(task_times)

    task_size = {}

    # Read format: Node ID, Task, Execution Time, Output size
    # print('DEBUG')
    # print(computation_matrix)
    # print(len(computation_matrix))
    # print(len(computation_matrix[0]))
    # print(execution_info)
    for row in execution_info:
        # print('^^^^^^^^^^^^')
        # print(row)
        # print(row[2])
        # print(row[3])
        # print(task_size)
        # print('^^^^^^^^^^^^')
        # print(execution_info)
        # print(task_ID_dict[row[1]])
        # print(node_ids[row[0]])
        # print(node_ids)
        # print(node_info)
        computation_matrix[task_ID_dict[row[1]]][node_ids[row[0]] - 1] = int(
            float(row[2]) * 10)
        #100000
        task_size[row[1]] = row[3]
    #     print('^^^^^^^^^^^^2')
    # print('>>>>>>>>>>>>>>>>>>>>')
    # print(task_size)
    # print(computation_matrix)

    for i in range(0, len(task_list)):
        line = "\tTASK %s\tTYPE %d \n" % (task_list[i], i)
        target.write(line)
    target.write("\n")

    # Need check
    v = 0
    keys = tasks.keys()
    for key in keys:
        for j in range(0, len(tasks[key])):
            #file size in Kbit is communication const
            comm_cost = int(float(task_size[key]))
            line = "\tARC a0_%d \tFROM %s TO %s \tTYPE %d" % (
                v, task_dict[key], task_dict.get(tasks[key][j]), comm_cost)
            v = v + 1
            target.write(line)
            target.write("\n")
    target.write("\n")
    target.write('}')

    # OK
    target.write('\n@computation_cost 0 {\n')

    line = '# type version %s\n' % (' '.join(node_info[1:]))
    target.write(line)

    for i in range(0, len(task_list)):
        line = '  %s    0\t%s\n' % (task_dict.get(task_list[i]), ' '.join(
            str(x) for x in computation_matrix[i]))
        target.write(line)
    target.write('}')
    target.write('\n\n\n\n')

    target.write('\n@quadratic 0 {\n')
    target.write('# Source Destination a b c\n')

    #OK
    for row in network_info:
        line = '  %s\t%s\t%s\n' % (row[0], row[2], row[4])
        target.write(line)
    target.write('}')
    target.close()

    num_task, task_names, num_node, comp_cost, rate, data, quaratic_profile = init(
        tgff_file)
    print('Checking the written information')
    print(num_task)
    print(comp_cost)
    print(rate)
    print(data)
    print(quaratic_profile)

    return
def talker():
    pub = rospy.Publisher('dag', Dag, queue_size=10)
    rospy.init_node('dag_publisher', anonymous=True)
    pub_rate = rospy.Rate(0.5)  #Hz

    dag = Dag()
    dag.num_of_tasks, dag.num_of_processors, comp_cost, rate, comm_cost = create_input.init(
        '../tgff/input_0.tgff')

    dag.comp_cost.layout.dim.append(MultiArrayDimension())
    dag.comp_cost.layout.dim.append(MultiArrayDimension())
    dag.comp_cost.layout.dim[0].size = dag.num_of_tasks
    dag.comp_cost.layout.dim[1].size = dag.num_of_processors
    dag.comp_cost.layout.dim[
        0].stride = dag.num_of_tasks * dag.num_of_processors
    dag.comp_cost.layout.dim[1].stride = dag.num_of_processors
    dag.comp_cost.layout.data_offset = 0

    stride0 = dag.num_of_tasks * dag.num_of_processors
    stride1 = dag.num_of_processors
    offset = dag.comp_cost.layout.data_offset

    print 'cost in comp_cost'
    for i in range(dag.num_of_tasks):
        print 'line %d in comp_cost' % i
        cost = comp_cost[i]
        print cost
        for j in range(dag.num_of_processors):
            dag.comp_cost.data.insert(offset + stride1 * i + j, cost[j])
    print 'dag.comp_cost.data:'
    print dag.comp_cost.data

    # add communication matrix
    dag.data.layout.dim.append(MultiArrayDimension())
    dag.data.layout.dim.append(MultiArrayDimension())
    dag.data.layout.dim[0].size = dag.num_of_tasks
    dag.data.layout.dim[1].size = dag.num_of_processors
    dag.data.layout.dim[0].stride = dag.num_of_tasks * dag.num_of_processors
    dag.data.layout.dim[1].stride = dag.num_of_processors
    dag.data.layout.data_offset = 0

    for i in range(dag.num_of_tasks):
        for j in range(dag.num_of_processors):
            dag.data.data.insert(dag.data.layout.dim[1].stride * i + j,
                                 comm_cost[i][j])
        print dag.data.data

    # add rate matrix
    dag.rate.layout.dim.append(MultiArrayDimension())
    dag.rate.layout.dim.append(MultiArrayDimension())
    dag.rate.layout.dim[0].size = dag.num_of_processors
    dag.rate.layout.dim[1].size = dag.num_of_processors
    dag.rate.layout.dim[
        0].stride = dag.num_of_processors * dag.num_of_processors
    dag.rate.layout.dim[1].stride = dag.num_of_processors

    for i in range(dag.num_of_processors):
        for j in range(dag.num_of_processors):
            dag.rate.data.insert(dag.rate.layout.dim[1].stride * i + j,
                                 rate[i][j])
        print dag.rate.data
    """
    for i in range(dag.num_of_tasks):
        print 'line %d in comp_cost' % i
        dag.comp_cost.append(comp_cost[i])
        print dag.comp_cost[i]
        
        dag.data.append(data[i])
        print dag.data[i]
    for i in range(dag.num_of_processors):
        dag.rate.append(rate[i])
        print dag.rate[i]
    """
    """
    test the schedulers
    """

    cpop_scheduler = cpop.CPOP('../tgff/input_0.tgff')
    cpop_scheduler.run()
    cpop_scheduler.display_result()

    while not rospy.is_shutdown():
        pub.publish(dag)
        pub_rate.sleep()
Esempio n. 6
0
#print(num_quadratic)
target.write('\n@quadratic 0 {\n')
target.write('# Source Destination a b c\n')
client_mongo = MongoClient('mongodb://localhost:27017/')
db = client_mongo.central_network_profiler
logging = db['quadratic_parameters'].find().sort([
    ("Time_Stamp[UTC]", pymongo.ASCENDING)
]).limit(num_quadratic)

for record in logging:
    #print(record)
    info_to_csv = [
        record['Source[Tag]'], record['Destination[Tag]'],
        str(record['Parameters'])
    ]
    line = '  %s\t%s\t%s\n' % (info_to_csv[0], info_to_csv[1], info_to_csv[2])
    target.write(line)

target.write('}')
target.close()

#test
num_task, task_names, num_processor, comp_cost, rate, data, quaratic_profile = init(
    'input_0.tgff')
print(num_task)
print(num_processor)
print(comp_cost)
print(rate)
print(data)
#print(quaratic_profile)
Esempio n. 7
0
    def __init__(self, filename, node_info):
        """
        Initialize some parameters.
        """
        self.node_info = node_info
        self.num_task, self.task_names, self.num_processor, comp_cost, self.rate, self.data, self.quaratic_profile = create_input.init(
            filename, node_info)

        self.tasks = [Task(n) for n in range(self.num_task)]
        self.processors = [Processor(n) for n in range(self.num_processor)]
        self.start_task_num, self.end_task_num = 0, self.num_task - 1
        self.dup_tasks = []
        self.task_mapper = ""
        self.critical_pre_task_num = -1

        for i in range(self.num_task):
            self.tasks[i].comp_cost = comp_cost[i]

        for task in self.tasks:
            task.avg_comp = sum(task.comp_cost) / self.num_processor

        self.cal_up_rank(self.tasks[self.start_task_num])
        self.cal_down_rank(self.tasks[self.end_task_num])
        self.tasks.sort(
            key=functools.cmp_to_key(lambda x, y: cmp(x.up_rank, y.up_rank)),
            reverse=True)
Esempio n. 8
0
def create_input_heft(
    tgff_file,
    num_nodes,
    network_info,
    execution_info,
    task_names,
    dag_task_map,
    name_to_id,
    worker_node_names
):
    """Generate the TGFF file

    Args:
        - tgff_file (str): file of output TGFF file
        - num_nodes (int): number of nodes
        - network_info (list): network profling information
        - execution_info (list): execution profiler information
        - task_names (list): names of tasks ordered as per app_config.yaml
        - dag_task_map (dict): mapping of tasks to list of children without "home"
        - name_to_id (dict): mapping of Jupiter node name to enumerated ID
    """
    target = open(tgff_file, 'w')
    target.write('@TASK_GRAPH 0 {')
    target.write("\n")
    target.write('\tAPERIODIC')
    target.write("\n\n")

    task_map = ['t0_%d' % (i) for i in range(0, len(task_names))]
    task_ID_dict = dict(zip(task_names, range(0, len(task_names))))
    task_dict = dict(zip(task_names, task_map))

    computation_matrix = []
    for i in range(0, len(task_names)):
        task_times = [0 for i in range(num_nodes)]
        computation_matrix.append(task_times)

    task_size = {}

    # Read format: Node ID, Task, Execution Time, Output size
    for row in execution_info:
        computation_matrix[task_ID_dict[row[1]]][name_to_id[row[0]] - 1] = int(float(row[2]) * 10)
        # 100000
        task_size[row[1]] = row[3]

    for i in range(0, len(task_names)):
        line = "\tTASK %s\tTYPE %d \n" % (task_names[i], i)
        target.write(line)
    target.write("\n")

    # Need check
    v = 0
    keys = dag_task_map.keys()
    for key in keys:
        for j in range(0, len(dag_task_map[key])):
            # file size in Kbit is communication const
            comm_cost = int(float(task_size[key]))
            line = "\tARC a0_%d \tFROM %s TO %s \tTYPE %d" % (v, task_dict[key], task_dict.get(dag_task_map[key][j]), comm_cost)
            v = v + 1
            target.write(line)
            target.write("\n")
    target.write("\n")
    target.write('}')

    # OK
    target.write('\n@computation_cost 0 {\n')

    line = '# type version %s\n' % (' '.join(worker_node_names[:]))
    target.write(line)

    for i in range(0, len(task_names)):
        line = '  %s    0\t%s\n' % (task_dict.get(task_names[i]), ' '.join(str(x) for x in computation_matrix[i]))
        target.write(line)
    target.write('}')
    target.write('\n\n\n\n')

    target.write('\n@quadratic 0 {\n')
    target.write('# Source Destination a b c\n')

    # OK
    for row in network_info:
        line = '  %s\t%s\t%s\n'%(row[0], row[2], row[4])
        target.write(line)
    target.write('}')
    target.close()

    # do not care about outputs
    _, _, _, _, _, _, _ = init(tgff_file, worker_node_names)
    return
Esempio n. 9
0
    def __init__(self, filename):
        """
        Initialize some parameters.
        """
        NODE_NAMES = os.environ["NODE_NAMES"]
        self.node_info = NODE_NAMES.split(":")
        self.num_task, self.task_names, self.num_processor, comp_cost, self.rate, self.data,self.quaratic_profile = init(filename)

        self.tasks = [Task(n) for n in range(self.num_task)]
        self.processors = [Processor(n) for n in range(self.num_processor)]
        self.start_task_num, self.end_task_num = 0, self.num_task-1
        self.dup_tasks = []
        self.critical_pre_task_num = -1


        for i in range(self.num_task):
            self.tasks[i].comp_cost = comp_cost[i]

        for task in self.tasks:
            task.avg_comp = sum(task.comp_cost) / self.num_processor

        self.cal_up_rank(self.tasks[self.start_task_num])
        self.cal_down_rank(self.tasks[self.end_task_num])
        self.tasks.sort(cmp=lambda x, y: cmp(x.up_rank, y.up_rank), reverse=True)