예제 #1
0
 def test_large_workflow(self):
     self.workflow = Workflow(
         "/home/rwb/Dropbox/PhD/writeups/observation_graph-model/json/3000Node.json"
     )
     env = Environment(
         "/home/rwb/Dropbox/PhD/writeups/observation_graph-model/json/3000Node_sys.json"
     )
     self.workflow.add_environment(env)
     heft(self.workflow)
예제 #2
0
    def test_create_sample_pop(self):
        logger.debug("HEFT makespan {0}".format(heft(self.wf).makespan))
        pop = generate_population(self.wf, size=25, rng=self.rng, skip_limit=5)
        for soln in pop:
            self.assertEqual(soln.execution_order[-1].task.aft, soln.makespan)
        logger.debug("GA Initial Population")
        logger.debug("########################")
        for soln in pop:
            logger.debug(("Execution order: {0}".format(soln.execution_order)))
            logger.debug("Allocations: {0}".format(
                soln.list_all_allocations()))
            logger.debug("Makespan (s): {0}".format(
                calculate_fitness(['time'], soln)))
            logger.debug("Cost ($){0}".format(calculate_fitness(['cost'],
                                                                soln)))

            soln.fitness = calculate_fitness(['time', 'cost'], soln)
        fig, ax = plt.subplots()
        ax.set_xlim([90, 200])
        ax.set_ylim([100, 250])
        x = [soln.fitness['time'] for soln in pop]
        y = [soln.fitness['cost'] for soln in pop]
        ax.scatter(x, y, c='red')
        ax.set_axisbelow(True)
        ax.legend()
        ax.grid(True)
        plt.xlabel('Solution Runtime')
        plt.ylabel('Solution execution cost')
        plt.show()
예제 #3
0
def run_algorithm(arg, parser):
    if arg['algorithm'] == 'heft':
        pass
        wf = Workflow(arg['workflow'])
        env = Environment(arg['environment'])
        wf.add_environment(env)
        print(heft(wf))
        print(wf.machine_alloc)
예제 #4
0
    def test_it_works(self):
        # print(heft(self.workflow))
        print(heft(self.dense))
        self.dense.pretty_print_allocation()

        # for p in self.dense.machines:
        # 	print(p)
        # #print(heft(self.gnp))

        pass
예제 #5
0
    def _run_scheduling(self, workflow):
        """
        Produce static schedules based on the algorithm specified at
        object creation.

        Returns
        -------
        solution : shadow.model.solution.Solution
            A solution object which describes a static schedule with
            additional information.
        """

        if self.algorithm is 'heft':
            solution = heft(workflow)
        elif self.algorithm is 'pheft':
            solution = pheft(workflow)
        elif self.algorithm is 'fcfs':
            solution = fcfs(workflow)
        else:
            raise RuntimeError(
                f"{self.algorithm} is not implemented by {str(self)}")
        LOGGER.debug("Solution makespan for {0} is {1}".format(
            self.algorithm, solution.makespan))
        return solution
예제 #6
0
# import config as cfg

from shadow.algorithms.heuristic import heft
from shadow.models.workflow import Workflow

# This workflow calculates the task time for each resource based on the demand
# and supply vectors provided in the 'flop_rep_test.json' fsile. 
wf = Workflow('topcuoglu.graphml')
retval = wf.load_attributes('flop_rep_test.json')
print(heft(wf))
wf.pretty_print_allocation()


# Original HEFT workflow; task time on reach resource is provided directly by
# the .json file. 

wf = Workflow('topcuoglu.graphml')
retval = wf.load_attributes('heft.json',calc_time=False)
print(heft(wf))
wf.pretty_print_allocation()
예제 #7
0
# Copyright (C) 17/6/20 RW Bunney

# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.

# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
# GNU General Public License for more details.

# You should have received a copy of the GNU General Public License
# along with this program.  If not, see <https://www.gnu.org/licenses/>.

# This is adapted and expanded upon in the shadowgen.ipynb notebook

from shadow.models.workflow import Workflow
from shadow.models.environment import Environment
import shadow.algorithms.heuristic as heuristic

workflow = Workflow('dax_files/output/shadow_Epigenomics_24.json')
env = Environment('environments/sys.json')
workflow.add_environment(env)
heuristic.heft(workflow)
예제 #8
0
from shadow.algorithms.heuristic import heft

from shadow.models.workflow import Workflow
from shadow.models.environment import Environment

from shadow.visualiser.plot import AllocationPlot
import matplotlib.pyplot as plt
from test import config

HEFTWorkflow = Workflow(config.test_heuristic_data['topcuoglu_graph'])
env = Environment(config.test_heuristic_data['topcuoglu_graph_system'])
HEFTWorkflow.add_environment(env)
heft(HEFTWorkflow)

sample_allocation = AllocationPlot(solution=HEFTWorkflow.solution)
fig, ax = sample_allocation.plot()
plt.show()
예제 #9
0
# plt.show()
# fcfs_plot = splot.AllocationPlot(fcfs_solution)
# fcfs_fig, fcfs_ax = fcfs_plot.plot()
# for x in fcfs_ax:
# 	x.set_xlim(right=max_x+10)
# # plt.xlim([0,max_x+10])
# plt.show()

heft_workflow = swf.Workflow('../dax_files/output/shadow_Epigenomics_24.json')
fcfs_workflow = swf.Workflow('../dax_files/output/shadow_Epigenomics_24.json')
shared_env = senv.Environment('../environments/sys.json')

heft_workflow.add_environment(shared_env)
fcfs_workflow.add_environment(shared_env)

heft_solution = sheuristic.heft(heft_workflow)
fcfs_solution = sheuristic.fcfs(fcfs_workflow)
# max_x = max(heft_solution.makespan,fcfs_solution.makespan)

print(heft_solution.makespan)

heft_plot = splot.AllocationPlot(heft_solution)
heft_fig, heft_ax = heft_plot.plot()
# for x in heft_ax:
# 	x.set_xlim(right=max_x+10)
# heft_ax.set_xlim(right=1000)
# plt.xlim([0,max_x+10])
plt.show()
fcfs_plot = splot.AllocationPlot(fcfs_solution)
fcfs_fig, fcfs_ax = fcfs_plot.plot()
# # for x in fcfs_ax:
예제 #10
0
def run_workflow(workflow, environment):
    workflow.add_environment(environment)
    return heft(workflow)
예제 #11
0
 def test_heft_schedule(self):
     # upward_rank(self.workflow)
     solution = heft(self.workflow)
     self.assertTrue(solution.makespan == 133)
예제 #12
0
 def test_schedule(self):
     solution = heft(self.workflow)
     self.assertEqual(98, solution.makespan)
예제 #13
0
	def test_schedule(self):
		retval = heft(self.wf)
		self.wf.pretty_print_allocation()
		self.assertTrue(retval == 98)
예제 #14
0
	def test_heft_schedule(self):
		# upward_rank(self.wf)
		retval = heft(self.wf)
		self.assertTrue(retval == 133)
예제 #15
0
 def test_execution_order(self):
     correct_order = [0, 3, 2, 4, 1, 5, 6, 8, 7, 9]
     retval = heft(self.workflow)
     order = self.workflow.solution.execution_order
     for i, alloc in enumerate(order):
         self.assertEqual(correct_order[i], alloc.tid)
예제 #16
0
from queue import Queue
from shadow.algorithms.heuristic import heft

from shadow.models.workflow import Workflow, Task
from shadow.models.environment import Environment

HEFTWorkflow = Workflow('heft.json')
env = Environment('sys.json')
HEFTWorkflow.add_environment(env)

print(heft(HEFTWorkflow))

DelayWorkflow = Workflow('heft_delay.json')
DelayWorkflow.add_environment(env)
print(heft(DelayWorkflow))


def calc_task_delay(task, delay, workflow):
    t = workflow.graph.tasks[task]
    aft = t.aft
    update_list = list(workflow.graph.successors(t))
    # add 10 to the start and finish time of each of these successors, and their successors
    update_queue = Queue()
    update_queue.put(update_list)
    print(update_queue)
    return workflow


task = HEFTWorkflow.tasks[0]
calc_task_delay(task, 10, workflow=HEFTWorkflow)
예제 #17
0
    print("Scheduling {0} Channels".format(x))
    WORKFLOW = "routput/shadow_Continuum_ChannelSplit_{0}.json".format(x)
    CLUSTER = "routput/system_spec_40_200-400_1.0"

    wf_fcfs = Workflow(WORKFLOW)
    env_fcfs = Environment(CLUSTER)
    wf_fcfs.add_environment(env_fcfs)

    wf_heft = Workflow(WORKFLOW)
    env_heft = Environment(CLUSTER)
    wf_heft.add_environment(env_heft)

    soln = fcfs(wf_fcfs)
    fcfs_res = soln.makespan

    soln2 = heft(wf_heft)
    heft_res = soln2.makespan
    diff = abs(fcfs_res - heft_res)
    values = {"time": fcfs_res, "channels": x, "algorithm": "fcfs",
              "diff": diff}
    row_to_add = pd.Series(values, name=x)
    df = df.append(row_to_add)
    values = {"time": heft_res, "channels": x, "algorithm": "heft",
              "diff": diff}
    row_to_add = pd.Series(values, name=x)
    df = df.append(row_to_add)

print(df)
df.to_pickle("continuum_pickle.pkl")
sns.set_style("darkgrid")
df.time = df.time.astype(float)