Ejemplo n.º 1
0
def generate(dataflow_name="generated_graph", c_param=None, nx_graph=None):
    """Generate a dataflow graph according to the parameters c_param
    
    Parameters
    ----------
    dataflow_name : the name of the dataflow (default : generated_graph).
    c_param : parameters of the generation.
    nx_graph : if you want to generate a graph with random rates but with specific graph architecture
    (like a random graph generate by NetworkX)
    """
    if c_param is None:
        c_param = Parameters()
    logging.basicConfig(level=c_param.get_logging_level())

    start = time()
    logging.info("Generating graph")
    dataflow = generate_dataflow(dataflow_name, c_param, nx_graph)
    logging.info("Generating weight")
    generate_rates(dataflow, c_param)
    compute_initial_marking(dataflow,
                            solver_str=c_param.get_solver(),
                            solver_verbose=c_param.is_solver_verbose(),
                            lp_filename=c_param.get_lp_filenam())
    if not c_param.is_normalized():
        dataflow.un_normalized()
    logging.info("Generating done : " + str(time() - start) + "s")
    return dataflow
Ejemplo n.º 2
0
def generate(dataflow_name="generated_graph", c_param=None, nx_graph=None):
    """Generate a dataflow graph according to the parameters c_param
    
    Parameters
    ----------
    graphName : the name of the dataflow (default : generated_graph).
    c_param : parameters of the generation.
    """
    if c_param is None:
        c_param = Parameters()
    logging.basicConfig(level=c_param.get_logging_level())

    start = time()
    logging.info("Generating graph")
    dataflow = generate_dataflow(dataflow_name, c_param, nx_graph)
    logging.info("Generating weight")
    generate_rates(dataflow, c_param)
    compute_initial_marking(dataflow,
                            solver_str=c_param.get_solver(),
                            solver_verbose=c_param.is_solver_verbose(),
                            lp_filename=c_param.get_lp_filenam())

    logging.info("Generating done : " + str(time() - start) + "s")
    return dataflow
Ejemplo n.º 3
0
"""
Example of how to write and read SDF3 files.
"""
from Turbine.file_parser.sdf3_parser import write_sdf3_file, read_sdf3_file
from Turbine.generation.generate import generate
from Turbine.param.parameters import Parameters

print "###### Setup the SDF generation #####"
c_param = Parameters()
c_param.set_dataflow_type("SDF")
c_param.set_min_task_degree(1)
c_param.set_max_task_degree(2)
c_param.set_nb_task(5)

print "###### Generate dataflow ############"
SDFG = generate("SDF_of_test", c_param)  # Generate a SDF for the example.
print SDFG

print "####### Write sdf3 file #############"
write_sdf3_file(SDFG, "SDF.sdf3")  # Write the generated SDF in a sdf3 file.

print "####### Read sdf3 file ##############"
SDF_from_file = read_sdf3_file("SDF.sdf3")  # Read the SDF from the file write previously sdf3 file.
print SDF_from_file
Ejemplo n.º 4
0
"""
Example of how to generate a Cyclo-Static dataflow graph
"""
from Turbine.generation.generate import generate
from Turbine.param.parameters import Parameters

print "###### Setup the CSDF generation ####"
c_param = Parameters()

# Set the CSDF type for the generation
c_param.set_dataflow_type("CSDF")

# Min/Max phase per task
c_param.set_min_phase_count(1)
c_param.set_max_phase_count(10)

# Min/Max arcs count per task
c_param.set_min_task_degree(1)
c_param.set_max_task_degree(3)

# Number of task in the dataflow
c_param.set_nb_task(100)

print "###### Generate CSDF dataflow #######"
CSDFG = generate("Test_of_SDFG", c_param)
print CSDFG
from Turbine.generation.generate import generate
from Turbine.param.parameters import Parameters

print "###### Setup the SDF generation #####"
c_param = Parameters()
c_param.set_dataflow_type("SDF")
c_param.set_solver(None)  # pass the initial marking computation phase when generate the dataflow
c_param.set_min_task_degree(1)
c_param.set_max_task_degree(5)
c_param.set_nb_task(100)

print "###### Generate dataflow ############"
dataflow = generate("SDF_of_test", c_param)  # Generate a SDF dataflow graph
print dataflow  # Print information about the dataflow, as you can see it's not normalized
print "Is dead lock:", dataflow.is_dead_lock  # Verify if the dataflow is live (Use symbolic execution: can be long on

print  "###### Compute the initial marking ##"
dataflow.compute_initial_marking()  # Compute the minimal initial marking
# By default the initial marking solver is choose automatically between SC1 and SC2.
# The first one is more efficient on SDF and the second one is more efficient on CSDF and PCG.
# You can force them with the argument solver_str="SC1" or solver_str="SC2"
# solver_str=None is to avoid solving the initial marking.
# solver_str="SC1_MIP" use the solver Gurobi and gives better result than SC1 and SC2 but it cannot handle big graphs
print dataflow
print "Is dead lock:", dataflow.is_dead_lock  # Verify if the dataflow is live (Use symbolic execution: can be long on
# big graphs)
Ejemplo n.º 6
0
"""
Illustrate how to convert a sdf3 file into a tur file
"""

from Turbine.file_parser.sdf3_parser import write_sdf3_file, read_sdf3_file
from Turbine.file_parser.turbine_parser import write_tur_file
from Turbine.generation.generate import generate
from Turbine.param.parameters import Parameters

print "###### Setup the SDF generation #####"
c_param = Parameters()
c_param.set_dataflow_type("SDF")
c_param.set_min_task_degree(1)
c_param.set_max_task_degree(5)
c_param.set_nb_task(100)

print "###### Generate dataflow ############"
SDFG = generate("Test_of_SDF", c_param)  # Generate a SDF
print SDFG

print "####### Write sdf3 file #############"
write_sdf3_file(SDFG, "SDF.sdf3")  # Write the SDF as a sdf3 file (XML)

print "####### Read sdf3 file ##############"
SDF_from_file = read_sdf3_file(
    "SDF.sdf3")  # Read the SDF from the sdf3 file write the previous line

print "####### Write tur file ##############"
write_tur_file(SDF_from_file, "SDF.tur")  # Write the SDF as a tur file

print "done !"
Ejemplo n.º 7
0
"""
Example of how to write and read TUR files.

TUR files are not XML, they are 10 times smaller than SDF3 files and easy to write by hand.
"""
from Turbine.generation.generate import generate
from Turbine.param.parameters import Parameters

print "###### Setup the SDF generation #####"
c_param = Parameters()
c_param.set_dataflow_type("SDF")
c_param.set_min_task_degree(1)
c_param.set_max_task_degree(5)
c_param.set_normalized(False)
c_param.set_nb_task(10)

print "###### Generate dataflow ############"
SDFG = generate("Test_of_SDF", c_param)
print SDFG
# The graph is not generate normalized, first we normalized it
print "####### Normalized graph ############"
coef_vector = SDFG.normalized()
print SDFG
# coef_vector is the un-normalization vector to retrieve the original graph.
# It's a dictionary {arc:coef}
print "####### Un-normalized graph #########"
# The graph can be un-normalized with a random vector if you put no parameters in the function un_normalized()
# Here we want to retrieve the original generated graph so we call the un_normalization with the proper argument
SDFG.un_normalized(coef_vector)
print SDFG
Ejemplo n.º 8
0
"""
Example of how to generate a Synchronous Dataflow Graph
"""
from Turbine.generation.generate import generate
from Turbine.param.parameters import Parameters

print "###### Setup the SDF generation #####"
c_param = Parameters()

# Set the SDF type for the generation
c_param.set_dataflow_type("SDF")

# Set the SDF type for the generation
c_param.set_acyclic(True)

# Min/Max arcs count per task
c_param.set_min_task_degree(1)
c_param.set_max_task_degree(2)

# Number of task in the dataflow
c_param.set_nb_task(10)

print "###### Generate SDF dataflow ########"
SDFG = generate("Test_of_SDFG", c_param)
print SDFG
print "Cyclic graph:", SDFG.is_cyclic
Ejemplo n.º 9
0
from Turbine.generation.generate import generate
from Turbine.param.parameters import Parameters

print "###### Setup the SDF generation #####"
c_param = Parameters()
c_param.set_dataflow_type("SDF")
c_param.set_solver(
    None
)  # pass the initial marking computation phase when generate the dataflow
c_param.set_min_task_degree(1)
c_param.set_max_task_degree(5)
c_param.set_nb_task(100)

print "###### Generate dataflow ############"
dataflow = generate("SDF_of_test", c_param)  # Generate a SDF dataflow graph
print dataflow  # Print information about the dataflow, as you can see it's not normalized
print "Is dead lock:", dataflow.is_dead_lock  # Verify if the dataflow is live (Use symbolic execution: can be long on

print "###### Compute the initial marking ##"
dataflow.compute_initial_marking()  # Compute the minimal initial marking
# By default the initial marking solver is choose automatically between SC1 and SC2.
# The first one is more efficient on SDF and the second one is more efficient on CSDF and PCG.
# You can force them with the argument solver_str="SC1" or solver_str="SC2"
# solver_str=None is to avoid solving the initial marking.
# solver_str="SC1_MIP" use the solver Gurobi and gives better result than SC1 and SC2 but it cannot handle big graphs
print dataflow
print "Is dead lock:", dataflow.is_dead_lock  # Verify if the dataflow is live (Use symbolic execution: can be long on
# big graphs)
Ejemplo n.º 10
0
"""
Example of how to generate a Cyclo-Static dataflow graph
"""
from Turbine.generation.generate import generate
from Turbine.param.parameters import Parameters

print "###### Setup the CSDF generation ####"
c_param = Parameters()

# Set the CSDF type for the generation
c_param.set_dataflow_type("CSDF")

# Min/Max phase per task
c_param.set_min_phase_count(1)
c_param.set_max_phase_count(10)

# Min/Max arcs count per task 
c_param.set_min_task_degree(1)
c_param.set_max_task_degree(3)

# Number of task in the dataflow
c_param.set_nb_task(100)

print "###### Generate CSDF dataflow #######"
CSDFG = generate("Test_of_SDFG", c_param)
print CSDFG
"""
Example of how to generate a Synchronous Dataflow Graph
"""
from Turbine.generation.generate import generate
from Turbine.param.parameters import Parameters
from networkx import MultiDiGraph
import networkx as nx

print "###### Setup the SDF generation #####"
c_param = Parameters()

# Set the SDF type for the generation
c_param.set_dataflow_type("SDF")

# Generate a random graph using networkx
nx_graph = nx.balanced_tree(2, 10, create_using=MultiDiGraph())

print "###### Generate SDF dataflow ########"
SDFG = generate("Test_of_SDFG", c_param, nx_graph=nx_graph)
print SDFG