def test_api_calls():
    from sprokit.pipeline import config
    from sprokit.pipeline import modules
    from sprokit.pipeline import process
    from sprokit.pipeline import process_factory

    modules.load_known_modules()

    proc_type = 'orphan'
    c = config.empty_config()

    process_factory.create_process(proc_type, '')
    process_factory.create_process(proc_type, '', c)
    process_factory.types()
    process_factory.description(proc_type)

    process_factory.Process.property_no_threads
    process_factory.Process.property_no_reentrancy
    process_factory.Process.property_unsync_input
    process_factory.Process.property_unsync_output
    process_factory.Process.port_heartbeat
    process_factory.Process.config_name
    process_factory.Process.config_type
    process_factory.Process.type_any
    process_factory.Process.type_none
    process_factory.Process.type_data_dependent
    process_factory.Process.type_flow_dependent
    process_factory.Process.flag_output_const
    process_factory.Process.flag_input_static
    process_factory.Process.flag_input_mutable
    process_factory.Process.flag_input_nodep
    process_factory.Process.flag_required
def test_api_calls():
    from sprokit.pipeline import config
    from sprokit.pipeline import modules
    from sprokit.pipeline import process
    from sprokit.pipeline import process_factory

    modules.load_known_modules()

    proc_type = 'orphan'
    c = config.empty_config()

    process_factory.create_process(proc_type, '')
    process_factory.create_process(proc_type, '', c)
    process_factory.types()
    process_factory.description(proc_type)

    process_factory.Process.property_no_threads
    process_factory.Process.property_no_reentrancy
    process_factory.Process.property_unsync_input
    process_factory.Process.property_unsync_output
    process_factory.Process.port_heartbeat
    process_factory.Process.config_name
    process_factory.Process.config_type
    process_factory.Process.type_any
    process_factory.Process.type_none
    process_factory.Process.type_data_dependent
    process_factory.Process.type_flow_dependent
    process_factory.Process.flag_output_const
    process_factory.Process.flag_input_static
    process_factory.Process.flag_input_mutable
    process_factory.Process.flag_input_nodep
    process_factory.Process.flag_required
Example #3
0
def test_register():
    from sprokit.pipeline import config
    from sprokit.pipeline import modules
    from sprokit.pipeline import pipeline
    from sprokit.pipeline import scheduler_registry

    modules.load_known_modules()

    reg = scheduler_registry.SchedulerRegistry.self()

    sched_type = 'python_example'
    sched_desc = 'simple description'

    reg.register_scheduler(sched_type, sched_desc, example_scheduler(True))

    if not sched_desc == reg.description(sched_type):
        test_error("Description was not preserved when registering")

    p = pipeline.Pipeline()

    try:
        s = reg.create_scheduler(sched_type, p)
        if s is None:
            raise Exception()
    except:
        test_error("Could not create newly registered scheduler type")
Example #4
0
def create_process(type, name, conf):
    from sprokit.pipeline import modules
    from sprokit.pipeline import process_factory

    modules.load_known_modules()

    p = process_factory.create_process(type, name, conf)

    return p
Example #5
0
def create_process(type, name, conf):
    from sprokit.pipeline import modules
    from sprokit.pipeline import process_factory

    modules.load_known_modules()

    p = process_factory.create_process(type, name, conf)

    return p
Example #6
0
def create_process(type, name, conf):
    from sprokit.pipeline import modules
    from sprokit.pipeline import process_registry

    modules.load_known_modules()

    reg = process_registry.ProcessRegistry.self()

    p = reg.create_process(type, name, conf)

    return p
Example #7
0
def run_pipeline(sched_type, pipe, conf):
    from sprokit.pipeline import config
    from sprokit.pipeline import modules
    from sprokit.pipeline import scheduler_factory

    modules.load_known_modules()

    s = scheduler_factory.create_scheduler(sched_type, pipe, conf)

    s.start()
    s.wait()
Example #8
0
def run_pipeline(sched_type, pipe, conf):
    from sprokit.pipeline import config
    from sprokit.pipeline import modules
    from sprokit.pipeline import scheduler_registry

    modules.load_known_modules()

    reg = scheduler_registry.SchedulerRegistry.self()

    s = reg.create_scheduler(sched_type, pipe, conf)

    s.start()
    s.wait()
Example #9
0
def test_simple_pipeline(path):
    from sprokit.pipeline import config
    from sprokit.pipeline import pipeline
    from sprokit.pipeline import modules
    from sprokit.pipeline_util import bake
    from sprokit.pipeline_util import load

    blocks = load.load_pipe_file(path)

    modules.load_known_modules()

    bake.bake_pipe_file(path)
    with open(path, 'r') as fin:
        bake.bake_pipe(fin)
    bake.bake_pipe_blocks(blocks)
    bake.extract_configuration(blocks)
Example #10
0
def test_wrapper_api():
    from sprokit.pipeline import config
    from sprokit.pipeline import modules
    from sprokit.pipeline import pipeline
    from sprokit.pipeline import process_registry
    from sprokit.pipeline import scheduler_registry

    sched_type = 'python_example'
    sched_desc = 'simple description'

    modules.load_known_modules()

    reg = scheduler_registry.SchedulerRegistry.self()

    reg.register_scheduler(sched_type, sched_desc, example_scheduler(False))

    p = pipeline.Pipeline()

    preg = process_registry.ProcessRegistry.self()

    proc_type = 'orphan'
    proc_name = 'orphan'

    proc = preg.create_process(proc_type, proc_name)

    p.add_process(proc)

    def check_scheduler(s):
        if s is None:
            test_error("Got a 'None' scheduler")
            return

        s.start()
        s.pause()
        s.resume()
        s.stop()
        s.start()
        s.wait()

        del s

    p.reset()
    p.setup_pipeline()

    s = reg.create_scheduler(sched_type, p)
    check_scheduler(s)
Example #11
0
def run_pipeline(sched_type, pipe, conf):
    from sprokit.pipeline import config
    from sprokit.pipeline import modules
    from sprokit.pipeline import scheduler_factory
    import sys

    modules.load_known_modules()

    if sched_type in cpp_scheds:
        expect_exception("trying to run a python process on a C++ scheduler", RuntimeError,
                         scheduler_factory.create_scheduler, sched_type, pipe, conf)

    else:
        s = scheduler_factory.create_scheduler(sched_type, pipe, conf)

        s.start()
        s.wait()
def test_api_calls():
    from sprokit.pipeline import config
    from sprokit.pipeline import modules
    from sprokit.pipeline import pipeline
    from sprokit.pipeline import scheduler_factory

    modules.load_known_modules()

    sched_type = 'thread_per_process'
    c = config.empty_config()
    p = pipeline.Pipeline()

    scheduler_factory.create_scheduler(sched_type, p)
    scheduler_factory.create_scheduler(sched_type, p, c)
    scheduler_factory.types()
    scheduler_factory.description(sched_type)
    scheduler_factory.default_type
Example #13
0
def test_api_calls():
    from sprokit.pipeline import config
    from sprokit.pipeline import modules
    from sprokit.pipeline import pipeline
    from sprokit.pipeline import scheduler_factory

    modules.load_known_modules()

    sched_type = 'thread_per_process'
    c = config.empty_config()
    p = pipeline.Pipeline()

    scheduler_factory.create_scheduler(sched_type, p)
    scheduler_factory.create_scheduler(sched_type, p, c)
    scheduler_factory.types()
    scheduler_factory.description(sched_type)
    scheduler_factory.default_type
Example #14
0
def run_pipeline(sched_type, pipe, conf):
    from sprokit.pipeline import config
    from sprokit.pipeline import modules
    from sprokit.pipeline import scheduler_factory
    import sys

    modules.load_known_modules()

    if sched_type in cpp_scheds:
        expect_exception("trying to run a python process on a C++ scheduler",
                         RuntimeError, scheduler_factory.create_scheduler,
                         sched_type, pipe, conf)

    else:
        s = scheduler_factory.create_scheduler(sched_type, pipe, conf)

        s.start()
        s.wait()
Example #15
0
def test_api_calls():
    from sprokit.pipeline import config
    from sprokit.pipeline import modules
    from sprokit.pipeline import pipeline
    from sprokit.pipeline import scheduler_registry

    modules.load_known_modules()

    reg = scheduler_registry.SchedulerRegistry.self()

    sched_type = 'thread_per_process'
    c = config.empty_config()
    p = pipeline.Pipeline()

    reg.create_scheduler(sched_type, p)
    reg.create_scheduler(sched_type, p, c)
    reg.types()
    reg.description(sched_type)
    reg.default_type
Example #16
0
def test_api_calls():
    from sprokit.pipeline import config
    from sprokit.pipeline import datum
    from sprokit.pipeline import edge
    from sprokit.pipeline import modules
    from sprokit.pipeline import process
    from sprokit.pipeline import process_registry
    from sprokit.pipeline import stamp

    e = edge.Edge()

    e.makes_dependency()
    e.has_data()
    e.full_of_data()
    e.datum_count()

    d = datum.complete()
    s = stamp.new_stamp(1)

    ed = edge.EdgeDatum(d, s)

    e.push_datum(ed)
    e.get_datum()

    e.push_datum(ed)
    e.peek_datum()
    e.pop_datum()

    modules.load_known_modules()

    reg = process_registry.ProcessRegistry.self()

    p = reg.create_process('orphan', process.ProcessName())

    e.set_upstream_process(p)
    e.set_downstream_process(p)

    e.mark_downstream_as_complete()
    e.is_downstream_complete()

    e.config_dependency
    e.config_capacity
def test_api_calls():
    from sprokit.pipeline import config
    from sprokit.pipeline import modules
    from sprokit.pipeline import process
    from sprokit.pipeline import process_registry

    modules.load_known_modules()

    reg = process_registry.ProcessRegistry.self()

    proc_type = 'orphan'
    c = config.empty_config()

    reg.create_process(proc_type, process.ProcessName())
    reg.create_process(proc_type, process.ProcessName(), c)
    reg.types()
    reg.description(proc_type)

    process_registry.Process.property_no_threads
    process_registry.Process.property_no_reentrancy
    process_registry.Process.property_unsync_input
    process_registry.Process.property_unsync_output
    process_registry.Process.port_heartbeat
    process_registry.Process.config_name
    process_registry.Process.config_type
    process_registry.Process.type_any
    process_registry.Process.type_none
    process_registry.Process.type_data_dependent
    process_registry.Process.type_flow_dependent
    process_registry.Process.flag_output_const
    process_registry.Process.flag_input_static
    process_registry.Process.flag_input_mutable
    process_registry.Process.flag_input_nodep
    process_registry.Process.flag_required

    cluster_bases = process_registry.ProcessCluster.__bases__
    if not cluster_bases[0] == process_registry.Process:
        test_error("The cluster class does not inherit from the process class")
Example #18
0
def test_simple_pipeline(path):
    import os

    from sprokit.pipeline import pipeline
    from sprokit.pipeline import modules
    from sprokit.pipeline_util import bake
    from sprokit.pipeline_util import export_

    modules.load_known_modules()

    p = bake.bake_pipe_file(path)
    r, w = os.pipe()

    name = 'graph'

    export_.export_dot(w, p, name)

    p.setup_pipeline()

    export_.export_dot(w, p, name)

    os.close(r)
    os.close(w)
Example #19
0
def test_cluster_multiplier(path):
    from sprokit.pipeline import config
    from sprokit.pipeline import pipeline
    from sprokit.pipeline import modules
    from sprokit.pipeline_util import bake
    from sprokit.pipeline_util import load

    blocks = load.load_cluster_file(path)

    modules.load_known_modules()

    bake.bake_cluster_file(path)
    with open(path, 'r') as fin:
        bake.bake_cluster(fin)
    info = bake.bake_cluster_blocks(blocks)

    conf = config.empty_config()

    info.type()
    info.description()
    info.create()
    info.create(conf)

    bake.register_cluster(info)
Example #20
0
def test_load():
    from sprokit.pipeline import modules

    modules.load_known_modules()
Example #21
0
def test_api_calls():
    from sprokit.pipeline import config
    from sprokit.pipeline import edge
    from sprokit.pipeline import modules
    from sprokit.pipeline import pipeline
    from sprokit.pipeline import process
    from sprokit.pipeline import process_cluster
    from sprokit.pipeline import process_registry

    p = pipeline.Pipeline()

    proc_type1 = 'numbers'
    proc_type2 = 'print_number'
    proc_type3 = 'orphan_cluster'

    proc_name1 = 'src'
    proc_name2 = 'snk'
    proc_name3 = 'orp'

    port_name1 = 'number'
    port_name2 = 'number'

    modules.load_known_modules()

    reg = process_registry.ProcessRegistry.self()

    proc1 = reg.create_process(proc_type1, proc_name1)

    conf_name = 'output'

    c = config.empty_config()

    c.set_value(conf_name, 'test-python-pipeline-api_calls-print_number.txt')
    proc2 = reg.create_process(proc_type2, proc_name2, c)

    proc3 = reg.create_process(proc_type3, proc_name3)

    p.add_process(proc1)
    p.add_process(proc2)
    p.add_process(proc3)
    p.connect(proc_name1, port_name1,
              proc_name2, port_name2)
    p.process_names()
    p.process_by_name(proc_name1)
    p.cluster_names()
    p.cluster_by_name(proc_name3)
    p.connections_from_addr(proc_name1, port_name1)
    p.connection_to_addr(proc_name2, port_name2)

    p.disconnect(proc_name1, port_name1,
                 proc_name2, port_name2)
    p.remove_process(proc_name1)
    p.remove_process(proc_name3)

    # Restore the pipeline so that setup_pipeline works.
    p.add_process(proc1)
    p.connect(proc_name1, port_name1,
              proc_name2, port_name2)

    p.setup_pipeline()

    p.upstream_for_process(proc_name2)
    p.upstream_for_port(proc_name2, port_name2)
    p.downstream_for_process(proc_name1)
    p.downstream_for_port(proc_name1, port_name1)
    p.sender_for_port(proc_name2, port_name2)
    p.receivers_for_port(proc_name1, port_name1)
    p.edge_for_connection(proc_name1, port_name1,
                          proc_name2, port_name2)
    p.input_edges_for_process(proc_name2)
    p.input_edge_for_port(proc_name2, port_name2)
    p.output_edges_for_process(proc_name1)
    p.output_edges_for_port(proc_name1, port_name1)

    p.is_setup()
    p.setup_successful()

    c = config.empty_config()

    p.reconfigure(c)

    p.reset()
Example #22
0
def test_api_calls():
    from sprokit.pipeline import config
    from sprokit.pipeline import edge
    from sprokit.pipeline import modules
    from sprokit.pipeline import pipeline
    from sprokit.pipeline import process
    from sprokit.pipeline import process_cluster
    from sprokit.pipeline import process_factory

    p = pipeline.Pipeline()

    proc_type1 = 'numbers'
    proc_type2 = 'print_number'
    proc_type3 = 'orphan_cluster'

    proc_name1 = 'src'
    proc_name2 = 'snk'
    proc_name3 = 'orp'

    port_name1 = 'number'
    port_name2 = 'number'

    modules.load_known_modules()

    proc1 = process_factory.create_process(proc_type1, proc_name1)

    conf_name = 'output'

    c = config.empty_config()

    c.set_value(conf_name, 'test-python-pipeline-api_calls-print_number.txt')
    proc2 = process_factory.create_process(proc_type2, proc_name2, c)

    proc3 = process_factory.create_process(proc_type3, proc_name3)

    p.add_process(proc1)
    p.add_process(proc2)
    p.add_process(proc3)
    p.connect(proc_name1, port_name1,
              proc_name2, port_name2)
    p.process_names()
    p.process_by_name(proc_name1)
    p.cluster_names()
    p.cluster_by_name(proc_name3)
    p.connections_from_addr(proc_name1, port_name1)
    p.connection_to_addr(proc_name2, port_name2)

    p.disconnect(proc_name1, port_name1,
                 proc_name2, port_name2)
    p.remove_process(proc_name1)
    p.remove_process(proc_name3)

    # Restore the pipeline so that setup_pipeline works.
    p.add_process(proc1)
    p.connect(proc_name1, port_name1,
              proc_name2, port_name2)

    p.setup_pipeline()

    p.upstream_for_process(proc_name2)
    p.upstream_for_port(proc_name2, port_name2)
    p.downstream_for_process(proc_name1)
    p.downstream_for_port(proc_name1, port_name1)
    p.sender_for_port(proc_name2, port_name2)
    p.receivers_for_port(proc_name1, port_name1)
    p.edge_for_connection(proc_name1, port_name1,
                          proc_name2, port_name2)
    p.input_edges_for_process(proc_name2)
    p.input_edge_for_port(proc_name2, port_name2)
    p.output_edges_for_process(proc_name1)
    p.output_edges_for_port(proc_name1, port_name1)

    p.is_setup()
    p.setup_successful()

    c = config.empty_config()

    p.reconfigure(c)

    p.reset()