def test_create(): from sprokit.pipeline import config try: config.empty_config() except: test_error("Failed to create an empty configuration") config.ConfigKeys()
def test_create(): from sprokit.pipeline import config try: config.empty_config() except: test_error("Failed to create an empty configuration") config.ConfigKey() config.ConfigKeys() config.ConfigDescription() config.ConfigValue()
def test_api_calls(): from sprokit.pipeline import config from vital.modules import modules from sprokit.pipeline import process from sprokit.pipeline import process_factory modules.load_known_modules() proc_type = 'orphan' c = config.empty_config() process_factory.create_process(proc_type, '') process_factory.create_process(proc_type, '', c) process_factory.types() process_factory.description(proc_type) process_factory.Process.property_no_threads process_factory.Process.property_no_reentrancy process_factory.Process.property_unsync_input process_factory.Process.property_unsync_output process_factory.Process.port_heartbeat process_factory.Process.config_name process_factory.Process.config_type process_factory.Process.type_any process_factory.Process.type_none process_factory.Process.type_data_dependent process_factory.Process.type_flow_dependent process_factory.Process.flag_output_const process_factory.Process.flag_input_static process_factory.Process.flag_input_mutable process_factory.Process.flag_input_nodep process_factory.Process.flag_required
def test_api_calls(): from sprokit.pipeline import config from sprokit.pipeline import process from sprokit.pipeline import process_cluster process_cluster.PythonProcessCluster.property_no_threads process_cluster.PythonProcessCluster.property_no_reentrancy process_cluster.PythonProcessCluster.property_unsync_input process_cluster.PythonProcessCluster.property_unsync_output process_cluster.PythonProcessCluster.type_any process_cluster.PythonProcessCluster.type_none process_cluster.PythonProcessCluster.type_data_dependent process_cluster.PythonProcessCluster.type_flow_dependent process_cluster.PythonProcessCluster.flag_output_const process_cluster.PythonProcessCluster.flag_output_shared process_cluster.PythonProcessCluster.flag_input_static process_cluster.PythonProcessCluster.flag_input_mutable process_cluster.PythonProcessCluster.flag_input_nodep process_cluster.PythonProcessCluster.flag_required class BaseProcess(process.PythonProcess): def __init__(self, conf): process.PythonProcess.__init__(self, conf) c = config.empty_config() p = BaseProcess(c) if process_cluster.cluster_from_process(p) is not None: test_error("A non-cluster process was detected as a cluster process")
def test_subblock(): from sprokit.pipeline import config c = config.empty_config() block1 = 'block1' block2 = 'block2' keya = 'keya' keyb = 'keyb' keyc = 'keyc' valuea = 'valuea' valueb = 'valueb' valuec = 'valuec' c.set_value(block1 + config.Config.block_sep + keya, valuea) c.set_value(block1 + config.Config.block_sep + keyb, valueb) c.set_value(block2 + config.Config.block_sep + keyc, valuec) d = c.subblock(block1) get_valuea = d.get_value(keya) if not valuea == get_valuea: test_error("Subblock does not inherit expected keys") get_valueb = d.get_value(keyb) if not valueb == get_valueb: test_error("Subblock does not inherit expected keys") if d.has_value(keyc): test_error("Subblock inherited unrelated key")
def test_create(): from sprokit.pipeline import config from sprokit.pipeline import pipeline c = config.empty_config() pipeline.Pipeline() pipeline.Pipeline(c)
def _dummy_detector_cfg(): test_cfg = config.empty_config() test_cfg.set_value("center_x", "3") test_cfg.set_value("center_y", "5") test_cfg.set_value("dx", "0.3") test_cfg.set_value("dy", "0.2") test_cfg.set_value("height", "10") test_cfg.set_value("width", "15") return test_cfg
def test_create(): from sprokit.pipeline import config from sprokit.pipeline import edge c = config.empty_config() edge.Edge() edge.Edge(c) edge.Edges()
def test_python_to_python(sched_type): from sprokit.pipeline import config from sprokit.pipeline import pipeline from sprokit.pipeline import process name_source = 'source' name_sink = 'sink' port_output = 'number' port_input = 'number' min = 0 max = 10 output_file = 'test-python-run-python_to_python.txt' c = config.empty_config() c.set_value(process.PythonProcess.config_name, name_source) c.set_value('start', str(min)) c.set_value('end', str(max)) s = make_source(c) c = config.empty_config() c.set_value(process.PythonProcess.config_name, name_sink) c.set_value('output', output_file) t = make_sink(c) p = pipeline.Pipeline(c) p.add_process(s) p.add_process(t) p.connect(name_source, port_output, name_sink, port_input) p.setup_pipeline() run_pipeline(sched_type, p, c) check_file(output_file, list(range(min, max)))
def test_dict(): from sprokit.pipeline import config c = config.empty_config() key = 'key' value = 'oldvalue' if key in c: test_error("'%s' is in an empty config" % key) if c: test_error("An empty config is not falsy") c[key] = value if not c[key] == value: test_error("Value was not set") if key not in c: test_error("'%s' is not in config after insertion" % key) if not len(c) == 1: test_error("The len() operator is incorrect") if not c: test_error("A non-empty config is not truthy") value = 'newvalue' origvalue = 'newvalue' c[key] = value value = 'replacedvalue' if not c[key] == origvalue: test_error("Value was overwritten") del c[key] expect_exception('getting an unset value', BaseException, c.__getitem__, key) expect_exception('deleting an unset value', BaseException, c.__delitem__, key) value = 10 c[key] = value if not c[key] == str(value): test_error("Value was not converted to a string")
def test_merge_config(): from sprokit.pipeline import config c = config.empty_config() d = config.empty_config() keya = 'keya' keyb = 'keyb' keyc = 'keyc' valuea = 'valuea' valueb = 'valueb' valuec = 'valuec' c.set_value(keya, valuea) c.set_value(keyb, valuea) d.set_value(keyb, valueb) d.set_value(keyc, valuec) c.merge_config(d) get_valuea = c.get_value(keya) if not valuea == get_valuea: test_error("Unmerged key changed") get_valueb = c.get_value(keyb) if not valueb == get_valueb: test_error("Conflicting key was not overwritten") get_valuec = c.get_value(keyc) if not valuec == get_valuec: test_error("New key did not appear")
def test_get_value(): from sprokit.pipeline import config c = config.empty_config() keya = 'keya' valuea = 'valuea' c.set_value(keya, valuea) get_valuea = c.get_value(keya) if not valuea == get_valuea: test_error("Did not retrieve value that was set")
def test_api_calls(): from sprokit.pipeline import config from vital.modules import modules from sprokit.pipeline import pipeline from sprokit.pipeline import scheduler_factory modules.load_known_modules() sched_type = 'thread_per_process' c = config.empty_config() p = pipeline.Pipeline() scheduler_factory.create_scheduler(sched_type, p) scheduler_factory.create_scheduler(sched_type, p, c) scheduler_factory.types() scheduler_factory.description(sched_type) scheduler_factory.default_type
def test_get_value_no_exist(): from sprokit.pipeline import config c = config.empty_config() keya = 'keya' keyb = 'keyb' valueb = 'valueb' expect_exception('retrieving an unset value', BaseException, c.get_value, keya) get_valueb = c.get_value(keyb, valueb) if not valueb == get_valueb: test_error("Did not retrieve default when requesting unset value")
def test_has_value(): from sprokit.pipeline import config c = config.empty_config() keya = 'keya' keyb = 'keyb' valuea = 'valuea' c.set_value(keya, valuea) if not c.has_value(keya): test_error("Block does not have value which was set") if c.has_value(keyb): test_error("Block has value which was not set")
def test_get_value_nested(): from sprokit.pipeline import config c = config.empty_config() keya = 'keya' keyb = 'keyb' valuea = 'valuea' c.set_value(keya + config.Config.block_sep + keyb, valuea) nc = c.subblock(keya) get_valuea = nc.get_value(keyb) if not valuea == get_valuea: test_error("Did not retrieve value that was set")
def test_nested_config(self): modules.load_known_modules() detector = ImageObjectDetector.create("SimpleImageObjectDetector") nested_cfg = config.empty_config() ImageObjectDetector.get_nested_algo_configuration( "detector", nested_cfg, detector) # Verify that test cfg is set to configuration inside detector # nested configuration uses the name of a detector as an additional configuration # key thus it is checked against 7 rather than 6 #nose.tools.assert_equal(len(nested_cfg), 7) #test_cfg = _dummy_detector_cfg() #test_cfg.set_value("example_detector:type", "example_detector") #ImageObjectDetector.set_nested_algo_configuration( "example_detector", # test_cfg, # detector ) nose.tools.assert_equal( ImageObjectDetector.check_nested_algo_configuration( "detector", nested_cfg), True)
def test_api_calls(): from sprokit.pipeline import config from sprokit.pipeline import modules from sprokit.pipeline import pipeline from sprokit.pipeline import scheduler_registry modules.load_known_modules() reg = scheduler_registry.SchedulerRegistry.self() sched_type = 'thread_per_process' c = config.empty_config() p = pipeline.Pipeline() reg.create_scheduler(sched_type, p) reg.create_scheduler(sched_type, p, c) reg.types() reg.description(sched_type) reg.default_type
def test_nested_config(self): modules.load_known_modules() detector = ImageObjectDetector.create("SimpleImageObjectDetector") nested_cfg = config.empty_config() ImageObjectDetector.get_nested_algo_configuration( "detector", nested_cfg, detector ) # Verify that test cfg is set to configuration inside detector # nested configuration uses the name of a detector as an additional configuration # key thus it is checked against 7 rather than 6 #nose.tools.assert_equal(len(nested_cfg), 7) #test_cfg = _dummy_detector_cfg() #test_cfg.set_value("example_detector:type", "example_detector") #ImageObjectDetector.set_nested_algo_configuration( "example_detector", # test_cfg, # detector ) nose.tools.assert_equal(ImageObjectDetector.check_nested_algo_configuration( "detector", nested_cfg), True)
def test_read_only_unset(): from sprokit.pipeline import config c = config.empty_config() keya = 'keya' valuea = 'valuea' c.set_value(keya, valuea) c.mark_read_only(keya) expect_exception('unsetting a read only value', BaseException, c.unset_value, keya) get_valuea = c.get_value(keya) if not valuea == get_valuea: test_error("Read only value was unset")
def test_subblock_view(): from sprokit.pipeline import config c = config.empty_config() block1 = 'block1' block2 = 'block2' keya = 'keya' keyb = 'keyb' keyc = 'keyc' valuea = 'valuea' valueb = 'valueb' valuec = 'valuec' c.set_value(block1 + config.Config.block_sep + keya, valuea) c.set_value(block2 + config.Config.block_sep + keyb, valueb) d = c.subblock_view(block1) if not d.has_value(keya): test_error("Subblock does not inherit expected keys") if d.has_value(keyb): test_error("Subblock inherited unrelated key") c.set_value(block1 + config.Config.block_sep + keya, valueb) get_valuea1 = d.get_value(keya) if not valueb == get_valuea1: test_error("Subblock view persisted a changed value") d.set_value(keya, valuea) get_valuea2 = d.get_value(keya) if not valuea == get_valuea2: test_error("Subblock view set value was not changed in parent")
def test_api_calls(): from sprokit.pipeline import config from sprokit.pipeline import modules from sprokit.pipeline import process from sprokit.pipeline import process_registry modules.load_known_modules() reg = process_registry.ProcessRegistry.self() proc_type = 'orphan' c = config.empty_config() reg.create_process(proc_type, process.ProcessName()) reg.create_process(proc_type, process.ProcessName(), c) reg.types() reg.description(proc_type) process_registry.Process.property_no_threads process_registry.Process.property_no_reentrancy process_registry.Process.property_unsync_input process_registry.Process.property_unsync_output process_registry.Process.port_heartbeat process_registry.Process.config_name process_registry.Process.config_type process_registry.Process.type_any process_registry.Process.type_none process_registry.Process.type_data_dependent process_registry.Process.type_flow_dependent process_registry.Process.flag_output_const process_registry.Process.flag_input_static process_registry.Process.flag_input_mutable process_registry.Process.flag_input_nodep process_registry.Process.flag_required cluster_bases = process_registry.ProcessCluster.__bases__ if not cluster_bases[0] == process_registry.Process: test_error("The cluster class does not inherit from the process class")
def test_unset_value(): from sprokit.pipeline import config c = config.empty_config() keya = 'keya' keyb = 'keyb' valuea = 'valuea' valueb = 'valueb' c.set_value(keya, valuea) c.set_value(keyb, valueb) c.unset_value(keya) expect_exception('retrieving an unset value', BaseException, c.get_value, keya) get_valueb = c.get_value(keyb) if not valueb == get_valueb: test_error("Did not retrieve value when requesting after an unrelated unset")
def test_available_values(): from sprokit.pipeline import config c = config.empty_config() keya = 'keya' keyb = 'keyb' valuea = 'valuea' valueb = 'valueb' c.set_value(keya, valuea) c.set_value(keyb, valueb) avail = c.available_values() if not len(avail) == 2: test_error("Did not retrieve correct number of keys") try: for val in avail: pass except: test_error("Available values is not iterable")
def test_peek_at_datum_on_port(): """ Test peek at datum on a test port with complete datum """ from sprokit.pipeline import process from sprokit.pipeline import datum, DatumType from sprokit.pipeline import config from sprokit.pipeline import edge from sprokit.pipeline import stamp cfg = config.empty_config() # Create Dummy Receiver process receiver_proc = process.PythonProcess(cfg) optional = process.PortFlags() receiver_proc.declare_input_port("test_port", "test", optional, "test_port") # Create an Edge and connect input port to the edge test_edge = edge.Edge() receiver_proc.connect_input_port("test_port", test_edge) # Create an Edge Datum and push it to the port s = stamp.new_stamp(1) e_datum = edge.EdgeDatum(datum.complete(), s) test_edge.push_datum(e_datum) receiver_datum_type = receiver_proc.peek_at_datum_on_port("test_port").type() if receiver_datum_type != DatumType.complete: test_error("Datum mismatch: expected a complete datum, got {0}".format(receiver_datum_type))
def test_cluster_multiplier(path): from sprokit.pipeline import config from sprokit.pipeline import pipeline from sprokit.pipeline import modules from sprokit.pipeline_util import bake from sprokit.pipeline_util import load blocks = load.load_cluster_file(path) modules.load_known_modules() bake.bake_cluster_file(path) with open(path, 'r') as fin: bake.bake_cluster(fin) info = bake.bake_cluster_blocks(blocks) conf = config.empty_config() info.type() info.description() info.create() info.create(conf) bake.register_cluster(info)
def _make_monitor_edge_config(self): self._edge_conf = config.empty_config()
def test_python_via_cpp(sched_type): from sprokit.pipeline import config from sprokit.pipeline import pipeline from sprokit.pipeline import process name_source1 = 'source1' name_source2 = 'source2' name_mult = 'mult' name_sink = 'sink' port_output = 'number' port_input1 = 'src/1' port_input2 = 'src/2' port_output1 = 'out/1' port_output2 = 'out/2' port_factor1 = 'factor1' port_factor2 = 'factor2' port_product = 'product' port_input = 'number' min1 = 0 max1 = 10 min2 = 10 max2 = 15 output_file = 'test-python-run-python_via_cpp.txt' c = config.empty_config() c.set_value(process.PythonProcess.config_name, name_source1) c.set_value('start', str(min1)) c.set_value('end', str(max1)) s1 = make_source(c) c = config.empty_config() c.set_value(process.PythonProcess.config_name, name_source2) c.set_value('start', str(min2)) c.set_value('end', str(max2)) s2 = make_source(c) c = config.empty_config() m = create_process('multiplication', name_mult, c) c = config.empty_config() c.set_value(process.PythonProcess.config_name, name_sink) c.set_value('output', output_file) t = make_sink(c) p = pipeline.Pipeline() p.add_process(s1) p.add_process(s2) p.add_process(m) p.add_process(t) p.connect(name_source1, port_output, name_mult, port_factor1) p.connect(name_source2, port_output, name_mult, port_factor2) p.connect(name_mult, port_product, name_sink, port_input) p.setup_pipeline() run_pipeline(sched_type, p, c) check_file(output_file, [a * b for a, b in zip(list(range(min1, max1)), list(range(min2, max2)))])
def test_python_via_cpp(sched_type): from sprokit.pipeline import config from sprokit.pipeline import pipeline from sprokit.pipeline import process name_source1 = 'source1' name_source2 = 'source2' name_mult = 'mult' name_sink = 'sink' port_output = 'number' port_input1 = 'src/1' port_input2 = 'src/2' port_output1 = 'out/1' port_output2 = 'out/2' port_factor1 = 'factor1' port_factor2 = 'factor2' port_product = 'product' port_input = 'number' min1 = 0 max1 = 10 min2 = 10 max2 = 15 output_file = 'test-python-run-python_via_cpp.txt' c = config.empty_config() c.set_value(process.PythonProcess.config_name, name_source1) c.set_value('start', str(min1)) c.set_value('end', str(max1)) s1 = make_source(c) c = config.empty_config() c.set_value(process.PythonProcess.config_name, name_source2) c.set_value('start', str(min2)) c.set_value('end', str(max2)) s2 = make_source(c) c = config.empty_config() m = create_process('multiplication', name_mult, c) c = config.empty_config() c.set_value(process.PythonProcess.config_name, name_sink) c.set_value('output', output_file) t = make_sink(c) p = pipeline.Pipeline() p.add_process(s1) p.add_process(s2) p.add_process(m) p.add_process(t) p.connect(name_source1, port_output, name_mult, port_factor1) p.connect(name_source2, port_output, name_mult, port_factor2) p.connect(name_mult, port_product, name_sink, port_input) p.setup_pipeline() run_pipeline(sched_type, p, c) check_file(output_file, [ a * b for a, b in zip(list(range(min1, max1)), list(range(min2, max2))) ])
def test_api_calls(): from sprokit.pipeline import config from sprokit.pipeline import edge from sprokit.pipeline import modules from sprokit.pipeline import pipeline from sprokit.pipeline import process from sprokit.pipeline import process_cluster from sprokit.pipeline import process_factory p = pipeline.Pipeline() proc_type1 = 'numbers' proc_type2 = 'print_number' proc_type3 = 'orphan_cluster' proc_name1 = 'src' proc_name2 = 'snk' proc_name3 = 'orp' port_name1 = 'number' port_name2 = 'number' modules.load_known_modules() proc1 = process_factory.create_process(proc_type1, proc_name1) conf_name = 'output' c = config.empty_config() c.set_value(conf_name, 'test-python-pipeline-api_calls-print_number.txt') proc2 = process_factory.create_process(proc_type2, proc_name2, c) proc3 = process_factory.create_process(proc_type3, proc_name3) p.add_process(proc1) p.add_process(proc2) p.add_process(proc3) p.connect(proc_name1, port_name1, proc_name2, port_name2) p.process_names() p.process_by_name(proc_name1) p.cluster_names() p.cluster_by_name(proc_name3) p.connections_from_addr(proc_name1, port_name1) p.connection_to_addr(proc_name2, port_name2) p.disconnect(proc_name1, port_name1, proc_name2, port_name2) p.remove_process(proc_name1) p.remove_process(proc_name3) # Restore the pipeline so that setup_pipeline works. p.add_process(proc1) p.connect(proc_name1, port_name1, proc_name2, port_name2) p.setup_pipeline() p.upstream_for_process(proc_name2) p.upstream_for_port(proc_name2, port_name2) p.downstream_for_process(proc_name1) p.downstream_for_port(proc_name1, port_name1) p.sender_for_port(proc_name2, port_name2) p.receivers_for_port(proc_name1, port_name1) p.edge_for_connection(proc_name1, port_name1, proc_name2, port_name2) p.input_edges_for_process(proc_name2) p.input_edge_for_port(proc_name2, port_name2) p.output_edges_for_process(proc_name1) p.output_edges_for_port(proc_name1, port_name1) p.is_setup() p.setup_successful() c = config.empty_config() p.reconfigure(c) p.reset()
def test_api_calls(): from sprokit.pipeline import config from sprokit.pipeline import edge from sprokit.pipeline import modules from sprokit.pipeline import pipeline from sprokit.pipeline import process from sprokit.pipeline import process_cluster from sprokit.pipeline import process_registry p = pipeline.Pipeline() proc_type1 = 'numbers' proc_type2 = 'print_number' proc_type3 = 'orphan_cluster' proc_name1 = 'src' proc_name2 = 'snk' proc_name3 = 'orp' port_name1 = 'number' port_name2 = 'number' modules.load_known_modules() reg = process_registry.ProcessRegistry.self() proc1 = reg.create_process(proc_type1, proc_name1) conf_name = 'output' c = config.empty_config() c.set_value(conf_name, 'test-python-pipeline-api_calls-print_number.txt') proc2 = reg.create_process(proc_type2, proc_name2, c) proc3 = reg.create_process(proc_type3, proc_name3) p.add_process(proc1) p.add_process(proc2) p.add_process(proc3) p.connect(proc_name1, port_name1, proc_name2, port_name2) p.process_names() p.process_by_name(proc_name1) p.cluster_names() p.cluster_by_name(proc_name3) p.connections_from_addr(proc_name1, port_name1) p.connection_to_addr(proc_name2, port_name2) p.disconnect(proc_name1, port_name1, proc_name2, port_name2) p.remove_process(proc_name1) p.remove_process(proc_name3) # Restore the pipeline so that setup_pipeline works. p.add_process(proc1) p.connect(proc_name1, port_name1, proc_name2, port_name2) p.setup_pipeline() p.upstream_for_process(proc_name2) p.upstream_for_port(proc_name2, port_name2) p.downstream_for_process(proc_name1) p.downstream_for_port(proc_name1, port_name1) p.sender_for_port(proc_name2, port_name2) p.receivers_for_port(proc_name1, port_name1) p.edge_for_connection(proc_name1, port_name1, proc_name2, port_name2) p.input_edges_for_process(proc_name2) p.input_edge_for_port(proc_name2, port_name2) p.output_edges_for_process(proc_name1) p.output_edges_for_port(proc_name1, port_name1) p.is_setup() p.setup_successful() c = config.empty_config() p.reconfigure(c) p.reset()