def test_import(): try: import kwiver.vital.config import kwiver.sprokit.pipeline.process import kwiver.sprokit.pipeline.process_cluster except: test_error("Failed to import the process_cluster module")
def test_api_calls(): from kwiver.vital.config import config from kwiver.sprokit.pipeline import process from kwiver.sprokit.pipeline import process_cluster process_cluster.PythonProcessCluster.property_no_threads process_cluster.PythonProcessCluster.property_no_reentrancy process_cluster.PythonProcessCluster.property_unsync_input process_cluster.PythonProcessCluster.property_unsync_output process_cluster.PythonProcessCluster.type_any process_cluster.PythonProcessCluster.type_none process_cluster.PythonProcessCluster.type_data_dependent process_cluster.PythonProcessCluster.type_flow_dependent process_cluster.PythonProcessCluster.flag_output_const process_cluster.PythonProcessCluster.flag_output_shared process_cluster.PythonProcessCluster.flag_input_static process_cluster.PythonProcessCluster.flag_input_mutable process_cluster.PythonProcessCluster.flag_input_nodep process_cluster.PythonProcessCluster.flag_required class BaseProcess(process.PythonProcess): def __init__(self, conf): process.PythonProcess.__init__(self, conf) c = config.empty_config() p = BaseProcess(c) if process_cluster.cluster_from_process(p) is not None: test_error("A non-cluster process was detected as a cluster process")
def test_register(): from kwiver.vital.config import config from kwiver.vital.modules import modules from kwiver.sprokit.pipeline import pipeline from kwiver.sprokit.pipeline import scheduler_factory modules.load_known_modules() sched_type = 'python_example' sched_desc = 'simple description' scheduler_factory.add_scheduler(sched_type, sched_desc, example_scheduler(True)) if not sched_desc == scheduler_factory.description(sched_type): test_error("Description was not preserved when registering") p = pipeline.Pipeline() try: s = scheduler_factory.create_scheduler(sched_type, p) if s is None: raise Exception() except: test_error("Could not create newly registered scheduler type")
def test_enums(): from kwiver.sprokit.adapters import adapter_data_set if int(adapter_data_set.DataSetType.data) != 1: test_error("adapter_data_set enum value mismatch: data") if int(adapter_data_set.DataSetType.end_of_input) != 2: test_error("adapter_data_set enum value mismatch: end_of_input")
def check_same_type(retrieved_val, val, portname): from kwiver.sprokit.adapters import adapter_data_set if isinstance(val, datum.Datum): val = val.get_datum() if not type(retrieved_val) is type(val): msg = "Retrieved value of type: {} at port {}. Expected type: {}" msg = msg.format(type(retrieved_val), portname, type(val)) test_error(msg)
def test_masking(): from kwiver.vital.modules import modules from kwiver.sprokit.pipeline import process_factory modules.load_known_modules() types = process_factory.types() if 'test_python_process' in types: test_error("Failed to mask out Python processes")
def test_extra_modules(): from kwiver.vital.modules import modules from kwiver.sprokit.pipeline import process_factory modules.load_known_modules() types = process_factory.types() if 'extra_test_python_process' not in types: test_error("Failed to load extra Python processes")
def __del__(self): if not self.ran_start: test_error("start override was not called") if not self.ran_wait: test_error("wait override was not called") if not self.ran_stop: test_error("stop override was not called") if not self.ran_pause: test_error("pause override was not called") if not self.ran_resume: test_error("resume override was not called")
def test_len(): from kwiver.sprokit.adapters import adapter_data_set ads = adapter_data_set.AdapterDataSet.create() # Check initial if len(ads) != 0: test_error("adapter_data_set with 0 values returned size {}".format(len(ads))) ads = _create_ads() if len(ads) != 3: test_error("adapter_data_set with 3 values returned size {}".format(len(ads)))
def check_scheduler(s): if s is None: test_error("Got a 'None' scheduler") return s.start() s.pause() s.resume() s.stop() s.start() s.wait() del s
def test_environment(): import os envvar = 'TEST_ENVVAR' if envvar not in os.environ: test_error('failed to get environment from CTest') else: expected = 'test_value' envvalue = os.environ[envvar] if envvalue != expected: test_error('did not get expected value')
def check_process(p): if p is None: test_error("Got a 'None' process") return p.properties() p.input_ports() p.output_ports() expect_exception("asking for info on a non-existant input port", RuntimeError, p.input_port_info, iport) expect_exception("asking for info on a non-existant output port", RuntimeError, p.output_port_info, oport) e = edge.Edge() expect_exception("connecting to a non-existant input port", RuntimeError, p.connect_input_port, iport, e) expect_exception("connecting to a non-existant output port", RuntimeError, p.connect_output_port, oport, e) p.available_config() p.available_tunable_config() expect_exception("asking for info on a non-existant config key", RuntimeError, p.config_info, key) expect_exception("setting a type on a non-existent input port", RuntimeError, p.set_input_port_type, iport, ptype) expect_exception("setting a type on a non-existent output port", RuntimeError, p.set_output_port_type, oport, ptype) p.reset() p.configure() p.init() # TODO: Can't check this because the core frequency of the process # cannot be set. Needs to be stepped within a pipeline to verify this. # Enable the ran_step check in p.check when this is fixed. #p.step() # TODO: Can't check this because only the process_cluster base class # and the pipeline may reconfigure a process. Needs to be stepped # within a pipeline to verify this. Enable the ran_step check in # p.check when this is fixed. #p.reconfigure(reconf) del p
def test_python_to_python(sched_type): from kwiver.vital.config import config from kwiver.sprokit.pipeline import pipeline from kwiver.sprokit.pipeline import process name_source = 'source' name_sink = 'sink' port_output = 'number' port_input = 'number' min = 0 max = 10 output_file = 'test-python-run-python_to_python.txt' c = config.empty_config() c.set_value(process.PythonProcess.config_name, name_source) c.set_value('start', str(min)) c.set_value('end', str(max)) s = make_source(c) c = config.empty_config() c.set_value(process.PythonProcess.config_name, name_sink) c.set_value('output', output_file) t = make_sink(c) p = pipeline.Pipeline(c) p.add_process(s) p.add_process(t) p.connect(name_source, port_output, name_sink, port_input) p.setup_pipeline() run_pipeline(sched_type, p, c) check_file(output_file, list(range(min, max))) if t.stepped and not t.finalized: test_error("sink process not finalized")
def overwrite_helper(instance_add_fxn, instance_get_fxn, val, new_data_type_str, portname): from kwiver.sprokit.adapters import adapter_data_set instance_add_fxn(portname, val) try: retrieved_val = instance_get_fxn(portname) except RuntimeError: test_error( "Failed to get object of type {} after attempting overwrite". format(new_data_type_str)) else: if isinstance(val, datum.Datum): val = val.get_datum() if retrieved_val != val: test_error( "Retrieved incorrect value after overwriting with {}".format( new_data_type_str))
def test_register(): from kwiver.vital.config import config from kwiver.sprokit.pipeline import process from kwiver.sprokit.pipeline import process_factory proc_type = 'python_example' proc_desc = 'simple description' process_factory.add_process(proc_type, proc_desc, example_process(True)) if not proc_desc == process_factory.description(proc_type): test_error("Description was not preserved when registering") try: p = process_factory.create_process(proc_type, '') if p is None: raise Exception() except: test_error("Could not create newly registered process type")
def test_register_cluster(): from kwiver.vital.config import config from kwiver.sprokit.pipeline import process from kwiver.sprokit.pipeline import process_cluster from kwiver.sprokit.pipeline import process_factory proc_type = 'python_example' proc_desc = 'simple description' process_factory.add_process(proc_type, proc_desc, base_example_process_cluster()) if not proc_desc == process_factory.description(proc_type): test_error("Description was not preserved when registering") p = None try: p = process_factory.create_process(proc_type, '') if p is None: raise Exception() except BaseException: import sys e = sys.exc_info()[1] test_error( "Could not create newly registered process cluster type: %s" % str(e)) if process_cluster.cluster_from_process(p) is None: test_error( "A cluster process from the registry was not detected as a cluster process" )
def check_file(fname, expect): # Don't check for c++ scheds--we shouldn't have produced a file, and don't want to get caught up with leftovers if sched_type in cpp_scheds: return with open(fname, 'r') as fin: ints = list([int(l.strip()) for l in list(fin)]) num_ints = len(ints) num_expect = len(expect) if not num_ints == num_expect: test_error("Got %d results when %d were expected." % (num_ints, num_expect)) res = list(zip(ints, expect)) line = 1 for i, e in res: if not i == e: test_error("Result %d is %d, where %d was expected" % (line, i, e)) line += 1
def test_iter(): from kwiver.vital import types as kvt from kwiver.sprokit.adapters import adapter_data_set ads = _create_ads() for port, dat in ads: if port == "string_port": if dat.get_datum() != "string_value": test_error("Didn't retrieve correct string value on first iteration") elif port == "timestamp_port": if dat.get_datum() != kvt.Timestamp(1000000000, 10): test_error( "Didn't retrieve correct timestamp value on second iteration" ) elif port == "vector_string_port": if dat.get_datum() != datum.VectorString(["element1", "element2"]): test_error("Didn't retrieve correct string vector on third iteration") else: test_error("unknown port: {}".format(port))
def test_complete(): from kwiver.sprokit.pipeline import datum d = datum.complete() if not d.type() == datum.DatumType.complete: test_error("Datum type mismatch") if len(d.get_error()): test_error("A complete datum has an error string") p = d.get_datum() if p is not None: test_error("A complete datum does not have None as its data")
def test_new(): from kwiver.sprokit.pipeline import datum d = datum.new('test_datum') if not d.type() == datum.DatumType.data: test_error("Datum type mismatch") if len(d.get_error()): test_error("A data datum has an error string") p = d.get_datum() if p is None: test_error("A data datum has None as its data")
def test_error_(): from kwiver.sprokit.pipeline import datum err = 'An error' d = datum.error(err) if not d.type() == datum.DatumType.error: test_error("Datum type mismatch") if not d.get_error() == err: test_error("An error datum did not keep the message") p = d.get_datum() if p is not None: test_error("An error datum does not have None as its data")
def test_api_calls(): from kwiver.sprokit.pipeline import stamp s = stamp.new_stamp(1) si = stamp.incremented_stamp(s) t = stamp.new_stamp(2) if s > si: test_error("A stamp is greater than its increment") if si < s: test_error("A stamp is greater than its increment") si2 = stamp.incremented_stamp(si) ti = stamp.incremented_stamp(t) if not si2 == ti: test_error("Stamps with different rates do not compare as equal")
def check_type(): from kwiver.sprokit.adapters import adapter_data_set ads = ( adapter_data_set.AdapterDataSet.create() ) # Check constructor with default argument ads_data = adapter_data_set.AdapterDataSet.create(adapter_data_set.DataSetType.data) ads_eoi = adapter_data_set.AdapterDataSet.create( adapter_data_set.DataSetType.end_of_input ) if ads_def.type() != adapter_data_set.DataSetType.data: test_error("adapter_data_set type mismatch: constructor with default arg") if ads_data.type() != adapter_data_set.DataSetType.data: test_error("adapter_data_set type mismatch: constructor with data arg") if ads_eoi.type() != adapter_data_set.DataSetType.end_of_input: test_error("adapter_data_set type mismatch: constructor with end_of_input arg")
def test_is_end_of_data(): from kwiver.sprokit.adapters import adapter_data_set ads_def = adapter_data_set.AdapterDataSet.create() # test default argument ads_data = adapter_data_set.AdapterDataSet.create(adapter_data_set.DataSetType.data) ads_eoi = adapter_data_set.AdapterDataSet.create( adapter_data_set.DataSetType.end_of_input ) if ads_def.is_end_of_data(): test_error( 'adapter data set of type "data" is empty: constructor with default arg' ) if ads_data.is_end_of_data(): test_error( 'adapter data set of type "data" is empty: constructor with data arg' ) if not ads_eoi.is_end_of_data(): test_error('adapter_data_set of type "end_of_input" is not empty')
def test_import(): try: import kwiver.sprokit.pipeline.datum except: test_error("Failed to import the datum module")
def check_same_type(retrieved_val, val): if not type(retrieved_val) is type(val): msg = "Retrieved value of type: {}. Expected type: {}" msg = msg.format(type(retrieved_val), type(val)) test_error(msg)
datum.new_track_set, None, ) expect_exception( "attempting to store None as a timestamp", TypeError, datum.new_timestamp, None, ) # Should also fail for the automatic type conversion expect_exception( "attempting to store none through automatic conversion", TypeError, datum.new, None, ) if __name__ == '__main__': import sys if len(sys.argv) != 2: test_error("Expected two arguments") sys.exit(1) testname = sys.argv[1] run_test(testname, find_tests(locals()))
def test_api_calls(cpp_pipeline_dir, py_pipeline_dir): from kwiver.sprokit.adapters import embedded_pipeline from kwiver.sprokit.adapters import adapter_data_set pipeline_fname = "simple_embedded_pipeline.pipe" path_to_pipe_file = os.path.join(cpp_pipeline_dir, pipeline_fname) ep = embedded_pipeline.EmbeddedPipeline() ep.build_pipeline(path_to_pipe_file) # Check the input_ports input_list = ep.input_port_names() if len(input_list) != 3: test_error( "input_port_list() returned list of length {}, expected {}".format( len(input_list), 3)) for port in input_list: print(" " + port) # Check the output ports output_list = ep.output_port_names() if len(output_list) != 3: test_error( "output_port_list() returned list of length {}, expected {}". format(len(output_list), 3)) for port in output_list: print(" " + port) # Test that we can't call wait() yet try: ep.wait() except RuntimeError: pass else: test_error("Calling wait() before start() should throw an error") ep.start() # Now send some data for i in range(10): ds = adapter_data_set.AdapterDataSet.create() for val, port in enumerate(input_list, start=i): ds[port] = val print("sending set:", i) ep.send(ds) print("Sending end of input element") ep.send_end_of_input() print("pipeline is full:", ep.full()) print("pipeline is empty:", ep.empty()) while True: ods = ep.receive() if ods.is_end_of_data(): if not ep.at_end(): test_error("at_end() not set correctly") break for (port, d) in ods: print(" port:", port, " value:", d.get_int()) ep.wait() ####### # Still need to test stop() ep = embedded_pipeline.EmbeddedPipeline() ep.build_pipeline(path_to_pipe_file) ep.start() ds = adapter_data_set.AdapterDataSet.create() ep.send_end_of_input() ods = ep.receive() ep.stop()
def test_import(cpp_pipeline_dir, py_pipeline_dir): try: import kwiver.sprokit.adapters.embedded_pipeline except: test_error("Failed to import the embedded pipeline module")
from kwiver.sprokit.pipeline import datum ads_in = adapter_data_set.AdapterDataSet.create() ads_in["detected_object_set"] = _create_detected_object_set() print("Starting roundtrip pipeline with a detected_object_set") run_roundtrip_pipeline(py_pipeline_dir, ads_in) # Create fresh objects. Make sure that this also works with adding datums directly ads_in = adapter_data_set.AdapterDataSet.create() ads_in["detected_object_set"] = datum.new(_create_detected_object_set()) print( "Starting roundtrip pipeline with a datum containing a detected_object_set" ) run_roundtrip_pipeline(py_pipeline_dir, ads_in) if __name__ == "__main__": import os import sys if len(sys.argv) != 4: test_error("Expected four arguments") sys.exit(1) testname = sys.argv[1] cpp_pipeline_dir = sys.argv[2] py_pipeline_dir = sys.argv[3] run_test(testname, find_tests(locals()), cpp_pipeline_dir, py_pipeline_dir)