def test_gpu_nodes_accepted(monkeypatch): def gpus_mock(): return [0, 1, 3] monkeypatch.setattr(videoflow.engines.realtime, 'get_gpus_available_to_process', gpus_mock) #1. Test that gpu nodes are accepted by having same number of gpu # processes as gpus in the system A = IntProducer() B = IdentityProcessorGpuOnly()(A) C = IdentityProcessorGpuOnly()(B) D = IdentityProcessorGpuOnly()(C) E = JoinerProcessor()(D) F = JoinerProcessor()(B, C, E, D) tsort = topological_sort([A]) tasks_data = _task_data_from_node_tsort(tsort) ee = RealtimeExecutionEngine() ee._al_create_processes(tasks_data) #2. Test that gpu nodes are accepted by having nodes not thrown an #error if gpu is not available A1 = IntProducer() B1 = IdentityProcessor(device_type=GPU)(A1) C1 = IdentityProcessor(device_type=GPU)(B1) D1 = IdentityProcessor(device_type=GPU)(A1) D1 = JoinerProcessor(device_type=GPU)(C1) E1 = JoinerProcessor(device_type=GPU)(B1, C1, D1) tsort = topological_sort([A1]) tasks_data = _task_data_from_node_tsort(tsort) ee1 = RealtimeExecutionEngine() ee1._al_create_processes(tasks_data)
def test_no_raise_error(): a = IntProducer() b = IdentityProcessor()(a) c = IdentityProcessor()(b) d = CommandlineConsumer()(c) graph_engine = GraphEngine([a], [d])
def test_raise_error_1(): a = IntProducer() b = IdentityProcessor()(a) c = IdentityProcessor()(b) d = CommandlineConsumer() with pytest.raises(ValueError): graph_engine = GraphEngine([a], [d])
def test_mp_example1(): producer = IntProducer(0, 40, 0.1) identity = IdentityProcessor(nb_tasks = 5)(producer) identity1 = IdentityProcessor(nb_tasks = 5)(identity) joined = JoinerProcessor(nb_tasks = 5)(identity, identity1) printer = CommandlineConsumer()(joined) flow = Flow([producer], [printer]) flow.run() flow.join()
def test_taskmodulenode_example1(): producer = IntProducer(0, 40, 0.05) identity = IdentityProcessor(nb_tasks = 1)(producer) identity1 = IdentityProcessor(nb_tasks = 1)(identity) joined = JoinerProcessor(nb_tasks = 1)(identity, identity1) task_module = TaskModuleNode(identity, joined) printer = CommandlineConsumer()(task_module) flow = Flow([producer], [printer]) flow.run() flow.join()
def test_graph_with_no_consumer(): # Graph with no consumer should run. producer = IntProducer(0, 40, 0.05) identity = IdentityProcessor(nb_tasks=1)(producer) identity1 = IdentityProcessor(nb_tasks=1)(identity) joined = JoinerProcessor(nb_tasks=1)(identity, identity1) task_module = TaskModuleNode(identity, joined) flow = Flow([producer], []) flow.run() flow.join()
def test_setting_parents_twice(): b = IdentityProcessor() c = IdentityProcessor()(b) with pytest.raises(RuntimeError): c(b) # Testing once more, just to check that is not a fluke with pytest.raises(RuntimeError): c(b)
def test_topological_sort(): a = IntProducer() b = IdentityProcessor()(a) c = IdentityProcessor()(b) d = IdentityProcessor()(c) e = IdentityProcessor()(d) expected_tsort = [a, b, c, d, e] tsort = topological_sort([a]) assert len(tsort) == len(expected_tsort), "topological sort returned different number of nodes" assert all([tsort[i] is expected_tsort[i] for i in range(len(tsort))]), "wrong topological sort"
def test_graph_with_deadend_processor(): # Graph with no consumer should run. producer = IntProducer(0, 40, 0.05) identity = IdentityProcessor(nb_tasks=1)(producer) identity1 = IdentityProcessor(nb_tasks=1)(identity) joined = JoinerProcessor(nb_tasks=1)(identity, identity1) task_module = TaskModuleNode(identity, joined) dead_end = IdentityProcessor()(task_module) printer = CommandlineConsumer()(task_module) flow = Flow([producer], [printer]) flow.run() flow.join()
def test_taskmodule_node_1(): ''' Tests that task module can create its own parents without having to take them from the entry node. ''' zero = IntProducer() a = IdentityProcessor() b = IdentityProcessor()(a) c = IdentityProcessor()(b) task_module = TaskModuleNode(a, c)(zero) out = CommandlineConsumer()(task_module) graph_engine = GraphEngine([zero], [out])
def test_nb_tasks_created(): #1. Test that the number of tasks created is equal to number of nodes A = IntProducer() B = IdentityProcessor()(A) C = IdentityProcessor()(B) D = JoinerProcessor()(C) E = JoinerProcessor()(B, C, D) tsort = topological_sort([A]) tasks_data = _task_data_from_node_tsort(tsort) ee = RealtimeExecutionEngine() ee._al_create_processes(tasks_data) assert len(tsort) == len(ee._procs)
def test_taskmodule_node_2(): ''' Tests that task module can take the childs from its exit_entry ''' zero = IntProducer() a = IdentityProcessor()(zero) b = IdentityProcessor()(a) c = IdentityProcessor()(b) out = CommandlineConsumer()(c) task_module = TaskModuleNode(a, c) graph_engine = GraphEngine([zero], [out]) tsort = graph_engine.topological_sort() assert len(tsort) == 3 assert task_module in tsort
def test_nb_tasks_created_1(): #2. Test that number of tasks created is different than number of # nodes, in the case of TaskModuleProcessor zero = IntProducer() a = IdentityProcessor()(zero) b = IdentityProcessor()(a) c = IdentityProcessor()(b) d = JoinerProcessor()(b, c) e = IdentityProcessor()(d) f = JoinerProcessor()(d, e, c, b) module = TaskModuleNode(a, f) out = CommandlineConsumer()(module) tsort = topological_sort([zero]) tasks_data = _task_data_from_node_tsort(tsort) ee = RealtimeExecutionEngine() ee._al_create_processes(tasks_data) assert len(ee._procs) == 3
def test_taskmodule_node(): ''' Tests simple task module creation and tests that it can be part of a flow ''' #1. Tests simple module first zero = IntProducer() a = IdentityProcessor()(zero) b = IdentityProcessor()(a) c = IdentityProcessor()(b) d = JoinerProcessor()(b, c) e = IdentityProcessor()(d) f = JoinerProcessor()(d, e, c, b) module = TaskModuleNode(a, f) out = CommandlineConsumer()(module) #2. Tests that you raise an exception as error here. with pytest.raises(RuntimeError): out1 = CommandlineConsumer()(f) graph_engine = GraphEngine([zero], [out]) tsort = graph_engine.topological_sort() assert len(tsort) == 3
def test_taskmodule_node_3(): ''' Test error when trying to put module inside of moduel ''' #2. Tests module inside of module zero = IntProducer() a = IdentityProcessor()(zero) b = IdentityProcessor()(a) c = IdentityProcessor()(b) d = JoinerProcessor()(b, c) e = IdentityProcessor()(d) f = JoinerProcessor()(d, e, c, b) module = TaskModuleNode(a, f) g = IdentityProcessor()(module) h = IdentityProcessor()(g) i = IdentityProcessor()(h) with pytest.raises(ValueError): module1 = TaskModuleNode(module, i)
from videoflow.core import Flow from videoflow.producers import IntProducer from videoflow.processors import IdentityProcessor, JoinerProcessor from videoflow.consumers import CommandlineConsumer producer = IntProducer(0, 40, 0.1) identity = IdentityProcessor(fps=2, nb_tasks=5, name='i1')(producer) identity1 = IdentityProcessor(fps=2, nb_tasks=5, name='i2')(identity) joined = JoinerProcessor(nb_tasks=5)(identity, identity1) printer = CommandlineConsumer()(joined) flow = Flow([producer], [printer]) flow.run() flow.join()
from videoflow.core import Flow from videoflow.producers import IntProducer from videoflow.processors import IdentityProcessor, JoinerProcessor from videoflow.consumers import CommandlineConsumer producer = IntProducer(0, 40, 0.1) identity = IdentityProcessor(nb_tasks=5)(producer) identity1 = IdentityProcessor(nb_tasks=5)(identity) joined = JoinerProcessor(nb_tasks=5)(identity, identity1) printer = CommandlineConsumer()(joined) flow = Flow([producer], [printer]) flow.run() flow.join()
def test_cycle_detection(): #1. simple linear graph with cycle b = IdentityProcessor() c = IdentityProcessor()(b) d = IdentityProcessor()(c) b(c) assert has_cycle([b]), '#1 Cycle not detected' #2. More complex non linear graph a1 = IntProducer() b1 = IdentityProcessor()(a1) c1 = IdentityProcessor()(b1) d1 = IdentityProcessor()(a1) e1 = IdentityProcessor() f1 = JoinerProcessor()(e1, d1) g1 = JoinerProcessor()(c1, b1, d1) e1(g1) assert not has_cycle([e1]), "#2 Cycle detected" assert not has_cycle([a1]), "#3 Cycle not detected" a2 = IntProducer() b2 = IdentityProcessor()(a2) c2 = IdentityProcessor()(b2) d2 = IdentityProcessor()(a2) e2 = IdentityProcessor() f2 = JoinerProcessor()(e2, d2) g2 = JoinerProcessor()(c2, b2, f2) e2(g2) assert has_cycle([e2]), '#4 Cycle not detected' assert has_cycle([a2]), "#5 Cycle not detected"
from videoflow.core import Flow from videoflow.producers import IntProducer from videoflow.processors import IdentityProcessor, JoinerProcessor from videoflow.consumers import CommandlineConsumer producer = IntProducer(0, 40, 0.1) identity = IdentityProcessor()(producer) identity1 = IdentityProcessor()(identity) joined = JoinerProcessor()(identity, identity1) printer = CommandlineConsumer()(joined) flow = Flow([producer], [printer],maintain_states=True,state_config={"flow_name":"simple_example","save_interval":25}) flow.run() flow.join()
from videoflow.core import Flow from videoflow.producers import IntProducer from videoflow.processors import IdentityProcessor, JoinerProcessor from videoflow.consumers import CommandlineConsumer from videoflow.core.constants import BATCH reader = IntProducer(0, 100, 0.001) game_state_processor = IdentityProcessor(fps=6, nb_tasks=1, name='i1')(reader) hero_processors = JoinerProcessor()(reader, game_state_processor) ability_processor = JoinerProcessor()(reader, game_state_processor, hero_processors) ammo_processor = JoinerProcessor()(reader, game_state_processor) death_processor = JoinerProcessor()(reader, game_state_processor) hp_processor = JoinerProcessor()(reader, game_state_processor) killfeed_processor = JoinerProcessor(fps=1, nb_tasks=5)(reader, game_state_processor) map_processor = JoinerProcessor()(reader, game_state_processor) resurrect_processor = JoinerProcessor()(reader, game_state_processor) sr_processor = JoinerProcessor()(reader, game_state_processor) ultimate_processor = JoinerProcessor()(reader, game_state_processor) player_score_processor = JoinerProcessor()(reader, game_state_processor) consumer_before = JoinerProcessor()( reader, game_state_processor, hero_processors, death_processor, killfeed_processor, ammo_processor, hp_processor, ultimate_processor, ability_processor, player_score_processor, map_processor, sr_processor, resurrect_processor) consumer = CommandlineConsumer()(consumer_before) flow = Flow([reader], [consumer], flow_type=BATCH) flow.run() flow.join()