def test_flatten_checks_for_dups_globally(self): flo = gf.Flow("test").add( gf.Flow("int1").add(t_utils.DummyTask(name="a")), gf.Flow("int2").add(t_utils.DummyTask(name="a"))) self.assertRaisesRegexp(exc.InvariantViolation, '^Tasks with duplicate names', f_utils.flatten, flo)
def test_checks_for_dups_globally(self): flo = gf.Flow("test").add( gf.Flow("int1").add(test_utils.DummyTask(name="a")), gf.Flow("int2").add(test_utils.DummyTask(name="a"))) e = engines.load(flo) self.assertRaisesRegex(exc.Duplicate, '^Atoms with duplicate names', e.compile)
def get_provision_flow(): """ Provisioning flow consisting of three graph flows, each consisting of set of tasks that can execute in parallel. Returns tuple consisting of the whole flow and a dictionary including references to three graph flows for pre-execution customisations. """ pre_flow = gf.Flow('PreBootInstance').add( AddUserPublicKey('add_user_public_key'), GetImage('get_image', provides='image'), GetFlavor('get_flavor', provides='flavor'), CreateRootVolume('create_root_volume', provides='root_volume_id')) main_flow = gf.Flow('BootInstance').add( CreateSecurityGroup('create_security_group', provides='security_group'), CreateDataVolume('create_data_volume', provides='data_volume_id'), ProvisionInstance('provision_instance', provides='server_id')) post_flow = gf.Flow('PostBootInstance').add( AllocateIPForInstance('allocate_ip_for_instance', provides='address_data'), AttachDataVolume('attach_data_volume'), RemoveUserPublicKey('remove_user_public_key')) return (lf.Flow('ProvisionInstance').add(pre_flow, main_flow, post_flow), { 'pre': pre_flow, 'main': main_flow, 'post': post_flow })
def test_nested(self): r = gf.Flow("root") r_1 = test_utils.TaskOneReturn("root.1") r_2 = test_utils.TaskOneReturn("root.2") r.add(r_1, r_2) r.link(r_1, r_2) subroot = gf.Flow("subroot") subroot_r_1 = test_utils.TaskOneReturn("subroot.1") subroot_r_2 = test_utils.TaskOneReturn("subroot.2") subroot.add(subroot_r_1, subroot_r_2) subroot.link(subroot_r_1, subroot_r_2) r.add(subroot) r_3 = test_utils.TaskOneReturn("root.3") r.add(r_3) r.link(r_2, r_3) c = compiler.PatternCompiler(r).compile() self.assertEqual([], _get_scopes(c, r_1)) self.assertEqual([['root.1']], _get_scopes(c, r_2)) self.assertEqual([['root.2', 'root.1']], _get_scopes(c, r_3)) self.assertEqual([], _get_scopes(c, subroot_r_1)) self.assertEqual([['subroot.1']], _get_scopes(c, subroot_r_2))
def test_graph_linear_scope(self): r = gf.Flow("root") r_1 = test_utils.TaskOneReturn("root.1") r_2 = test_utils.TaskOneReturn("root.2") r.add(r_1, r_2) r.link(r_1, r_2) s = lf.Flow("subroot") s_1 = test_utils.TaskOneReturn("subroot.1") s_2 = test_utils.TaskOneReturn("subroot.2") s.add(s_1, s_2) r.add(s) t = gf.Flow("subroot2") t_1 = test_utils.TaskOneReturn("subroot2.1") t_2 = test_utils.TaskOneReturn("subroot2.2") t.add(t_1, t_2) t.link(t_1, t_2) r.add(t) r.link(s, t) c = compiler.PatternCompiler(r).compile() self.assertEqual([], _get_scopes(c, r_1)) self.assertEqual([['root.1']], _get_scopes(c, r_2)) self.assertEqual([], _get_scopes(c, s_1)) self.assertEqual([['subroot.1']], _get_scopes(c, s_2)) self.assertEqual([[], ['subroot.2', 'subroot.1']], _get_scopes(c, t_1)) self.assertEqual([["subroot2.1"], ['subroot.2', 'subroot.1']], _get_scopes(c, t_2))
def create_flow(): # Setup the set of things to do (mini-nova). flow = lf.Flow("root").add( PrintText("Starting vm creation.", no_slow=True), lf.Flow('vm-maker').add( # First create a specification for the final vm to-be. DefineVMSpec("define_spec"), # This does all the image stuff. gf.Flow("img-maker").add( LocateImages("locate_images"), DownloadImages("download_images"), ), # This does all the network stuff. gf.Flow("net-maker").add( AllocateIP("get_my_ips"), CreateNetworkTpl("fetch_net_settings"), WriteNetworkSettings("write_net_settings"), ), # This does all the volume stuff. gf.Flow("volume-maker").add( AllocateVolumes("allocate_my_volumes", provides='volumes'), FormatVolumes("volume_formatter"), ), # Finally boot it all. BootVM("boot-it"), ), # Ya it worked! PrintText("Finished vm create.", no_slow=True), PrintText("Instance is running!", no_slow=True)) return flow
def test_graph_nested_graph(self): a, b, c, d, e, f, g = test_utils.make_many(7) flo = gf.Flow("test") flo.add(a, b, c, d) flo2 = gf.Flow('test2') flo2.add(e, f, g) flo.add(flo2) g = _replicate_graph_with_names( compiler.PatternCompiler(flo).compile()) self.assertEqual(11, len(g)) self.assertItemsEqual(g.edges(), [ ('test', 'a'), ('test', 'b'), ('test', 'c'), ('test', 'd'), ('test', 'test2'), ('test2', 'e'), ('test2', 'f'), ('test2', 'g'), ('e', 'test2[$]'), ('f', 'test2[$]'), ('g', 'test2[$]'), ('test2[$]', 'test[$]'), ('a', 'test[$]'), ('b', 'test[$]'), ('c', 'test[$]'), ('d', 'test[$]'), ])
def test_iter_nodes(self): task1 = _task('task1', provides=['a'], requires=['c']) task2 = _task('task2', provides=['b'], requires=['a']) task3 = _task('task3', provides=['c']) f1 = gf.Flow('nested') f1.add(task3) tasks = set([task1, task2, f1]) f = gf.Flow('test').add(task1, task2, f1) for (n, data) in f.iter_nodes(): self.assertTrue(n in tasks) self.assertDictEqual({}, data)
def test_graph_flatten_nested_graph(self): a, b, c, d, e, f, g = _make_many(7) flo = gf.Flow("test") flo.add(a, b, c, d) flo2 = gf.Flow('test2') flo2.add(e, f, g) flo.add(flo2) g = f_utils.flatten(flo) self.assertEqual(7, len(g)) self.assertEqual(0, g.number_of_edges())
def test_graph_flow_stringy(self): f = gf.Flow('test') expected = 'graph_flow.Flow: test(len=0)' self.assertEqual(expected, str(f)) task1 = _task(name='task1') task2 = _task(name='task2') task3 = _task(name='task3') f = gf.Flow('test') f.add(task1, task2, task3) expected = 'graph_flow.Flow: test(len=3)' self.assertEqual(expected, str(f))
def test_iter_links(self): task1 = _task('task1') task2 = _task('task2') task3 = _task('task3') f1 = gf.Flow('nested') f1.add(task3) tasks = set([task1, task2, f1]) f = gf.Flow('test').add(task1, task2, f1) for (u, v, data) in f.iter_links(): self.assertTrue(u in tasks) self.assertTrue(v in tasks) self.assertDictEqual({}, data)
def test_graph_nested_graph(self): a, b, c, d, e, f, g = test_utils.make_many(7) flo = gf.Flow("test") flo.add(a, b, c, d) flo2 = gf.Flow('test2') flo2.add(e, f, g) flo.add(flo2) compilation = compiler.PatternCompiler(flo).compile() g = compilation.execution_graph self.assertEqual(7, len(g)) self.assertEqual(0, g.number_of_edges())
def get_deprovision_flow(): pre_flow = gf.Flow('PreDestroyInstance').add( GetServer('get_server', provides="server")) main_flow = gf.Flow('DestroyInstance').add( DeprovisionInstance('deprovision_instance')) post_flow = gf.Flow('PostDestroyInstance').add( DeleteSecurityGroup('delete_security_group')) return (lf.Flow('DeprovisionInstance').add(pre_flow, main_flow, post_flow), { 'pre': pre_flow, 'main': main_flow, 'post': post_flow })
def build_flow(self, flow_name, flow_type='graph'): if flow_type == 'linear': return linear_flow.Flow(flow_name) elif flow_type == 'graph': return graph_flow.Flow(flow_name) else: raise ValueError(_("unsupported flow type: %s") % flow_type)
def test_graph_nested_requires(self): a = test_utils.ProvidesRequiresTask('a', provides=['x'], requires=[]) b = test_utils.ProvidesRequiresTask('b', provides=[], requires=[]) c = test_utils.ProvidesRequiresTask('c', provides=[], requires=['x']) inner_flo = lf.Flow("test2").add(b, c) flo = gf.Flow("test").add(a, inner_flo) g = _replicate_graph_with_names( compiler.PatternCompiler(flo).compile()) self.assertEqual(7, len(g)) self.assertItemsEqual(g.edges(data=True), [ ('test', 'a', { 'invariant': True }), ('test2', 'b', { 'invariant': True }), ('a', 'test2', { 'reasons': set(['x']) }), ('b', 'c', { 'invariant': True }), ('c', 'test2[$]', { 'invariant': True }), ('test2[$]', 'test[$]', { 'invariant': True }), ]) self.assertItemsEqual(['test'], list(g.no_predecessors_iter())) self.assertItemsEqual(['test[$]'], list(g.no_successors_iter()))
def setUpClass(cls): # Create a workflow to create flowdetails with wf_id = uuidutils.generate_uuid() wf_name = 'wf-%s' % (wf_id) wf = flow.Flow(wf_name, None, wf_id) cls.wfs.append(wf)
def execute(self, actions): try: # NOTE(jed) We want to have a strong separation of concern # between the Watcher planner and the Watcher Applier in order # to us the possibility to support several workflow engine. # We want to provide the 'taskflow' engine by # default although we still want to leave the possibility for # the users to change it. # The current implementation uses graph with linked actions. # todo(jed) add olso conf for retry and name flow = gf.Flow("watcher_flow") actions_uuid = {} for a in actions: task = TaskFlowActionContainer(a, self) flow.add(task) actions_uuid[a.uuid] = task for a in actions: for parent_id in a.parents: flow.link(actions_uuid[parent_id], actions_uuid[a.uuid], decider=self.decider) e = engines.load(flow, engine='parallel', max_workers=self.config.max_workers) e.run() return flow except Exception as e: raise exception.WorkflowExecutionException(error=e)
def test_shadow_graph(self): r = gf.Flow("root") customer = test_utils.ProvidesRequiresTask("customer", provides=['dog'], requires=[]) customer2 = test_utils.ProvidesRequiresTask("customer2", provides=['dog'], requires=[]) washer = test_utils.ProvidesRequiresTask("washer", requires=['dog'], provides=['wash']) r.add(customer, washer) r.add(customer2, resolve_requires=False) r.link(customer2, washer) c = compiler.PatternCompiler(r).compile() # The order currently is *not* guaranteed to be 'customer' before # 'customer2' or the reverse, since either can occur before the # washer; since *either* is a valid topological ordering of the # dependencies... # # This may be different after/if the following is resolved: # # https://github.com/networkx/networkx/issues/1181 (and a few others) self.assertEqual(set(['customer', 'customer2']), set(_get_scopes(c, washer)[0])) self.assertEqual([], _get_scopes(c, customer2)) self.assertEqual([], _get_scopes(c, customer))
def test_graph(self): a, b, c, d = test_utils.make_many(4) flo = gf.Flow("test") flo.add(a, b, c, d) compilation = compiler.PatternCompiler(flo).compile() self.assertEqual(6, len(compilation.execution_graph)) self.assertEqual(8, compilation.execution_graph.number_of_edges())
def test_nested_provides_graph_reverts_correctly(self): flow = gf.Flow("test").add( utils.ProgressingTask('a', requires=['x']), lf.Flow("test2", retry=retry.Times(2)).add( utils.ProgressingTask('b', provides='x'), utils.FailingTask('c'))) engine = self._make_engine(flow) engine.compile() engine.prepare() engine.storage.save('test2_retry', 1) engine.storage.save('b', 11) engine.storage.save('a', 10) with utils.CaptureListener(engine, capture_flow=False) as capturer: self.assertRaisesRegexp(RuntimeError, '^Woot', engine.run) expected = ['c.t RUNNING', 'c.t FAILURE(Failure: RuntimeError: Woot!)', 'a.t REVERTING', 'c.t REVERTING', 'a.t REVERTED', 'c.t REVERTED', 'b.t REVERTING', 'b.t REVERTED'] self.assertItemsEqual(capturer.values[:8], expected) # Task 'a' was or was not executed again, both cases are ok. self.assertIsSuperAndSubsequence(capturer.values[8:], [ 'b.t RUNNING', 'c.t FAILURE(Failure: RuntimeError: Woot!)', 'b.t REVERTED', ]) self.assertEqual(engine.storage.get_flow_state(), st.REVERTED)
def test_graph_links(self): a, b, c, d = test_utils.make_many(4) flo = gf.Flow("test") flo.add(a, b, c, d) flo.link(a, b) flo.link(b, c) flo.link(c, d) g = _replicate_graph_with_names( compiler.PatternCompiler(flo).compile()) self.assertEqual(6, len(g)) self.assertItemsEqual(g.edges(data=True), [ ('test', 'a', { 'invariant': True }), ('a', 'b', { 'manual': True }), ('b', 'c', { 'manual': True }), ('c', 'd', { 'manual': True }), ('d', 'test[$]', { 'invariant': True }), ]) self.assertItemsEqual(['test'], g.no_predecessors_iter()) self.assertItemsEqual(['test[$]'], g.no_successors_iter())
def test_retry_in_graph_flow_with_tasks(self): r = retry.AlwaysRevert("r") a, b, c = test_utils.make_many(3) flo = gf.Flow("test", r).add(a, b, c).link(b, c) g = _replicate_graph_with_names( compiler.PatternCompiler(flo).compile()) self.assertItemsEqual(g.edges(data=True), [ ('test', 'r', { 'invariant': True }), ('r', 'a', { 'invariant': True, 'retry': True }), ('r', 'b', { 'invariant': True, 'retry': True }), ('b', 'c', { 'manual': True }), ('a', 'test[$]', { 'invariant': True }), ('c', 'test[$]', { 'invariant': True }), ]) self.assertItemsEqual(['test'], g.no_predecessors_iter()) self.assertItemsEqual(['test[$]'], g.no_successors_iter()) self.assertIs(r, g.node['a']['retry']) self.assertIs(r, g.node['b']['retry']) self.assertIs(r, g.node['c']['retry'])
def test_nested_provides_graph_retried_correctly(self): flow = gf.Flow("test").add( utils.ProgressingTask('a', requires=['x']), lf.Flow("test2", retry=retry.Times(2)).add( utils.ProgressingTask('b', provides='x'), utils.ProgressingTask('c'))) engine = self._make_engine(flow) engine.compile() engine.prepare() engine.storage.save('test2_retry', 1) engine.storage.save('b', 11) # pretend that 'c' failed fail = failure.Failure.from_exception(RuntimeError('Woot!')) engine.storage.save('c', fail, st.FAILURE) with utils.CaptureListener(engine, capture_flow=False) as capturer: engine.run() expected = ['c.t REVERTING', 'c.t REVERTED', 'b.t REVERTING', 'b.t REVERTED'] self.assertItemsEqual(capturer.values[:4], expected) expected = ['test2_retry.r RETRYING', 'b.t PENDING', 'c.t PENDING', 'test2_retry.r RUNNING', 'test2_retry.r SUCCESS(2)', 'b.t RUNNING', 'b.t SUCCESS(5)', 'a.t RUNNING', 'c.t RUNNING', 'a.t SUCCESS(5)', 'c.t SUCCESS(5)'] self.assertItemsEqual(expected, capturer.values[4:]) self.assertEqual(engine.storage.get_flow_state(), st.SUCCESS)
def create_graph_flow(name, objs, subflow_factory_fn, *args, **kwargs): """ Walk over model instances passed in ``objs`` list and their dependencies and create graph flow using ``subflow_factory_fn`` function in order to create subflow for each object and/or dependency. :param name: name of resulting flow :param objs: iterable of objects :param subflow_factory_fn: function that will create subflows :param args: additional positional arguments that will be passed to subflow factory function :param kwargs: additional named arguments that will be passed to subflow factory function :return: graph flow instance """ def _create_and_link_subflow(obj): obj_id = obj.primary_key if obj_id in created: return created[obj_id] subflow = subflow_factory_fn(obj, *args, **kwargs) if subflow is None: return None graph.add(subflow) created[obj_id] = subflow for dep in obj.dependencies(): dep_subflow = _create_and_link_subflow(dep) if dep_subflow is not None: graph.link(dep_subflow, subflow) return subflow created = {} graph = graph_flow.Flow(name) for obj in objs: _create_and_link_subflow(obj) return graph
def test_graph_flow_four_tasks_revert(self): flow = gf.Flow('g-4-failing').add( utils.ProgressingTask(name='task4', provides='d', requires=['c']), utils.ProgressingTask(name='task2', provides='b', requires=['a']), utils.FailingTask(name='task3', provides='c', requires=['b']), utils.ProgressingTask(name='task1', provides='a')) engine = self._make_engine(flow) with utils.CaptureListener(engine, capture_flow=False) as capturer: self.assertFailuresRegexp(RuntimeError, '^Woot', engine.run) expected = ['task1.t RUNNING', 'task1.t SUCCESS(5)', 'task2.t RUNNING', 'task2.t SUCCESS(5)', 'task3.t RUNNING', 'task3.t FAILURE(Failure: RuntimeError: Woot!)', 'task3.t REVERTING', 'task3.t REVERTED', 'task2.t REVERTING', 'task2.t REVERTED', 'task1.t REVERTING', 'task1.t REVERTED'] self.assertEqual(expected, capturer.values) self.assertEqual(engine.storage.get_flow_state(), states.REVERTED)
def test_task_graph_property(self): flow = gf.Flow('test').add(utils.TaskNoRequiresNoReturns(name='task1'), utils.TaskNoRequiresNoReturns(name='task2')) engine = self._make_engine(flow) graph = engine.execution_graph self.assertIsInstance(graph, networkx.DiGraph)
def test_graph_flow_resolve_existing(self): task1 = _task(name='task1', requires=['a', 'b']) task2 = _task(name='task2', provides=['a', 'b']) f = gf.Flow('test') f.add(task1) f.add(task2, resolve_existing=True) self.assertEqual(set([]), f.requires)
def test_dependent(self): r = gf.Flow("root") customer = test_utils.ProvidesRequiresTask("customer", provides=['dog'], requires=[]) washer = test_utils.ProvidesRequiresTask("washer", requires=['dog'], provides=['wash']) dryer = test_utils.ProvidesRequiresTask("dryer", requires=['dog', 'wash'], provides=['dry_dog']) shaved = test_utils.ProvidesRequiresTask("shaver", requires=['dry_dog'], provides=['shaved_dog']) happy_customer = test_utils.ProvidesRequiresTask( "happy_customer", requires=['shaved_dog'], provides=['happiness']) r.add(customer, washer, dryer, shaved, happy_customer) c = compiler.PatternCompiler(r).compile() self.assertEqual([], _get_scopes(c, customer)) self.assertEqual([['washer', 'customer']], _get_scopes(c, dryer)) self.assertEqual([['shaver', 'dryer', 'washer', 'customer']], _get_scopes(c, happy_customer))
def execute(self, actions): try: # NOTE(jed) We want to have a strong separation of concern # between the Watcher planner and the Watcher Applier in order # to us the possibility to support several workflow engine. # We want to provide the 'taskflow' engine by # default although we still want to leave the possibility for # the users to change it. # todo(jed) we need to change the way the actions are stored. # The current implementation only use a linked list of actions. # todo(jed) add olso conf for retry and name flow = gf.Flow("watcher_flow") previous = None for a in actions: task = TaskFlowActionContainer(a, self) flow.add(task) if previous is None: previous = task # we have only one Action in the Action Plan if len(actions) == 1: nop = TaskFlowNop() flow.add(nop) flow.link(previous, nop) else: # decider == guard (UML) flow.link(previous, task, decider=self.decider) previous = task e = engines.load(flow) e.run() except Exception as e: raise exception.WorkflowExecutionException(error=e)
def test_graph_cyclic_dependency(self): flow = gf.Flow('g-3-cyclic') self.assertRaisesRegexp( exceptions.DependencyFailure, '^No path', flow.add, utils.TaskOneArgOneReturn(provides='a', requires=['b']), utils.TaskOneArgOneReturn(provides='b', requires=['c']), utils.TaskOneArgOneReturn(provides='c', requires=['a']))