def test_retries_hierarchy(self): c1 = retry.AlwaysRevert("cp1") c2 = retry.AlwaysRevert("cp2") a, b, c, d = test_utils.make_many(4) flo = lf.Flow("test", c1).add(a, lf.Flow("test", c2).add(b, c), d) compilation = compiler.PatternCompiler(flo).compile() g = compilation.execution_graph self.assertEqual(6, len(g)) self.assertItemsEqual(g.edges(data=True), [ (c1, a, { 'retry': True }), (a, c2, { 'invariant': True }), (c2, b, { 'retry': True }), (b, c, { 'invariant': True }), (c, d, { 'invariant': True }), ]) self.assertIs(c1, g.node[a]['retry']) self.assertIs(c1, g.node[d]['retry']) self.assertIs(c2, g.node[b]['retry']) self.assertIs(c2, g.node[c]['retry']) self.assertIs(c1, g.node[c2]['retry']) self.assertIs(None, g.node[c1].get('retry'))
def test_retry_in_nested_flows(self): c1 = retry.AlwaysRevert("c1") c2 = retry.AlwaysRevert("c2") inner_flo = lf.Flow("test2", c2) flo = lf.Flow("test", c1).add(inner_flo) g = _replicate_graph_with_names( compiler.PatternCompiler(flo).compile()) self.assertEqual(6, len(g)) self.assertItemsEqual(g.edges(data=True), [ ('test', 'c1', { 'invariant': True }), ('c1', 'test2', { 'invariant': True, 'retry': True }), ('test2', 'c2', { 'invariant': True }), ('c2', 'test2[$]', { 'invariant': True }), ('test2[$]', 'test[$]', { 'invariant': True }), ]) self.assertIs(c1, g.node['c2']['retry']) self.assertItemsEqual(['test'], list(g.no_predecessors_iter())) self.assertItemsEqual(['test[$]'], list(g.no_successors_iter()))
def test_retries_hierarchy(self): c1 = retry.AlwaysRevert("c1") c2 = retry.AlwaysRevert("c2") a, b, c, d = test_utils.make_many(4) inner_flo = lf.Flow("test2", c2).add(b, c) flo = lf.Flow("test", c1).add(a, inner_flo, d) g = _replicate_graph_with_names( compiler.PatternCompiler(flo).compile()) self.assertEqual(10, len(g)) self.assertItemsEqual(g.edges(data=True), [ ('test', 'c1', {'invariant': True}), ('c1', 'a', {'invariant': True, 'retry': True}), ('a', 'test2', {'invariant': True}), ('test2', 'c2', {'invariant': True}), ('c2', 'b', {'invariant': True, 'retry': True}), ('b', 'c', {'invariant': True}), ('c', 'test2[$]', {'invariant': True}), ('test2[$]', 'd', {'invariant': True}), ('d', 'test[$]', {'invariant': True}), ]) self.assertIs(c1, g.node['a']['retry']) self.assertIs(c1, g.node['d']['retry']) self.assertIs(c2, g.node['b']['retry']) self.assertIs(c2, g.node['c']['retry']) self.assertIs(c1, g.node['c2']['retry']) self.assertIsNone(g.node['c1'].get('retry'))
def test_retry_in_nested_flows(self): c1 = retry.AlwaysRevert("c1") c2 = retry.AlwaysRevert("c2") flo = lf.Flow("test", c1).add(lf.Flow("test2", c2)) compilation = compiler.PatternCompiler(flo).compile() g = compilation.execution_graph self.assertEqual(2, len(g)) self.assertItemsEqual(g.edges(data=True), [(c1, c2, {'retry': True})]) self.assertIs(c1, g.node[c2]['retry']) self.assertItemsEqual([c1], g.no_predecessors_iter()) self.assertItemsEqual([c2], g.no_successors_iter())
def test_retry_in_graph_flow_with_tasks(self): r = retry.AlwaysRevert("cp") a, b, c = test_utils.make_many(3) flo = gf.Flow("test", r).add(a, b, c).link(b, c) compilation = compiler.PatternCompiler(flo).compile() g = compilation.execution_graph self.assertEqual(5, len(g)) self.assertItemsEqual(g.edges(data=True), [(flo, r, { 'invariant': True }), (r, a, { 'invariant': True, 'retry': True }), (r, b, { 'invariant': True, 'retry': True }), (b, c, { 'manual': True })]) self.assertItemsEqual([flo], g.no_predecessors_iter()) self.assertItemsEqual([a, c], g.no_successors_iter()) self.assertIs(r, g.node[a]['retry']) self.assertIs(r, g.node[b]['retry']) self.assertIs(r, g.node[c]['retry'])
def test_retry_in_unordered_flow_with_tasks(self): c = retry.AlwaysRevert("c") a, b = test_utils.make_many(2) flo = uf.Flow("test", c).add(a, b) g = _replicate_graph_with_names( compiler.PatternCompiler(flo).compile()) self.assertEqual(5, len(g)) self.assertItemsEqual(g.edges(data=True), [ ('test', 'c', { 'invariant': True }), ('c', 'a', { 'invariant': True, 'retry': True }), ('c', 'b', { 'invariant': True, 'retry': True }), ('b', 'test[$]', { 'invariant': True }), ('a', 'test[$]', { 'invariant': True }), ]) self.assertItemsEqual(['test'], list(g.no_predecessors_iter())) self.assertItemsEqual(['test[$]'], list(g.no_successors_iter())) self.assertIs(c, g.node['a']['retry']) self.assertIs(c, g.node['b']['retry'])
def test_retry_in_graph_flow_with_tasks(self): r = retry.AlwaysRevert("r") a, b, c = test_utils.make_many(3) flo = gf.Flow("test", r).add(a, b, c).link(b, c) g = _replicate_graph_with_names( compiler.PatternCompiler(flo).compile()) self.assertItemsEqual(g.edges(data=True), [ ('test', 'r', { 'invariant': True }), ('r', 'a', { 'invariant': True, 'retry': True }), ('r', 'b', { 'invariant': True, 'retry': True }), ('b', 'c', { 'manual': True }), ('a', 'test[$]', { 'invariant': True }), ('c', 'test[$]', { 'invariant': True }), ]) self.assertItemsEqual(['test'], g.no_predecessors_iter()) self.assertItemsEqual(['test[$]'], g.no_successors_iter()) self.assertIs(r, g.node['a']['retry']) self.assertIs(r, g.node['b']['retry']) self.assertIs(r, g.node['c']['retry'])
def test_retry_in_unordered_flow_with_tasks(self): c = retry.AlwaysRevert("c") a, b = test_utils.make_many(2) flo = uf.Flow("test", c).add(a, b) compilation = compiler.PatternCompiler(flo).compile() g = compilation.execution_graph self.assertEqual(4, len(g)) self.assertItemsEqual(g.edges(data=True), [ (flo, c, { 'invariant': True }), (c, a, { 'invariant': True, 'retry': True }), (c, b, { 'invariant': True, 'retry': True }), ]) self.assertItemsEqual([flo], g.no_predecessors_iter()) self.assertItemsEqual([a, b], g.no_successors_iter()) self.assertIs(c, g.node[a]['retry']) self.assertIs(c, g.node[b]['retry'])
def get_flow(**kwargs): """Return task flow :param task_id: Task ID :param task_type: Type of the task :param task_repo: Task repo :param image_repo: Image repository used :param image_id: ID of the Image to be processed :param uri: uri for the image file """ task_id = kwargs.get('task_id') task_type = kwargs.get('task_type') task_repo = kwargs.get('task_repo') image_repo = kwargs.get('image_repo') image_id = kwargs.get('image_id') import_method = kwargs.get('import_req')['method']['name'] uri = kwargs.get('import_req')['method'].get('uri') if not uri and import_method == 'glance-direct': separator = '' if not CONF.node_staging_uri.endswith('/'): separator = '/' uri = separator.join((CONF.node_staging_uri, str(image_id))) flow = lf.Flow(task_type, retry=retry.AlwaysRevert()) if import_method == 'web-download': downloadToStaging = internal_plugins.get_import_plugin(**kwargs) flow.add(downloadToStaging) if not CONF.node_staging_uri.endswith('/'): separator = '/' file_uri = separator.join((CONF.node_staging_uri, str(image_id))) else: file_uri = uri flow.add(_VerifyStaging(task_id, task_type, task_repo, file_uri)) for plugin in import_plugins.get_import_plugins(**kwargs): flow.add(plugin) import_to_store = _ImportToStore(task_id, task_type, image_repo, file_uri, image_id) flow.add(import_to_store) delete_task = lf.Flow(task_type).add(_DeleteFromFS(task_id, task_type)) flow.add(delete_task) save_task = _SaveImage(task_id, task_type, image_repo, image_id) flow.add(save_task) complete_task = _CompleteTask(task_id, task_type, task_repo, image_id) flow.add(complete_task) image = image_repo.get(image_id) from_state = image.status image.status = 'importing' image_repo.save(image, from_state=from_state) return flow
def test_unordered_flow_with_retry_fully_satisfies(self): ret = retry.AlwaysRevert(provides=['b', 'a']) f = uf.Flow('test', ret) f.add(_task(name='task1', requires=['a'])) self.assertIs(f.retry, ret) self.assertEqual('test_retry', ret.name) self.assertEqual(set([]), f.requires) self.assertEqual(set(['b', 'a']), f.provides)
def test_unordered_flow_with_retry(self): ret = retry.AlwaysRevert(requires=['a'], provides=['b']) f = uf.Flow('test', ret) self.assertIs(f.retry, ret) self.assertEqual('test_retry', ret.name) self.assertEqual(set(['a']), f.requires) self.assertEqual(set(['b']), f.provides)
def test_graph_flow_with_retry(self): ret = retry.AlwaysRevert(requires=['a'], provides=['b']) f = gf.Flow('test', ret) self.assertIs(f.retry, ret) self.assertEqual(ret.name, 'test_retry') self.assertEqual(f.requires, set(['a'])) self.assertEqual(f.provides, set(['b']))
def test_graph_flow_retry_and_task(self): flow = gf.Flow( 'gf', retry.AlwaysRevert('rt', requires=['x', 'y'], provides=['a', 'b'])) flow.add( utils.TaskMultiArgOneReturn(rebind=['a', 'x', 'c'], provides=['z'])) self.assertEqual(flow.requires, set(['x', 'y', 'c'])) self.assertEqual(flow.provides, set(['a', 'b', 'z']))
def get_flow(**kwargs): """Return task flow :param task_id: Task ID :param task_type: Type of the task :param task_repo: Task repo :param image_repo: Image repository used :param image_id: ID of the Image to be processed :param uri: uri for the image file """ task_id = kwargs.get('task_id') task_type = kwargs.get('task_type') task_repo = kwargs.get('task_repo') image_repo = kwargs.get('image_repo') image_id = kwargs.get('image_id') uri = kwargs.get('uri') if not uri: separator = '' if not CONF.node_staging_uri.endsWith('/'): separator = '/' uri = separator.join((CONF.node_staging_uri, str(image_id))) flow = lf.flow(task_type, retry=retry.AlwaysRevert()) flow.add(_VerifyStaging(task_id, task_type, uri)) # TODO(jokke): For the pluggable tasks like image verification or # image conversion we need to implement the plugin logic here. import_to_store = _ImportToStore(task_id, task_type, image_repo, uri, rebind_args={'image_id': image_id}) flow.add(import_to_store) delete_task = lf.Flow(task_type).add(_DeleteFromFS(task_id, task_type)) flow.add(delete_task) save_task = _SaveImage(task_id, task_type, image_repo, rebind_args={'image_id': image_id}) flow.add(save_task) complete_task = _CompleteTask(task_id, task_type, task_repo, rebind_args={'image_id': image_id}) flow.add(complete_task) return flow
def get_flow(**kwargs): """Return task flow :param task_id: Task ID :param task_type: Type of the task :param task_repo: Task repo :param image_repo: Image repository used :param image_id: ID of the Image to be processed :param uri: uri for the image file """ task_id = kwargs.get('task_id') task_type = kwargs.get('task_type') task_repo = kwargs.get('task_repo') image_repo = kwargs.get('image_repo') image_id = kwargs.get('image_id') uri = kwargs.get('uri') if not uri: separator = '' if not CONF.node_staging_uri.endswith('/'): separator = '/' uri = separator.join((CONF.node_staging_uri, str(image_id))) flow = lf.Flow(task_type, retry=retry.AlwaysRevert()) flow.add(_VerifyStaging(task_id, task_type, task_repo, uri)) for plugin in import_plugins.get_import_plugins(**kwargs): flow.add(plugin) import_to_store = _ImportToStore(task_id, task_type, image_repo, uri, image_id) flow.add(import_to_store) delete_task = lf.Flow(task_type).add(_DeleteFromFS(task_id, task_type)) flow.add(delete_task) save_task = _SaveImage(task_id, task_type, image_repo, image_id) flow.add(save_task) complete_task = _CompleteTask(task_id, task_type, task_repo, image_id) flow.add(complete_task) return flow
def start_flow(name, data): flow_api = linear_flow.Flow(name, retry=retry.AlwaysRevert()) flow_api.add( Patter_data(requires=['name', 'size'], provides={'name', 'size', 'new'}), Start(), Finish(), ) try: engine = engines.load(flow_api, engine_conf={'engine': 'serial'}, store=data) engine.run() except: print 'workflow is failed' data_f = engine.storage.fetch('name') print 'that is the fetch data done:' print data_f
def create_migration_flow(obj, config, migration): """ Creates migration flow for object ``obj`` based on configuration ``config`` migration ``migration``. :param obj: model.Model instance :param config: configuration :param migration: migration (part of configuration) :return: migration flow for an object """ if obj.find_link(config.clouds[migration.destination]) is not None: return None cls = obj.get_class() flow_factories = migration.migration_flow_factories if cls not in flow_factories: raise RuntimeError('Failed to find migration flow factory') else: flow = linear_flow.Flow('top_level_' + taskflow_utils.object_name(obj), retry=retry.AlwaysRevert()) factory = flow_factories[cls]() migration_tasks = factory.create_flow(config, migration, obj) flow.add(*migration_tasks) return flow
def test_retry_in_graph_flow_with_requirements(self): flow = gf.Flow('gf', retry.AlwaysRevert('rt', requires=['x', 'y'])) self.assertEqual(flow.requires, set(['x', 'y'])) self.assertEqual(flow.provides, set())
def test_retry_in_graph_flow_requires_and_provides(self): flow = gf.Flow( 'gf', retry.AlwaysRevert('rt', requires=['x', 'y'], provides=['a', 'b'])) self.assertEqual(flow.requires, set(['x', 'y'])) self.assertEqual(flow.provides, set(['a', 'b']))
def test_retry_in_graph_flow(self): flo = gf.Flow("test", retry.AlwaysRevert("c")) compilation = compiler.PatternCompiler(flo).compile() g = compilation.execution_graph self.assertEqual(3, len(g)) self.assertEqual(2, g.number_of_edges())
def test_retry_in_unordered_flow(self): flo = uf.Flow("test", retry.AlwaysRevert("c")) compilation = compiler.PatternCompiler(flo).compile() self.assertEqual(3, len(compilation.execution_graph)) self.assertEqual(2, compilation.execution_graph.number_of_edges())
def test_retry(self): r = retry.AlwaysRevert('r1') msg_regex = "^Retry controller .* must only be used .*" self.assertRaisesRegexp(TypeError, msg_regex, compiler.PatternCompiler(r).compile)
def test_unordered_flow_retry_two_tasks_provide_same_value(self): flow = uf.Flow('uf', retry.AlwaysRevert('rt', provides=['y'])) flow.add(utils.TaskOneReturn('t1', provides=['x']), utils.TaskOneReturn('t2', provides=['x'])) self.assertEqual(set(['x', 'y']), flow.provides)
def get_flow(**kwargs): """Return task flow :param task_id: Task ID :param task_type: Type of the task :param task_repo: Task repo :param image_repo: Image repository used :param image_id: ID of the Image to be processed :param uri: uri for the image file """ task_id = kwargs.get('task_id') task_type = kwargs.get('task_type') task_repo = kwargs.get('task_repo') image_repo = kwargs.get('image_repo') admin_repo = kwargs.get('admin_repo') image_id = kwargs.get('image_id') import_method = kwargs.get('import_req')['method']['name'] uri = kwargs.get('import_req')['method'].get('uri') stores = kwargs.get('backend', [None]) all_stores_must_succeed = kwargs.get('import_req').get( 'all_stores_must_succeed', True) separator = '' if not CONF.enabled_backends and not CONF.node_staging_uri.endswith('/'): separator = '/' # Instantiate an action wrapper with the admin repo if we got one, # otherwise with the regular repo. action_wrapper = ImportActionWrapper(admin_repo or image_repo, image_id, task_id) kwargs['action_wrapper'] = action_wrapper if not uri and import_method in ['glance-direct', 'copy-image']: if CONF.enabled_backends: separator, staging_dir = store_utils.get_dir_separator() uri = separator.join((staging_dir, str(image_id))) else: uri = separator.join((CONF.node_staging_uri, str(image_id))) flow = lf.Flow(task_type, retry=retry.AlwaysRevert()) flow.add(_ImageLock(task_id, task_type, action_wrapper)) if import_method in ['web-download', 'copy-image']: internal_plugin = internal_plugins.get_import_plugin(**kwargs) flow.add(internal_plugin) if CONF.enabled_backends: separator, staging_dir = store_utils.get_dir_separator() file_uri = separator.join((staging_dir, str(image_id))) else: file_uri = separator.join((CONF.node_staging_uri, str(image_id))) else: file_uri = uri flow.add(_VerifyStaging(task_id, task_type, task_repo, file_uri)) # Note(jokke): The plugins were designed to act on the image data or # metadata during the import process before the image goes active. It # does not make sense to try to execute them during 'copy-image'. if import_method != 'copy-image': for plugin in import_plugins.get_import_plugins(**kwargs): flow.add(plugin) else: LOG.debug("Skipping plugins on 'copy-image' job.") for idx, store in enumerate(stores, 1): set_active = (not all_stores_must_succeed) or (idx == len(stores)) if import_method == 'copy-image': set_active = False task_name = task_type + "-" + (store or "") import_task = lf.Flow(task_name) import_to_store = _ImportToStore(task_id, task_name, task_repo, action_wrapper, file_uri, store, all_stores_must_succeed, set_active) import_task.add(import_to_store) flow.add(import_task) delete_task = lf.Flow(task_type).add(_DeleteFromFS(task_id, task_type)) flow.add(delete_task) verify_task = _VerifyImageState(task_id, task_type, action_wrapper, import_method) flow.add(verify_task) complete_task = _CompleteTask(task_id, task_type, task_repo, action_wrapper) flow.add(complete_task) with action_wrapper as action: if import_method != 'copy-image': action.set_image_attribute(status='importing') action.add_importing_stores(stores) action.remove_failed_stores(stores) action.pop_extra_property('os_glance_stage_host') return flow
def test_graph_flow_retry_and_task_dependency_provide_require(self): flow = gf.Flow('gf', retry.AlwaysRevert('rt', requires=['x'])) flow.add(utils.TaskOneReturn(provides=['x'])) self.assertEqual(set(['x']), flow.provides) self.assertEqual(set(['x']), flow.requires)
def test_graph_flow_retry_and_task_provide_same_value(self): flow = gf.Flow('gf', retry.AlwaysRevert('rt', provides=['x'])) flow.add(utils.TaskOneReturn('t1', provides=['x'])) self.assertEqual(set(['x']), flow.provides)
def get_flow(**kwargs): """Return task flow :param task_id: Task ID :param task_type: Type of the task :param task_repo: Task repo :param image_repo: Image repository used :param image_factory: Glance Image Factory :param uri: uri for the image file """ task_id = kwargs.get('task_id') task_type = kwargs.get('task_type') task_repo = kwargs.get('task_repo') image_repo = kwargs.get('image_repo') image_factory = kwargs.get('image_factory') uri = kwargs.get('uri') flow = lf.Flow(task_type, retry=retry.AlwaysRevert()).add( _CreateImage(task_id, task_type, task_repo, image_repo, image_factory)) import_to_store = _ImportToStore(task_id, task_type, image_repo, uri) try: # NOTE(flaper87): ImportToLocal and DeleteFromLocal shouldn't be here. # Ideally, we should have the different import flows doing this for us # and this function should clean up duplicated tasks. For example, say # 2 flows need to have a local copy of the image - ImportToLocal - in # order to be able to complete the task - i.e Introspect-. In that # case, the introspect.get_flow call should add both, ImportToLocal and # DeleteFromLocal, to the flow and this function will reduce the # duplicated calls to those tasks by creating a linear flow that # ensures those are called before the other tasks. For now, I'm # keeping them here, though. limbo = lf.Flow(task_type).add( _ImportToFS(task_id, task_type, task_repo, uri)) for subflow in _get_import_flows(**kwargs): limbo.add(subflow) # NOTE(flaper87): We have hard-coded 2 tasks, # if there aren't more than 2, it means that # no subtask has been registered. if len(limbo) > 1: flow.add(limbo) # NOTE(flaper87): Until this implementation gets smarter, # make sure ImportToStore is called *after* the imported # flow stages. If not, the image will be set to saving state # invalidating tasks like Introspection or Convert. flow.add(import_to_store) # NOTE(flaper87): Since this is an "optional" task but required # when `limbo` is executed, we're adding it in its own subflow # to isolate it from the rest of the flow. delete_flow = lf.Flow(task_type).add( _DeleteFromFS(task_id, task_type)) flow.add(delete_flow) else: flow.add(import_to_store) except exception.BadTaskConfiguration as exc: # NOTE(flaper87): If something goes wrong with the load of # import tasks, make sure we go on. LOG.error(_LE('Bad task configuration: %s'), exc.message) flow.add(import_to_store) flow.add(_SaveImage(task_id, task_type, image_repo), _CompleteTask(task_id, task_type, task_repo)) return flow
def test_unordered_flow_retry_and_task_same_requires_provides(self): flow = uf.Flow('uf', retry.AlwaysRevert('rt', requires=['x'])) flow.add(utils.TaskOneReturn(provides=['x'])) self.assertEqual(set(['x']), flow.requires) self.assertEqual(set(['x']), flow.provides)
def get_flow(**kwargs): """Return task flow :param task_id: Task ID :param task_type: Type of the task :param task_repo: Task repo :param image_repo: Image repository used :param image_id: ID of the Image to be processed :param uri: uri for the image file """ task_id = kwargs.get('task_id') task_type = kwargs.get('task_type') task_repo = kwargs.get('task_repo') image_repo = kwargs.get('image_repo') image_id = kwargs.get('image_id') import_method = kwargs.get('import_req')['method']['name'] uri = kwargs.get('import_req')['method'].get('uri') stores = kwargs.get('backend', [None]) allow_failure = kwargs.get('import_req').get('allow_failure', False) separator = '' if not CONF.enabled_backends and not CONF.node_staging_uri.endswith('/'): separator = '/' if not uri and import_method == 'glance-direct': if CONF.enabled_backends: separator, staging_dir = _get_dir_separator() uri = separator.join((staging_dir, str(image_id))) else: uri = separator.join((CONF.node_staging_uri, str(image_id))) flow = lf.Flow(task_type, retry=retry.AlwaysRevert()) if import_method == 'web-download': downloadToStaging = internal_plugins.get_import_plugin(**kwargs) flow.add(downloadToStaging) if CONF.enabled_backends: separator, staging_dir = _get_dir_separator() file_uri = separator.join((staging_dir, str(image_id))) else: file_uri = separator.join((CONF.node_staging_uri, str(image_id))) else: file_uri = uri flow.add(_VerifyStaging(task_id, task_type, task_repo, file_uri)) for plugin in import_plugins.get_import_plugins(**kwargs): flow.add(plugin) for idx, store in enumerate(stores, 1): set_active = allow_failure or (idx == len(stores)) task_name = task_type + "-" + (store or "") import_task = lf.Flow(task_name) import_to_store = _ImportToStore(task_id, task_name, image_repo, file_uri, image_id, store, allow_failure, set_active) import_task.add(import_to_store) flow.add(import_task) delete_task = lf.Flow(task_type).add(_DeleteFromFS(task_id, task_type)) flow.add(delete_task) save_task = _SaveImage(task_id, task_type, image_repo, image_id) flow.add(save_task) complete_task = _CompleteTask(task_id, task_type, task_repo, image_id) flow.add(complete_task) image = image_repo.get(image_id) from_state = image.status image.status = 'importing' image.extra_properties['os_glance_importing_to_stores'] = ','.join( (store for store in stores if store is not None)) image.extra_properties['os_glance_failed_import'] = '' image_repo.save(image, from_state=from_state) return flow
def test_retry(self): r = retry.AlwaysRevert('r1') self.assertRaises(TypeError, compiler.PatternCompiler(r).compile)