def run_workflow(**kwargs): kwargs['wfile'] = pu.find_default_wfile(kwargs['wfile']) log.info('Found and running workflow at ' + kwargs['wfile']) # Initialize a Worklow. During initialization all the validation # takes place automatically. wf = Workflow(kwargs['wfile']) wf_runner = WorkflowRunner(wf) # Check for injected actions pre_wfile = os.environ.get('POPPER_PRE_WORKFLOW_PATH') post_wfile = os.environ.get('POPPER_POST_WORKFLOW_PATH') # Saving workflow instance for signal handling popper.cli.interrupt_params['parallel'] = kwargs['parallel'] if kwargs['parallel']: if sys.version_info[0] < 3: log.fail('--parallel is only supported on Python3') log.warning("Using --parallel may result in interleaved output. " "You may use --quiet flag to avoid confusion.") if kwargs['with_dependencies'] and (not kwargs['action']): log.fail('`--with-dependencies` can be used only with ' 'action argument.') if kwargs['skip'] and kwargs['action']: log.fail('`--skip` can\'t be used when action argument ' 'is passed.') on_failure = kwargs.pop('on_failure') wfile = kwargs.pop('wfile') try: if pre_wfile: pre_wf = Workflow(pre_wfile) pre_wf_runner = WorkflowRunner(pre_wf) pre_wf_runner.run(**kwargs) wf_runner.run(**kwargs) if post_wfile: post_wf = Workflow(post_wfile) pre_wf_runner = WorkflowRunner(post_wf) pre_wf_runner.run(**kwargs) except SystemExit as e: if (e.code != 0) and on_failure: kwargs['skip'] = list() kwargs['action'] = on_failure wf_runner.run(**kwargs) else: raise if kwargs['action']: log.info('Action "{}" finished successfully.'.format(kwargs['action'])) else: log.info('Workflow "{}" finished successfully.'.format(wfile))
def run_pipeline(action, wfile, skip_clone, skip_pull, skip, workspace, reuse, dry_run, parallel, with_dependencies, on_failure): # Initialize a Worklow. During initialization all the validation # takes place automatically. wf = Workflow(wfile) pipeline = WorkflowRunner(wf) # Saving workflow instance for signal handling popper.cli.interrupt_params['parallel'] = parallel if reuse: log.warn("Using --reuse ignores any changes made to an action's logic " "or to an action block in the .workflow file.") if parallel: if sys.version_info[0] < 3: log.fail('--parallel is only supported on Python3') log.warn("Using --parallel may result in interleaved output. " "You may use --quiet flag to avoid confusion.") try: pipeline.run(action, skip_clone, skip_pull, skip, workspace, reuse, dry_run, parallel, with_dependencies) except SystemExit as e: if (e.code is not 0) and on_failure: pipeline.run(on_failure, skip_clone, skip_pull, list(), workspace, reuse, dry_run, parallel, with_dependencies) else: raise if action: log.info('Action "{}" finished successfully.'.format(action)) else: log.info('Workflow finished successfully.')
def setUp(self): os.makedirs('/tmp/test_folder/gha-demo') # log.setLevel('CRITICAL') git.Repo.clone_from( 'https://github.com/JayjeetAtGithub/popper-scaffold-workflow', '/tmp/test_folder/gha-demo') os.chdir('/tmp/test_folder/gha-demo') self.wf_one = Workflow('/tmp/test_folder/gha-demo/main.workflow') self.wf_two = Workflow('/tmp/test_folder/gha-demo/main.workflow') self.wf_three = Workflow('/tmp/test_folder/gha-demo/main.workflow') self.runner_one = WorkflowRunner(self.wf_one) self.runner_one.wid = '1234' self.runner_two = WorkflowRunner(self.wf_two) self.runner_two.wid = '5678' self.runner_three = WorkflowRunner(self.wf_three) self.runner_three.wid = '3456'
def test_validate_workflow_block(self): self.create_workflow_file(""" workflow "sample workflow 1" { resolves = ["a"] } workflow "sample workflow 2" { resolves = ["a"] } """) wf = Workflow('/tmp/test_folder/a.workflow') self.assertRaises(SystemExit, wf.validate_workflow_block) self.create_workflow_file(""" action "a" { uses = "sh" } """) wf = Workflow('/tmp/test_folder/a.workflow') self.assertRaises(SystemExit, wf.validate_workflow_block) self.create_workflow_file(""" workflow "sample workflow 1" { resolves = ["a"] runs = ["sh", "-c", "ls"] } action "a" { uses = ["sh"] } """) wf = Workflow('/tmp/test_folder/a.workflow') self.assertRaises(SystemExit, wf.validate_workflow_block) self.create_workflow_file(""" workflow "sample workflow 1" { on = "push" } action "a" { uses = ["sh"] } """) wf = Workflow('/tmp/test_folder/a.workflow') self.assertRaises(SystemExit, wf.validate_workflow_block)
def test_check_secrets(self): os.environ['SECRET_ONE'] = '1234' os.environ['SECRET_TWO'] = '5678' pu.write_file( '/tmp/test_folder/a.workflow', """ workflow "sample" { resolves = "a" } action "a" { uses = "popperized/bin/sh@master" args = ["ls -ltr"] secrets = ["SECRET_ONE", "SECRET_TWO"] } """) wf = Workflow('/tmp/test_folder/a.workflow') wf.parse() WorkflowRunner.check_secrets(wf, False, False) WorkflowRunner.check_secrets(wf, True, False) WorkflowRunner.check_secrets(wf, False, True) os.environ.pop('SECRET_ONE') pu.write_file( '/tmp/test_folder/a.workflow', """ workflow "sample" { resolves = "a" } action "a" { uses = "popperized/bin/sh@master" args = ["ls -ltr"] secrets = ["SECRET_ONE", "SECRET_TWO"] } """) wf = Workflow('/tmp/test_folder/a.workflow') wf.parse() os.environ['CI'] = 'false' with patch('getpass.getpass', return_value='1234') as fake_input: WorkflowRunner.check_secrets(wf, False, False) os.environ['CI'] = 'true' os.environ.pop('SECRET_ONE') self.assertRaises(SystemExit, WorkflowRunner.check_secrets, wf, False, False)
def test_check_duplicate_actions(self): self.create_workflow_file(""" workflow "sample" { resolves = ["a", "b"] } action "a" { uses = "sh" } action "b" { uses = "sh" } action "a" { uses = "sh" } """) workflow = Workflow('/tmp/test_folder/a.workflow') self.assertRaises(SystemExit, workflow.check_duplicate_actions) self.create_workflow_file(""" workflow "sample" { resolves = ["a", "b"] } action "a" { uses = "sh" } action "b" { uses = "sh" } """) workflow = Workflow('/tmp/test_folder/a.workflow') workflow.check_duplicate_actions()
def import_from_repo(path, project_root): parts = scm.get_parts(path) if len(parts) < 3: log.fail( 'Required url format: \ <url>/<user>/<repo>[/folder[/wf.workflow]]' ) url, service, user, repo, _, version = scm.parse(path) cloned_project_dir = os.path.join("/tmp", service, user, repo) scm.clone(url, user, repo, os.path.dirname( cloned_project_dir), version ) if len(parts) == 3: ptw_one = os.path.join(cloned_project_dir, "main.workflow") ptw_two = os.path.join(cloned_project_dir, ".github/main.workflow") if os.path.isfile(ptw_one): path_to_workflow = ptw_one elif os.path.isfile(ptw_two): path_to_workflow = ptw_two else: log.fail("Unable to find a .workflow file") elif len(parts) >= 4: path_to_workflow = os.path.join( cloned_project_dir, '/'.join(parts[3:])).split("@")[0] if not os.path.basename(path_to_workflow).endswith('.workflow'): path_to_workflow = os.path.join( path_to_workflow, 'main.workflow') if not os.path.isfile(path_to_workflow): log.fail("Unable to find a .workflow file") shutil.copy(path_to_workflow, project_root) log.info("Successfully imported from {}".format(path_to_workflow)) with open(path_to_workflow, 'r') as fp: wf = Workflow(path_to_workflow) action_paths = list() for _, a_block in wf.actions: if a_block['uses'].startswith("./"): action_paths.append(a_block['uses']) action_paths = set([a.split("/")[1] for a in action_paths]) for a in action_paths: copy_tree(os.path.join(cloned_project_dir, a), os.path.join(project_root, a)) log.info("Copied {} to {}...".format(os.path.join( cloned_project_dir, a), project_root))
def test_check_for_empty_workflow(self): self.create_workflow_file(""" workflow "samples" { resolves = ["a1", "a2"] } action "b" { uses = "sh" } action "c" { uses = "sh" } """) workflow = Workflow('/tmp/test_folder/a.workflow') workflow.normalize() workflow.resolves = ["a1", "a2"] self.assertRaises(SystemExit, workflow.check_for_empty_workflow)
def __init__(self, wfile, workspace, dry_run, reuse, parallel, skip_secrets_prompt=False): wfile = pu.find_default_wfile(wfile) self.workspace = workspace self.dry_run = dry_run self.reuse = reuse self.parallel = parallel self.skip_secrets_prompt = skip_secrets_prompt self.actions_cache_path = os.path.join('/', 'tmp', 'actions') # Initialize a Worklow. During initialization all the validation # takes place automatically. self.wf = Workflow(wfile) self.check_secrets() log.debug('workflow:\n{}'.format( yaml.dump(self.wf, default_flow_style=False, default_style='')))
def setUp(self): os.makedirs('/tmp/test_folder') os.chdir('/tmp/test_folder') log.setLevel('CRITICAL') workflow = """ workflow "sample" { resolves = "sample action" } action "sample action" { uses = "sh" args = ["echo", "Hello"] } """ pu.write_file('/tmp/test_folder/a.workflow', workflow) self.wf = Workflow('/tmp/test_folder/a.workflow') self.wf.parse() WorkflowRunner.instantiate_runners('docker', self.wf, '/tmp/test_folder', False, False, '12345')
def cli(ctx, wfile, skip, recursive, colors): """ Creates a graph in the .dot format representing the workflow. """ def add_to_graph(dot_str, wf, parent, children, node_attrs, stage_edges): """Recursively goes over the children ("next" attribute) of the given parent, adding an edge from parent to children """ for n in children: edge = ' "{}" -> "{}";\n'.format(parent, n) if edge in stage_edges: continue dot_str += edge + ' "{}" [{}];\n'.format(n, node_attrs) stage_edges.add(edge) for M in wf.get_action(n).get('next', []): dot_str = add_to_graph(dot_str, wf, n, [M], node_attrs, stage_edges) return dot_str wfile_list = list() if recursive: wfile_list = pu.find_recursive_wfile() else: wfile_list.append(pu.find_default_wfile(wfile)) for wfile in wfile_list: wf = Workflow(wfile) wf.parse() wf = Workflow.skip_actions(wf, skip) wf.check_for_unreachable_actions() node_attrs = ('shape=box, style="filled{}", fillcolor=transparent{}') wf_attr = node_attrs.format(',rounded', ',color=red' if colors else '') act_attr = node_attrs.format('', ',color=cyan' if colors else '') dot_str = add_to_graph("", wf, wf.name, wf.root, act_attr, set()) dot_str += ' "{}" [{}];\n'.format(wf.name, wf_attr) log.info("digraph G { graph [bgcolor=transparent];\n" + dot_str + "}\n")
def test_download_actions(self): pu.write_file( '/tmp/test_folder/a.workflow', """ workflow "sample" { resolves = "a" } action "a" { uses = "popperized/bin/sh@master" } action "b" { uses = "popperized/ansible@master" } """) wf = Workflow('/tmp/test_folder/a.workflow') wf.parse() # Download actions in the default cache directory. WorkflowRunner.download_actions(wf, False, False, '12345') self.assertEqual( os.path.exists(os.environ['HOME'] + '/.cache/.popper/actions/12345/github.com'), True) # Download actions in custom cache directory os.environ['POPPER_CACHE_DIR'] = '/tmp/somedir' WorkflowRunner.download_actions(wf, False, False, '12345') self.assertEqual( os.path.exists('/tmp/somedir/actions/12345/github.com'), True) os.environ.pop('POPPER_CACHE_DIR') # Release resources. shutil.rmtree('/tmp/somedir') shutil.rmtree(os.environ['HOME'] + '/.cache/.popper/actions/12345/github.com') # Test with skipclone flag when action not present in cache. self.assertRaises(SystemExit, WorkflowRunner.download_actions, wf, False, True, '12345')
def setUp(self): os.makedirs('/tmp/test_folder') os.chdir('/tmp/test_folder') log.setLevel('CRITICAL') workflow = """ workflow "sample" { resolves = "sample action" } action "sample action" { uses = "popperized/bin/sh@master" args = ["echo", "Hello"] } """ pu.write_file('/tmp/test_folder/a.workflow', workflow) self.wf = Workflow('/tmp/test_folder/a.workflow') self.wf.parse() WorkflowRunner.download_actions(self.wf, False, False, '12345') WorkflowRunner.instantiate_runners('vagrant', self.wf, '/tmp/test_folder', False, False, '12345') self.runner = self.wf.action['sample action']['runner'] VagrantRunner.setup_vagrant_cache('12345')
def test_get_workflow_env(self): pu.write_file( '/tmp/test_folder/a.workflow', """ workflow "sample" { resolves = "a" } action "a" { uses = "sh" } """) wf = Workflow('/tmp/test_folder/a.workflow') wf.parse() env = WorkflowRunner.get_workflow_env(wf, '/tmp/test_folder') self.assertDictEqual( env, { 'HOME': os.environ['HOME'], 'GITHUB_WORKFLOW': 'sample', 'GITHUB_ACTION': '', 'GITHUB_ACTOR': 'popper', 'GITHUB_REPOSITORY': 'unknown', 'GITHUB_EVENT_NAME': 'push', 'GITHUB_EVENT_PATH': '/tmp/github_event.json', 'GITHUB_WORKSPACE': '/tmp/test_folder', 'GITHUB_SHA': 'unknown', 'GITHUB_REF': 'unknown', 'POPPER_WORKFLOW': 'sample', 'POPPER_ACTION': '', 'POPPER_ACTOR': 'popper', 'POPPER_REPOSITORY': 'unknown', 'POPPER_EVENT_NAME': 'push', 'POPPER_EVENT_PATH': '/tmp/github_event.json', 'POPPER_WORKSPACE': '/tmp/test_folder', 'POPPER_SHA': 'unknown', 'POPPER_REF': 'unknown' })
def test_normalize(self): self.create_workflow_file(""" workflow "sample workflow" { resolves = "a" } action "a" { needs = "b" uses = "popperized/bin/npm@master" args = "npm --version" secrets = "SECRET_KEY" } """) wf = Workflow('/tmp/test_folder/a.workflow') wf.normalize() self.assertEqual(wf.resolves, ['a']) self.assertEqual(wf.name, 'sample workflow') self.assertEqual(wf.on, 'push') self.assertDictEqual(wf.props, dict()) action_a = wf.get_action('a') self.assertEqual(action_a['name'], 'a') self.assertEqual(action_a['needs'], ['b']) self.assertEqual(action_a['args'], ['npm', '--version']) self.assertEqual(action_a['secrets'], ['SECRET_KEY'])
def test_validate_action_blocks(self): self.create_workflow_file(""" workflow "sample workflow" { resolves = "a" } """) wf = Workflow('/tmp/test_folder/a.workflow') self.assertRaises(SystemExit, wf.validate_action_blocks) self.create_workflow_file(""" workflow "sample workflow" { resolves = "a" } action "a" { uses = "sh" on = "push" } """) wf = Workflow('/tmp/test_folder/a.workflow') self.assertRaises(SystemExit, wf.validate_action_blocks) self.create_workflow_file(""" workflow "sample workflow" { resolves = "a" } action "a" { args = "ls" } """) wf = Workflow('/tmp/test_folder/a.workflow') self.assertRaises(SystemExit, wf.validate_action_blocks) self.create_workflow_file(""" workflow "sample workflow" { resolves = "a" } action "a" { uses = 1 } """) wf = Workflow('/tmp/test_folder/a.workflow') self.assertRaises(SystemExit, wf.validate_action_blocks) self.create_workflow_file(""" workflow "sample workflow" { resolves = "a" } action "a" { uses = "sh" needs = 1 } """) wf = Workflow('/tmp/test_folder/a.workflow') self.assertRaises(SystemExit, wf.validate_action_blocks) self.create_workflow_file(""" workflow "sample workflow" { resolves = "a" } action "a" { uses = "sh" args = [1, 2, 3, 4] } """) wf = Workflow('/tmp/test_folder/a.workflow') self.assertRaises(SystemExit, wf.validate_action_blocks) self.create_workflow_file(""" workflow "sample workflow" { resolves = "a" } action "a" { uses = "sh" runs = [1, 2, 3, 4] } """) wf = Workflow('/tmp/test_folder/a.workflow') self.assertRaises(SystemExit, wf.validate_action_blocks) self.create_workflow_file(""" workflow "sample workflow" { resolves = "a" } action "a" { uses = "sh" secrets = { SECRET_A = 1234, SECRET_B = 5678 } } """) wf = Workflow('/tmp/test_folder/a.workflow') self.assertRaises(SystemExit, wf.validate_action_blocks) self.create_workflow_file(""" workflow "sample workflow" { resolves = "a" } action "a" { uses = "sh" env = [ "SECRET_A", "SECRET_B" ] } """) wf = Workflow('/tmp/test_folder/a.workflow') self.assertRaises(SystemExit, wf.validate_action_blocks)
def test_instantiate_runners(self): pu.write_file( '/tmp/test_folder/a.workflow', """ workflow "sample" { resolves = "a" } action "a" { uses = "sh" args = "ls" } """) wf = Workflow('/tmp/test_folder/a.workflow') wf.parse() env = WorkflowRunner.get_workflow_env(wf, '/tmp/test_folder') WorkflowRunner.instantiate_runners('docker', wf, '/tmp/test_folder', False, False, '12345') self.assertIsInstance(wf.action['a']['runner'], HostRunner) os.makedirs('/tmp/test_folder/actions/sample') pu.write_file('/tmp/test_folder/actions/sample/entrypoint.sh') pu.write_file('/tmp/test_folder/actions/sample/README.md') pu.write_file( '/tmp/test_folder/a.workflow', """ workflow "sample" { resolves = "a" } action "a" { uses = "./actions/sample" } """) wf = Workflow('/tmp/test_folder/a.workflow') wf.parse() env = WorkflowRunner.get_workflow_env(wf, '/tmp/test_folder') WorkflowRunner.instantiate_runners('singularity', wf, '/tmp/test_folder', False, False, '12345') self.assertIsInstance(wf.action['a']['runner'], HostRunner) pu.write_file( '/tmp/test_folder/a.workflow', """ workflow "sample" { resolves = "a" } action "a" { uses = "popperized/bin/sh@master" } """) wf = Workflow('/tmp/test_folder/a.workflow') wf.parse() env = WorkflowRunner.get_workflow_env(wf, '/tmp/test_folder') WorkflowRunner.instantiate_runners('singularity', wf, '/tmp/test_folder', False, False, '12345') self.assertIsInstance(wf.action['a']['runner'], SingularityRunner) WorkflowRunner.instantiate_runners('docker', wf, '/tmp/test_folder', False, False, '12345') self.assertIsInstance(wf.action['a']['runner'], DockerRunner) WorkflowRunner.instantiate_runners('vagrant', wf, '/tmp/test_folder', False, False, '12345') self.assertIsInstance(wf.action['a']['runner'], VagrantRunner)
def test_complete_graph(self): self.create_workflow_file(""" workflow "example" { resolves = "end" } action "a" { uses = "sh" args = "ls" } action "b" { uses = "sh" args = "ls" } action "c" { uses = "sh" args = "ls" } action "d" { needs = ["c"] uses = "sh" args = "ls" } action "e" { needs = ["d", "b", "a"] uses = "sh" args = "ls" } action "end" { needs = "e" uses = "sh" args = "ls" } """) wf = Workflow('/tmp/test_folder/a.workflow') wf.normalize() wf.complete_graph() self.assertEqual(wf.name, 'example') self.assertEqual(wf.resolves, ['end']) self.assertEqual(wf.on, 'push') self.assertEqual(wf.props, {}) self.assertEqual(wf.root, {'b', 'c', 'a'}) actions_dict = { 'a': { 'uses': 'sh', 'args': ['ls'], 'name': 'a', 'next': {'e'} }, 'b': { 'uses': 'sh', 'args': ['ls'], 'name': 'b', 'next': {'e'} }, 'c': { 'uses': 'sh', 'args': ['ls'], 'name': 'c', 'next': {'d'} }, 'd': { 'needs': ['c'], 'uses': 'sh', 'args': ['ls'], 'name': 'd', 'next': {'e'} }, 'e': { 'needs': ['d', 'b', 'a'], 'uses': 'sh', 'args': ['ls'], 'name': 'e', 'next': {'end'} }, 'end': { 'needs': ['e'], 'uses': 'sh', 'args': ['ls'], 'name': 'end' } } self.assertDictEqual(wf.action, actions_dict)
def test_get_stages(self): self.create_workflow_file(""" workflow "example" { resolves = "end" } action "a" { uses = "sh" args = "ls" } action "b" { uses = "sh" args = "ls" } action "c" { uses = "sh" args = "ls" } action "d" { needs = ["c"] uses = "sh" args = "ls" } action "e" { needs = ["d", "b", "a"] uses = "sh" args = "ls" } action "end" { needs = "e" uses = "sh" args = "ls" } """) wf = Workflow('/tmp/test_folder/a.workflow') wf.parse() stages = list() for stage in wf.get_stages(): stages.append(stage) self.assertListEqual(stages, [{'b', 'c', 'a'}, {'d'}, {'e'}, {'end'}]) self.create_workflow_file(""" workflow "example" { resolves = ["end"] } action "a" { uses = "sh" args = "ls" } action "b" { needs = "a" uses = "sh" args = "ls" } action "c" { uses = "sh" args = "ls" } action "d" { uses = "sh" needs = ["b", "c"] args = "ls" } action "g" { needs = "d" uses = "sh" args = "ls" } action "f" { needs = "d" uses = "sh" args = "ls" } action "h" { needs = "g" uses = "sh" args = "ls" } action "end" { needs = ["h", "f"] uses = "sh" args = "ls" } """) wf = Workflow('/tmp/test_folder/a.workflow') wf.parse() stages = list() for stage in wf.get_stages(): stages.append(stage) self.assertListEqual( stages, [{'a', 'c'}, {'b'}, {'d'}, {'g', 'f'}, {'h'}, {'end'}])
def test_check_for_unreachable_actions(self): self.create_workflow_file(""" workflow "example" { resolves = "end" } action "a" { uses = "sh" args = "ls" } action "b" { uses = "sh" args = "ls" } action "c" { uses = "sh" args = "ls" } action "d" { needs = ["c"] uses = "sh" args = "ls" } action "e" { needs = ["d", "b", "a"] uses = "sh" args = "ls" } action "end" { needs = "e" uses = "sh" args = "ls" } """) wf = Workflow('/tmp/test_folder/a.workflow') wf.parse() changed_wf = Workflow.skip_actions(wf, ['d', 'a', 'b']) self.assertDictEqual( changed_wf.action, { 'a': { 'uses': 'sh', 'args': ['ls'], 'name': 'a', 'next': set() }, 'b': { 'uses': 'sh', 'args': ['ls'], 'name': 'b', 'next': set() }, 'c': { 'uses': 'sh', 'args': ['ls'], 'name': 'c', 'next': set() }, 'd': { 'needs': [], 'uses': 'sh', 'args': ['ls'], 'name': 'd', 'next': set() }, 'e': { 'needs': [], 'uses': 'sh', 'args': ['ls'], 'name': 'e', 'next': {'end'} }, 'end': { 'needs': ['e'], 'uses': 'sh', 'args': ['ls'], 'name': 'end' } }) self.assertRaises(SystemExit, changed_wf.check_for_unreachable_actions, True) self.create_workflow_file(""" workflow "sample" { resolves = ["reachable"] } action "reachable" { uses = "popperized/bin/sh@master" args = "ls" } action "unreachable" { uses = "popperized/bin/sh@master" args = ["ls -ltr"] } """) wf = Workflow('/tmp/test_folder/a.workflow') wf.parse() wf.check_for_unreachable_actions()
def test_skip_actions(self): self.create_workflow_file(""" workflow "example" { resolves = "end" } action "a" { uses = "sh" args = "ls" } action "b" { uses = "sh" args = "ls" } action "c" { uses = "sh" args = "ls" } action "d" { needs = ["c"] uses = "sh" args = "ls" } action "e" { needs = ["d", "b", "a"] uses = "sh" args = "ls" } action "end" { needs = "e" uses = "sh" args = "ls" } """) wf = Workflow('/tmp/test_folder/a.workflow') wf.parse() changed_wf = Workflow.skip_actions(wf, ['b']) self.assertDictEqual( changed_wf.action, { 'a': { 'uses': 'sh', 'args': ['ls'], 'name': 'a', 'next': {'e'} }, 'b': { 'uses': 'sh', 'args': ['ls'], 'name': 'b', 'next': set() }, 'c': { 'uses': 'sh', 'args': ['ls'], 'name': 'c', 'next': {'d'} }, 'd': { 'needs': ['c'], 'uses': 'sh', 'args': ['ls'], 'name': 'd', 'next': {'e'} }, 'e': { 'needs': ['d', 'a'], 'uses': 'sh', 'args': ['ls'], 'name': 'e', 'next': {'end'} }, 'end': { 'needs': ['e'], 'uses': 'sh', 'args': ['ls'], 'name': 'end' } }) changed_wf = Workflow.skip_actions(wf, ['d', 'a']) self.assertDictEqual( changed_wf.action, { 'a': { 'uses': 'sh', 'args': ['ls'], 'name': 'a', 'next': set() }, 'b': { 'uses': 'sh', 'args': ['ls'], 'name': 'b', 'next': {'e'} }, 'c': { 'uses': 'sh', 'args': ['ls'], 'name': 'c', 'next': set() }, 'd': { 'needs': [], 'uses': 'sh', 'args': ['ls'], 'name': 'd', 'next': set() }, 'e': { 'needs': ['b'], 'uses': 'sh', 'args': ['ls'], 'name': 'e', 'next': {'end'} }, 'end': { 'needs': ['e'], 'uses': 'sh', 'args': ['ls'], 'name': 'end' } })
def test_filter_action(self): self.create_workflow_file(""" workflow "example" { resolves = "end" } action "a" { uses = "sh" args = "ls" } action "b" { uses = "sh" args = "ls" } action "c" { uses = "sh" args = "ls" } action "d" { needs = ["c"] uses = "sh" args = "ls" } action "e" { needs = ["d", "b", "a"] uses = "sh" args = "ls" } action "end" { needs = "e" uses = "sh" args = "ls" } """) wf = Workflow('/tmp/test_folder/a.workflow') wf.parse() changed_wf = Workflow.filter_action(wf, 'e') self.assertSetEqual(changed_wf.root, {'e'}) self.assertDictEqual( changed_wf.action, { 'e': { 'needs': [], 'uses': 'sh', 'args': ['ls'], 'name': 'e', 'next': set() } }) changed_wf = Workflow.filter_action(wf, 'd') self.assertSetEqual(changed_wf.root, {'d'}) self.assertDictEqual( changed_wf.action, { 'd': { 'needs': [], 'uses': 'sh', 'args': ['ls'], 'name': 'd', 'next': set() } }) changed_wf = Workflow.filter_action(wf, 'e', with_dependencies=True) self.assertSetEqual(changed_wf.root, {'b', 'a', 'c'}) self.assertDictEqual( changed_wf.action, { 'a': { 'uses': 'sh', 'args': ['ls'], 'name': 'a', 'next': {'e'} }, 'b': { 'uses': 'sh', 'args': ['ls'], 'name': 'b', 'next': {'e'} }, 'c': { 'uses': 'sh', 'args': ['ls'], 'name': 'c', 'next': {'d'} }, 'd': { 'needs': ['c'], 'uses': 'sh', 'args': ['ls'], 'name': 'd', 'next': {'e'} }, 'e': { 'needs': ['d', 'b', 'a'], 'uses': 'sh', 'args': ['ls'], 'name': 'e', 'next': set() } }) changed_wf = Workflow.filter_action(wf, 'd', with_dependencies=True) self.assertSetEqual(changed_wf.root, {'c'}) self.assertDictEqual( changed_wf.action, { 'c': { 'uses': 'sh', 'args': ['ls'], 'name': 'c', 'next': {'d'} }, 'd': { 'needs': ['c'], 'uses': 'sh', 'args': ['ls'], 'name': 'd', 'next': set() } })