def run(self, action, skip_clone, skip_pull, skip, workspace, reuse, dry_run, parallel, with_dependencies, runtime, skip_secrets_prompt=False): """Run the workflow or a specific action. """ new_wf = deepcopy(self.wf) if skip: new_wf = Workflow.skip_actions(self.wf, skip) if action: new_wf = Workflow.filter_action(self.wf, action, with_dependencies) new_wf.check_for_unreachable_actions(skip) WorkflowRunner.check_secrets(new_wf, dry_run, skip_secrets_prompt) WorkflowRunner.download_actions(new_wf, dry_run, skip_clone, self.wid) WorkflowRunner.instantiate_runners(runtime, new_wf, workspace, dry_run, skip_pull, self.wid) for s in new_wf.get_stages(): WorkflowRunner.run_stage(runtime, new_wf, s, reuse, parallel)
def test_format_command(self): cmd = u"docker version" res = Workflow.format_command(cmd) self.assertEqual(res, ["docker", "version"]) cmd = ["docker", "version"] res = Workflow.format_command(cmd) self.assertEqual(res, ["docker", "version"])
def run_workflow(**kwargs): kwargs['wfile'] = pu.find_default_wfile(kwargs['wfile']) log.info('Found and running workflow at ' + kwargs['wfile']) # Initialize a Worklow. During initialization all the validation # takes place automatically. wf = Workflow(kwargs['wfile']) wf_runner = WorkflowRunner(wf) # Check for injected actions pre_wfile = os.environ.get('POPPER_PRE_WORKFLOW_PATH') post_wfile = os.environ.get('POPPER_POST_WORKFLOW_PATH') # Saving workflow instance for signal handling popper.cli.interrupt_params['parallel'] = kwargs['parallel'] if kwargs['parallel']: if sys.version_info[0] < 3: log.fail('--parallel is only supported on Python3') log.warning("Using --parallel may result in interleaved output. " "You may use --quiet flag to avoid confusion.") if kwargs['with_dependencies'] and (not kwargs['action']): log.fail('`--with-dependencies` can be used only with ' 'action argument.') if kwargs['skip'] and kwargs['action']: log.fail('`--skip` can\'t be used when action argument ' 'is passed.') on_failure = kwargs.pop('on_failure') wfile = kwargs.pop('wfile') try: if pre_wfile: pre_wf = Workflow(pre_wfile) pre_wf_runner = WorkflowRunner(pre_wf) pre_wf_runner.run(**kwargs) wf_runner.run(**kwargs) if post_wfile: post_wf = Workflow(post_wfile) pre_wf_runner = WorkflowRunner(post_wf) pre_wf_runner.run(**kwargs) except SystemExit as e: if (e.code != 0) and on_failure: kwargs['skip'] = list() kwargs['action'] = on_failure wf_runner.run(**kwargs) else: raise if kwargs['action']: log.info('Action "{}" finished successfully.'.format(kwargs['action'])) else: log.info('Workflow "{}" finished successfully.'.format(wfile))
def cli(ctx, wfile, skip, colors): """Creates a graph in the .dot format representing the workflow. """ # Args: # ctx(Popper.cli.context): For process inter-command communication # context is used.For reference visit # https://click.palletsprojects.com/en/7.x/commands # wfile(str): Name of the file containing definition of workflow. # skip(tuple): List of steps that are to be skipped. # colors(bool): Flag for colors. # Returns: # None def add_to_graph(dot_str, wf, parent, children, node_attrs, stage_edges): """Recursively goes over the children ("next" attribute) of the given parent, adding an edge from parent to children Args: dot_str(str): The intermediate string to which further nodes are to be added. wf(popper.parser.workflow): Instance of the workflow class. parent(str): Step Identifier. children(list/set): The node that is to be attached as a children. node_attrs(str): These are the attributes of the node of the graph. stage_edges(set): Intermediate sets containing the nodes and edges. Returns: str: The string containing nodes and their description. """ for n in children: edge = f' "{parent}" -> "{n}";\n' if edge in stage_edges: continue dot_str += edge + f' "{n}" [{node_attrs}];\n' stage_edges.add(edge) for M in wf.steps[n].get('next', []): dot_str = add_to_graph(dot_str, wf, n, [M], node_attrs, stage_edges) return dot_str wf = Workflow.new(wfile) wf.parse() wf = Workflow.skip_steps(wf, skip) wf.check_for_unreachable_steps() node_attrs = ('shape=box, style="filled{}", fillcolor=transparent{}') wf_attr = node_attrs.format(',rounded', ',color=red' if colors else '') act_attr = node_attrs.format('', ',color=cyan' if colors else '') dot_str = add_to_graph("", wf, wf.name, wf.root, act_attr, set()) dot_str += f' "{wf.name}" [{wf_attr}];\n' log.info("digraph G { graph [bgcolor=transparent];\n" + dot_str + "}\n")
def run(self, action, skip_clone, skip_pull, skip, workspace, reuse, dry_run, parallel, with_dependencies, engine, skip_secrets_prompt=False): """Run the workflow or a specific action. Args: action(str): Name of particular action being executed from workflow. skip_clone(bool): True if cloning action has to be skipped. skip_pull(bool): True if pulling action has to be skipped. skip(tuple): Tuple containing the actions to be skipped. workspace(str): Location of the workspace. reuse(bool): True if existing containers are to be reused. dry_run(bool): True if workflow flag is being dry-run. parallel(bool): True if actions are to be executed in parallel. with_dependencies(bool): True if with-dependencies flag is passed as an argument. engine(str): Name of the run time being used in workflow. skip_secrets_prompt(bool): True if part of the workflow has to be skipped.(Default value = False) Returns: None """ new_wf = deepcopy(self.wf) if skip: new_wf = Workflow.skip_actions(self.wf, skip) if action: new_wf = Workflow.filter_action(self.wf, action, with_dependencies) new_wf.check_for_unreachable_actions(skip) WorkflowRunner.check_secrets(new_wf, dry_run, skip_secrets_prompt) WorkflowRunner.download_actions(new_wf, dry_run, skip_clone, self.wid) WorkflowRunner.instantiate_runners(engine, new_wf, workspace, dry_run, skip_pull, self.wid) for s in new_wf.get_stages(): WorkflowRunner.run_stage(engine, new_wf, s, reuse, parallel)
def run_pipeline(action, wfile, skip_clone, skip_pull, skip, workspace, reuse, dry_run, parallel, with_dependencies, on_failure): # Initialize a Worklow. During initialization all the validation # takes place automatically. wf = Workflow(wfile) pipeline = WorkflowRunner(wf) # Saving workflow instance for signal handling popper.cli.interrupt_params['parallel'] = parallel if reuse: log.warn("Using --reuse ignores any changes made to an action's logic " "or to an action block in the .workflow file.") if parallel: if sys.version_info[0] < 3: log.fail('--parallel is only supported on Python3') log.warn("Using --parallel may result in interleaved output. " "You may use --quiet flag to avoid confusion.") try: pipeline.run(action, skip_clone, skip_pull, skip, workspace, reuse, dry_run, parallel, with_dependencies) except SystemExit as e: if (e.code is not 0) and on_failure: pipeline.run(on_failure, skip_clone, skip_pull, list(), workspace, reuse, dry_run, parallel, with_dependencies) else: raise if action: log.info('Action "{}" finished successfully.'.format(action)) else: log.info('Workflow finished successfully.')
def setUp(self): os.makedirs('/tmp/test_folder/gha-demo') # log.setLevel('CRITICAL') git.Repo.clone_from( 'https://github.com/JayjeetAtGithub/popper-scaffold-workflow', '/tmp/test_folder/gha-demo') os.chdir('/tmp/test_folder/gha-demo') self.wf_one = Workflow('/tmp/test_folder/gha-demo/main.workflow') self.wf_two = Workflow('/tmp/test_folder/gha-demo/main.workflow') self.wf_three = Workflow('/tmp/test_folder/gha-demo/main.workflow') self.runner_one = WorkflowRunner(self.wf_one) self.runner_one.wid = '1234' self.runner_two = WorkflowRunner(self.wf_two) self.runner_two.wid = '5678' self.runner_three = WorkflowRunner(self.wf_three) self.runner_three.wid = '3456'
def test_validate_workflow_block(self): self.create_workflow_file(""" workflow "sample workflow 1" { resolves = ["a"] } workflow "sample workflow 2" { resolves = ["a"] } """) wf = Workflow('/tmp/test_folder/a.workflow') self.assertRaises(SystemExit, wf.validate_workflow_block) self.create_workflow_file(""" action "a" { uses = "sh" } """) wf = Workflow('/tmp/test_folder/a.workflow') self.assertRaises(SystemExit, wf.validate_workflow_block) self.create_workflow_file(""" workflow "sample workflow 1" { resolves = ["a"] runs = ["sh", "-c", "ls"] } action "a" { uses = ["sh"] } """) wf = Workflow('/tmp/test_folder/a.workflow') self.assertRaises(SystemExit, wf.validate_workflow_block) self.create_workflow_file(""" workflow "sample workflow 1" { on = "push" } action "a" { uses = ["sh"] } """) wf = Workflow('/tmp/test_folder/a.workflow') self.assertRaises(SystemExit, wf.validate_workflow_block)
def __init__(self, wfile, workspace, dry_run, reuse, parallel, skip_secrets_prompt=False): wfile = pu.find_default_wfile(wfile) self.workspace = workspace self.dry_run = dry_run self.reuse = reuse self.parallel = parallel self.skip_secrets_prompt = skip_secrets_prompt self.actions_cache_path = os.path.join('/', 'tmp', 'actions') # Initialize a Worklow. During initialization all the validation # takes place automatically. self.wf = Workflow(wfile) self.check_secrets() log.debug('workflow:\n{}'.format( yaml.dump(self.wf, default_flow_style=False, default_style='')))
def test_check_for_empty_workflow(self): self.create_workflow_file(""" workflow "samples" { resolves = ["a1", "a2"] } action "b" { uses = "sh" } action "c" { uses = "sh" } """) workflow = Workflow('/tmp/test_folder/a.workflow') workflow.normalize() workflow.resolves = ["a1", "a2"] self.assertRaises(SystemExit, workflow.check_for_empty_workflow)
def setUp(self): os.makedirs('/tmp/test_folder') os.chdir('/tmp/test_folder') log.setLevel('CRITICAL') workflow = """ workflow "sample" { resolves = "sample action" } action "sample action" { uses = "sh" args = ["echo", "Hello"] } """ pu.write_file('/tmp/test_folder/a.workflow', workflow) self.wf = Workflow('/tmp/test_folder/a.workflow') self.wf.parse() WorkflowRunner.instantiate_runners('docker', self.wf, '/tmp/test_folder', False, False, '12345')
def test_download_actions(self): pu.write_file( '/tmp/test_folder/a.workflow', """ workflow "sample" { resolves = "a" } action "a" { uses = "popperized/bin/sh@master" } action "b" { uses = "popperized/ansible@master" } """) wf = Workflow('/tmp/test_folder/a.workflow') wf.parse() # Download actions in the default cache directory. WorkflowRunner.download_actions(wf, False, False, '12345') self.assertEqual( os.path.exists(os.environ['HOME'] + '/.cache/.popper/actions/12345/github.com'), True) # Download actions in custom cache directory os.environ['POPPER_CACHE_DIR'] = '/tmp/somedir' WorkflowRunner.download_actions(wf, False, False, '12345') self.assertEqual( os.path.exists('/tmp/somedir/actions/12345/github.com'), True) os.environ.pop('POPPER_CACHE_DIR') # Release resources. shutil.rmtree('/tmp/somedir') shutil.rmtree(os.environ['HOME'] + '/.cache/.popper/actions/12345/github.com') # Test with skipclone flag when action not present in cache. self.assertRaises(SystemExit, WorkflowRunner.download_actions, wf, False, True, '12345')
def setUp(self): os.makedirs('/tmp/test_folder') os.chdir('/tmp/test_folder') log.setLevel('CRITICAL') workflow = """ workflow "sample" { resolves = "sample action" } action "sample action" { uses = "popperized/bin/sh@master" args = ["echo", "Hello"] } """ pu.write_file('/tmp/test_folder/a.workflow', workflow) self.wf = Workflow('/tmp/test_folder/a.workflow') self.wf.parse() WorkflowRunner.download_actions(self.wf, False, False, '12345') WorkflowRunner.instantiate_runners('vagrant', self.wf, '/tmp/test_folder', False, False, '12345') self.runner = self.wf.action['sample action']['runner'] VagrantRunner.setup_vagrant_cache('12345')
def import_from_repo(path, project_root): parts = scm.get_parts(path) if len(parts) < 3: log.fail( 'Required url format: \ <url>/<user>/<repo>[/folder[/wf.workflow]]' ) url, service, user, repo, _, version = scm.parse(path) cloned_project_dir = os.path.join("/tmp", service, user, repo) scm.clone(url, user, repo, os.path.dirname( cloned_project_dir), version ) if len(parts) == 3: ptw_one = os.path.join(cloned_project_dir, "main.workflow") ptw_two = os.path.join(cloned_project_dir, ".github/main.workflow") if os.path.isfile(ptw_one): path_to_workflow = ptw_one elif os.path.isfile(ptw_two): path_to_workflow = ptw_two else: log.fail("Unable to find a .workflow file") elif len(parts) >= 4: path_to_workflow = os.path.join( cloned_project_dir, '/'.join(parts[3:])).split("@")[0] if not os.path.basename(path_to_workflow).endswith('.workflow'): path_to_workflow = os.path.join( path_to_workflow, 'main.workflow') if not os.path.isfile(path_to_workflow): log.fail("Unable to find a .workflow file") shutil.copy(path_to_workflow, project_root) log.info("Successfully imported from {}".format(path_to_workflow)) with open(path_to_workflow, 'r') as fp: wf = Workflow(path_to_workflow) action_paths = list() for _, a_block in wf.actions: if a_block['uses'].startswith("./"): action_paths.append(a_block['uses']) action_paths = set([a.split("/")[1] for a in action_paths]) for a in action_paths: copy_tree(os.path.join(cloned_project_dir, a), os.path.join(project_root, a)) log.info("Copied {} to {}...".format(os.path.join( cloned_project_dir, a), project_root))
def test_get_workflow_env(self): pu.write_file( '/tmp/test_folder/a.workflow', """ workflow "sample" { resolves = "a" } action "a" { uses = "sh" } """) wf = Workflow('/tmp/test_folder/a.workflow') wf.parse() env = WorkflowRunner.get_workflow_env(wf, '/tmp/test_folder') self.assertDictEqual( env, { 'HOME': os.environ['HOME'], 'GITHUB_WORKFLOW': 'sample', 'GITHUB_ACTION': '', 'GITHUB_ACTOR': 'popper', 'GITHUB_REPOSITORY': 'unknown', 'GITHUB_EVENT_NAME': 'push', 'GITHUB_EVENT_PATH': '/tmp/github_event.json', 'GITHUB_WORKSPACE': '/tmp/test_folder', 'GITHUB_SHA': 'unknown', 'GITHUB_REF': 'unknown', 'POPPER_WORKFLOW': 'sample', 'POPPER_ACTION': '', 'POPPER_ACTOR': 'popper', 'POPPER_REPOSITORY': 'unknown', 'POPPER_EVENT_NAME': 'push', 'POPPER_EVENT_PATH': '/tmp/github_event.json', 'POPPER_WORKSPACE': '/tmp/test_folder', 'POPPER_SHA': 'unknown', 'POPPER_REF': 'unknown' })
def test_normalize(self): self.create_workflow_file(""" workflow "sample workflow" { resolves = "a" } action "a" { needs = "b" uses = "popperized/bin/npm@master" args = "npm --version" secrets = "SECRET_KEY" } """) wf = Workflow('/tmp/test_folder/a.workflow') wf.normalize() self.assertEqual(wf.resolves, ['a']) self.assertEqual(wf.name, 'sample workflow') self.assertEqual(wf.on, 'push') self.assertDictEqual(wf.props, dict()) action_a = wf.get_action('a') self.assertEqual(action_a['name'], 'a') self.assertEqual(action_a['needs'], ['b']) self.assertEqual(action_a['args'], ['npm', '--version']) self.assertEqual(action_a['secrets'], ['SECRET_KEY'])
def test_check_secrets(self): os.environ['SECRET_ONE'] = '1234' os.environ['SECRET_TWO'] = '5678' pu.write_file( '/tmp/test_folder/a.workflow', """ workflow "sample" { resolves = "a" } action "a" { uses = "popperized/bin/sh@master" args = ["ls -ltr"] secrets = ["SECRET_ONE", "SECRET_TWO"] } """) wf = Workflow('/tmp/test_folder/a.workflow') wf.parse() WorkflowRunner.check_secrets(wf, False, False) WorkflowRunner.check_secrets(wf, True, False) WorkflowRunner.check_secrets(wf, False, True) os.environ.pop('SECRET_ONE') pu.write_file( '/tmp/test_folder/a.workflow', """ workflow "sample" { resolves = "a" } action "a" { uses = "popperized/bin/sh@master" args = ["ls -ltr"] secrets = ["SECRET_ONE", "SECRET_TWO"] } """) wf = Workflow('/tmp/test_folder/a.workflow') wf.parse() os.environ['CI'] = 'false' with patch('getpass.getpass', return_value='1234') as fake_input: WorkflowRunner.check_secrets(wf, False, False) os.environ['CI'] = 'true' os.environ.pop('SECRET_ONE') self.assertRaises(SystemExit, WorkflowRunner.check_secrets, wf, False, False)
def cli(ctx, wfile, skip, recursive, colors): """ Creates a graph in the .dot format representing the workflow. """ def add_to_graph(dot_str, wf, parent, children, node_attrs, stage_edges): """Recursively goes over the children ("next" attribute) of the given parent, adding an edge from parent to children """ for n in children: edge = ' "{}" -> "{}";\n'.format(parent, n) if edge in stage_edges: continue dot_str += edge + ' "{}" [{}];\n'.format(n, node_attrs) stage_edges.add(edge) for M in wf.get_action(n).get('next', []): dot_str = add_to_graph(dot_str, wf, n, [M], node_attrs, stage_edges) return dot_str wfile_list = list() if recursive: wfile_list = pu.find_recursive_wfile() else: wfile_list.append(pu.find_default_wfile(wfile)) for wfile in wfile_list: wf = Workflow(wfile) wf.parse() wf = Workflow.skip_actions(wf, skip) wf.check_for_unreachable_actions() node_attrs = ('shape=box, style="filled{}", fillcolor=transparent{}') wf_attr = node_attrs.format(',rounded', ',color=red' if colors else '') act_attr = node_attrs.format('', ',color=cyan' if colors else '') dot_str = add_to_graph("", wf, wf.name, wf.root, act_attr, set()) dot_str += ' "{}" [{}];\n'.format(wf.name, wf_attr) log.info("digraph G { graph [bgcolor=transparent];\n" + dot_str + "}\n")
def test_scaffold(self): wf_dir = tempfile.mkdtemp() runner = CliRunner() file_loc = f'{wf_dir}/wf.yml' result = runner.invoke(scaffold.cli, ['-f', file_loc]) self.assertEqual(result.exit_code, 0) self.assertTrue(os.path.isfile(file_loc)) wf = Workflow.new(file_loc) self.assertDictEqual( wf.steps, { '1': { 'uses': 'popperized/bin/sh@master', 'args': ['ls'], 'name': '1', 'next': {'2'} }, '2': { 'uses': 'docker://alpine:3.11', 'args': ['ls'], 'name': '2', 'needs': ['1'] } }) with self.assertLogs('popper') as test_logger: result = runner.invoke(run.cli, ['-f', file_loc]) self.assertEqual(result.exit_code, 0) self.assertTrue(len(test_logger.output)) self.assertTrue("INFO:popper:Step '1' ran successfully !" in test_logger.output) self.assertTrue("INFO:popper:Step '2' ran successfully !" in test_logger.output)
def test_check_duplicate_actions(self): self.create_workflow_file(""" workflow "sample" { resolves = ["a", "b"] } action "a" { uses = "sh" } action "b" { uses = "sh" } action "a" { uses = "sh" } """) workflow = Workflow('/tmp/test_folder/a.workflow') self.assertRaises(SystemExit, workflow.check_duplicate_actions) self.create_workflow_file(""" workflow "sample" { resolves = ["a", "b"] } action "a" { uses = "sh" } action "b" { uses = "sh" } """) workflow = Workflow('/tmp/test_folder/a.workflow') workflow.check_duplicate_actions()
def test_instantiate_runners(self): pu.write_file( '/tmp/test_folder/a.workflow', """ workflow "sample" { resolves = "a" } action "a" { uses = "sh" args = "ls" } """) wf = Workflow('/tmp/test_folder/a.workflow') wf.parse() env = WorkflowRunner.get_workflow_env(wf, '/tmp/test_folder') WorkflowRunner.instantiate_runners('docker', wf, '/tmp/test_folder', False, False, '12345') self.assertIsInstance(wf.action['a']['runner'], HostRunner) os.makedirs('/tmp/test_folder/actions/sample') pu.write_file('/tmp/test_folder/actions/sample/entrypoint.sh') pu.write_file('/tmp/test_folder/actions/sample/README.md') pu.write_file( '/tmp/test_folder/a.workflow', """ workflow "sample" { resolves = "a" } action "a" { uses = "./actions/sample" } """) wf = Workflow('/tmp/test_folder/a.workflow') wf.parse() env = WorkflowRunner.get_workflow_env(wf, '/tmp/test_folder') WorkflowRunner.instantiate_runners('singularity', wf, '/tmp/test_folder', False, False, '12345') self.assertIsInstance(wf.action['a']['runner'], HostRunner) pu.write_file( '/tmp/test_folder/a.workflow', """ workflow "sample" { resolves = "a" } action "a" { uses = "popperized/bin/sh@master" } """) wf = Workflow('/tmp/test_folder/a.workflow') wf.parse() env = WorkflowRunner.get_workflow_env(wf, '/tmp/test_folder') WorkflowRunner.instantiate_runners('singularity', wf, '/tmp/test_folder', False, False, '12345') self.assertIsInstance(wf.action['a']['runner'], SingularityRunner) WorkflowRunner.instantiate_runners('docker', wf, '/tmp/test_folder', False, False, '12345') self.assertIsInstance(wf.action['a']['runner'], DockerRunner) WorkflowRunner.instantiate_runners('vagrant', wf, '/tmp/test_folder', False, False, '12345') self.assertIsInstance(wf.action['a']['runner'], VagrantRunner)
def test_filter_action(self): self.create_workflow_file(""" workflow "example" { resolves = "end" } action "a" { uses = "sh" args = "ls" } action "b" { uses = "sh" args = "ls" } action "c" { uses = "sh" args = "ls" } action "d" { needs = ["c"] uses = "sh" args = "ls" } action "e" { needs = ["d", "b", "a"] uses = "sh" args = "ls" } action "end" { needs = "e" uses = "sh" args = "ls" } """) wf = Workflow('/tmp/test_folder/a.workflow') wf.parse() changed_wf = Workflow.filter_action(wf, 'e') self.assertSetEqual(changed_wf.root, {'e'}) self.assertDictEqual( changed_wf.action, { 'e': { 'needs': [], 'uses': 'sh', 'args': ['ls'], 'name': 'e', 'next': set() } }) changed_wf = Workflow.filter_action(wf, 'd') self.assertSetEqual(changed_wf.root, {'d'}) self.assertDictEqual( changed_wf.action, { 'd': { 'needs': [], 'uses': 'sh', 'args': ['ls'], 'name': 'd', 'next': set() } }) changed_wf = Workflow.filter_action(wf, 'e', with_dependencies=True) self.assertSetEqual(changed_wf.root, {'b', 'a', 'c'}) self.assertDictEqual( changed_wf.action, { 'a': { 'uses': 'sh', 'args': ['ls'], 'name': 'a', 'next': {'e'} }, 'b': { 'uses': 'sh', 'args': ['ls'], 'name': 'b', 'next': {'e'} }, 'c': { 'uses': 'sh', 'args': ['ls'], 'name': 'c', 'next': {'d'} }, 'd': { 'needs': ['c'], 'uses': 'sh', 'args': ['ls'], 'name': 'd', 'next': {'e'} }, 'e': { 'needs': ['d', 'b', 'a'], 'uses': 'sh', 'args': ['ls'], 'name': 'e', 'next': set() } }) changed_wf = Workflow.filter_action(wf, 'd', with_dependencies=True) self.assertSetEqual(changed_wf.root, {'c'}) self.assertDictEqual( changed_wf.action, { 'c': { 'uses': 'sh', 'args': ['ls'], 'name': 'c', 'next': {'d'} }, 'd': { 'needs': ['c'], 'uses': 'sh', 'args': ['ls'], 'name': 'd', 'next': set() } })
class TestHostRunner(unittest.TestCase): def setUp(self): os.makedirs('/tmp/test_folder') os.chdir('/tmp/test_folder') log.setLevel('CRITICAL') workflow = """ workflow "sample" { resolves = "sample action" } action "sample action" { uses = "sh" args = ["echo", "Hello"] } """ pu.write_file('/tmp/test_folder/a.workflow', workflow) self.wf = Workflow('/tmp/test_folder/a.workflow') self.wf.parse() WorkflowRunner.instantiate_runners('docker', self.wf, '/tmp/test_folder', False, False, '12345') def tearDown(self): os.chdir('/tmp') shutil.rmtree('/tmp/test_folder') log.setLevel('NOTSET') def test_run(self): runner = self.wf.action['sample action']['runner'] self.assertRaises(SystemExit, runner.run, reuse=True) runner.run() def test_host_prepare(self): runner = self.wf.action['sample action']['runner'] runner.action['runs'] = ['script1'] runner.action['args'] = ['github.com'] cmd = runner.host_prepare() self.assertEqual(cmd, ['/tmp/test_folder/script1', 'github.com']) os.makedirs('/tmp/test_folder/action/myaction') runner.action['uses'] = './action/myaction' runner.action['runs'] = ['script'] runner.action['args'] = ['arg1', 'arg2'] cmd = runner.host_prepare() self.assertEqual( cmd, ['/tmp/test_folder/./action/myaction/./script', 'arg1', 'arg2']) os.chdir('/tmp/test_folder') runner.action.pop('runs') cmd = runner.host_prepare() self.assertEqual(cmd, [ '/tmp/test_folder/./action/myaction/./entrypoint.sh', 'arg1', 'arg2' ]) def test_host_start(self): runner = self.wf.action['sample action']['runner'] runner.prepare_environment(set_env=True) e = runner.host_start( ["sh", "-c", "echo 'Hello from Popper 2.x !' > popper.file"]) self.assertEqual(e, 0) self.assertEqual(os.path.exists('popper.file'), True) runner.remove_environment()
class TestVagrantRunner(unittest.TestCase): def setUp(self): os.makedirs('/tmp/test_folder') os.chdir('/tmp/test_folder') log.setLevel('CRITICAL') workflow = """ workflow "sample" { resolves = "sample action" } action "sample action" { uses = "popperized/bin/sh@master" args = ["echo", "Hello"] } """ pu.write_file('/tmp/test_folder/a.workflow', workflow) self.wf = Workflow('/tmp/test_folder/a.workflow') self.wf.parse() WorkflowRunner.download_actions(self.wf, False, False, '12345') WorkflowRunner.instantiate_runners('vagrant', self.wf, '/tmp/test_folder', False, False, '12345') self.runner = self.wf.action['sample action']['runner'] VagrantRunner.setup_vagrant_cache('12345') def tearDown(self): os.chdir('/tmp') shutil.rmtree('/tmp/test_folder') log.setLevel('NOTSET') @unittest.skipIf(os.environ['RUNTIME'] != 'vagrant', 'Skipping vagrant tests...') def test_setup_vagrant_cache(self): cache_path = os.path.join(os.environ['HOME'], '.cache/.popper/vagrant/12345') shutil.rmtree(cache_path) self.assertEqual(os.path.exists(cache_path), False) VagrantRunner.setup_vagrant_cache('12345') self.assertEqual(os.path.exists(cache_path), True) @unittest.skipIf(os.environ['RUNTIME'] != 'vagrant', 'Skipping vagrant tests...') def test_vagrant_start(self): os.makedirs('/tmp/test_folder/test_vm') vagrantfile_content = """ Vagrant.configure("2") do |config| config.vm.box = "ailispaw/barge" end """ pu.write_file('/tmp/test_folder/test_vm/Vagrantfile', vagrantfile_content) self.runner.vagrant_start('/tmp/test_folder/test_vm') self.assertEqual( self.runner.vagrant_exists('/tmp/test_folder/test_vm'), True) vagrant.Vagrant(root='/tmp/test_folder/test_vm').halt() vagrant.Vagrant(root='/tmp/test_folder/test_vm').destroy() @unittest.skipIf(os.environ['RUNTIME'] != 'vagrant', 'Skipping vagrant tests...') def test_vagrant_stop(self): os.makedirs('/tmp/test_folder/test_vm') vagrantfile_content = """ Vagrant.configure("2") do |config| config.vm.box = "ailispaw/barge" end """ pu.write_file('/tmp/test_folder/test_vm/Vagrantfile', vagrantfile_content) v = vagrant.Vagrant(root='/tmp/test_folder/test_vm') v.up() self.assertEqual( self.runner.vagrant_exists('/tmp/test_folder/test_vm'), True) self.runner.vagrant_stop('/tmp/test_folder/test_vm') self.assertEqual( self.runner.vagrant_exists('/tmp/test_folder/test_vm'), False) vagrant.Vagrant(root='/tmp/test_folder/test_vm').destroy() @unittest.skipIf(os.environ['RUNTIME'] != 'vagrant', 'Skipping vagrant tests...') def test_vagrant_exists(self): os.makedirs('/tmp/test_folder/test_vm') vagrantfile_content = """ Vagrant.configure("2") do |config| config.vm.box = "ailispaw/barge" end """ pu.write_file('/tmp/test_folder/test_vm/Vagrantfile', vagrantfile_content) v = vagrant.Vagrant(root='/tmp/test_folder/test_vm') v.up() self.assertEqual( self.runner.vagrant_exists('/tmp/test_folder/test_vm'), True) v.halt() self.assertEqual( self.runner.vagrant_exists('/tmp/test_folder/test_vm'), False) vagrant.Vagrant(root='/tmp/test_folder/test_vm').destroy() shutil.rmtree('/tmp/test_folder/test_vm') self.assertEqual( self.runner.vagrant_exists('/tmp/test_folder/test_vm'), False) @unittest.skipIf(os.environ['RUNTIME'] != 'vagrant', 'Skipping vagrant tests...') def test_vagrant_write_vagrantfile(self): self.runner.vagrant_write_vagrantfile('/tmp/test_folder/test_vm') required_content = """ Vagrant.configure("2") do |config| config.vm.box = "ailispaw/barge" config.vm.synced_folder "{}", "{}" config.vm.synced_folder "/tmp/test_folder", "/tmp/test_folder" end """.format(os.environ['HOME'], os.environ['HOME']) f = open('/tmp/test_folder/test_vm/Vagrantfile') content = f.readlines() f.close() for line in content: self.assertEqual(line in required_content, True)
class TestSingularityRunner(unittest.TestCase): def setUp(self): os.makedirs('/tmp/test_folder') os.chdir('/tmp/test_folder') log.setLevel('CRITICAL') workflow = """ workflow "sample" { resolves = "sample action" } action "sample action" { uses = "popperized/bin/sh@master" args = ["echo", "Hello"] } """ pu.write_file('/tmp/test_folder/a.workflow', workflow) self.wf = Workflow('/tmp/test_folder/a.workflow') self.wf.parse() WorkflowRunner.download_actions(self.wf, False, False, '12345') WorkflowRunner.instantiate_runners('singularity', self.wf, '/tmp/test_folder', False, False, '12345') self.runner = self.wf.action['sample action']['runner'] SingularityRunner.setup_singularity_cache('12345') def tearDown(self): os.chdir('/tmp') shutil.rmtree('/tmp/test_folder') shutil.rmtree(os.path.join(os.environ['HOME'], '.cache/.popper')) log.setLevel('NOTSET') @unittest.skipIf(os.environ['RUNTIME'] != 'singularity', 'Skipping singularity tests...') def test_singularity_exists(self): pu.write_file('/tmp/test_folder/testimg.sif', 'fake image file') self.assertEqual( self.runner.singularity_exists('/tmp/test_folder/testimg.sif'), True) os.remove('/tmp/test_folder/testimg.sif') @unittest.skipIf(os.environ['RUNTIME'] != 'singularity', 'Skipping singularity tests...') def test_singularity_rm(self): pu.write_file('/tmp/test_folder/testimg.sif', 'fake image file') self.runner.singularity_rm('/tmp/test_folder/testimg.sif') self.assertEqual( self.runner.singularity_exists('/tmp/test_folder/testimg.sif'), False) @unittest.skipIf(os.environ['RUNTIME'] != 'singularity', 'Skipping singularity tests...') def test_singularity_build_from_image(self): self.runner.singularity_build_from_image( 'docker://*****:*****@unittest.skipIf(os.environ['RUNTIME'] != 'singularity', 'Skipping singularity tests...') def test_singularity_build_from_recipe(self): os.chdir( os.path.join( os.environ['HOME'], '.cache/.popper/actions/12345/github.com/popperized/bin/sh')) self.runner.singularity_build_from_recipe( os.path.join( os.environ['HOME'], '.cache/.popper/actions/12345/github.com/popperized/bin/sh'), os.path.join(os.environ['HOME'], '.cache/.popper/singularity/12345/testimg.sif')) self.assertEqual( os.path.exists( os.path.join(os.environ['HOME'], '.cache/.popper/singularity/12345/testimg.sif')), True) @unittest.skipIf(os.environ['RUNTIME'] != 'singularity', 'Skipping singularity tests...') def test_get_recipe_file(self): os.chdir(os.environ['HOME'] + '/.cache/.popper/actions/12345/github.com/popperized/bin/sh') file = SingularityRunner.get_recipe_file(os.getcwd(), '12345') self.assertEqual( file, os.environ['HOME'] + '/.cache/.popper/actions/12345/github.com/popperized/bin/sh/' + 'Singularity.12345') os.remove( os.environ['HOME'] + '/.cache/.popper/actions/12345/github.com/popperized/bin/sh/' + 'Dockerfile') self.assertRaises(SystemExit, SingularityRunner.get_recipe_file, os.getcwd(), '12345') @unittest.skipIf(os.environ['RUNTIME'] != 'singularity', 'Skipping singularity tests...') def test_singularity_start(self): self.runner.action['runs'] = [ "sh", "-c", "echo 'Hello from Popper 2.x !' > popper.file" ] self.runner.singularity_build_from_image( 'docker://*****:*****@unittest.skipIf(os.environ['RUNTIME'] != 'singularity', 'Skipping singularity tests...') def test_get_build_resources(self): res = self.runner.get_build_resources() self.assertTupleEqual( res, (True, 'popperized/bin/sh@master', os.path.join( os.environ['HOME'], '.cache/.popper/actions/12345/github.com/popperized/bin/sh'))) self.runner.action['uses'] = 'docker://*****:*****@unittest.skipIf(os.environ['RUNTIME'] != 'singularity', 'Skipping singularity tests...') def test_setup_singularity_cache(self): cache_path = os.path.join(os.environ['HOME'], '.cache/.popper/singularity/12345') shutil.rmtree(cache_path) self.assertEqual(os.path.exists(cache_path), False) SingularityRunner.setup_singularity_cache('12345') self.assertEqual(os.path.exists(cache_path), True)
class TestDockerRunner(unittest.TestCase): def setUp(self): os.makedirs('/tmp/test_folder') os.chdir('/tmp/test_folder') log.setLevel('CRITICAL') workflow = """ workflow "sample" { resolves = "sample action" } action "sample action" { uses = "popperized/bin/sh@master" args = ["echo", "Hello"] } """ pu.write_file('/tmp/test_folder/a.workflow', workflow) self.wf = Workflow('/tmp/test_folder/a.workflow') self.wf.parse() WorkflowRunner.download_actions(self.wf, False, False, '12345') WorkflowRunner.instantiate_runners('docker', self.wf, '/tmp/test_folder', False, False, '12345') self.docker_client = docker.from_env() self.runner = self.wf.action['sample action']['runner'] def tearDown(self): os.chdir('/tmp') shutil.rmtree('/tmp/test_folder') shutil.rmtree(os.path.join(os.environ['HOME'], '.cache/.popper')) log.setLevel('NOTSET') self.docker_client.close() @unittest.skipIf(os.environ['RUNTIME'] != 'docker', 'Skipping docker tests...') def test_get_build_resources(self): res = self.runner.get_build_resources() self.assertTupleEqual( res, (True, 'popperized/bin:master', os.environ['HOME'] + '/.cache/.popper/actions/12345/github.com/popperized/bin/sh')) self.runner.action['uses'] = 'docker://*****:*****@unittest.skipIf(os.environ['RUNTIME'] != 'docker', 'Skipping docker tests...') def test_docker_exists(self): image = self.docker_client.images.pull('debian:buster-slim') container = self.docker_client.containers.create( image='debian:buster-slim', name='popper_sample_action_12345') self.assertEqual(self.runner.docker_exists(), True) container.remove() self.docker_client.images.remove('debian:buster-slim') @unittest.skipIf(os.environ['RUNTIME'] != 'docker', 'Skipping docker tests...') def test_docker_image_exists(self): image = self.docker_client.images.pull('debian:buster-slim') self.assertEqual(self.runner.docker_image_exists('debian:buster-slim'), True) self.docker_client.images.remove('debian:buster-slim', force=True) @unittest.skipIf(os.environ['RUNTIME'] != 'docker', 'Skipping docker tests...') def test_docker_rm(self): self.docker_client.images.pull('debian:buster-slim') self.runner.docker_create('debian:buster-slim') self.runner.docker_rm() self.assertRaises(docker.errors.NotFound, self.runner.docker_rm) @unittest.skipIf(os.environ['RUNTIME'] != 'docker', 'Skipping docker tests...') def test_docker_pull(self): self.assertEqual(self.runner.docker_image_exists('debian:buster-slim'), False) self.runner.skip_pull = True self.assertRaises(SystemExit, self.runner.docker_pull, 'debian:buster-slim') self.runner.skip_pull = False self.runner.docker_pull('debian:buster-slim') self.assertEqual(self.runner.docker_image_exists('debian:buster-slim'), True) @unittest.skipIf(os.environ['RUNTIME'] != 'docker', 'Skipping docker tests...') def test_docker_start(self): self.runner.action['runs'] = [ "sh", "-c", "echo 'Hello from Popper 2.x !' > popper.file" ] self.runner.docker_pull('debian:buster-slim') self.runner.docker_create('debian:buster-slim') e = self.runner.docker_start() self.assertEqual(e, 0) self.assertEqual(os.path.exists('popper.file'), True) res = self.docker_client.containers.list(filters={'status': 'running'}) self.assertListEqual(res, []) self.runner.docker_rm() @unittest.skipIf(os.environ['RUNTIME'] != 'docker', 'Skipping docker tests...') def test_docker_build(self): pu.write_file( '/tmp/test_folder/Dockerfile', """ FROM debian:stable-slim RUN apt-get update && \ apt-get install curl -y && \ apt-get clean -y """) self.runner.docker_build('abcd:latest', '/tmp/test_folder') res = self.docker_client.images.get('abcd:latest') @unittest.skipIf(os.environ['RUNTIME'] != 'docker', 'Skipping docker tests...') def test_docker_create(self): self.runner.action['args'] = ['env'] self.runner.docker_pull('debian:buster-slim') self.runner.docker_create('debian:buster-slim') self.assertEqual(self.runner.docker_exists(), True) self.runner.docker_rm()
class TestActionRunner(unittest.TestCase): def setUp(self): os.makedirs('/tmp/test_folder') os.chdir('/tmp/test_folder') log.setLevel('CRITICAL') workflow = """ workflow "sample" { resolves = "sample action" } action "sample action" { uses = "popperized/bin/sh@master" args = ["echo", "Hello"] } """ pu.write_file('/tmp/test_folder/a.workflow', workflow) self.wf = Workflow('/tmp/test_folder/a.workflow') self.wf.parse() WorkflowRunner.instantiate_runners('docker', self.wf, '/tmp/test_folder', False, False, '12345') self.runner = self.wf.action['sample action']['runner'] def tearDown(self): os.chdir('/tmp') shutil.rmtree('/tmp/test_folder') log.setLevel('NOTSET') def test_check_executable(self): self.assertRaises(SystemExit, self.runner.check_executable, 'abcd') def test_handle_exit(self): self.flag = 0 def signal_handler(sig, frame): self.flag = 1 signal.signal(signal.SIGUSR1, signal_handler) self.assertRaises(SystemExit, self.runner.handle_exit, 1) self.runner.handle_exit(0) self.assertEqual(self.flag, 0) self.runner.handle_exit(78) self.assertEqual(self.flag, 1) def test_prepare_environment(self): env = self.runner.prepare_environment() self.assertDictEqual( env, { 'HOME': os.environ['HOME'], 'GITHUB_WORKFLOW': 'sample', 'GITHUB_ACTION': 'sample action', 'GITHUB_ACTOR': 'popper', 'GITHUB_REPOSITORY': 'unknown', 'GITHUB_EVENT_NAME': 'push', 'GITHUB_EVENT_PATH': '/tmp/github_event.json', 'GITHUB_WORKSPACE': '/tmp/test_folder', 'GITHUB_SHA': 'unknown', 'GITHUB_REF': 'unknown', 'POPPER_WORKFLOW': 'sample', 'POPPER_ACTION': 'sample action', 'POPPER_ACTOR': 'popper', 'POPPER_REPOSITORY': 'unknown', 'POPPER_EVENT_NAME': 'push', 'POPPER_EVENT_PATH': '/tmp/github_event.json', 'POPPER_WORKSPACE': '/tmp/test_folder', 'POPPER_SHA': 'unknown', 'POPPER_REF': 'unknown' }) self.assertEqual(set(env.keys()).issubset(set(os.environ)), False) env = self.runner.prepare_environment(set_env=True) self.assertEqual(set(env.keys()).issubset(set(os.environ)), True) self.runner.remove_environment() def test_prepare_volumes(self): env = self.runner.prepare_environment() volumes = self.runner.prepare_volumes(env) self.assertEqual(volumes, [ '{}:{}'.format(os.environ['HOME'], os.environ['HOME']), '{}:/github/home'.format( os.environ['HOME']), '/tmp/test_folder:/tmp/test_folder', '/tmp/test_folder:/github/workspace', '/tmp/github_event.json:/github/workflow/event.json' ]) volumes = self.runner.prepare_volumes(env, include_docker_socket=True) self.assertEqual(volumes, [ '/var/run/docker.sock:/var/run/docker.sock', '{}:{}'.format( os.environ['HOME'], os.environ['HOME']), '{}:/github/home'.format( os.environ['HOME']), '/tmp/test_folder:/tmp/test_folder', '/tmp/test_folder:/github/workspace', '/tmp/github_event.json:/github/workflow/event.json' ]) def test_remove_environment(self): env = self.runner.prepare_environment(set_env=True) self.assertEqual(set(env.keys()).issubset(set(os.environ)), True) self.runner.remove_environment() self.assertEqual(set(env.keys()).issubset(set(os.environ)), False) def test_setup_necessary_files(self): os.remove('/tmp/github_event.json') self.assertEqual(os.path.exists('/tmp/github_event.json'), False) self.runner.setup_necessary_files() self.assertEqual(os.path.exists('/tmp/github_event.json'), True) self.runner.workspace = '/tmp/a/b/c' self.runner.setup_necessary_files() self.assertEqual(os.path.exists('/tmp/a/b/c'), True)
def test_check_for_unreachable_actions(self): self.create_workflow_file(""" workflow "example" { resolves = "end" } action "a" { uses = "sh" args = "ls" } action "b" { uses = "sh" args = "ls" } action "c" { uses = "sh" args = "ls" } action "d" { needs = ["c"] uses = "sh" args = "ls" } action "e" { needs = ["d", "b", "a"] uses = "sh" args = "ls" } action "end" { needs = "e" uses = "sh" args = "ls" } """) wf = Workflow('/tmp/test_folder/a.workflow') wf.parse() changed_wf = Workflow.skip_actions(wf, ['d', 'a', 'b']) self.assertDictEqual( changed_wf.action, { 'a': { 'uses': 'sh', 'args': ['ls'], 'name': 'a', 'next': set() }, 'b': { 'uses': 'sh', 'args': ['ls'], 'name': 'b', 'next': set() }, 'c': { 'uses': 'sh', 'args': ['ls'], 'name': 'c', 'next': set() }, 'd': { 'needs': [], 'uses': 'sh', 'args': ['ls'], 'name': 'd', 'next': set() }, 'e': { 'needs': [], 'uses': 'sh', 'args': ['ls'], 'name': 'e', 'next': {'end'} }, 'end': { 'needs': ['e'], 'uses': 'sh', 'args': ['ls'], 'name': 'end' } }) self.assertRaises(SystemExit, changed_wf.check_for_unreachable_actions, True) self.create_workflow_file(""" workflow "sample" { resolves = ["reachable"] } action "reachable" { uses = "popperized/bin/sh@master" args = "ls" } action "unreachable" { uses = "popperized/bin/sh@master" args = ["ls -ltr"] } """) wf = Workflow('/tmp/test_folder/a.workflow') wf.parse() wf.check_for_unreachable_actions()
def test_get_stages(self): self.create_workflow_file(""" workflow "example" { resolves = "end" } action "a" { uses = "sh" args = "ls" } action "b" { uses = "sh" args = "ls" } action "c" { uses = "sh" args = "ls" } action "d" { needs = ["c"] uses = "sh" args = "ls" } action "e" { needs = ["d", "b", "a"] uses = "sh" args = "ls" } action "end" { needs = "e" uses = "sh" args = "ls" } """) wf = Workflow('/tmp/test_folder/a.workflow') wf.parse() stages = list() for stage in wf.get_stages(): stages.append(stage) self.assertListEqual(stages, [{'b', 'c', 'a'}, {'d'}, {'e'}, {'end'}]) self.create_workflow_file(""" workflow "example" { resolves = ["end"] } action "a" { uses = "sh" args = "ls" } action "b" { needs = "a" uses = "sh" args = "ls" } action "c" { uses = "sh" args = "ls" } action "d" { uses = "sh" needs = ["b", "c"] args = "ls" } action "g" { needs = "d" uses = "sh" args = "ls" } action "f" { needs = "d" uses = "sh" args = "ls" } action "h" { needs = "g" uses = "sh" args = "ls" } action "end" { needs = ["h", "f"] uses = "sh" args = "ls" } """) wf = Workflow('/tmp/test_folder/a.workflow') wf.parse() stages = list() for stage in wf.get_stages(): stages.append(stage) self.assertListEqual( stages, [{'a', 'c'}, {'b'}, {'d'}, {'g', 'f'}, {'h'}, {'end'}])
def cli(ctx, step, wfile, debug, dry_run, log_file, quiet, reuse, engine, resource_manager, skip, skip_pull, skip_clone, substitution, allow_loose, with_dependencies, workspace, conf): """Runs a Popper workflow. Only executes STEP if given. To specify a container engine to use other than docker, use the --engine/-e flag. For executing on a resource manager such as SLURM or Kubernetes, use the --resource-manager/-r flag. Alternatively, a configuration file can be given (--conf flag) that can specify container options, resource manager options, or both (see "Workflow Syntax and Execution Runtime" section of the Popper documentation for more). If the container engine (-e) or resource manager (-r) are specified with a flag and a configuration file is given as well, the values passed via the flags are given preference over those contained in the configuration file. """ # set the logging levels. level = 'STEP_INFO' if quiet: level = 'INFO' if debug: level = 'DEBUG' log.setLevel(level) if dry_run: logging.msg_prefix = "DRYRUN: " if log_file: # also log to a file logging.add_log(log, log_file) # check conflicting flags and fail if needed if with_dependencies and not step: log.fail('`--with-dependencies` can only be used when ' 'STEP argument is given.') if skip and step: log.fail('`--skip` can not be used when STEP argument is passed.') # invoke wf factory; handles formats, validations, filtering wf = Workflow.new(wfile, step=step, skipped_steps=skip, substitutions=substitution, allow_loose=allow_loose, include_step_dependencies=with_dependencies) config = PopperConfig(engine_name=engine, resman_name=resource_manager, config_file=conf, reuse=reuse, dry_run=dry_run, skip_pull=skip_pull, skip_clone=skip_clone, workspace_dir=workspace) runner = WorkflowRunner(config) try: runner.run(wf) except Exception as e: log.debug(traceback.format_exc()) log.fail(e)