def test_get_stages(self): self.create_workflow_file(""" workflow "example" { resolves = "end" } action "a" { uses = "sh" args = "ls" } action "b" { uses = "sh" args = "ls" } action "c" { uses = "sh" args = "ls" } action "d" { needs = ["c"] uses = "sh" args = "ls" } action "e" { needs = ["d", "b", "a"] uses = "sh" args = "ls" } action "end" { needs = "e" uses = "sh" args = "ls" } """) wf = Workflow('/tmp/test_folder/a.workflow') wf.parse() stages = list() for stage in wf.get_stages(): stages.append(stage) self.assertListEqual(stages, [{'b', 'c', 'a'}, {'d'}, {'e'}, {'end'}]) self.create_workflow_file(""" workflow "example" { resolves = ["end"] } action "a" { uses = "sh" args = "ls" } action "b" { needs = "a" uses = "sh" args = "ls" } action "c" { uses = "sh" args = "ls" } action "d" { uses = "sh" needs = ["b", "c"] args = "ls" } action "g" { needs = "d" uses = "sh" args = "ls" } action "f" { needs = "d" uses = "sh" args = "ls" } action "h" { needs = "g" uses = "sh" args = "ls" } action "end" { needs = ["h", "f"] uses = "sh" args = "ls" } """) wf = Workflow('/tmp/test_folder/a.workflow') wf.parse() stages = list() for stage in wf.get_stages(): stages.append(stage) self.assertListEqual( stages, [{'a', 'c'}, {'b'}, {'d'}, {'g', 'f'}, {'h'}, {'end'}])
class WorkflowRunner(object): """A GHA workflow runner. """ def __init__(self, wfile, workspace, dry_run, reuse, parallel, skip_secrets_prompt=False): wfile = pu.find_default_wfile(wfile) self.workspace = workspace self.dry_run = dry_run self.reuse = reuse self.parallel = parallel self.skip_secrets_prompt = skip_secrets_prompt self.actions_cache_path = os.path.join('/', 'tmp', 'actions') # Initialize a Worklow. During initialization all the validation # takes place automatically. self.wf = Workflow(wfile) self.check_secrets() log.debug('workflow:\n{}'.format( yaml.dump(self.wf, default_flow_style=False, default_style=''))) def check_secrets(self): """Checks whether the secrets defined in the action block is set in the execution environment or not. Note: When the environment variable `CI` is set to `true`, then the execution fails if secrets are not defined else it prompts the user to enter the environment vars during the time of execution itself. """ if self.dry_run or self.skip_secrets_prompt: return for _, a in self.wf.actions: for s in a.get('secrets', []): if s not in os.environ: if os.environ.get('CI') == "true": log.fail('Secret {} not defined'.format(s)) else: val = input("Enter the value for {0}:\n".format(s)) os.environ[s] = val def download_actions(self): """Clone actions that reference a repository.""" cloned = set() infoed = False for _, a in self.wf.actions: if ('docker://' in a['uses'] or 'shub://' in a['uses'] or './' in a['uses']): continue url, service, usr, repo, action_dir, version = scm.parse(a['uses']) repo_parent_dir = os.path.join( self.actions_cache_path, service, usr ) a['repo_dir'] = os.path.join(repo_parent_dir, repo) a['action_dir'] = action_dir if self.dry_run: continue if not infoed: log.info('[popper] cloning action repositories') infoed = True if '{}/{}'.format(usr, repo) in cloned: continue if not os.path.exists(repo_parent_dir): os.makedirs(repo_parent_dir) log.info('[popper] - {}/{}/{}@{}'.format(url, usr, repo, version)) scm.clone(url, usr, repo, repo_parent_dir, version) cloned.add('{}/{}'.format(usr, repo)) def instantiate_runners(self): """Factory of ActionRunner instances, one for each action""" for _, a in self.wf.actions: if 'docker://' in a['uses']: a['runner'] = DockerRunner( a, self.workspace, self.env, self.dry_run) continue if 'shub://' in a['uses']: a['runner'] = SingularityRunner( a, self.workspace, self.env, self.dry_run) continue if './' in a['uses']: if os.path.exists(os.path.join(a['uses'], 'Dockerfile')): a['runner'] = DockerRunner( a, self.workspace, self.env, self.dry_run) elif os.path.exists(os.path.join(a['uses'], 'singularity.def')): a['runner'] = SingularityRunner( a, self.workspace, self.env, self.dry_run) else: a['runner'] = HostRunner( a, self.workspace, self.env, self.dry_run) continue dockerfile_path = os.path.join(a['repo_dir'], a['action_dir'], 'Dockerfile') singularityfile_path = os.path.join(a['repo_dir'], a['action_dir'], 'singularity.def') if os.path.exists(dockerfile_path): a['runner'] = DockerRunner( a, self.workspace, self.env, self.dry_run) elif os.path.exists(singularityfile_path): a['runner'] = SingularityRunner( a, self.workspace, self.env, self.dry_run) else: a['runner'] = HostRunner( a, self.workspace, self.env, self.dry_run) def run(self, action_name=None, reuse=False, parallel=False): """Run the pipeline or a specific action""" os.environ['WORKSPACE'] = self.workspace if scm.get_user(): repo_id = '{}/{}'.format(scm.get_user(), scm.get_name()) else: repo_id = 'unknown' self.env = { 'GITHUB_WORKSPACE': self.workspace, 'GITHUB_WORKFLOW': self.wf.name, 'GITHUB_ACTOR': 'popper', 'GITHUB_REPOSITORY': repo_id, 'GITHUB_EVENT_NAME': self.wf.on, 'GITHUB_EVENT_PATH': '/{}/{}'.format(self.workspace, 'workflow/event.json'), 'GITHUB_SHA': scm.get_sha(), 'GITHUB_REF': scm.get_ref() } for e in dict(self.env): self.env.update({e.replace('GITHUB_', 'POPPER_'): self.env[e]}) self.download_actions() self.instantiate_runners() if action_name: self.wf.get_runner(action_name).run(reuse) else: for s in self.wf.get_stages(): self.run_stage(s, reuse, parallel) def run_stage(self, stage, reuse=False, parallel=False): if parallel: with ThreadPoolExecutor(max_workers=mp.cpu_count()) as ex: flist = { ex.submit(self.wf.get_runner(a).run, reuse): a for a in stage } popper.cli.flist = flist for future in as_completed(flist): future.result() log.info('Action ran successfully !') else: for action in stage: self.wf.get_runner(action).run(reuse) @staticmethod def import_from_repo(path, project_root): parts = scm.get_parts(path) if len(parts) < 3: log.fail( 'Required url format: \ <url>/<user>/<repo>[/folder[/wf.workflow]]' ) url, service, user, repo, _, version = scm.parse(path) cloned_project_dir = os.path.join("/tmp", service, user, repo) scm.clone(url, user, repo, os.path.dirname( cloned_project_dir), version ) if len(parts) == 3: ptw_one = os.path.join(cloned_project_dir, "main.workflow") ptw_two = os.path.join(cloned_project_dir, ".github/main.workflow") if os.path.isfile(ptw_one): path_to_workflow = ptw_one elif os.path.isfile(ptw_two): path_to_workflow = ptw_two else: log.fail("Unable to find a .workflow file") elif len(parts) >= 4: path_to_workflow = os.path.join( cloned_project_dir, '/'.join(parts[3:])).split("@")[0] if not os.path.basename(path_to_workflow).endswith('.workflow'): path_to_workflow = os.path.join( path_to_workflow, 'main.workflow') if not os.path.isfile(path_to_workflow): log.fail("Unable to find a .workflow file") shutil.copy(path_to_workflow, project_root) log.info("Successfully imported from {}".format(path_to_workflow)) with open(path_to_workflow, 'r') as fp: wf = Workflow(path_to_workflow) action_paths = list() for _, a_block in wf.actions: if a_block['uses'].startswith("./"): action_paths.append(a_block['uses']) action_paths = set([a.split("/")[1] for a in action_paths]) for a in action_paths: copy_tree(os.path.join(cloned_project_dir, a), os.path.join(project_root, a)) log.info("Copied {} to {}...".format(os.path.join( cloned_project_dir, a), project_root))