def test_parse(self): test_url = "ssh://[email protected]:cplee/github-actions-demo.git" self.assertRaises(SystemExit, scm.parse, test_url) test_url = "https://github.com/cplee@master" self.assertRaises(SystemExit, scm.parse, test_url) test_url = "github.com" self.assertRaises(SystemExit, scm.parse, test_url) test_url = "@master" self.assertRaises(SystemExit, scm.parse, test_url) test_url = "http://gitlab.com/cplee/github-actions-demo.git" parts = scm.parse(test_url) self.assertTupleEqual(parts, ('http://gitlab.com', 'gitlab.com', 'cplee', 'github-actions-demo', '', None)) test_url = "https://github.com/cplee/github-actions-demo@master" parts = scm.parse(test_url) self.assertTupleEqual(parts, ('https://github.com', 'github.com', 'cplee', 'github-actions-demo', '', 'master')) test_url = "github.com/cplee/github-actions-demo" parts = scm.parse(test_url) self.assertTupleEqual(parts, ('https://github.com', 'github.com', 'cplee', 'github-actions-demo', '', None)) test_url = "cplee/github-actions-demo/path/to/action@develop" parts = scm.parse(test_url) self.assertTupleEqual( parts, ('https://github.com', 'github.com', 'cplee', 'github-actions-demo', 'path/to/action', 'develop'))
def _get_build_info(self, step): """Parses the `uses` attribute and returns build information needed. Args: step(dict): dict with step data Returns: (str, str, str, str): bool (build), image, tag, Dockerfile """ build = True img = None build_ctx_path = None if "docker://" in step.uses: img = step.uses.replace("docker://", "") if ":" in img: (img, tag) = img.split(":") else: tag = "latest" build = False elif "./" in step.uses: img = f'{pu.sanitized_name(step.id, "step")}' tag = f"{self._config.git_sha_short}" build_ctx_path = os.path.join(self._config.workspace_dir, step.uses) else: _, service, user, repo, step_dir, version = scm.parse(step.uses) wf_cache_dir = os.path.join(self._config.cache_dir, self._config.wid) repo_dir = os.path.join(wf_cache_dir, service, user, repo) img = f"{user}/{repo}".lower() tag = version build_ctx_path = os.path.join(repo_dir, step_dir) return (build, img, tag, build_ctx_path)
def _get_build_info(self, step): """Parses the `uses` attribute and returns build information needed. Args: step(dict): dict with step data Returns: (str, str, str, str): bool (build), image, tag, Dockerfile """ build = True img = None build_ctx_path = None if 'docker://' in step['uses']: img = step['uses'].replace('docker://', '') if ':' in img: (img, tag) = img.split(':') else: tag = 'latest' build = False elif './' in step['uses']: img = f'{pu.sanitized_name(step["name"], "step")}' tag = f'{self._config.workspace_sha}' build_ctx_path = os.path.join(self._config.workspace_dir, step['uses']) else: _, _, user, repo, _, version = scm.parse(step['uses']) img = f'{user}/{repo}'.lower() tag = version build_ctx_path = os.path.join(step['repo_dir'], step['step_dir']) return (build, img, tag, build_ctx_path)
def download_actions(self): """Clone actions that reference a repository.""" cloned = set() infoed = False for _, a in self.wf.actions: if ('docker://' in a['uses'] or 'shub://' in a['uses'] or './' in a['uses']): continue url, service, usr, repo, action_dir, version = scm.parse(a['uses']) repo_parent_dir = os.path.join( self.actions_cache_path, service, usr ) a['repo_dir'] = os.path.join(repo_parent_dir, repo) a['action_dir'] = action_dir if self.dry_run: continue if not infoed: log.info('[popper] cloning action repositories') infoed = True if '{}/{}'.format(usr, repo) in cloned: continue if not os.path.exists(repo_parent_dir): os.makedirs(repo_parent_dir) log.info('[popper] - {}/{}/{}@{}'.format(url, usr, repo, version)) scm.clone(url, usr, repo, repo_parent_dir, version) cloned.add('{}/{}'.format(usr, repo))
def import_from_repo(action_ref, project_root): url, service, user, repo, action_dir, version = scm.parse(action_ref) cloned_project_dir = os.path.join("/tmp", service, user, repo) scm.clone(url, user, repo, cloned_project_dir, version) if not action_dir: ptw_one = os.path.join(cloned_project_dir, "main.workflow") ptw_two = os.path.join(cloned_project_dir, ".github/main.workflow") if os.path.isfile(ptw_one): path_to_workflow = ptw_one elif os.path.isfile(ptw_two): path_to_workflow = ptw_two else: log.fail("Unable to find main.workflow file") else: path_to_workflow = os.path.join(cloned_project_dir, action_dir) if not os.path.basename(path_to_workflow).endswith('.workflow'): path_to_workflow = os.path.join(path_to_workflow, 'main.workflow') if not os.path.isfile(path_to_workflow): log.fail("Unable to find a main.workflow file") if '.github/' in path_to_workflow: path_to_copy = os.path.dirname(os.path.dirname(path_to_workflow)) else: path_to_copy = os.path.dirname(path_to_workflow) copy_tree(path_to_copy, project_root) log.info("Successfully imported from {}".format(action_ref))
def test_parse(self): test_url = ("ssh://[email protected]:popperized" "/github-actions-demo.git") self.assertRaises(SystemExit, scm.parse, test_url) test_url = "https://github.com/popperized@master" self.assertRaises(SystemExit, scm.parse, test_url) test_url = "github.com" self.assertRaises(SystemExit, scm.parse, test_url) test_url = "@master" self.assertRaises(SystemExit, scm.parse, test_url) test_url = "http://gitlab.com/popperized/github-actions-demo.git" parts = scm.parse(test_url) self.assertTupleEqual(parts, ( 'http://gitlab.com', 'gitlab.com', 'popperized', 'github-actions-demo', '', None)) test_url = ("https://github.com/popperized" "/github-actions-demo@master") parts = scm.parse(test_url) self.assertTupleEqual(parts, ( 'https://github.com', 'github.com', 'popperized', 'github-actions-demo', '', 'master')) test_url = "github.com/popperized/github-actions-demo" parts = scm.parse(test_url) self.assertTupleEqual(parts, ( 'https://github.com', 'github.com', 'popperized', 'github-actions-demo', '', None)) test_url = "popperized/github-actions-demo/path/to/action@develop" parts = scm.parse(test_url) self.assertTupleEqual(parts, ( 'https://github.com', 'github.com', 'popperized', 'github-actions-demo', 'path/to/action', 'develop'))
def run(self, reuse=False): build = True if 'docker://' in self.action['uses']: tag = self.action['uses'].replace('docker://', '') if ':' not in tag: tag += ":latest" build = False dockerfile_path = 'n/a' elif './' in self.action['uses']: action_dir = os.path.basename(self.action['uses'].replace( './', '')) if self.env['GITHUB_REPOSITORY'] == 'unknown': repo_id = '' else: repo_id = self.env['GITHUB_REPOSITORY'] if action_dir: repo_id += '/' tag = repo_id + action_dir + ':' + self.env['GITHUB_SHA'] dockerfile_path = os.path.join(os.getcwd(), self.action['uses']) else: _, _, user, repo, _, version = scm.parse(self.action['uses']) tag = '{}/{}:{}'.format(user, repo, version) dockerfile_path = os.path.join(self.action['repo_dir'], self.action['action_dir']) log.debug('docker tag: {}'.format(tag)) log.debug('dockerfile path: {}'.format(dockerfile_path)) if not reuse: if self.docker_exists(): self.docker_rm() if build: self.docker_build(tag, dockerfile_path) else: self.docker_pull(tag) self.docker_create(tag) else: if not self.docker_exists(): if build: self.docker_build(tag, dockerfile_path) else: self.docker_pull(tag) self.docker_create(tag) if self.container is not None: popper.cli.docker_list.append(self.container) e = self.docker_start() if e != 0: log.fail("Action '{}' failed!".format(self.action['name']))
def _clone_repos(self, wf): """Clone steps that reference a repository. Args: wf(popper.parser.workflow): Instance of the Workflow class. config.dry_run(bool): True if workflow flag is being dry-run. config.skip_clone(bool): True if clonning step has to be skipped. config.wid(str): id of the workspace Returns: None """ # cache directory for this workspace wf_cache_dir = os.path.join(self._config.cache_dir, self._config.wid) os.makedirs(wf_cache_dir, exist_ok=True) cloned = set() infoed = False for step in wf.steps: if ( "docker://" in step.uses or "shub://" in step.uses or "library://" in step.uses or "./" in step.uses or step.uses == "sh" ): continue url, service, user, repo, _, version = scm.parse(step.uses) repo_dir = os.path.join(wf_cache_dir, service, user, repo) if self._config.dry_run: continue if self._config.skip_clone: if not os.path.exists(repo_dir): log.fail(f"Expecting folder '{repo_dir}' not found.") continue if not infoed: log.info("[popper] Cloning step repositories") infoed = True if f"{user}/{repo}" in cloned: continue log.info(f"[popper] - {url}/{user}/{repo}@{version}") scm.clone(url, user, repo, repo_dir, version) cloned.add(f"{user}/{repo}")
def fetch_metadata(update_cache): """Fetch metatdata of the repositories from the search_sources on which to run the search. Args: update_cache (bool) : Flag variable to decide whether to update the cache or not. Returns: dict : All metadata related to the actions. """ cache_file = setup_cache() update = False if (not os.path.isfile(cache_file)) or update_cache: update = True if not update: # Use metadata from cache and skip its update. with open(cache_file, 'r') as cf: metadata = yaml.load(cf, Loader=yaml.FullLoader) else: # Update the cache file. log.info('Updating action metadata cache...\n') search_sources = read_search_sources() source_list = list() for url in search_sources: _, _, user, repo, path_to_action, version = scm.parse(url) source_list.append((user, repo, path_to_action, version)) metadata = defaultdict(dict) with click.progressbar(source_list, show_eta=False, bar_template='[%(bar)s] %(info)s | %(label)s', show_percent=True, width=30) as bar: for r in bar: user, repo, path_to_action, version = r[0], r[1], r[2], r[3] action = os.path.normpath( os.path.join(user, repo, path_to_action)) bar.label = "{}".format(action) metadata[action] = fetch_repo_metadata(user, repo, path_to_action, version) with open(cache_file, 'w') as cf: yaml.dump(dict(metadata), cf) return metadata
def _clone_repos(self, wf): """Clone steps that reference a repository. Args: wf(popper.parser.workflow): Instance of the Workflow class. config.dry_run(bool): True if workflow flag is being dry-run. config.skip_clone(bool): True if clonning step has to be skipped. config.wid(str): id of the workspace Returns: None """ repo_cache = os.path.join(WorkflowRunner._setup_base_cache(), self._config.wid) cloned = set() infoed = False for _, a in wf.steps.items(): uses = a['uses'] if ('docker://' in uses or 'shub://' in uses or 'library://' in uses or './' in uses or uses == 'sh'): continue url, service, user, repo, step_dir, version = scm.parse(a['uses']) repo_dir = os.path.join(repo_cache, service, user, repo) a['repo_dir'] = repo_dir a['step_dir'] = step_dir if self._config.dry_run: continue if self._config.skip_clone: if not os.path.exists(repo_dir): log.fail(f"Expecting folder '{repo_dir}' not found.") continue if not infoed: log.info('[popper] Cloning step repositories') infoed = True if f'{user}/{repo}' in cloned: continue log.info(f'[popper] - {url}/{user}/{repo}@{version}') scm.clone(url, user, repo, repo_dir, version) cloned.add(f'{user}/{repo}')
def import_from_repo(path, project_root): parts = scm.get_parts(path) if len(parts) < 3: log.fail( 'Required url format: \ <url>/<user>/<repo>[/folder[/wf.workflow]]' ) url, service, user, repo, _, version = scm.parse(path) cloned_project_dir = os.path.join("/tmp", service, user, repo) scm.clone(url, user, repo, os.path.dirname( cloned_project_dir), version ) if len(parts) == 3: ptw_one = os.path.join(cloned_project_dir, "main.workflow") ptw_two = os.path.join(cloned_project_dir, ".github/main.workflow") if os.path.isfile(ptw_one): path_to_workflow = ptw_one elif os.path.isfile(ptw_two): path_to_workflow = ptw_two else: log.fail("Unable to find a .workflow file") elif len(parts) >= 4: path_to_workflow = os.path.join( cloned_project_dir, '/'.join(parts[3:])).split("@")[0] if not os.path.basename(path_to_workflow).endswith('.workflow'): path_to_workflow = os.path.join( path_to_workflow, 'main.workflow') if not os.path.isfile(path_to_workflow): log.fail("Unable to find a .workflow file") shutil.copy(path_to_workflow, project_root) log.info("Successfully imported from {}".format(path_to_workflow)) with open(path_to_workflow, 'r') as fp: wf = Workflow(path_to_workflow) action_paths = list() for _, a_block in wf.actions: if a_block['uses'].startswith("./"): action_paths.append(a_block['uses']) action_paths = set([a.split("/")[1] for a in action_paths]) for a in action_paths: copy_tree(os.path.join(cloned_project_dir, a), os.path.join(project_root, a)) log.info("Copied {} to {}...".format(os.path.join( cloned_project_dir, a), project_root))
def download_actions(wf, dry_run, skip_clone, wid): """Clone actions that reference a repository. Args: wf(popper.parser.workflow): Instance of the Workflow class. dry_run(bool): True if workflow flag is being dry-run. skip_clone(bool): True if clonning action has to be skipped. wid(str): Returns: None """ actions_cache = os.path.join(pu.setup_base_cache(), 'actions', wid) cloned = set() infoed = False for _, a in wf.action.items(): if ('docker://' in a['uses'] or './' in a['uses'] or a['uses'] == 'sh'): continue url, service, user, repo, action_dir, version = scm.parse( a['uses']) repo_dir = os.path.join(actions_cache, service, user, repo) a['repo_dir'] = repo_dir a['action_dir'] = action_dir if dry_run: continue if skip_clone: if not os.path.exists(repo_dir): log.fail('The required action folder \'{}\' was not ' 'found locally.'.format(repo_dir)) continue if not infoed: log.info('[popper] Cloning action repositories') infoed = True if '{}/{}'.format(user, repo) in cloned: continue log.info('[popper] - {}/{}/{}@{}'.format(url, user, repo, version)) scm.clone(url, user, repo, repo_dir, version) cloned.add('{}/{}'.format(user, repo))
def get_build_resources(self): """Parse the `uses` attribute and get the build resources from them. Args: None Returns: bool: pull/build, image ref, the build source """ build = True image = None build_source = None if 'docker://' in self.action['uses']: image = self.action['uses'].replace('docker://', '') if ':' not in image: image += ":latest" build = False elif './' in self.action['uses']: action_dir = os.path.basename(self.action['uses'].replace( './', '')) if self.env['GITHUB_REPOSITORY'] == 'unknown': repo_id = '' else: repo_id = self.env['GITHUB_REPOSITORY'] if action_dir: repo_id += '/' image = repo_id + action_dir + ':' + self.env['GITHUB_SHA'] build_source = os.path.join(scm.get_git_root_folder(), self.action['uses']) else: _, _, user, repo, _, version = scm.parse(self.action['uses']) image = '{}/{}:{}'.format(user, repo, version) build_source = os.path.join(self.action['repo_dir'], self.action['action_dir']) image = image.lower() return (build, image, build_source)
def _get_build_info(self, step): build = True img = None build_ctx_path = None if ('docker://' in step['uses'] or 'shub://' in step['uses'] or 'library://' in step['uses']): img = step['uses'] build = False elif './' in step['uses']: img = f'{pu.sanitized_name(step["name"], "step")}' build_ctx_path = os.path.join(self._config.workspace_dir, step['uses']) else: _, _, user, repo, _, version = scm.parse(step['uses']) img = f'{user}/{repo}'.lower() build_ctx_path = os.path.join(step['repo_dir'], step['step_dir']) return (build, img, build_ctx_path)
def _get_build_info(self, step): build = True img = None build_ctx_path = None if ("docker://" in step.uses or "shub://" in step.uses or "library://" in step.uses): img = step.uses build = False elif "./" in step.uses: img = f'{pu.sanitized_name(step.id, "step")}' build_ctx_path = os.path.join(self._config.workspace_dir, step.uses) else: _, service, user, repo, step_dir, version = scm.parse(step.uses) wf_cache_dir = os.path.join(self._config.cache_dir, self._config.wid) repo_dir = os.path.join(wf_cache_dir, service, user, repo) img = f"{user}/{repo}".lower() build_ctx_path = os.path.join(repo_dir, step_dir) return (build, img, build_ctx_path)
def download_actions(self, wf, dry_run, skip_clone): """Clone actions that reference a repository.""" cloned = set() infoed = False for _, a in wf.actions.items(): if ('docker://' in a['uses'] or 'shub://' in a['uses'] or './' in a['uses'] or a['uses'] == 'sh'): continue url, service, usr, repo, action_dir, version = scm.parse(a['uses']) repo_dir = os.path.join(self.actions_cache_path, service, usr, repo) a['repo_dir'] = repo_dir a['action_dir'] = action_dir if dry_run: continue if skip_clone: if not os.path.exists(repo_dir): log.fail('Cannot find action folder locally.') continue if not infoed: log.info('[popper] cloning action repositories') infoed = True if '{}/{}'.format(usr, repo) in cloned: continue log.info('[popper] - {}/{}/{}@{}'.format(url, usr, repo, version)) scm.clone(url, usr, repo, repo_dir, version) cloned.add('{}/{}'.format(usr, repo))
def test_parse(self): test_url = "ssh://[email protected]:popperized" "/github-actions-demo.git" self.assertRaises(SystemExit, scm.parse, test_url) test_url = "https://github.com/popperized@master" self.assertRaises(SystemExit, scm.parse, test_url) test_url = "github.com" self.assertRaises(SystemExit, scm.parse, test_url) test_url = "@master" self.assertRaises(SystemExit, scm.parse, test_url) test_url = "http://gitlab.com/popperized/github-actions-demo.git" parts = scm.parse(test_url) self.assertTupleEqual( parts, ( "http://gitlab.com", "gitlab.com", "popperized", "github-actions-demo", "", None, ), ) test_url = "https://github.com/popperized" "/github-actions-demo@master" parts = scm.parse(test_url) self.assertTupleEqual( parts, ( "https://github.com", "github.com", "popperized", "github-actions-demo", "", "master", ), ) test_url = "github.com/popperized/github-actions-demo" parts = scm.parse(test_url) self.assertTupleEqual( parts, ( "https://github.com", "github.com", "popperized", "github-actions-demo", "", None, ), ) test_url = "popperized/github-actions-demo/path/to/action@develop" parts = scm.parse(test_url) self.assertTupleEqual( parts, ( "https://github.com", "github.com", "popperized", "github-actions-demo", "path/to/action", "develop", ), )