def initialize_repo(project_root): """This function is used for initializing a popper repository.""" if pu.is_popperized(): pu.fail('Repository has already been popperized') return config = { 'metadata': { 'access_right': "open", 'license': "CC-BY-4.0", 'upload_type': "publication", 'publication_type': "article" }, 'pipelines': {}, 'popperized': ["github/popperized"] } pu.write_config(config) with open(os.path.join(project_root, '.gitignore'), 'a') as f: f.write('.cache\n') f.write('popper_logs\n') f.write('popper_status\n') pu.info('Popperized repository ' + project_root, fg='blue', bold=True)
def cli(ctx, pipeline, add, rm): """Manipulates the environments that are associated to a pipeline. An environment is a docker image where a pipeline runs when 'popper run' is executed. The 'host' environment is a special case that corresponds to the running directly on the environment where the 'popper' command runs, i.e. running directly on the host without docker. When a new pipeline is created using, the default environment is 'host' (see 'popper init --help' for more). Examples: popper env mypipeline # show environments for pipeline popper env mypipeline --add ubuntu-xenial,centos-7.2 popper env mypipeline --rm host """ config = pu.read_config() if not add and not rm: pu.print_yaml(config['pipelines'][pipeline]['envs'], fg='yellow') if add: config['pipelines'][pipeline]['envs'] += add.split(',') if rm: for e in rm.split(','): config['pipelines'][pipeline]['envs'].remove(e) pu.write_config(config)
def cli(ctx, pipeline): """Remove a popper pipeline from the user's repository effectively to keep the pipelines folder and the .popper.yml files in sync. """ project_root = pu.get_project_root() pipelines = pu.read_config()['pipelines'] if pipeline in pipelines: path = pipelines[pipeline]['path'] pipeline_dir = os.path.join( project_root, path) else: pu.fail("Pipeline '{}' not in this project".format(pipeline)) if os.path.isdir(pipeline_dir): shutil.rmtree(pipeline_dir) popper_config = pu.read_config() del popper_config['pipelines'][pipeline] pu.info("Pipeline '{}' removed successfully".format(pipeline), fg="blue") pu.write_config(popper_config) else: pu.fail("Path '{}' is not a folder".format(pipeline))
def cli(ctx): """Resets a popper repository completely, removing all existing pipelines and folders, leaving behind a newly created .popper.yml file. Note: It only removes those files inside a pipeline folder that are also tracked by git. Untracked files will not be deleted. """ msg = ( "This will remove all the pipeline files in this " " project, do you want to continue?" ) if(not click.confirm(msg, abort=False)): sys.exit(0) project_root = pu.get_project_root() if project_root != os.getcwd(): msg = 'This command can only be executed from the project root folder' pu.fail(msg) config = pu.read_config() for _, p in config['pipelines'].items(): pu.exec_cmd('git rm -r {}'.format(p['path'])) pu.write_config(pu.init_config) content = pt.ReadMe() content.init_project() pu.info("Reset complete", fg="cyan")
def publish_snapshot(self): if self.deposition is None: self.create_new_deposition() else: if self.is_last_deposition_published(): self.create_new_version() self.delete_previous_file() self.upload_new_file() self.update_metadata() url = '{}/{}/publish'.format( self.baseurl, self.deposition['id'] ) r = requests.post(url, params=self.params) if r.status_code == 201: url = r.json()['location'] r = requests.get(url, params=self.params) doi = r.json()['doi'] doi_url = 'https://doi.org/{}'.format(doi) pu.info( "Snapshot has been successfully published with DOI " "{} and the DOI URL {}".format(doi, doi_url) ) config = pu.read_config() config['metadata']['figshare_doi'] = doi config['metadata']['figshare_doi_url'] = doi_url pu.write_config(config) else: pu.fail( "Status {}: Failed to publish the record." .format(r.status_code) )
def publish_snapshot(self): if self.deposition is None: self.create_new_deposition() self.update_metadata_from_yaml() else: if self.is_last_deposition_published(): self.create_new_version() self.delete_previous_file() self.update_metadata() self.upload_new_file() r = requests.get(self.baseurl, params=self.params) config = pu.read_config() try: deposition_id = r.json()[0]['id'] except (KeyError, IndexError): pu.fail("No previously unpublished records exist.") url = '{}/{}/actions/publish'.format(self.baseurl, deposition_id) r = requests.post(url, params=self.params) if r.status_code == 202: doi = r.json()['doi'] doi_url = r.json()['doi_url'] pu.info("Snapshot has been successfully published with DOI " "{} and the DOI URL {}".format(doi, doi_url)) config['metadata']['zenodo_doi'] = doi config['metadata']['zenodo_doi_url'] = doi_url pu.write_config(config) else: pu.fail("Status {}: Failed to publish the record.".format( r.status_code))
def cli(ctx, add, rm): """Manipulates the metadata associated to a Popper repository. A metadata entry is an arbitrary key-value pair. Without any options, it displays all the existing metadata entries. Examples: popper metadata # show all entries popper metadata --add author='Jane Doe' --add year=2018 popper metadata --rm author """ config = pu.read_config() if not add and not rm: pu.print_yaml(config['metadata'], fg='yellow') if add: for kv_str in add: kv_list = kv_str.split('=') config['metadata'][kv_list[0]] = kv_list[1] if rm: for k in rm: config['metadata'].pop(k) pu.write_config(config)
def cli(ctx, pipeline): """Remove a popper pipeline from the user's repository effectively to keep the pipelines folder and the .popper.yml files in sync. """ project_root = pu.get_project_root() if pipeline == 'paper': pipeline_dir = project_root else: pipeline_dir = os.path.join(project_root, 'pipelines') pipeline_path = os.path.join(pipeline_dir, pipeline) if os.path.isdir(pipeline_path): shutil.rmtree(pipeline_path) popper_config = pu.read_config() del popper_config['pipelines'][pipeline] pu.info("Pipeline {} removed successfully".format(pipeline), fg="green") pu.write_config(popper_config) else: pu.fail("Pipeline {} doesn't exists".format(pipeline))
def cli(ctx): """Synchronize your pipelines and popper.yml file if any pipeline or stage has been deleted. """ pipeline_dir = os.path.join(pu.get_project_root(), 'pipelines') popper_config = pu.read_config() pipelines = {} for pipeline in os.listdir(pipeline_dir): envs = popper_config['pipelines'][pipeline]['envs'] relative_path = popper_config['pipelines'][pipeline]['path'] defined_stages = popper_config['pipelines'][pipeline]['stages'] existing_stages = [] for stage in defined_stages: os.chdir(os.path.join(pipeline_dir, pipeline)) if os.path.exists(stage+'.sh') or os.path.exists(stage): existing_stages.append(stage) pipelines[pipeline] = { 'envs': envs, 'path': relative_path, 'stages': existing_stages } popper_config['pipelines'] = pipelines pu.write_config(popper_config) pu.info("\nYour popper.yml file has been updated! Run git diff to see " "the differences.", fg="white")
def cli(ctx): """Synchronize your pipelines and popper.yml file if any pipeline or stage has been deleted. """ popper_config = pu.read_config() project_root = pu.get_project_root() pipelines = popper_config['pipelines'] # Removing nonexistent pipelines from .popper.yml for p in list(pipelines): pipeline = pipelines[p] pipe_path = os.path.join(project_root, pipeline['path']) # Checking if the pipeline exists if os.path.exists(pipe_path): # Synchronizing stages stages = [ x[:-3] for x in os.listdir(pipe_path) if x.endswith(".sh") ] pipelines[p]['stages'] = stages else: del pipelines[p] popper_config['pipelines'] = pipelines pu.write_config(popper_config) pu.info( "\nYour popper.yml file has been updated! Run git diff to see " "the differences.", fg="white")
def cli(ctx, pipeline, folder, branch): """Add a pipeline to your repository from the existing popperized repositories on github. The pipeline argument is provided as owner/repo/ pipeline. For example, 'popper add popperized/quiho-popper/single-node' adds the 'single-node' pipeline from the 'quiho-popper' repository from the 'popperized' organization. """ if len(pipeline.split('/')) != 3: raise BadArgumentUsage( "Bad pipeline name. See 'popper add --help' for more info.") owner, repo, pipe_name = pipeline.split('/') config = pu.read_config() if pipe_name in config['pipelines']: pu.fail("Pipeline {} already in repo.".format(pipe_name)) project_root = pu.get_project_root() pipelines_dir = os.path.join(project_root, folder) if not os.path.exists(pipelines_dir): os.mkdir(pipelines_dir) gh_url = 'https://github.com/{}/{}/'.format(owner, repo) gh_url += 'archive/{}.tar.gz'.format(branch) pu.info("Downloading pipeline {}... ".format(pipe_name)) r = pu.make_gh_request( gh_url, msg="Unable to fetch the pipeline. Please check if the name" " of the pipeline is correct and the internet is connected" ) # Downloading and extracting the tarfile with tarfile.open( mode='r:gz', fileobj=BytesIO(r.content)) as t: t.extractall() os.rename('{}-{}/pipelines/{}'.format( repo, branch, pipe_name), os.path.join(folder, pipe_name)) shutil.rmtree('{}-{}'.format(repo, branch)) pu.info("Updating popper configuration... ") repo_config = get_config(owner, repo) config['pipelines'][pipe_name] = repo_config['pipelines'][pipe_name] config['pipelines'][pipe_name]['path'] = os.path.join(folder, pipe_name) pu.write_config(config) pu.info("Pipeline {} has been added successfully.".format(pipe_name), fg="green")
def cli(ctx, pipeline, set): """View or change the stages of a pipeline. """ config = pu.read_config() if pipeline in config['pipelines']: if set: config['pipelines'][pipeline]['stages'] = set.split(',') pu.write_config(config) pu.info("\nStages:", fg="yellow") pu.print_yaml(config['pipelines'][pipeline]['stages'], fg="white") else: pu.fail("The pipeline {} is not defined. \nSee popper.yml file to see " "which pipelines are defined.".format(pipeline))
def cli(ctx, pipeline, add, rm, ls): """Manipulates the environments that are associated to a pipeline. An environment is a docker image where a pipeline runs when 'popper run' is executed. The 'host' environment is a special case that corresponds to the running directly on the environment where the 'popper' command runs, i.e. running directly on the host without docker. When a new pipeline is created using, the default environment is 'host' (see 'popper init --help' for more). Examples: popper env mypipeline # show environments for pipeline popper env mypipeline --add ubuntu-xenial,centos-7.2 popper env mypipeline --rm host """ config = pu.read_config() if not add and not rm and not ls: if not pipeline: raise BadArgumentUsage('Expecting name of a pipeline') if pipeline not in config['pipelines']: pu.fail("Pipeline '{}' not found in .popper.yml".format(pipeline)) pu.print_yaml(config['pipelines'][pipeline]['envs'], fg='yellow') sys.exit(0) if add: config['pipelines'][pipeline]['envs'] += add.split(',') if rm: for e in rm.split(','): config['pipelines'][pipeline]['envs'].remove(e) if ls: try: response = requests.get("https://hub.docker.com/v2/repositories/" "falsifiable/poppercheck/tags") environments = [] for result in response.json()['results']: environments.append(result['name']) pu.info('environments:') pu.print_yaml(environments) except requests.exceptions.RequestException as e: click.echo(click.style("Error: " + str(e), fg='red'), err=True) pu.write_config(config)
def cli(ctx, pipeline, folder): """Add a pipeline to your repository from the existing popperized repositories on github. The pipeline argument is provided as owner/repo/ pipeline. For example, 'popper add popperized/quiho-popper/single-node' adds the 'single-node' pipeline from the 'quiho-popper' repository from the 'popperized' organization. """ if len(pipeline.split('/')) != 3: pu.fail("Bad pipeline name. See 'popper add --help' for more info.") owner, repo, pipe_name = pipeline.split('/') config = pu.read_config() if pipe_name in config['pipelines']: pu.fail("Pipeline {} already in repo.".format(pipe_name)) project_root = pu.get_project_root() pipelines_dir = os.path.join(project_root, folder) if not os.path.exists(pipelines_dir): os.mkdir(pipelines_dir) gh_url = 'https://github.com/{}/{}/archive/master.zip'.format(owner, repo) pu.info("Downloading pipeline {}... ".format(pipe_name)) r = requests.get(gh_url) if r.status_code != 200: pu.fail("Unable to fetch the pipeline. Please check if the name" + " of the pipeline is correct and the internet is connected") with zipfile.ZipFile(BytesIO(r.content)) as z: z.extractall() os.rename('{}-master/pipelines/{}'.format(repo, pipe_name), os.path.join(folder, pipe_name)) shutil.rmtree('{}-master'.format(repo)) pu.info("Updating popper configuration... ") repo_config = get_config(owner, repo) config['pipelines'][pipe_name] = repo_config['pipelines'][pipe_name] config['pipelines'][pipe_name]['path'] = os.path.join(folder, pipe_name) pu.write_config(config) pu.info("Pipeline {} has been added successfully.".format(pipe_name), fg="green")
def initialize_repo(project_root): """This function is used for initializing a popper repository.""" content = pt.ReadMe() if pu.is_popperized(): pu.fail('Repository has already been popperized') return pu.write_config(pu.init_config) with open(os.path.join(project_root, '.gitignore'), 'a') as f: f.write(pu.gitignore_content) # write README content.init_project() pu.info('Popperized repository ' + project_root, fg='blue', bold=True)
def cli(ctx): """Initializes a repository by creating the .popper.yml file. """ project_root = scm.get_root_folder() if pu.is_popperized(project_root): pu.fail('Repository has already been popperized') return pu.write_config(project_root, pu.init_config) with open(os.path.join(project_root, '.gitignore'), 'a') as f: f.write(pu.gitignore_content) # write README pu.info('Popperized repository {}\n'.format(project_root))
def update_config(owner, repo, pipeline_name, path, repo_config): """Adds the information about the added pipeline in the popperized entry of the .popper.yml file. """ pipeline_path = 'pipelines/{}'.format(pipeline_name) if 'stages' in repo_config: pipeline_stages = repo_config['stages'][pipeline_name] else: pipeline_stages = [ 'setup.sh', 'run.sh', 'post-run.sh', 'validate.sh', 'teardown.sh'] pipeline_envs = repo_config['envs'][pipeline_name] source_url = 'github.com/{}/{}'.format(owner, repo) config = pu.read_config() config['pipelines'][pipeline_name] = { 'envs': pipeline_envs, 'path': pipeline_path, 'stages': pipeline_stages, 'source': source_url } if 'stages' not in config: config['stages'] = {} config['stages'][pipeline_name] = pipeline_stages if 'envs' not in config: config['envs'] = {} config['envs'][pipeline_name] = pipeline_envs if 'popperized' not in config: config['popperized'] = [] config['popperized'].append('github/{}/{}'.format(owner, repo)) pu.write_config(config)
def cli(ctx, pipeline): """This command is used to remove a popper pipeline from the user's repository effectively to keep the pipelines folder and the .popper.yml files in sync. Examples: popper rm single-node """ pipeline_dir = os.path.join(pu.get_project_root(), 'pipelines') popper_config = pu.read_config() pipeline_path = os.path.join(pipeline_dir, pipeline) if os.path.isdir(pipeline_path): shutil.rmtree(pipeline_path) popper_config = pu.read_config() del popper_config['pipelines'][pipeline] if 'stages' in popper_config: if pipeline in popper_config['stages']: del popper_config['stages'][pipeline] if 'envs' in popper_config: if pipeline in popper_config['envs']: del popper_config['envs'][pipeline] pu.info("Pipeline {} removed successfully".format(pipeline), fg="green") pu.write_config(popper_config) else: pu.fail("Pipeline {} doesn't exists".format(pipeline))
def cli(ctx): """Resets a popper repository completely, removing all existing pipelines and folders, leaving behind a newly created .popper.yml file. """ project_root = pu.get_project_root() for file_name in os.listdir(project_root): if file_name in [".git", ".cache"]: continue file_path = os.path.join(project_root, file_name) try: shutil.rmtree(file_path) except OSError: os.remove(file_path) config = { 'metadata': { 'access_right': "open", 'license': "CC-BY-4.0", 'upload_type': "publication", 'publication_type': "article" }, 'pipelines': {}, 'popperized': ["github/popperized"] } pu.write_config(config) with open(os.path.join(project_root, '.gitignore'), 'a') as f: f.write('.cache\n') f.write('popper_logs\n') f.write('popper_status\n') pu.info("Reset complete", fg="cyan")
def cli(ctx, keywords, skip_update, add, rm, ls, include_readme): """Searches for pipelines on GitHub matching the given keyword(s). The list of repositories or organizations scraped for Popper pipelines is specified in the 'popperized' list in the .popper.yml file. By default, https://github.com/popperized is added to the configuration. If no keywords are specified, a list of all the pipelines from all organizations (in the .popper.yml file) and repositories will be returned. Example: popper search quiho would result in: popperized/quiho-popper To add or remove orgs/repos to/from the 'popperized' , use the --add and --rm flags while searching. popper search --add org/repo To remove an organization/person do: popper search --rm org/repo To view the list repositories that are available to the search command: popper search --ls """ if (rm or add or ls) and (keywords): raise BadArgumentUsage( "'add', 'rm' and 'ls' flags cannot be combined with others.") project_root = pu.get_project_root() config = pu.read_config() popperized_list = config['popperized'] if add: add = 'github/' + add if add not in popperized_list: popperized_list.append(add) config['popperized'] = popperized_list pu.write_config(config) sys.exit(0) if rm: rm = 'github/' + rm if rm in popperized_list: popperized_list.remove(rm) config['popperized'] = popperized_list pu.write_config(config) sys.exit(0) result = [] # to store the result of the search query as a list if ls: for p in popperized_list: if p.count('/') == 1: org_name = p.split('/')[1] org_url = ('https://api.github.com/users/{}/repos') org_url = org_url.format(org_name) response = pu.make_gh_request(org_url) repos = response.json() temp = [r["full_name"] for r in repos] result.extend(temp) else: result.extend(p[7:]) if len(result) > 0: pu.info("The list of available poppperized repositories are:\n") pu.print_yaml(result) sys.exit() else: fail_msg = "There are no popperized repositores available" "for search. Use the --add flag to add an org/repo." pu.fail(fail_msg) sys.exit(0) search_params = {} if not keywords: # checks if the query is empty or not search_params['empty_query'] = True else: search_params['empty_query'] = False cache_dir = os.path.join(project_root, '.cache') search_params["keywords"] = keywords search_params["cache_dir"] = cache_dir search_params["skip_update"] = True if skip_update else False search_params["in_readme"] = True if include_readme else False if not os.path.exists(cache_dir): os.makedirs(cache_dir) for popperized in popperized_list: if popperized.count('/') == 1: # it is an organization org_name = popperized.split('/')[1] repos = "" if not skip_update: org_url = ( 'https://api.github.com/users/{}/repos'.format(org_name)) response = pu.make_gh_request(org_url) with open(os.path.join(cache_dir, org_name + '_repos.json'), 'w') as f: json.dump(response.json(), f) try: with open(os.path.join(cache_dir, org_name + '_repos.json'), 'r') as f: repos = json.load(f) except FileNotFoundError: pu.fail('No cached metadata has been downloaded') with click.progressbar( repos, show_eta=False, label='Searching in ' + org_name, bar_template='[%(bar)s] %(label)s | %(info)s', show_percent=True) as bar: for r in bar: if search_params["empty_query"]: temp = ' {}/{}'\ .format(org_name, r['name']) result.append(temp) elif l_distance(r["name"].lower(), keywords.lower()) < 1: temp = ' {}/{}' \ .format(org_name, r['name']) result.append(temp) else: search_params["repo_url"] = r["url"] search_params["uname"] = org_name result.extend(search_pipeline(search_params)) else: # it is a repository user, repo = popperized.split('/')[1:] repo_url = ('https://api.github.com/repos/{}/{}'.format( user, repo)) search_params["repo_url"] = repo_url search_params["uname"] = user pu.info("Searching in repository : {}".format(repo)) result.extend(search_pipeline(search_params)) if len(result) != 0: pu.info("\nSearch results:\n", fg="green") for res in result: pu.info("> " + res + "\n") if search_params["in_readme"]: pu.info("Use popper info command to view the" " details of a pipeline. See popper info --" "help") else: pu.fail("Unable to find any matching pipelines")
def cli(ctx, pipeline, add, rm, ls, argument): """Manipulates the environments that are associated to a pipeline. An environment is a docker image where a pipeline runs when 'popper run' is executed. The 'host' environment is a special case that corresponds to the running directly on the environment where the 'popper' command runs, i.e. running directly on the host without docker. When a new pipeline is created using, the default environment is 'host' (see 'popper init --help' for more). Examples: popper env mypipeline # show environments for pipeline popper env mypipeline --add ubuntu-xenial,centos-7.2 popper env mypipeline --rm host :argument Used to pass an argument to Docker through popper. Can be given multiple times (Ignored for 'host'). An example of usage is as follows: popper env mypipeline --add debian-9 -arg --runtime=runc -arg --ipc=host This will add to the environment 'debian-9' the set of arguments runtime=runc and ipc=host. """ config = pu.read_config() if ls: try: response = requests.get("https://hub.docker.com/v2/repositories/" "falsifiable/popper/tags") environments = [] for result in response.json()['results']: environments.append(result['name']) pu.info('environments:') pu.print_yaml(environments) except requests.exceptions.RequestException as e: click.echo(click.style("Error: " + str(e), fg='red'), err=True) sys.exit(0) if not pipeline: get_pipe = pu.in_pipeline(name=True) if get_pipe is not None: pipeline = get_pipe else: pu.fail("This is not a pipeline") if not add and not rm: if pipeline not in config['pipelines']: pu.fail("Pipeline '{}' not found in .popper.yml".format(pipeline)) pu.print_yaml(config['pipelines'][pipeline]['envs'], fg='yellow') sys.exit(0) envs = config['pipelines'][pipeline]['envs'] args = set(argument) if add: elems = add.split(',') environments = set(elems) - set(envs) envs.update({env: {'args': []} for env in environments}) for env in elems: envs[env]['args'] = args if rm: for env in rm.split(','): if env in envs: envs.pop(env) else: pu.warn('Environment {} not found in {}'.format(env, pipeline)) config['pipelines'][pipeline]['envs'] = envs pu.write_config(config)
def cli(ctx, pipeline, folder, branch): """Add a pipeline to your repository from the existing popperized repositories on github. The pipeline argument is provided as <org>/<repo>/<pipeline>. For example: popper add popperized/quiho-popper/single-node The above adds the 'single-node' pipeline from the 'quiho-popper' repository from the 'popperized' organization. This commands makes use of Github's API, which has a limit on the number of requests per hour that an unauthenticated user can make. If you reach this limit, you can provide a Github API token via a POPPER_GITHUB_API_TOKEN environment variable. If defined, this variable is used to obtain the token when executing HTTP requests. """ if len(pipeline.split('/')) != 3: raise BadArgumentUsage( "Bad pipeline name. See 'popper add --help' for more info.") owner, repo, pipe_name = pipeline.split('/') new_pipe_name, folder = pu.get_name_and_path_for_new_pipeline( folder, pipe_name) config = pu.read_config() if new_pipe_name in config['pipelines']: pu.fail("Pipeline {} already in repo.".format(new_pipe_name)) project_root = pu.get_project_root() pipelines_dir = os.path.join(project_root, folder) if not os.path.exists(pipelines_dir): try: os.makedirs(pipelines_dir) except (OSError, IOError) as e: pu.fail("Could not create the necessary path.\n") elif len(os.listdir(pipelines_dir)) != 0: pu.fail("The path already exists and is not empty.") gh_url = 'https://github.com/{}/{}/'.format(owner, repo) gh_url += 'archive/{}.tar.gz'.format(branch) pu.info("Downloading pipeline {} as {}...".format(pipe_name, new_pipe_name)) r = pu.make_gh_request( gh_url, msg="Unable to fetch the pipeline. Please check if the name" " of the pipeline is correct and the internet is connected") # Downloading and extracting the tarfile with tarfile.open(mode='r:gz', fileobj=BytesIO(r.content)) as t: t.extractall() try: os.rename('{}-{}/pipelines/{}'.format(repo, branch, pipe_name), pipelines_dir) except OSError: pu.fail("Could not rename {} to {}.".format( '{}-{}/pipelines/{}'.format(repo, branch, pipe_name), pipelines_dir)) finally: shutil.rmtree('{}-{}'.format(repo, branch)) pu.info("Updating popper configuration... ") repo_config = get_config(owner, repo) config['pipelines'][new_pipe_name] = repo_config['pipelines'][pipe_name] config['pipelines'][new_pipe_name]['path'] = folder pu.write_config(config) pu.info("Pipeline {} has been added successfully.".format(new_pipe_name), fg="green")
def rename(data): """Method to rename or move a pipeline from one directory to another Args: data (dict): Contains the input details of the arguments given by the user. It has the following keys : cur_name (str): the current name of the pipeline new_name (str): the new name for the pipeline cur_path (str): the current relative path of the pipeline new_path (str): the new relative path for the pipeline """ cur_name = data['cur_name'] project_root = pu.get_project_root() popper_config = pu.read_config() pipelines = popper_config['pipelines'] new_name = data['new_name'] if cur_name in pipelines: if 'cur_path' in data: if not pipelines[cur_name]['path'] == data['cur_path']: pu.fail( "No pipeline {} exists at {}.".format( cur_name, data['cur_path']) ) else: data['cur_path'] = pipelines[cur_name]['path'] pipelines[new_name] = pipelines.pop(cur_name) abs_path_old = os.path.join(project_root, data['cur_path']) if 'new_path' in data: abs_path_new = os.path.join(project_root, data['new_path']) else: data['new_path'] = "/".join(data['cur_path'].split('/')[:-1]) abs_path_new = os.path.join(project_root, data['new_path']) data['new_path'] = os.path.join(data['new_path'], data['new_name']) if os.path.exists(abs_path_new): abs_path_new = os.path.join(abs_path_new, data['new_name']) os.rename(abs_path_old, abs_path_new) else: os.makedirs(abs_path_new) shutil.move(abs_path_old, abs_path_new) if data['cur_name'] != data['new_name']: temp_path = os.path.join(abs_path_new, data['cur_name']) abs_path_new = os.path.join(abs_path_new, data['new_name']) os.rename(temp_path, abs_path_new) abs_path_old = "/".join(abs_path_old.split("/")[:-1]) if os.listdir(abs_path_old) == []: shutil.rmtree(abs_path_old) pipelines[new_name]['path'] = data['new_path'] popper_config['pipelines'] = pipelines pu.write_config(popper_config) else: pu.fail("Pipeline {} doesn't exists".format(cur_name))
def cli(ctx, keywords, skip_update, add, rm, ls, include_readme): """Searches for pipelines on Github matching the given keyword(s). The list of repositories or organizations scraped for pipelines is specified in the 'popperized' list in the .popper.yml file. By default, https://github.com/popperized is added to the list. If no keywords are specified, a list of all the pipelines from all organizations (in the .popper.yml file) and repositories will be shown. This commands makes use of Github's API, which has a limit on the number of requests per hour that an unauthenticated user can make. If you reach this limit, you can provide a Github API token via a POPPER_GITHUB_API_TOKEN environment variable. If defined, this variable is used to obtain the token when executing HTTP requests. Example: popper search quiho Would result in: popperized/quiho-popper/single-node The format of search output is <org>/<repo>/<pipeline-name>. To add organizations or repositories to the list of pipeline sources: popper search --add org/repo popper search --add entireorg To remove one: popper search --rm org/repo popper search --rm entireorg To view the list repositories that are available to the search command: popper search --ls """ if ((rm and add) or (rm and ls) or (add and ls)): raise BadArgumentUsage("Only one of 'add', 'rm' and 'ls' accepted.") if (rm or add or ls) and keywords: raise BadArgumentUsage("Search cannot be combined with other flags.") config = pu.read_config() sources = pu.get_search_sources(config) if add: if len(add.split('/')) > 2: pu.fail("Bad source naming format. See 'popper search --help'.") if add in sources: pu.info('{} is already a search source.'.format(add)) sys.exist(0) sources.append(add) config['search_sources'] = sources pu.write_config(config) sys.exit(0) if rm: if rm not in sources: pu.info("'{}' is not a search source.".format(rm)) sys.exit(0) sources.remove(rm) config['search_sources'] = sources pu.write_config(config) sys.exit(0) if len(sources) == 0: pu.fail('No source for popper pipelines defined! Add one first.') pipeline_meta = pu.fetch_pipeline_metadata(skip_update) result = search_pipelines(pipeline_meta, keywords, include_readme) pu.info('Matching pipelines:') pu.print_yaml(result)