def save_directory(path, dirname, url, topmost=1): """Recursive method to handle directory download. Creates the empty directory, saves the files that belong to that directory and then calls itself for other directories in that directory. """ r = requests.get(url) if r.status_code != 200: pu.fail("Could not download the directory {}. Make sure the directory " "exists in the pipeline and try again.".format(dirname)) os.chdir(path) try: os.mkdir(dirname) except FileExistsError: pass path = os.path.join(path, dirname) response = r.json() if topmost == 1: # Progressbar to show the number of files installed. with click.progressbar(response, show_eta=False, label='Downloading pipeline files:', item_show_func=lambda i: '' + '| Current File/Directory > ' + i['name'] if i else None, bar_template='[%(bar)s] %(label)s | %(info)s', show_percent=True) as bar: for item in bar: save_directory_util(path, dirname, url, item) else: for item in response: save_directory_util(path, dirname, url, item)
def upload_snapshot(service_url, params, filename): """Receives the service_url and the required paramters and the filename to be uploaded and uploads the deposit, but the deposit is not published at this step. Returns the deposition id.""" # Create the deposit pu.info("Uploading the snapshot...") headers = {'Content-Type': "application/json"} r = requests.post(service_url, params=params, json={}, headers=headers) if r.status_code == 401: pu.fail("Your access token is invalid. " "Please enter a valid access token.") deposition_id = r.json()['id'] upload_url = service_url + '/{}/files'.format(deposition_id) files = {'file': open(filename, 'rb')} data = {'filename': filename} # Upload the file r = requests.post(upload_url, data=data, files=files, params=params) response = {'status_code': r.status_code} if r.status_code == 201: file_id = r.json()['id'] response['deposition_id'] = deposition_id pu.info( "Snapshot has been successfully uploaded. Your deposition id is " "{} and the file id is {}.".format(deposition_id, file_id)) else: pu.fail("Status {}: Failed to upload your snapshot. Please " "try again.".format(r.status_code)) return deposition_id
def update_metadata(self): """Reads required metatdata from the previous record and updates it from .popper.yml. The record is updated with the new data. This will only be called when some previous deposition is found. """ data = self.deposition['metadata'] config = pu.read_config()['metadata'] try: data['description'] = '<p>{}</p>'.format(config['abstract']) data['title'] = config['title'] data['publication_date'] = str(date.today()) data['keywords'] = list( map(lambda x: x.strip(), config['keywords'].split(',')) ) creators = [] for key in sorted(list(config.keys())): if 'author' in key: name, email, affiliation = map( lambda x: x.strip(), config[key].split(',') ) if len(name.split()) == 2: name = ', '.join(name.split()[::-1]) creators.append({'name': name, 'affiliation': affiliation}) data['creators'] = creators # make sure the URL of the repo is in the metadata if 'related_identifiers' not in data: data['related_identifiers'] = [] found = False for identifier in data['related_identifiers']: if identifier['identifier'] == self.remote_url: found = True break if not found: data['related_identifiers'].append({ "identifier": self.remote_url, "relation": "isSupplementTo", "scheme": "url" }) data['upload_type'] = config['upload_type'] if config['upload_type'] == 'publication': data['publication_type'] = config['publication_type'] except KeyError: pu.fail( "Metadata is not defined properly in .popper.yml. " "See the documentation for proper metadata format." ) data = {'metadata': data} url = '{}/{}'.format(self.baseurl, self.deposition['id']) r = requests.put( url, data=json.dumps(data), params=self.params, headers={'Content-Type': "application/json"} ) if r.status_code != 200: pu.fail("{} - Failed to update metadata: {}".format(r.status_code, r.json()))
def cli(ctx, pipeline, env, binary, clear): """ Adds pipeline requirements to .popper.yml """ # try to get pipeline from current directory if not pipeline: get_pipe = pu.in_pipeline(name=True) if get_pipe is not None: pipeline = get_pipe else: pu.fail("No pipeline detected") config = pu.read_config() if pipeline not in config['pipelines']: pu.fail( 'Pipeline {} does not exist. Check your .popper.yml file.'.format( pipeline)) # merge current requirements reqs = config['pipelines'][pipeline].get('requirements', {}) var_reqs = set([]) if clear else set(reqs.get('vars', [])) var_reqs |= set(env) var_reqs = list(var_reqs) reqs['vars'] = var_reqs bin_reqs = set([]) if clear else set(reqs.get('bin', [])) bin_reqs |= set(binary) bin_reqs = list(bin_reqs) reqs['bin'] = bin_reqs pu.update_config(pipeline, reqs=reqs)
def __init__(self, access_token): super(Zenodo, self).__init__() self.baseurl = 'https://zenodo.org/api/deposit/depositions' self.params = {'access_token': access_token} r = requests.get(self.baseurl, params=self.params) try: depositions = r.json() remote_url = pu.get_remote_url() for deposition in depositions: metadata = deposition['metadata'] try: identifiers = metadata['related_identifiers'] if identifiers[0]['identifier'] == remote_url: self.deposition = deposition except KeyError: pass except TypeError: if r.status_code == 401: pu.fail( "The access token provided was invalid. " "Please provide a valid access_token." ) else: pu.fail( "Status {}: Could not fetch the depositions." "Try again later.".format(r.status_code) )
def __init__(self, access_token): super(Figshare, self).__init__() self.baseurl = 'https://api.figshare.com/v2/account/articles' self.params = {'access_token': access_token} r = requests.get(self.baseurl, params=self.params) if r.status_code == 200: depositions = r.json() remote_url = pu.get_remote_url() for deposition in depositions: deposition_id = deposition['id'] url = '{}/{}'.format(self.baseurl, deposition_id) r = requests.get(url, params=self.params) deposition = r.json() if remote_url in deposition['references']: self.deposition = deposition break elif r.status_code == 403: pu.fail( "The access token provided was invalid. " "Please provide a valid access_token." ) else: pu.fail( "Status {}: Could not fetch the depositions." "Try again later.".format(r.status_code) )
def run(self, reuse=False): """Runs the singularity action """ build = True if 'shub://' in self.action['uses']: image = self.action['uses'] build = False elif './' in self.action['uses']: image = 'action/' + os.path.basename(self.action['uses']) singularityfile_path = os.path.join( os.getcwd(), self.action['uses']) else: image = '/'.join(self.action['uses'].split('/')[:2]) singularityfile_path = os.path.join(self.action['repo_dir'], self.action['action_dir']) if not reuse: if self.singularity_exists(): self.singularity_rm() if build: self.singularity_build(singularityfile_path, image) else: self.singularity_pull(image) else: if not self.singularity_exists(): if build: self.singularity_build(singularityfile_path, image) else: self.singularity_pull(image) e = self.singularity_start(image) if e != 0: pu.fail('Action {} failed!\n'.format(self.action['name']))
def initialize_existing_pipeline(pipeline_path, stages, envs): for s in stages.split(','): s_filename = os.path.join(pipeline_path, s) if not isfile(s_filename) and not isfile(s_filename + '.sh'): pu.fail(( "Unable to find script for stage '" + s + "'. You might need " "to provide values for the --stages flag. See 'init --help'."))
def cli(ctx): """Resets a popper repository completely, removing all existing pipelines and folders, leaving behind a newly created .popper.yml file. Note: It only removes those files inside a pipeline folder that are also tracked by git. Untracked files will not be deleted. """ msg = ( "This will remove all the pipeline files in this " " project, do you want to continue?" ) if(not click.confirm(msg, abort=False)): sys.exit(0) project_root = pu.get_project_root() if project_root != os.getcwd(): msg = 'This command can only be executed from the project root folder' pu.fail(msg) config = pu.read_config() for _, p in config['pipelines'].items(): pu.exec_cmd('git rm -r {}'.format(p['path'])) pu.write_config(pu.init_config) content = pt.ReadMe() content.init_project() pu.info("Reset complete", fg="cyan")
def publish_snapshot(self): if self.deposition is None: self.create_new_deposition() else: if self.is_last_deposition_published(): self.create_new_version() self.delete_previous_file() self.upload_new_file() self.update_metadata() url = '{}/{}/publish'.format( self.baseurl, self.deposition['id'] ) r = requests.post(url, params=self.params) if r.status_code == 201: url = r.json()['location'] r = requests.get(url, params=self.params) doi = r.json()['doi'] doi_url = 'https://doi.org/{}'.format(doi) pu.info( "Snapshot has been successfully published with DOI " "{} and the DOI URL {}".format(doi, doi_url) ) config = pu.read_config() config['metadata']['figshare_doi'] = doi config['metadata']['figshare_doi_url'] = doi_url pu.write_config(config) else: pu.fail( "Status {}: Failed to publish the record." .format(r.status_code) )
def cli(ctx, pipeline): """Add a pipeline to your repository from the existing popperized repositories on github. The pipeline argument is provided as owner/repo/ pipeline. For example, popper add popperized/quiho-popper/single-node adds the single-node pipeline from the quiho-popper repository. """ try: owner, repo, pipeline_name = pipeline.split('/') except ValueError: pu.fail("See popper add --help for more info.") project_root = pu.get_project_root() path = os.path.join(project_root, 'pipelines') if os.path.exists(path): pass else: os.chdir(project_root) os.mkdir('pipelines') dirname = pipeline_name url = ('https://api.github.com/repos/{}/{}/contents/pipelines/{}' .format(owner, repo, pipeline_name)) repo_config = get_config(owner, repo) save_directory(path, dirname, url) path = os.path.join(path, pipeline_name) update_config(owner, repo, pipeline_name, path, repo_config) pu.info("Pipeline {} successfully added.".format(pipeline_name) + " It can be viewed in the pipelines directory.", fg="green")
def cli(ctx, pipeline): """Remove a popper pipeline from the user's repository effectively to keep the pipelines folder and the .popper.yml files in sync. """ project_root = pu.get_project_root() pipelines = pu.read_config()['pipelines'] if pipeline in pipelines: path = pipelines[pipeline]['path'] pipeline_dir = os.path.join( project_root, path) else: pu.fail("Pipeline '{}' not in this project".format(pipeline)) if os.path.isdir(pipeline_dir): shutil.rmtree(pipeline_dir) popper_config = pu.read_config() del popper_config['pipelines'][pipeline] pu.info("Pipeline '{}' removed successfully".format(pipeline), fg="blue") pu.write_config(popper_config) else: pu.fail("Path '{}' is not a folder".format(pipeline))
def cli(ctx, service, key): """Creates a archive of the repository on the provided service using an access token. Reports an error if archive creation is not successful. Currently supported services are Zenodo and Figshare. """ services = {'zenodo': Zenodo, 'figshare': Figshare} environment_variables = { 'zenodo': 'POPPER_ZENODO_API_TOKEN', 'figshare': 'POPPER_FIGSHARE_API_TOKEN' } if service not in services: pu.fail("The service {} is not supported. See popper archive " "--help for more info.".format(service)) if not key: try: key = os.environ[environment_variables[service]] except KeyError: key = get_access_token(service) archive = services[service](key) archive.publish_snapshot() pu.info("Done..!")
def get_access_token(service): """Tries to read the access token from a key file. If not present, prompts the user for a key and also stores the key in a key file if the user wishes.""" project_root = pu.get_project_root() os.chdir(project_root) try: with open('.{}.key'.format(service), 'r') as keyfile: encrypted_access_token = keyfile.read().strip() passphrase = click.prompt( 'Please enter your passphrase for {}'.format(service), hide_input=True).encode() aes = pyaes.AESModeOfOperationCTR(generate_key(passphrase)) try: access_token = aes.decrypt(encrypted_access_token).decode() except UnicodeDecodeError: pu.fail("Invalid passphrase. Please use the same passphrase " "used at the time of encrypting the access_token.") except FileNotFoundError: pu.info('No access token found for {}'.format(service)) access_token = click.prompt( 'Please enter your access token for {}'.format(service)) if click.confirm('Would you like to store this key?'): passphrase = click.prompt('Enter a strong passphrase', hide_input=True).encode() aes = pyaes.AESModeOfOperationCTR(generate_key(passphrase)) encrypted_access_token = aes.encrypt(access_token) with open('.{}.key'.format(service), 'w') as keyfile: keyfile.writelines('{}'.format(''.join( chr(b) for b in encrypted_access_token))) pu.info('Your key is stored in .{}.key'.format(service)) return access_token
def cli(ctx, pipeline, timeout, skip): """Executes a pipeline and reports its status. When PIPELINE is given, it executes only the pipeline with such a name. If the argument is omitted, all pipelines are executed in lexicographical order. """ cwd = os.getcwd() pipes = pu.read_config()['pipelines'] project_root = pu.get_project_root() if pipeline: if pipeline not in pipes: pu.fail("Cannot find pipeline {} in .popper.yml".format(pipeline)) status = run_pipeline(project_root, pipes[pipeline], timeout, skip) else: if os.path.basename(cwd) in pipes: # run just the one for CWD status = run_pipeline(project_root, pipes[os.path.basename(cwd)], timeout, skip) else: # run all for pipe in pipes: status = run_pipeline(project_root, pipes[pipe], timeout, skip) if status == 'FAIL': break os.chdir(cwd) if status == 'FAIL': pu.fail("Failed to execute pipeline")
def run(self, reuse): build = True if 'docker://' in self.action['uses']: tag = self.action['uses'].replace('docker://', '') build = False elif './' in self.action['uses']: tag = 'action/' + os.path.basename(self.action['uses']) dockerfile_path = os.path.join(os.getcwd(), self.action['uses']) else: tag = '/'.join(self.action['uses'].split('/')[:2]) dockerfile_path = os.path.join(self.action['repo_dir'], self.action['action_dir']) if not reuse: if self.docker_exists(): self.docker_rm() if build: self.docker_build(tag, dockerfile_path) else: self.docker_pull(tag) self.docker_create(tag) else: if not self.docker_exists(): if build: self.docker_build(tag, dockerfile_path) else: self.docker_pull(tag) self.docker_create(tag) e = self.docker_start() if e != 0: pu.fail('Action {} failed!\n'.format(self.action['name']))
def run(self, reuse=False): cmd = self.action.get('runs', ['entrypoint.sh']) cmd[0] = os.path.join('./', cmd[0]) cmd.extend(self.action.get('args', '')) cwd = os.getcwd() if not self.dry_run: if 'repo_dir' in self.action: os.chdir(self.action['repo_dir']) else: os.chdir(os.path.join(cwd, self.action['uses'])) os.environ.update(self.action.get('env', {})) pu.info('{}[{}] {}\n'.format(self.msg_prefix, self.action['name'], ' '.join(cmd))) _, ecode = pu.exec_cmd(' '.join(cmd), verbose=(not self.quiet), debug=self.debug, ignore_error=True, log_file=self.log_filename, dry_run=self.dry_run) for i in self.action.get('env', {}): os.environ.pop(i) os.chdir(cwd) if ecode != 0: pu.fail("\n\nAction '{}' failed.\n.".format(self.action['name']))
def initialize_repo(project_root): """This function is used for initializing a popper repository.""" if pu.is_popperized(): pu.fail('Repository has already been popperized') return config = { 'metadata': { 'access_right': "open", 'license': "CC-BY-4.0", 'upload_type': "publication", 'publication_type': "article" }, 'pipelines': {}, 'popperized': ["github/popperized"] } pu.write_config(config) with open(os.path.join(project_root, '.gitignore'), 'a') as f: f.write('.cache\n') f.write('popper_logs\n') f.write('popper_status\n') pu.info('Popperized repository ' + project_root, fg='blue', bold=True)
def cli(ctx, pipeline): """Remove a popper pipeline from the user's repository effectively to keep the pipelines folder and the .popper.yml files in sync. """ project_root = pu.get_project_root() if pipeline == 'paper': pipeline_dir = project_root else: pipeline_dir = os.path.join(project_root, 'pipelines') pipeline_path = os.path.join(pipeline_dir, pipeline) if os.path.isdir(pipeline_path): shutil.rmtree(pipeline_path) popper_config = pu.read_config() del popper_config['pipelines'][pipeline] pu.info("Pipeline {} removed successfully".format(pipeline), fg="green") pu.write_config(popper_config) else: pu.fail("Pipeline {} doesn't exists".format(pipeline))
def publish_snapshot(self): if self.deposition is None: self.create_new_deposition() self.update_metadata_from_yaml() else: if self.is_last_deposition_published(): self.create_new_version() self.delete_previous_file() self.update_metadata() self.upload_new_file() r = requests.get(self.baseurl, params=self.params) config = pu.read_config() try: deposition_id = r.json()[0]['id'] except (KeyError, IndexError): pu.fail("No previously unpublished records exist.") url = '{}/{}/actions/publish'.format(self.baseurl, deposition_id) r = requests.post(url, params=self.params) if r.status_code == 202: doi = r.json()['doi'] doi_url = r.json()['doi_url'] pu.info("Snapshot has been successfully published with DOI " "{} and the DOI URL {}".format(doi, doi_url)) config['metadata']['zenodo_doi'] = doi config['metadata']['zenodo_doi_url'] = doi_url pu.write_config(config) else: pu.fail("Status {}: Failed to publish the record.".format( r.status_code))
def get_access_token(service, cwd): """Tries to read the access token from a key file. If not present, prompts the user for a key and also stores the key in a key file if the user wishes.""" os.chdir(cwd) try: with open('.{}.key'.format(service), 'r') as keyfile: encrypted_access_token = keyfile.read().strip().encode() passphrase = click.prompt( 'Please enter your passphrase for {}'.format(service), hide_input=True).encode() f = Fernet(generate_key(passphrase)) try: access_token = f.decrypt(encrypted_access_token).decode("utf8") except InvalidToken: pu.fail("Invalid passphrase. Please use the same passphrase " "used at the time of encrypting the access_token.") except FileNotFoundError: pu.info('No access token found for {}'.format(service)) access_token = click.prompt( 'Please enter your access token for {}'.format(service)) if click.confirm('Would you like to store this key?'): passphrase = click.prompt('Enter a strong passphrase', hide_input=True).encode() f = Fernet(generate_key(passphrase)) encrypted_access_token = f.encrypt(access_token.encode()) with open('.{}.key'.format(service), 'w') as keyfile: keyfile.writelines(encrypted_access_token.decode("utf8")) pu.info('Your key is stored in .{}.key'.format(service)) return access_token
def initialize_repo(project_root): if pu.is_popperized(): pu.fail('Repository has already been popperized') with open(os.path.join(project_root, '.popper.yml'), 'w') as f: f.write('{ metadata: { }, pipelines: { } }\n') pu.info('Popperized repository ' + project_root, fg='blue', bold=True)
def publish_snapshot(self): url = '{}/{}/publish'.format( self.baseurl, self.deposition['id'] ) r = requests.post(url, params=self.params) if r.status_code != 201: pu.fail("Status {}: {}".format(r.status_code, r.json())) self.is_last_published = True
def create_new_deposition(self): url = self.baseurl headers = {"Content-Type": "application/json"} r = requests.post(url, params=self.params, json={}, headers=headers) if r.status_code != 201: pu.fail("Status {}: {}".format(r.status_code, r.json())) self.deposition = r.json() self.record_id = self.deposition['id']
def create_new_deposition(self): url = self.baseurl headers = {"Content-Type": "application/json"} r = requests.post(url, params=self.params, json={}, headers=headers) if r.status_code == 201: self.deposition = r.json() else: pu.fail("Status {}: Could not create new deposition.".format( r.status_code))
def cli(ctx, service, history, inplace): """Generates markdown for the badge of a service. Currently available services are: CloudLab, Chameleon, Google Cloud Engine and Popper. """ if history and service: raise BadArgumentUsage("--history can't be combined with other flags.") remote_url = pu.get_remote_url() if not remote_url: pu.fail("Failed to infer remote URL for git repository.") org, repo = remote_url.split('/')[-2:] if history: baseurl = pu.read_config().get('badge-server-url', 'http://badges.falsifiable.us') try: r = requests.get('{}/{}/{}/list'.format(baseurl, org, repo)) if r.json(): pu.print_yaml(r.json()) else: pu.info("No records to show") except requests.exceptions.RequestException: pu.fail("Could not communicate with the badge server") sys.exit(0) if not service and inplace: raise BadArgumentUsage("--inplace must be given with --service") if service is None: pu.fail('Please specify a service name.') if service not in services: pu.fail('Unknown service {}.'.format(service)) if service == 'popper': org, repo = remote_url.split('/')[-2:] markup = '[![{}]({})]({})'.format( services[service][0], services[service][1].format(org, repo), services[service][2]) else: markup = '[![{}]({})]({})'.format(*services[service]) if not inplace: pu.info(markup) sys.exit(0) try: os.chdir(pu.get_project_root()) with open('README.md', 'r+') as f: content = f.read() f.seek(0, 0) f.write(markup + '\n\n' + content) except IOError as e: if e.errno == ENOENT: pu.fail("README.md does not exist at the root of the project")
def cli(ctx, name, stages, envs, existing, infer_stages): """Initializes a repository or a pipeline. Without an argument, this command initializes a popper repository. If an argument is given, a pipeline or paper folder is initialized. If the given name is 'paper', then a 'paper' folder is created. Otherwise, a pipeline named NAME is created and initialized inside the 'pipelines' folder. By default, the stages of a pipeline are: setup, run, post-run, validate and teardown. To override these, the `--stages` flag can be provided, which expects a comma-separated list of stage names. The teardown stage is to be provided at the end if the --stages flag is being used. If the --existing flag is given, the NAME argument is treated as a path to a folder, which is assumed to contain bash scripts. --stages must be given. """ # check if the the teardown stage is the last stage of the pipeline if stages and 'teardown' in stages and stages.split(',')[-1] != 'teardown': raise BadArgumentUsage( '--stages = Teardown should be the last stage.' + ' Consider renaming it or putting it at the end.') project_root = pu.get_project_root() # init repo if name is None: initialize_repo(project_root) return if not pu.is_popperized(): pu.fail("Repository has not been popperized yet. See 'init --help'") if isdir(os.path.join(project_root, name)) and existing: # existing pipeline abs_path = os.path.join(project_root, name) relative_path = name if infer_stages: stages = ",".join( map(lambda x: x[:-3], sorted(glob.glob1(abs_path, '*.sh')))) else: initialize_existing_pipeline(abs_path, stages, envs) elif name == 'paper': # create a paper pipeline abs_path = os.path.join(project_root, 'paper') relative_path = os.path.join('paper') initialize_paper(abs_path, envs) else: # new pipeline abs_path = os.path.join(project_root, 'pipelines', name) relative_path = os.path.join('pipelines', name) initialize_new_pipeline(abs_path, stages, envs) pu.update_config(name, stages, envs, relative_path) pu.info('Initialized pipeline ' + name, fg='blue', bold=True)
def cli(ctx, pipeline, add, rm): """Define or remove executions of a pipeline.""" config, pipeline_config = pu.read_config(pipeline) if add and rm: raise UsageError("Both add and rm cannot be given at the same time. " "See popper env-vars --help for more information.") if add: env_vars = pipeline_config.get('vars', []) vars_add = {} for var in add: key, val = var.split('=') vars_add[key] = val env_vars.append(vars_add) pu.update_config(pipeline, vars=env_vars) elif rm: env_vars = pipeline_config.get('vars', None) if not env_vars: pu.fail("No environment variables defined for this pipeline.") vars_del = {} for var in rm: key, val = var.split('=') vars_del[key] = val index = -1 for vars in env_vars: if len(vars.keys()) != len(vars_del.keys()): continue else: successful = True for key in vars_del: if vars[key] != vars_del[key]: successful = False if successful: index = env_vars.index(vars) break if index != -1: env_vars.pop(index) pu.update_config(pipeline, vars=env_vars) else: pu.fail("The environment variable list does " "not exist for this pipeline.") else: try: env_vars = pipeline_config['vars'] if len(env_vars) == 0: raise KeyError pu.print_yaml(env_vars) except KeyError: pu.info("No environment variables defined for this pipeline.")
def save_file(path, filename, download_url): """Helper method to save a file. """ os.chdir(path) r = requests.get(download_url) if r.status_code != 200: pu.fail("Could not download the file {}. Make sure the file " "exists in the pipeline and try again.".format(filename)) with open(filename, 'wb') as f: f.writelines(BytesIO(r.content))
def cli(ctx, pipeline, folder, branch): """Add a pipeline to your repository from the existing popperized repositories on github. The pipeline argument is provided as owner/repo/ pipeline. For example, 'popper add popperized/quiho-popper/single-node' adds the 'single-node' pipeline from the 'quiho-popper' repository from the 'popperized' organization. """ if len(pipeline.split('/')) != 3: raise BadArgumentUsage( "Bad pipeline name. See 'popper add --help' for more info.") owner, repo, pipe_name = pipeline.split('/') config = pu.read_config() if pipe_name in config['pipelines']: pu.fail("Pipeline {} already in repo.".format(pipe_name)) project_root = pu.get_project_root() pipelines_dir = os.path.join(project_root, folder) if not os.path.exists(pipelines_dir): os.mkdir(pipelines_dir) gh_url = 'https://github.com/{}/{}/'.format(owner, repo) gh_url += 'archive/{}.tar.gz'.format(branch) pu.info("Downloading pipeline {}... ".format(pipe_name)) r = pu.make_gh_request( gh_url, msg="Unable to fetch the pipeline. Please check if the name" " of the pipeline is correct and the internet is connected" ) # Downloading and extracting the tarfile with tarfile.open( mode='r:gz', fileobj=BytesIO(r.content)) as t: t.extractall() os.rename('{}-{}/pipelines/{}'.format( repo, branch, pipe_name), os.path.join(folder, pipe_name)) shutil.rmtree('{}-{}'.format(repo, branch)) pu.info("Updating popper configuration... ") repo_config = get_config(owner, repo) config['pipelines'][pipe_name] = repo_config['pipelines'][pipe_name] config['pipelines'][pipe_name]['path'] = os.path.join(folder, pipe_name) pu.write_config(config) pu.info("Pipeline {} has been added successfully.".format(pipe_name), fg="green")