def cli(ctx, pipeline): """Remove a popper pipeline from the user's repository effectively to keep the pipelines folder and the .popper.yml files in sync. """ project_root = pu.get_project_root() pipelines = pu.read_config()['pipelines'] if pipeline in pipelines: path = pipelines[pipeline]['path'] pipeline_dir = os.path.join( project_root, path) else: pu.fail("Pipeline '{}' not in this project".format(pipeline)) if os.path.isdir(pipeline_dir): shutil.rmtree(pipeline_dir) popper_config = pu.read_config() del popper_config['pipelines'][pipeline] pu.info("Pipeline '{}' removed successfully".format(pipeline), fg="blue") pu.write_config(popper_config) else: pu.fail("Path '{}' is not a folder".format(pipeline))
def cli(ctx, pipeline): """Remove a popper pipeline from the user's repository effectively to keep the pipelines folder and the .popper.yml files in sync. """ project_root = pu.get_project_root() if pipeline == 'paper': pipeline_dir = project_root else: pipeline_dir = os.path.join(project_root, 'pipelines') pipeline_path = os.path.join(pipeline_dir, pipeline) if os.path.isdir(pipeline_path): shutil.rmtree(pipeline_path) popper_config = pu.read_config() del popper_config['pipelines'][pipeline] pu.info("Pipeline {} removed successfully".format(pipeline), fg="green") pu.write_config(popper_config) else: pu.fail("Pipeline {} doesn't exists".format(pipeline))
def cli(ctx, pipeline, add, rm): """Manipulates the environments that are associated to a pipeline. An environment is a docker image where a pipeline runs when 'popper run' is executed. The 'host' environment is a special case that corresponds to the running directly on the environment where the 'popper' command runs, i.e. running directly on the host without docker. When a new pipeline is created using, the default environment is 'host' (see 'popper init --help' for more). Examples: popper env mypipeline # show environments for pipeline popper env mypipeline --add ubuntu-xenial,centos-7.2 popper env mypipeline --rm host """ config = pu.read_config() if not add and not rm: pu.print_yaml(config['pipelines'][pipeline]['envs'], fg='yellow') if add: config['pipelines'][pipeline]['envs'] += add.split(',') if rm: for e in rm.split(','): config['pipelines'][pipeline]['envs'].remove(e) pu.write_config(config)
def cli(ctx, pipeline, timeout, skip): """Executes a pipeline and reports its status. When PIPELINE is given, it executes only the pipeline with such a name. If the argument is omitted, all pipelines are executed in lexicographical order. """ cwd = os.getcwd() pipes = pu.read_config()['pipelines'] project_root = pu.get_project_root() if pipeline: if pipeline not in pipes: pu.fail("Cannot find pipeline {} in .popper.yml".format(pipeline)) status = run_pipeline(project_root, pipes[pipeline], timeout, skip) else: if os.path.basename(cwd) in pipes: # run just the one for CWD status = run_pipeline(project_root, pipes[os.path.basename(cwd)], timeout, skip) else: # run all for pipe in pipes: status = run_pipeline(project_root, pipes[pipe], timeout, skip) if status == 'FAIL': break os.chdir(cwd) if status == 'FAIL': pu.fail("Failed to execute pipeline")
def update_metadata(self): """Reads required metatdata from the previous record and updates it from .popper.yml. The record is updated with the new data. This will only be called when some previous deposition is found. """ data = self.deposition['metadata'] config = pu.read_config()['metadata'] try: data['description'] = '<p>{}</p>'.format(config['abstract']) data['title'] = config['title'] data['publication_date'] = str(date.today()) data['keywords'] = list( map(lambda x: x.strip(), config['keywords'].split(',')) ) creators = [] for key in sorted(list(config.keys())): if 'author' in key: name, email, affiliation = map( lambda x: x.strip(), config[key].split(',') ) if len(name.split()) == 2: name = ', '.join(name.split()[::-1]) creators.append({'name': name, 'affiliation': affiliation}) data['creators'] = creators # make sure the URL of the repo is in the metadata if 'related_identifiers' not in data: data['related_identifiers'] = [] found = False for identifier in data['related_identifiers']: if identifier['identifier'] == self.remote_url: found = True break if not found: data['related_identifiers'].append({ "identifier": self.remote_url, "relation": "isSupplementTo", "scheme": "url" }) data['upload_type'] = config['upload_type'] if config['upload_type'] == 'publication': data['publication_type'] = config['publication_type'] except KeyError: pu.fail( "Metadata is not defined properly in .popper.yml. " "See the documentation for proper metadata format." ) data = {'metadata': data} url = '{}/{}'.format(self.baseurl, self.deposition['id']) r = requests.put( url, data=json.dumps(data), params=self.params, headers={'Content-Type': "application/json"} ) if r.status_code != 200: pu.fail("{} - Failed to update metadata: {}".format(r.status_code, r.json()))
def publish_snapshot(self): if self.deposition is None: self.create_new_deposition() else: if self.is_last_deposition_published(): self.create_new_version() self.delete_previous_file() self.upload_new_file() self.update_metadata() url = '{}/{}/publish'.format( self.baseurl, self.deposition['id'] ) r = requests.post(url, params=self.params) if r.status_code == 201: url = r.json()['location'] r = requests.get(url, params=self.params) doi = r.json()['doi'] doi_url = 'https://doi.org/{}'.format(doi) pu.info( "Snapshot has been successfully published with DOI " "{} and the DOI URL {}".format(doi, doi_url) ) config = pu.read_config() config['metadata']['figshare_doi'] = doi config['metadata']['figshare_doi_url'] = doi_url pu.write_config(config) else: pu.fail( "Status {}: Failed to publish the record." .format(r.status_code) )
def update_badge(status): if pu.is_repo_empty(): pu.warn('No commit log found. Skipping badge server update.') return remote_url = pu.get_remote_url() if not remote_url: pu.warn('No remote url found. Skipping badge server update.') return baseurl = pu.read_config().get('badge-server-url', 'http://badges.falsifiable.us') org, repo = remote_url.split('/')[-2:] badge_server_url = '{}/{}/{}'.format(baseurl, org, repo) branch_name = check_output(['git', 'rev-parse', '--abbrev-ref', 'HEAD'])[:-1] data = { 'timestamp': int(time.time()), 'commit_id': pu.get_head_commit(), 'status': status, 'branch': branch_name, } try: r = requests.post(badge_server_url, data=data) if r.status_code != 201 and r.status_code != 200: pu.warn("Could not create a record on the badge server.") else: pu.info(r.json()['message'], fg="green") except requests.exceptions.RequestException: pu.warn("Could not communicate with the badge server.")
def cli(ctx, pipeline, env, binary, clear): """ Adds pipeline requirements to .popper.yml """ # try to get pipeline from current directory if not pipeline: get_pipe = pu.in_pipeline(name=True) if get_pipe is not None: pipeline = get_pipe else: pu.fail("No pipeline detected") config = pu.read_config() if pipeline not in config['pipelines']: pu.fail( 'Pipeline {} does not exist. Check your .popper.yml file.'.format( pipeline)) # merge current requirements reqs = config['pipelines'][pipeline].get('requirements', {}) var_reqs = set([]) if clear else set(reqs.get('vars', [])) var_reqs |= set(env) var_reqs = list(var_reqs) reqs['vars'] = var_reqs bin_reqs = set([]) if clear else set(reqs.get('bin', [])) bin_reqs |= set(binary) bin_reqs = list(bin_reqs) reqs['bin'] = bin_reqs pu.update_config(pipeline, reqs=reqs)
def cli(ctx): """Resets a popper repository completely, removing all existing pipelines and folders, leaving behind a newly created .popper.yml file. Note: It only removes those files inside a pipeline folder that are also tracked by git. Untracked files will not be deleted. """ msg = ( "This will remove all the pipeline files in this " " project, do you want to continue?" ) if(not click.confirm(msg, abort=False)): sys.exit(0) project_root = pu.get_project_root() if project_root != os.getcwd(): msg = 'This command can only be executed from the project root folder' pu.fail(msg) config = pu.read_config() for _, p in config['pipelines'].items(): pu.exec_cmd('git rm -r {}'.format(p['path'])) pu.write_config(pu.init_config) content = pt.ReadMe() content.init_project() pu.info("Reset complete", fg="cyan")
def publish_snapshot(self): if self.deposition is None: self.create_new_deposition() self.update_metadata_from_yaml() else: if self.is_last_deposition_published(): self.create_new_version() self.delete_previous_file() self.update_metadata() self.upload_new_file() r = requests.get(self.baseurl, params=self.params) config = pu.read_config() try: deposition_id = r.json()[0]['id'] except (KeyError, IndexError): pu.fail("No previously unpublished records exist.") url = '{}/{}/actions/publish'.format(self.baseurl, deposition_id) r = requests.post(url, params=self.params) if r.status_code == 202: doi = r.json()['doi'] doi_url = r.json()['doi_url'] pu.info("Snapshot has been successfully published with DOI " "{} and the DOI URL {}".format(doi, doi_url)) config['metadata']['zenodo_doi'] = doi config['metadata']['zenodo_doi_url'] = doi_url pu.write_config(config) else: pu.fail("Status {}: Failed to publish the record.".format( r.status_code))
def cli(ctx, add, rm): """Manipulates the metadata associated to a Popper repository. A metadata entry is an arbitrary key-value pair. Without any options, it displays all the existing metadata entries. Examples: popper metadata # show all entries popper metadata --add author='Jane Doe' --add year=2018 popper metadata --rm author """ config = pu.read_config() if not add and not rm: pu.print_yaml(config['metadata'], fg='yellow') if add: for kv_str in add: kv_list = kv_str.split('=') config['metadata'][kv_list[0]] = kv_list[1] if rm: for k in rm: config['metadata'].pop(k) pu.write_config(config)
def cli(ctx, cur, new): """Used to rename a popper pipeline Examples: popper mv exp1 experiment1 popper mv pipelines1/exp1 pipelines2/exp1 This will rename the pipeline exp1 to experiment1 """ popper_config = pu.read_config() data = {} cur_path, new_path = False, False if '/' in cur: data['cur_name'] = cur.split('/')[-1] data['cur_path'] = cur else: data['cur_name'] = cur if '/' in new: data['new_name'] = new.split('/')[-1] data['new_path'] = "/".join(new.split('/')[:-1]) else: data['new_name'] = new rename(data)
def cli(ctx): """Synchronize your pipelines and popper.yml file if any pipeline or stage has been deleted. """ pipeline_dir = os.path.join(pu.get_project_root(), 'pipelines') popper_config = pu.read_config() pipelines = {} for pipeline in os.listdir(pipeline_dir): envs = popper_config['pipelines'][pipeline]['envs'] relative_path = popper_config['pipelines'][pipeline]['path'] defined_stages = popper_config['pipelines'][pipeline]['stages'] existing_stages = [] for stage in defined_stages: os.chdir(os.path.join(pipeline_dir, pipeline)) if os.path.exists(stage+'.sh') or os.path.exists(stage): existing_stages.append(stage) pipelines[pipeline] = { 'envs': envs, 'path': relative_path, 'stages': existing_stages } popper_config['pipelines'] = pipelines pu.write_config(popper_config) pu.info("\nYour popper.yml file has been updated! Run git diff to see " "the differences.", fg="white")
def cli(ctx): """Synchronize your pipelines and popper.yml file if any pipeline or stage has been deleted. """ popper_config = pu.read_config() project_root = pu.get_project_root() pipelines = popper_config['pipelines'] # Removing nonexistent pipelines from .popper.yml for p in list(pipelines): pipeline = pipelines[p] pipe_path = os.path.join(project_root, pipeline['path']) # Checking if the pipeline exists if os.path.exists(pipe_path): # Synchronizing stages stages = [ x[:-3] for x in os.listdir(pipe_path) if x.endswith(".sh") ] pipelines[p]['stages'] = stages else: del pipelines[p] popper_config['pipelines'] = pipelines pu.write_config(popper_config) pu.info( "\nYour popper.yml file has been updated! Run git diff to see " "the differences.", fg="white")
def cli(ctx, service, history, inplace): """Generates markdown for the badge of a service. Currently available services are: CloudLab, Chameleon, Google Cloud Engine and Popper. """ if history and service: raise BadArgumentUsage("--history can't be combined with other flags.") remote_url = pu.get_remote_url() if not remote_url: pu.fail("Failed to infer remote URL for git repository.") org, repo = remote_url.split('/')[-2:] if history: baseurl = pu.read_config().get('badge-server-url', 'http://badges.falsifiable.us') try: r = requests.get('{}/{}/{}/list'.format(baseurl, org, repo)) if r.json(): pu.print_yaml(r.json()) else: pu.info("No records to show") except requests.exceptions.RequestException: pu.fail("Could not communicate with the badge server") sys.exit(0) if not service and inplace: raise BadArgumentUsage("--inplace must be given with --service") if service is None: pu.fail('Please specify a service name.') if service not in services: pu.fail('Unknown service {}.'.format(service)) if service == 'popper': org, repo = remote_url.split('/')[-2:] markup = '[![{}]({})]({})'.format( services[service][0], services[service][1].format(org, repo), services[service][2]) else: markup = '[![{}]({})]({})'.format(*services[service]) if not inplace: pu.info(markup) sys.exit(0) try: os.chdir(pu.get_project_root()) with open('README.md', 'r+') as f: content = f.read() f.seek(0, 0) f.write(markup + '\n\n' + content) except IOError as e: if e.errno == ENOENT: pu.fail("README.md does not exist at the root of the project")
def cli(ctx, pipeline, add, rm): """Define or remove executions of a pipeline.""" config, pipeline_config = pu.read_config(pipeline) if add and rm: raise UsageError("Both add and rm cannot be given at the same time. " "See popper env-vars --help for more information.") if add: env_vars = pipeline_config.get('vars', []) vars_add = {} for var in add: key, val = var.split('=') vars_add[key] = val env_vars.append(vars_add) pu.update_config(pipeline, vars=env_vars) elif rm: env_vars = pipeline_config.get('vars', None) if not env_vars: pu.fail("No environment variables defined for this pipeline.") vars_del = {} for var in rm: key, val = var.split('=') vars_del[key] = val index = -1 for vars in env_vars: if len(vars.keys()) != len(vars_del.keys()): continue else: successful = True for key in vars_del: if vars[key] != vars_del[key]: successful = False if successful: index = env_vars.index(vars) break if index != -1: env_vars.pop(index) pu.update_config(pipeline, vars=env_vars) else: pu.fail("The environment variable list does " "not exist for this pipeline.") else: try: env_vars = pipeline_config['vars'] if len(env_vars) == 0: raise KeyError pu.print_yaml(env_vars) except KeyError: pu.info("No environment variables defined for this pipeline.")
def cli(ctx, pipeline, folder, branch): """Add a pipeline to your repository from the existing popperized repositories on github. The pipeline argument is provided as owner/repo/ pipeline. For example, 'popper add popperized/quiho-popper/single-node' adds the 'single-node' pipeline from the 'quiho-popper' repository from the 'popperized' organization. """ if len(pipeline.split('/')) != 3: raise BadArgumentUsage( "Bad pipeline name. See 'popper add --help' for more info.") owner, repo, pipe_name = pipeline.split('/') config = pu.read_config() if pipe_name in config['pipelines']: pu.fail("Pipeline {} already in repo.".format(pipe_name)) project_root = pu.get_project_root() pipelines_dir = os.path.join(project_root, folder) if not os.path.exists(pipelines_dir): os.mkdir(pipelines_dir) gh_url = 'https://github.com/{}/{}/'.format(owner, repo) gh_url += 'archive/{}.tar.gz'.format(branch) pu.info("Downloading pipeline {}... ".format(pipe_name)) r = pu.make_gh_request( gh_url, msg="Unable to fetch the pipeline. Please check if the name" " of the pipeline is correct and the internet is connected" ) # Downloading and extracting the tarfile with tarfile.open( mode='r:gz', fileobj=BytesIO(r.content)) as t: t.extractall() os.rename('{}-{}/pipelines/{}'.format( repo, branch, pipe_name), os.path.join(folder, pipe_name)) shutil.rmtree('{}-{}'.format(repo, branch)) pu.info("Updating popper configuration... ") repo_config = get_config(owner, repo) config['pipelines'][pipe_name] = repo_config['pipelines'][pipe_name] config['pipelines'][pipe_name]['path'] = os.path.join(folder, pipe_name) pu.write_config(config) pu.info("Pipeline {} has been added successfully.".format(pipe_name), fg="green")
def update_metadata_from_yaml(self): """Reads required metatdata from .popper.yml and updates the metadata for the record. This will only be called when no previous deposition is found. Args: deposition_id (str): The deposition id whose metadata will be updated """ deposition_id = self.deposition['id'] data = pu.read_config()['metadata'] required_data = ['title', 'upload_type', 'abstract', 'author1'] metadata_is_valid = True for req in required_data: if req not in data: metadata_is_valid = False break if not metadata_is_valid: pu.fail("Metadata is not defined properly in .popper.yml. " "See the documentation for proper metadata format.") # Change abstract to description, if present data['description'] = '<p>' + data['abstract'] + '</p>' del data['abstract'] # Collect the authors in a sorted manner creators = [] for key in sorted(list(data.keys())): if 'author' in key: name, email, affiliation = map(lambda x: x.strip(), data[key].split(',')) if len(name.split()) == 2: name = ', '.join(name.split()[::-1]) creators.append({'name': name, 'affiliation': affiliation}) del data[key] data['creators'] = creators # Change the keywords to a list from string of comma separated values if 'keywords' in data: data['keywords'] = list( map(lambda x: x.strip(), data['keywords'].split(','))) data = {'metadata': data} url = '{}/{}'.format(self.baseurl, deposition_id) r = requests.put(url, data=json.dumps(data), params=self.params, headers={'Content-Type': "application/json"}) if r.status_code != 200: pu.fail("Status {}: Failed to update metadata.".format( r.status_code))
def cli(ctx, pipeline, set): """View or change the stages of a pipeline. """ config = pu.read_config() if pipeline in config['pipelines']: if set: config['pipelines'][pipeline]['stages'] = set.split(',') pu.write_config(config) pu.info("\nStages:", fg="yellow") pu.print_yaml(config['pipelines'][pipeline]['stages'], fg="white") else: pu.fail("The pipeline {} is not defined. \nSee popper.yml file to see " "which pipelines are defined.".format(pipeline))
def create_new_deposition(self): url = self.baseurl data = {'title': pu.read_config()['metadata']['title']} r = requests.post(url, params=self.params, data=json.dumps(data)) if r.status_code != 201: pu.fail("Status {}: {}".format(r.status_code, r.json())) location = r.json()['location'] r = requests.get(location, params=self.params) if r.status_code != 200: pu.fail("Status {}: {}".format(r.status_code, r.json())) self.deposition = r.json() self.record_id = self.deposition['id']
def cli(ctx, pipeline, add, rm, ls): """Manipulates the environments that are associated to a pipeline. An environment is a docker image where a pipeline runs when 'popper run' is executed. The 'host' environment is a special case that corresponds to the running directly on the environment where the 'popper' command runs, i.e. running directly on the host without docker. When a new pipeline is created using, the default environment is 'host' (see 'popper init --help' for more). Examples: popper env mypipeline # show environments for pipeline popper env mypipeline --add ubuntu-xenial,centos-7.2 popper env mypipeline --rm host """ config = pu.read_config() if not add and not rm and not ls: if not pipeline: raise BadArgumentUsage('Expecting name of a pipeline') if pipeline not in config['pipelines']: pu.fail("Pipeline '{}' not found in .popper.yml".format(pipeline)) pu.print_yaml(config['pipelines'][pipeline]['envs'], fg='yellow') sys.exit(0) if add: config['pipelines'][pipeline]['envs'] += add.split(',') if rm: for e in rm.split(','): config['pipelines'][pipeline]['envs'].remove(e) if ls: try: response = requests.get("https://hub.docker.com/v2/repositories/" "falsifiable/poppercheck/tags") environments = [] for result in response.json()['results']: environments.append(result['name']) pu.info('environments:') pu.print_yaml(environments) except requests.exceptions.RequestException as e: click.echo(click.style("Error: " + str(e), fg='red'), err=True) pu.write_config(config)
def cli(ctx, pipeline): """This command is used to remove a popper pipeline from the user's repository effectively to keep the pipelines folder and the .popper.yml files in sync. Examples: popper rm single-node """ pipeline_dir = os.path.join(pu.get_project_root(), 'pipelines') popper_config = pu.read_config() pipeline_path = os.path.join(pipeline_dir, pipeline) if os.path.isdir(pipeline_path): shutil.rmtree(pipeline_path) popper_config = pu.read_config() del popper_config['pipelines'][pipeline] if 'stages' in popper_config: if pipeline in popper_config['stages']: del popper_config['stages'][pipeline] if 'envs' in popper_config: if pipeline in popper_config['envs']: del popper_config['envs'][pipeline] pu.info("Pipeline {} removed successfully".format(pipeline), fg="green") pu.write_config(popper_config) else: pu.fail("Pipeline {} doesn't exists".format(pipeline))
def cli(ctx, pipeline, folder): """Add a pipeline to your repository from the existing popperized repositories on github. The pipeline argument is provided as owner/repo/ pipeline. For example, 'popper add popperized/quiho-popper/single-node' adds the 'single-node' pipeline from the 'quiho-popper' repository from the 'popperized' organization. """ if len(pipeline.split('/')) != 3: pu.fail("Bad pipeline name. See 'popper add --help' for more info.") owner, repo, pipe_name = pipeline.split('/') config = pu.read_config() if pipe_name in config['pipelines']: pu.fail("Pipeline {} already in repo.".format(pipe_name)) project_root = pu.get_project_root() pipelines_dir = os.path.join(project_root, folder) if not os.path.exists(pipelines_dir): os.mkdir(pipelines_dir) gh_url = 'https://github.com/{}/{}/archive/master.zip'.format(owner, repo) pu.info("Downloading pipeline {}... ".format(pipe_name)) r = requests.get(gh_url) if r.status_code != 200: pu.fail("Unable to fetch the pipeline. Please check if the name" + " of the pipeline is correct and the internet is connected") with zipfile.ZipFile(BytesIO(r.content)) as z: z.extractall() os.rename('{}-master/pipelines/{}'.format(repo, pipe_name), os.path.join(folder, pipe_name)) shutil.rmtree('{}-master'.format(repo)) pu.info("Updating popper configuration... ") repo_config = get_config(owner, repo) config['pipelines'][pipe_name] = repo_config['pipelines'][pipe_name] config['pipelines'][pipe_name]['path'] = os.path.join(folder, pipe_name) pu.write_config(config) pu.info("Pipeline {} has been added successfully.".format(pipe_name), fg="green")
def cli(ctx, pipeline, timeout, skip, ignore_errors, output, no_badge_update, requirement_level): """Executes one or more pipelines and reports on their status. When PIPELINE is given, it executes only the pipeline with that name. If the argument is omitted, all pipelines are executed in lexicographical order. """ project_pipelines = pu.read_config()['pipelines'] if len(project_pipelines) == 0: pu.info( "No pipelines defined in .popper.yml. " "Run popper init --help for more info.", fg='yellow') sys.exit(0) project_root = pu.get_project_root() cwd = os.getcwd() pipelines = get_pipelines_to_execute(cwd, pipeline, project_pipelines) if os.environ.get('CI', False): pipes_from_log = pipelines_from_commit_message(project_pipelines) if len(pipes_from_log) != 0: pu.info("Found 'CI', ignoring PIPELINE argument.") pipelines = pipes_from_log pipelines = { pipe_n: pipe_c for pipe_n, pipe_c in pipelines.items() if check_requirements(pipe_n, pipe_c, requirement_level) } pipelines = check_skiplist(pipelines, skip) if not len(pipelines): pu.info("No pipelines to execute") sys.exit(0) status = run_pipelines(pipelines, project_root, timeout, skip, ignore_errors, output) os.chdir(cwd) if os.environ.get('CI', False) and not no_badge_update: update_badge(status) if status == 'FAIL': pu.fail("Failed to execute pipeline")
def cli(ctx, pipeline, timeout, skip, ignore_errors): """Executes a pipeline and reports its status. When PIPELINE is given, it executes only the pipeline with such a name. If the argument is omitted, all pipelines are executed in lexicographical order. Reports an error if no pipelines have been configured. """ cwd = os.getcwd() pipes = pu.read_config()['pipelines'] project_root = pu.get_project_root() time_out = pu.parse_timeout(timeout) if len(pipes) == 0: pu.info("No pipelines defined in .popper.yml. " "Run popper init --help for more info.", fg='yellow') sys.exit(0) if pipeline: if ignore_errors: pu.warn("--ignore-errors flag is ignored when pipeline " "argument is provided") if pipeline not in pipes: pu.fail("Cannot find pipeline {} in .popper.yml".format(pipeline)) status = run_pipeline(project_root, pipes[pipeline], time_out, skip) else: if os.path.basename(cwd) in pipes: # run just the one for CWD status = run_pipeline(project_root, pipes[os.path.basename(cwd)], time_out, skip) else: # run all skip_list = skip.split(',') if skip else [] for pipe in pipes: if pipe not in skip_list: status = run_pipeline( project_root, pipes[pipe], time_out, [] ) if status == 'FAIL' and not ignore_errors: break os.chdir(cwd) if status == 'FAIL': pu.fail("Failed to execute pipeline")
def update_config(owner, repo, pipeline_name, path, repo_config): """Adds the information about the added pipeline in the popperized entry of the .popper.yml file. """ pipeline_path = 'pipelines/{}'.format(pipeline_name) if 'stages' in repo_config: pipeline_stages = repo_config['stages'][pipeline_name] else: pipeline_stages = [ 'setup.sh', 'run.sh', 'post-run.sh', 'validate.sh', 'teardown.sh'] pipeline_envs = repo_config['envs'][pipeline_name] source_url = 'github.com/{}/{}'.format(owner, repo) config = pu.read_config() config['pipelines'][pipeline_name] = { 'envs': pipeline_envs, 'path': pipeline_path, 'stages': pipeline_stages, 'source': source_url } if 'stages' not in config: config['stages'] = {} config['stages'][pipeline_name] = pipeline_stages if 'envs' not in config: config['envs'] = {} config['envs'][pipeline_name] = pipeline_envs if 'popperized' not in config: config['popperized'] = [] config['popperized'].append('github/{}/{}'.format(owner, repo)) pu.write_config(config)
def update_metadata(self): """Reads required metatdata from the previous record and updates it from .popper.yml. The record is updated with the new data. This will only be called when some previous deposition is found. """ data = self.deposition['metadata'] config = pu.read_config()['metadata'] try: data['description'] = '<p>{}</p>'.format(config['abstract']) data['title'] = config['title'] data['publication_date'] = str(date.today()) data['keywords'] = list( map(lambda x: x.strip(), config['keywords'].split(',')) ) creators = [] for key in sorted(list(config.keys())): if 'author' in key: name, email, affiliation = map( lambda x: x.strip(), config[key].split(',') ) if len(name.split()) == 2: name = ', '.join(name.split()[::-1]) creators.append({'name': name, 'affiliation': affiliation}) data['creators'] = creators except KeyError: pu.fail( "Metadata is not defined properly in .popper.yml. " "See the documentation for proper metadata format." ) data = {'metadata': data} url = '{}/{}'.format(self.baseurl, self.deposition['id']) r = requests.put( url, data=json.dumps(data), params=self.params, headers={'Content-Type': "application/json"} ) if r.status_code != 200: pu.fail( "Status {}: Failed to update metadata.".format(r.status_code) )
def create_new_deposition(self): url = self.baseurl data = { 'title': pu.read_config()['metadata']['title'] } r = requests.post(url, params=self.params, data=json.dumps(data)) if r.status_code == 201: location = r.json()['location'] r = requests.get(location, params=self.params) if r.status_code == 200: self.deposition = r.json() else: pu.fail( "Status {}: Could not fetch the depositions." "Try again later.".format(r.status_code) ) else: pu.fail( "Status {}: Could not create new deposition." .format(r.status_code) )
def cli(ctx): """Used to list down the avaliable pipelines in a popper repository Example: > popper ls - experiment-1 - experiment-2 """ pipelines = list(pu.read_config()['pipelines'].keys()) if 'paper' in pipelines: pipelines.remove('paper') if len(pipelines) == 0: pu.info("There are no pipelines in this repository", fg="red") else: pu.info("The available pipelines are :\n", fg="cyan") pu.print_yaml(pipelines, fg="cyan")
def cli(ctx, keywords, skip_update, add, rm, ls, include_readme): """Searches for pipelines on GitHub matching the given keyword(s). The list of repositories or organizations scraped for Popper pipelines is specified in the 'popperized' list in the .popper.yml file. By default, https://github.com/popperized is added to the configuration. If no keywords are specified, a list of all the pipelines from all organizations (in the .popper.yml file) and repositories will be returned. Example: popper search quiho would result in: popperized/quiho-popper To add or remove orgs/repos to/from the 'popperized' , use the --add and --rm flags while searching. popper search --add org/repo To remove an organization/person do: popper search --rm org/repo To view the list repositories that are available to the search command: popper search --ls """ if (rm or add or ls) and (keywords): raise BadArgumentUsage( "'add', 'rm' and 'ls' flags cannot be combined with others.") project_root = pu.get_project_root() config = pu.read_config() popperized_list = config['popperized'] if add: add = 'github/' + add if add not in popperized_list: popperized_list.append(add) config['popperized'] = popperized_list pu.write_config(config) sys.exit(0) if rm: rm = 'github/' + rm if rm in popperized_list: popperized_list.remove(rm) config['popperized'] = popperized_list pu.write_config(config) sys.exit(0) result = [] # to store the result of the search query as a list if ls: for p in popperized_list: if p.count('/') == 1: org_name = p.split('/')[1] org_url = ('https://api.github.com/users/{}/repos') org_url = org_url.format(org_name) response = pu.make_gh_request(org_url) repos = response.json() temp = [r["full_name"] for r in repos] result.extend(temp) else: result.extend(p[7:]) if len(result) > 0: pu.info("The list of available poppperized repositories are:\n") pu.print_yaml(result) sys.exit() else: fail_msg = "There are no popperized repositores available" "for search. Use the --add flag to add an org/repo." pu.fail(fail_msg) sys.exit(0) search_params = {} if not keywords: # checks if the query is empty or not search_params['empty_query'] = True else: search_params['empty_query'] = False cache_dir = os.path.join(project_root, '.cache') search_params["keywords"] = keywords search_params["cache_dir"] = cache_dir search_params["skip_update"] = True if skip_update else False search_params["in_readme"] = True if include_readme else False if not os.path.exists(cache_dir): os.makedirs(cache_dir) for popperized in popperized_list: if popperized.count('/') == 1: # it is an organization org_name = popperized.split('/')[1] repos = "" if not skip_update: org_url = ( 'https://api.github.com/users/{}/repos'.format(org_name)) response = pu.make_gh_request(org_url) with open(os.path.join(cache_dir, org_name + '_repos.json'), 'w') as f: json.dump(response.json(), f) try: with open(os.path.join(cache_dir, org_name + '_repos.json'), 'r') as f: repos = json.load(f) except FileNotFoundError: pu.fail('No cached metadata has been downloaded') with click.progressbar( repos, show_eta=False, label='Searching in ' + org_name, bar_template='[%(bar)s] %(label)s | %(info)s', show_percent=True) as bar: for r in bar: if search_params["empty_query"]: temp = ' {}/{}'\ .format(org_name, r['name']) result.append(temp) elif l_distance(r["name"].lower(), keywords.lower()) < 1: temp = ' {}/{}' \ .format(org_name, r['name']) result.append(temp) else: search_params["repo_url"] = r["url"] search_params["uname"] = org_name result.extend(search_pipeline(search_params)) else: # it is a repository user, repo = popperized.split('/')[1:] repo_url = ('https://api.github.com/repos/{}/{}'.format( user, repo)) search_params["repo_url"] = repo_url search_params["uname"] = user pu.info("Searching in repository : {}".format(repo)) result.extend(search_pipeline(search_params)) if len(result) != 0: pu.info("\nSearch results:\n", fg="green") for res in result: pu.info("> " + res + "\n") if search_params["in_readme"]: pu.info("Use popper info command to view the" " details of a pipeline. See popper info --" "help") else: pu.fail("Unable to find any matching pipelines")