def update_badge(status): if pu.is_repo_empty(): pu.warn('No commit log found. Skipping badge server update.') return remote_url = pu.get_remote_url() if not remote_url: pu.warn('No remote url found. Skipping badge server update.') return baseurl = pu.read_config().get('badge-server-url', 'http://badges.falsifiable.us') org, repo = remote_url.split('/')[-2:] badge_server_url = '{}/{}/{}'.format(baseurl, org, repo) branch_name = check_output(['git', 'rev-parse', '--abbrev-ref', 'HEAD'])[:-1] data = { 'timestamp': int(time.time()), 'commit_id': pu.get_head_commit(), 'status': status, 'branch': branch_name, } try: r = requests.post(badge_server_url, data=data) if r.status_code != 201 and r.status_code != 200: pu.warn("Could not create a record on the badge server.") else: pu.info(r.json()['message'], fg="green") except requests.exceptions.RequestException: pu.warn("Could not communicate with the badge server.")
def cli(ctx, pipeline, timeout, skip, ignore_errors): """Executes a pipeline and reports its status. When PIPELINE is given, it executes only the pipeline with such a name. If the argument is omitted, all pipelines are executed in lexicographical order. Reports an error if no pipelines have been configured. """ cwd = os.getcwd() pipes = pu.read_config()['pipelines'] project_root = pu.get_project_root() time_out = pu.parse_timeout(timeout) if len(pipes) == 0: pu.info("No pipelines defined in .popper.yml. " "Run popper init --help for more info.", fg='yellow') sys.exit(0) if pipeline: if ignore_errors: pu.warn("--ignore-errors flag is ignored when pipeline " "argument is provided") if pipeline not in pipes: pu.fail("Cannot find pipeline {} in .popper.yml".format(pipeline)) status = run_pipeline(project_root, pipes[pipeline], time_out, skip) else: if os.path.basename(cwd) in pipes: # run just the one for CWD status = run_pipeline(project_root, pipes[os.path.basename(cwd)], time_out, skip) else: # run all skip_list = skip.split(',') if skip else [] for pipe in pipes: if pipe not in skip_list: status = run_pipeline( project_root, pipes[pipe], time_out, [] ) if status == 'FAIL' and not ignore_errors: break os.chdir(cwd) if status == 'FAIL': pu.fail("Failed to execute pipeline")
def run_pipeline(action, wfile, workspace, reuse, quiet, debug, dry_run, parallel): pipeline = Workflow(wfile, workspace, quiet, debug, dry_run) if reuse: pu.warn("\n " + " using --reuse ignores any changes made to an action" + "\n " + " or to an action block in the workflow.\n\n") if parallel: pu.warn("\n " + " using --parallel may result in interleaved ouput." + "\n " + " You may use --quiet flag to avoid confusion.\n\n") pipeline.run(action, reuse, parallel) if action: pu.info('', action, '\nAction finished successfully.\n\n') else: pu.info('', '', '\nWorkflow finished successfully.\n\n')
def check_requirements(pipe_n, pipeline, requirement_level): if 'requirements' not in pipeline: return True var_reqs = pipeline['requirements'].get('vars', []) bin_reqs = pipeline['requirements'].get('bin', []) missing_vars = [envvar for envvar in var_reqs if envvar not in os.environ] missing_binaries = [bin for bin in bin_reqs if not bin_exists(bin)] missing_versions = [ bin_requirements(bin) for bin in bin_reqs if bin not in missing_binaries ] missing_versions = [msg for msg in missing_versions if msg is not None] msg = "" if missing_vars: msg += ('Required environment variables for pipeline {} unset: {}\n'. format(pipe_n, ','.join(missing_vars))) if missing_binaries: msg += ('Required binaries for pipeline {} not available: {}\n'.format( pipe_n, ','.join(missing_binaries))) if missing_versions: msg += ('Requirements for pipeline {} not fulfilled:\n{}\n'.format( pipe_n, '\n'.join(missing_versions))) if msg: if requirement_level == 'fail': pu.fail(msg) pu.warn(msg) if requirement_level == 'warn': pu.info('Skipping pipeline {}'.format(pipe_n)) return requirement_level == 'ignore' return True
def cli(ctx, pipeline, add, rm, ls, argument): """Manipulates the environments that are associated to a pipeline. An environment is a docker image where a pipeline runs when 'popper run' is executed. The 'host' environment is a special case that corresponds to the running directly on the environment where the 'popper' command runs, i.e. running directly on the host without docker. When a new pipeline is created using, the default environment is 'host' (see 'popper init --help' for more). Examples: popper env mypipeline # show environments for pipeline popper env mypipeline --add ubuntu-xenial,centos-7.2 popper env mypipeline --rm host :argument Used to pass an argument to Docker through popper. Can be given multiple times (Ignored for 'host'). An example of usage is as follows: popper env mypipeline --add debian-9 -arg --runtime=runc -arg --ipc=host This will add to the environment 'debian-9' the set of arguments runtime=runc and ipc=host. """ config = pu.read_config() if ls: try: response = requests.get("https://hub.docker.com/v2/repositories/" "falsifiable/popper/tags") environments = [] for result in response.json()['results']: environments.append(result['name']) pu.info('environments:') pu.print_yaml(environments) except requests.exceptions.RequestException as e: click.echo(click.style("Error: " + str(e), fg='red'), err=True) sys.exit(0) if not pipeline: get_pipe = pu.in_pipeline(name=True) if get_pipe is not None: pipeline = get_pipe else: pu.fail("This is not a pipeline") if not add and not rm: if pipeline not in config['pipelines']: pu.fail("Pipeline '{}' not found in .popper.yml".format(pipeline)) pu.print_yaml(config['pipelines'][pipeline]['envs'], fg='yellow') sys.exit(0) envs = config['pipelines'][pipeline]['envs'] args = set(argument) if add: elems = add.split(',') environments = set(elems) - set(envs) envs.update({env: {'args': []} for env in environments}) for env in elems: envs[env]['args'] = args if rm: for env in rm.split(','): if env in envs: envs.pop(env) else: pu.warn('Environment {} not found in {}'.format(env, pipeline)) config['pipelines'][pipeline]['envs'] = envs pu.write_config(config)