def publish_snapshot(self): if self.deposition is None: self.create_new_deposition() self.update_metadata_from_yaml() else: if self.is_last_deposition_published(): self.create_new_version() self.delete_previous_file() self.update_metadata() self.upload_new_file() r = requests.get(self.baseurl, params=self.params) config = pu.read_config() try: deposition_id = r.json()[0]['id'] except (KeyError, IndexError): pu.fail("No previously unpublished records exist.") url = '{}/{}/actions/publish'.format(self.baseurl, deposition_id) r = requests.post(url, params=self.params) if r.status_code == 202: doi = r.json()['doi'] doi_url = r.json()['doi_url'] pu.info("Snapshot has been successfully published with DOI " "{} and the DOI URL {}".format(doi, doi_url)) config['metadata']['zenodo_doi'] = doi config['metadata']['zenodo_doi_url'] = doi_url pu.write_config(config) else: pu.fail("Status {}: Failed to publish the record.".format( r.status_code))
def cli(ctx, pipeline): """Add a pipeline to your repository from the existing popperized repositories on github. The pipeline argument is provided as owner/repo/ pipeline. For example, popper add popperized/quiho-popper/single-node adds the single-node pipeline from the quiho-popper repository. """ try: owner, repo, pipeline_name = pipeline.split('/') except ValueError: pu.fail("See popper add --help for more info.") project_root = pu.get_project_root() path = os.path.join(project_root, 'pipelines') if os.path.exists(path): pass else: os.chdir(project_root) os.mkdir('pipelines') dirname = pipeline_name url = ('https://api.github.com/repos/{}/{}/contents/pipelines/{}' .format(owner, repo, pipeline_name)) repo_config = get_config(owner, repo) save_directory(path, dirname, url) path = os.path.join(path, pipeline_name) update_config(owner, repo, pipeline_name, path, repo_config) pu.info("Pipeline {} successfully added.".format(pipeline_name) + " It can be viewed in the pipelines directory.", fg="green")
def cli(ctx): """Resets a popper repository completely, removing all existing pipelines and folders, leaving behind a newly created .popper.yml file. Note: It only removes those files inside a pipeline folder that are also tracked by git. Untracked files will not be deleted. """ msg = ( "This will remove all the pipeline files in this " " project, do you want to continue?" ) if(not click.confirm(msg, abort=False)): sys.exit(0) project_root = pu.get_project_root() if project_root != os.getcwd(): msg = 'This command can only be executed from the project root folder' pu.fail(msg) config = pu.read_config() for _, p in config['pipelines'].items(): pu.exec_cmd('git rm -r {}'.format(p['path'])) pu.write_config(pu.init_config) content = pt.ReadMe() content.init_project() pu.info("Reset complete", fg="cyan")
def cli(ctx, pipeline, add, rm): """Manipulates the environments that are associated to a pipeline. An environment is a docker image where a pipeline runs when 'popper run' is executed. The 'host' environment is a special case that corresponds to the running directly on the environment where the 'popper' command runs, i.e. running directly on the host without docker. When a new pipeline is created using, the default environment is 'host' (see 'popper init --help' for more). Examples: popper env mypipeline # show environments for pipeline popper env mypipeline --add ubuntu-xenial,centos-7.2 popper env mypipeline --rm host """ config = pu.read_config() if not add and not rm: pu.info(str(config['pipelines'][pipeline]['envs'])) if add: config['pipelines'][pipeline]['envs'] += add.split(',') if rm: for e in rm.split(','): config['pipelines'][pipeline]['envs'].remove(e) pu.write_config(config)
def docker_create(self, img): env_vars = self.action.get('env', {}) for s in self.action.get('secrets', []): env_vars.update({s: os.environ[s]}) for e, v in self.env.items(): env_vars.update({e: v}) env_vars.update({'HOME': os.environ['HOME']}) env_flags = [" -e {}='{}'".format(k, v) for k, v in env_vars.items()] docker_cmd = 'docker create ' docker_cmd += ' --name={}'.format(self.cid) docker_cmd += ' --volume {0}:{0}'.format(self.workspace) docker_cmd += ' --volume {0}:{0}'.format(os.environ['HOME']) docker_cmd += ' --volume {0}:{0}'.format('/var/run/docker.sock') docker_cmd += ' --workdir={} '.format(self.workspace) docker_cmd += ''.join(env_flags) if self.action.get('runs', None): docker_cmd += ' --entrypoint={} '.format(self.action['runs']) docker_cmd += ' {}'.format(img) docker_cmd += ' {}'.format(' '.join(self.action.get('args', ''))) pu.info('{}[{}] docker create {} {}\n'.format( self.msg_prefix, self.action['name'], img, ' '.join(self.action.get('args', '')))) pu.exec_cmd(docker_cmd, debug=self.debug, dry_run=self.dry_run)
def cli(ctx, add, rm): """Manipulates the metadata associated to a Popper repository. A metadata entry is an arbitrary key-value pair. Without any options, it displays all the existing metadata entries. Examples: popper metadata # show all entries popper metadata --add author='Jane Doe' --add year=2018 popper metadata --rm author """ config = pu.read_config() if not add and not rm: pu.info(str(config['metadata'])) if add: for kv_str in add: kv_list = kv_str.split('=') config['metadata'][kv_list[0]] = kv_list[1] if rm: for k in rm: config['metadata'].pop(k) pu.write_config(config)
def run(self, reuse=False): cmd = self.action.get('runs', ['entrypoint.sh']) cmd[0] = os.path.join('./', cmd[0]) cmd.extend(self.action.get('args', '')) cwd = os.getcwd() if not self.dry_run: if 'repo_dir' in self.action: os.chdir(self.action['repo_dir']) else: os.chdir(os.path.join(cwd, self.action['uses'])) os.environ.update(self.action.get('env', {})) pu.info('{}[{}] {}\n'.format(self.msg_prefix, self.action['name'], ' '.join(cmd))) _, ecode = pu.exec_cmd(' '.join(cmd), verbose=(not self.quiet), debug=self.debug, ignore_error=True, log_file=self.log_filename, dry_run=self.dry_run) for i in self.action.get('env', {}): os.environ.pop(i) os.chdir(cwd) if ecode != 0: pu.fail("\n\nAction '{}' failed.\n.".format(self.action['name']))
def cli(ctx): """Synchronize your pipelines and popper.yml file if any pipeline or stage has been deleted. """ pipeline_dir = os.path.join(pu.get_project_root(), 'pipelines') popper_config = pu.read_config() pipelines = {} for pipeline in os.listdir(pipeline_dir): envs = popper_config['pipelines'][pipeline]['envs'] relative_path = popper_config['pipelines'][pipeline]['path'] defined_stages = popper_config['pipelines'][pipeline]['stages'] existing_stages = [] for stage in defined_stages: os.chdir(os.path.join(pipeline_dir, pipeline)) if os.path.exists(stage+'.sh') or os.path.exists(stage): existing_stages.append(stage) pipelines[pipeline] = { 'envs': envs, 'path': relative_path, 'stages': existing_stages } popper_config['pipelines'] = pipelines pu.write_config(popper_config) pu.info("\nYour popper.yml file has been updated! Run git diff to see " "the differences.", fg="white")
def cli(ctx, service, key): """Creates a archive of the repository on the provided service using an access token. Reports an error if archive creation is not successful. Currently supported services are Zenodo and Figshare. """ services = {'zenodo': Zenodo, 'figshare': Figshare} environment_variables = { 'zenodo': 'POPPER_ZENODO_API_TOKEN', 'figshare': 'POPPER_FIGSHARE_API_TOKEN' } if service not in services: pu.fail("The service {} is not supported. See popper archive " "--help for more info.".format(service)) if not key: try: key = os.environ[environment_variables[service]] except KeyError: key = get_access_token(service) archive = services[service](key) archive.publish_snapshot() pu.info("Done..!")
def execute(self, cmd, log_tag): time_limit = time.time() + self.timeout sleep_time = 0.25 log_tag = log_tag.replace(' ', '_') out_fname = os.path.join(os.environ['WORKSPACE'], log_tag + '.out') err_fname = os.path.join(os.environ['WORKSPACE'], log_tag + '.err') with open(out_fname, "w") as outf, open(err_fname, "w") as errf: p = subprocess.Popen(cmd, stdout=outf, stderr=errf, shell=True, preexec_fn=os.setsid) while p.poll() is None: if self.timeout != 0.0 and time.time() > time_limit: os.killpg(os.getpgid(p.pid), signal.SIGTERM) sys.stdout.write(' time out!') break if sleep_time < 300: sleep_time *= 2 for i in range(int(sleep_time)): if i % 10 == 0: pu.info('.') time.sleep(sleep_time) pu.info('\n') return p.poll()
def download_actions(self): """Clone actions that reference a repository.""" infoed = False for _, a in self.wf['action'].items(): if 'docker://' in a['uses'] or './' in a['uses']: continue user = a['uses'].split('/')[0] repo = a['uses'].split('/')[1] if '@' in a['uses']: action_dir = '/'.join(a['uses'].split('@')[0].split('/')[2:]) version = a['uses'].split('@')[1] else: action_dir = '/'.join(a['uses'].split('/')[2:]) version = None action_dir = os.path.join('./', action_dir) repo_parent_dir = os.path.join(self.actions_cache_path, user) if not os.path.exists(repo_parent_dir): os.makedirs(repo_parent_dir) if not infoed: pu.info('[popper] cloning actions from repositories\n') infoed = True scm.clone(user, repo, repo_parent_dir, version) a['repo_dir'] = os.path.join(repo_parent_dir, repo) a['action_dir'] = action_dir
def update_badge(status): if pu.is_repo_empty(): pu.warn('No commit log found. Skipping badge server update.') return remote_url = pu.get_remote_url() if not remote_url: pu.warn('No remote url found. Skipping badge server update.') return baseurl = pu.read_config().get('badge-server-url', 'http://badges.falsifiable.us') org, repo = remote_url.split('/')[-2:] badge_server_url = '{}/{}/{}'.format(baseurl, org, repo) branch_name = check_output(['git', 'rev-parse', '--abbrev-ref', 'HEAD'])[:-1] data = { 'timestamp': int(time.time()), 'commit_id': pu.get_head_commit(), 'status': status, 'branch': branch_name, } try: r = requests.post(badge_server_url, data=data) if r.status_code != 201 and r.status_code != 200: pu.warn("Could not create a record on the badge server.") else: pu.info(r.json()['message'], fg="green") except requests.exceptions.RequestException: pu.warn("Could not communicate with the badge server.")
def publish_snapshot(self): if self.deposition is None: self.create_new_deposition() else: if self.is_last_deposition_published(): self.create_new_version() self.delete_previous_file() self.upload_new_file() self.update_metadata() url = '{}/{}/publish'.format( self.baseurl, self.deposition['id'] ) r = requests.post(url, params=self.params) if r.status_code == 201: url = r.json()['location'] r = requests.get(url, params=self.params) doi = r.json()['doi'] doi_url = 'https://doi.org/{}'.format(doi) pu.info( "Snapshot has been successfully published with DOI " "{} and the DOI URL {}".format(doi, doi_url) ) config = pu.read_config() config['metadata']['figshare_doi'] = doi config['metadata']['figshare_doi_url'] = doi_url pu.write_config(config) else: pu.fail( "Status {}: Failed to publish the record." .format(r.status_code) )
def cli(ctx, pipeline): """Remove a popper pipeline from the user's repository effectively to keep the pipelines folder and the .popper.yml files in sync. """ project_root = pu.get_project_root() pipelines = pu.read_config()['pipelines'] if pipeline in pipelines: path = pipelines[pipeline]['path'] pipeline_dir = os.path.join( project_root, path) else: pu.fail("Pipeline '{}' not in this project".format(pipeline)) if os.path.isdir(pipeline_dir): shutil.rmtree(pipeline_dir) popper_config = pu.read_config() del popper_config['pipelines'][pipeline] pu.info("Pipeline '{}' removed successfully".format(pipeline), fg="blue") pu.write_config(popper_config) else: pu.fail("Path '{}' is not a folder".format(pipeline))
def get_access_token(service, cwd): """Tries to read the access token from a key file. If not present, prompts the user for a key and also stores the key in a key file if the user wishes.""" os.chdir(cwd) try: with open('.{}.key'.format(service), 'r') as keyfile: encrypted_access_token = keyfile.read().strip().encode() passphrase = click.prompt( 'Please enter your passphrase for {}'.format(service), hide_input=True).encode() f = Fernet(generate_key(passphrase)) try: access_token = f.decrypt(encrypted_access_token).decode("utf8") except InvalidToken: pu.fail("Invalid passphrase. Please use the same passphrase " "used at the time of encrypting the access_token.") except FileNotFoundError: pu.info('No access token found for {}'.format(service)) access_token = click.prompt( 'Please enter your access token for {}'.format(service)) if click.confirm('Would you like to store this key?'): passphrase = click.prompt('Enter a strong passphrase', hide_input=True).encode() f = Fernet(generate_key(passphrase)) encrypted_access_token = f.encrypt(access_token.encode()) with open('.{}.key'.format(service), 'w') as keyfile: keyfile.writelines(encrypted_access_token.decode("utf8")) pu.info('Your key is stored in .{}.key'.format(service)) return access_token
def run_in_docker(project_root, pipe_n, pipe_d, env, timeout, skip, ignore_errors, output_dir, env_vars, args): abs_path = '{}/{}'.format(project_root, pipe_d['path']) docker_cmd = 'docker run --rm -v {0}:{0}'.format(project_root) docker_cmd += ' --workdir={} '.format(abs_path) docker_cmd += ' '.join(args) docker_cmd += ''.join( [' -e {0}="{1}"'.format(k, env_vars[k]) for k in env_vars]) if env_vars else '' if '/' in env: img = env else: img = 'falsifiable/popper:{}'.format(env) docker_cmd += ' {} run '.format(img) popper_flags = ' --timeout={}'.format(timeout) popper_flags += ' --skip {}'.format(','.join(skip)) if skip else '' popper_flags += ' --output {}'.format(output_dir) popper_flags += ' --ignore-errors' if ignore_errors else '' cmd = '{} {}'.format(docker_cmd, popper_flags) pu.info('Running in Docker with: {}'.format(cmd)) check_output(cmd, shell=True) with open(os.path.join(abs_path, output_dir, 'popper_status'), 'r') as f: status = f.read() return status
def cli(ctx, pipeline): """Remove a popper pipeline from the user's repository effectively to keep the pipelines folder and the .popper.yml files in sync. """ project_root = pu.get_project_root() if pipeline == 'paper': pipeline_dir = project_root else: pipeline_dir = os.path.join(project_root, 'pipelines') pipeline_path = os.path.join(pipeline_dir, pipeline) if os.path.isdir(pipeline_path): shutil.rmtree(pipeline_path) popper_config = pu.read_config() del popper_config['pipelines'][pipeline] pu.info("Pipeline {} removed successfully".format(pipeline), fg="green") pu.write_config(popper_config) else: pu.fail("Pipeline {} doesn't exists".format(pipeline))
def cli(ctx, wfile, recursive): """ Creates a graph in the .dot format representing the workflow """ wfile_list = list() if recursive: wfile_list = pu.find_recursive_wfile() else: wfile_list.append(pu.find_default_wfile(wfile)) for wfile in wfile_list: pipeline = Workflow(wfile, False, False, False, False) graph = list() wf = pipeline.wf workflow_name = list(wf['workflow'].keys())[0] action = wf['resolves'][0] last_action = get_first_action(wf) for act in last_action: graph.append("\t{} -> {};\n".format( workflow_name.replace(' ', '_').replace('-', '_'), act.replace(' ', '_').replace('-', '_'))) parent_action = cur_action = action graph = add(parent_action, cur_action, wf['action'], graph) graph = ''.join(list(set(graph))) graph = "digraph G {\n" + graph + "}\n" pu.info(graph)
def cli(ctx): """Synchronize your pipelines and popper.yml file if any pipeline or stage has been deleted. """ popper_config = pu.read_config() project_root = pu.get_project_root() pipelines = popper_config['pipelines'] # Removing nonexistent pipelines from .popper.yml for p in list(pipelines): pipeline = pipelines[p] pipe_path = os.path.join(project_root, pipeline['path']) # Checking if the pipeline exists if os.path.exists(pipe_path): # Synchronizing stages stages = [ x[:-3] for x in os.listdir(pipe_path) if x.endswith(".sh") ] pipelines[p]['stages'] = stages else: del pipelines[p] popper_config['pipelines'] = pipelines pu.write_config(popper_config) pu.info( "\nYour popper.yml file has been updated! Run git diff to see " "the differences.", fg="white")
def initialize_repo(project_root): """This function is used for initializing a popper repository.""" if pu.is_popperized(): pu.fail('Repository has already been popperized') return config = { 'metadata': { 'access_right': "open", 'license': "CC-BY-4.0", 'upload_type': "publication", 'publication_type': "article" }, 'pipelines': {}, 'popperized': ["github/popperized"] } pu.write_config(config) with open(os.path.join(project_root, '.gitignore'), 'a') as f: f.write('.cache\n') f.write('popper_logs\n') f.write('popper_status\n') pu.info('Popperized repository ' + project_root, fg='blue', bold=True)
def get_access_token(service): """Tries to read the access token from a key file. If not present, prompts the user for a key and also stores the key in a key file if the user wishes.""" project_root = pu.get_project_root() os.chdir(project_root) try: with open('.{}.key'.format(service), 'r') as keyfile: encrypted_access_token = keyfile.read().strip() passphrase = click.prompt( 'Please enter your passphrase for {}'.format(service), hide_input=True).encode() aes = pyaes.AESModeOfOperationCTR(generate_key(passphrase)) try: access_token = aes.decrypt(encrypted_access_token).decode() except UnicodeDecodeError: pu.fail("Invalid passphrase. Please use the same passphrase " "used at the time of encrypting the access_token.") except FileNotFoundError: pu.info('No access token found for {}'.format(service)) access_token = click.prompt( 'Please enter your access token for {}'.format(service)) if click.confirm('Would you like to store this key?'): passphrase = click.prompt('Enter a strong passphrase', hide_input=True).encode() aes = pyaes.AESModeOfOperationCTR(generate_key(passphrase)) encrypted_access_token = aes.encrypt(access_token) with open('.{}.key'.format(service), 'w') as keyfile: keyfile.writelines('{}'.format(''.join( chr(b) for b in encrypted_access_token))) pu.info('Your key is stored in .{}.key'.format(service)) return access_token
def upload_snapshot(service_url, params, filename): """Receives the service_url and the required paramters and the filename to be uploaded and uploads the deposit, but the deposit is not published at this step. Returns the deposition id.""" # Create the deposit pu.info("Uploading the snapshot...") headers = {'Content-Type': "application/json"} r = requests.post(service_url, params=params, json={}, headers=headers) if r.status_code == 401: pu.fail("Your access token is invalid. " "Please enter a valid access token.") deposition_id = r.json()['id'] upload_url = service_url + '/{}/files'.format(deposition_id) files = {'file': open(filename, 'rb')} data = {'filename': filename} # Upload the file r = requests.post(upload_url, data=data, files=files, params=params) response = {'status_code': r.status_code} if r.status_code == 201: file_id = r.json()['id'] response['deposition_id'] = deposition_id pu.info( "Snapshot has been successfully uploaded. Your deposition id is " "{} and the file id is {}.".format(deposition_id, file_id)) else: pu.fail("Status {}: Failed to upload your snapshot. Please " "try again.".format(r.status_code)) return deposition_id
def run_pipeline(project_root, pipeline, timeout, skip): abs_path = os.path.join(project_root, pipeline['path']) pu.info("Executing " + os.path.basename(abs_path), fg='blue', bold=True, blink=True) os.chdir(abs_path) check_output('rm -rf popper_logs/ popper_status', shell=True) check_output('mkdir -p popper_logs/', shell=True) STATUS = "SUCCESS" with click.progressbar(pipeline['stages'], show_eta=False, label="Current stage: ", item_show_func=str, bar_template='[%(bar)s] %(label)s %(info)s', show_percent=False) as stages: for stage in stages: if os.path.isfile(stage): stage_file = stage elif os.path.isfile(stage + '.sh'): stage_file = stage + '.sh' else: continue if skip and stage in skip.split(','): continue ecode = execute(stage_file, timeout, stages) if ecode != 0: pu.info("Stage {} failed.".format(stage), fg='red', bold=True, blink=True) STATUS = "FAIL" pu.info(".err and .out output for {}:".format(stage), fg='red') for t in ['.err', '.out']: logfile = 'popper_logs/{}{}'.format(stage_file, t) with open(logfile, 'r') as f: pu.info(f.read()) break if 'valid' in stage: STATUS = "GOLD" with open('popper_logs/validate.sh.out', 'r') as f: validate_output = f.readlines() if len(validate_output) == 0: STATUS = "SUCCESS" for line in validate_output: if '[true]' not in line: STATUS = "SUCCESS" with open('popper_status', 'w') as f: f.write(STATUS + '\n') pu.info('status : ' + STATUS, fg='green', bold=True) sys.stdout.write('\n') return STATUS
def initialize_repo(project_root): if pu.is_popperized(): pu.fail('Repository has already been popperized') with open(os.path.join(project_root, '.popper.yml'), 'w') as f: f.write('{ metadata: { }, pipelines: { } }\n') pu.info('Popperized repository ' + project_root, fg='blue', bold=True)
def cli(ctx, service, history, inplace): """Generates markdown for the badge of a service. Currently available services are: CloudLab, Chameleon, Google Cloud Engine and Popper. """ if history and service: raise BadArgumentUsage("--history can't be combined with other flags.") remote_url = pu.get_remote_url() if not remote_url: pu.fail("Failed to infer remote URL for git repository.") org, repo = remote_url.split('/')[-2:] if history: baseurl = pu.read_config().get('badge-server-url', 'http://badges.falsifiable.us') try: r = requests.get('{}/{}/{}/list'.format(baseurl, org, repo)) if r.json(): pu.print_yaml(r.json()) else: pu.info("No records to show") except requests.exceptions.RequestException: pu.fail("Could not communicate with the badge server") sys.exit(0) if not service and inplace: raise BadArgumentUsage("--inplace must be given with --service") if service is None: pu.fail('Please specify a service name.') if service not in services: pu.fail('Unknown service {}.'.format(service)) if service == 'popper': org, repo = remote_url.split('/')[-2:] markup = '[![{}]({})]({})'.format( services[service][0], services[service][1].format(org, repo), services[service][2]) else: markup = '[![{}]({})]({})'.format(*services[service]) if not inplace: pu.info(markup) sys.exit(0) try: os.chdir(pu.get_project_root()) with open('README.md', 'r+') as f: content = f.read() f.seek(0, 0) f.write(markup + '\n\n' + content) except IOError as e: if e.errno == ENOENT: pu.fail("README.md does not exist at the root of the project")
def cli(ctx, name, stages, envs, existing, infer_stages): """Initializes a repository or a pipeline. Without an argument, this command initializes a popper repository. If an argument is given, a pipeline or paper folder is initialized. If the given name is 'paper', then a 'paper' folder is created. Otherwise, a pipeline named NAME is created and initialized inside the 'pipelines' folder. By default, the stages of a pipeline are: setup, run, post-run, validate and teardown. To override these, the `--stages` flag can be provided, which expects a comma-separated list of stage names. The teardown stage is to be provided at the end if the --stages flag is being used. If the --existing flag is given, the NAME argument is treated as a path to a folder, which is assumed to contain bash scripts. --stages must be given. """ # check if the the teardown stage is the last stage of the pipeline if stages and 'teardown' in stages and stages.split(',')[-1] != 'teardown': raise BadArgumentUsage( '--stages = Teardown should be the last stage.' + ' Consider renaming it or putting it at the end.') project_root = pu.get_project_root() # init repo if name is None: initialize_repo(project_root) return if not pu.is_popperized(): pu.fail("Repository has not been popperized yet. See 'init --help'") if isdir(os.path.join(project_root, name)) and existing: # existing pipeline abs_path = os.path.join(project_root, name) relative_path = name if infer_stages: stages = ",".join( map(lambda x: x[:-3], sorted(glob.glob1(abs_path, '*.sh')))) else: initialize_existing_pipeline(abs_path, stages, envs) elif name == 'paper': # create a paper pipeline abs_path = os.path.join(project_root, 'paper') relative_path = os.path.join('paper') initialize_paper(abs_path, envs) else: # new pipeline abs_path = os.path.join(project_root, 'pipelines', name) relative_path = os.path.join('pipelines', name) initialize_new_pipeline(abs_path, stages, envs) pu.update_config(name, stages, envs, relative_path) pu.info('Initialized pipeline ' + name, fg='blue', bold=True)
def create_archive(project_root, project_name): """Creates a git archive of the popperized repository and returns the filename.""" pu.info("Creating the archive...") os.chdir(project_root) archive_file = project_name + '.tar.gz' command = 'git archive master | gzip > ' + archive_file subprocess.call(command, shell=True) return archive_file
def docker_start(self): pu.info('{}[{}] docker start \n'.format(self.msg_prefix, self.action['name'])) cmd = 'docker start --attach {}'.format(self.cid) _, ecode = pu.exec_cmd( cmd, verbose=(not self.quiet), debug=self.debug, log_file=self.log_filename, dry_run=self.dry_run) return ecode
def cli(ctx, pipeline, add, rm): """Define or remove executions of a pipeline.""" config, pipeline_config = pu.read_config(pipeline) if add and rm: raise UsageError("Both add and rm cannot be given at the same time. " "See popper env-vars --help for more information.") if add: env_vars = pipeline_config.get('vars', []) vars_add = {} for var in add: key, val = var.split('=') vars_add[key] = val env_vars.append(vars_add) pu.update_config(pipeline, vars=env_vars) elif rm: env_vars = pipeline_config.get('vars', None) if not env_vars: pu.fail("No environment variables defined for this pipeline.") vars_del = {} for var in rm: key, val = var.split('=') vars_del[key] = val index = -1 for vars in env_vars: if len(vars.keys()) != len(vars_del.keys()): continue else: successful = True for key in vars_del: if vars[key] != vars_del[key]: successful = False if successful: index = env_vars.index(vars) break if index != -1: env_vars.pop(index) pu.update_config(pipeline, vars=env_vars) else: pu.fail("The environment variable list does " "not exist for this pipeline.") else: try: env_vars = pipeline_config['vars'] if len(env_vars) == 0: raise KeyError pu.print_yaml(env_vars) except KeyError: pu.info("No environment variables defined for this pipeline.")
def cli(ctx, action, wfile, timeout, workspace): """Executes one or more pipelines and reports on their status. """ pipeline = Workflow(wfile) pipeline.workspace = workspace pipeline.timeout = timeout pipeline.run(action) if action: pu.info('\nAction "{}" finished successfully.\n\n'.format(action)) else: pu.info('\nWorkflow finished successfully.\n\n')