def main(): # get input arguments for download link args = parse_input_args() download_url = args.download_url output_filename = download_url.split('/')[-1] # check if COSMIC login credentials ENV variable exists cosmic_db_creds = environ.get('COSMIC_DB_CREDS') if cosmic_db_creds == None: print( 'COSMIC database login credentials not found in this system. Please ensure that COSMIC_DB_CREDS environment variable exists.' ) print('Exiting program') sys.exit(1) # use the cosmic credentials to get the download link. print('Fetching authorized download link...') auth_download_link = get_cosmic_download_link(download_url, cosmic_db_creds) print('Got authorized download link.') # download the actual database file print('Downloading file from COSMIC...') sh.bash("-c", f"curl '{auth_download_link}' -o {output_filename}", _fg=True)
def process_mut_export(filename, sites): # run a shell command to create intermediate file sites_file = 'sites.tmp' print('Preprocessing cosmic mutant export file...') # check operating system gz_decomp = get_gzip_app() sh.bash("-c", f"{gz_decomp} {filename} | cut -f 7,8,17 >{sites_file}") print('Calculating file size...') row_count = int(sh.bash("-c", f"cat {sites_file} | wc -l")) # parse the intermediate file print('Parsing intermediate file...') counter = 0 with progressbar.ProgressBar(max_value=row_count) as pbar: for sites_item in read_sites_file(sites_file): temp_arr = sites_item.strip('\n').split('\t') cosmic_id = temp_arr[2] site = temp_arr[1] tumor_id = temp_arr[0] if cosmic_id in sites: if site in sites[cosmic_id]: sites[cosmic_id][site].add(tumor_id) else: sites[cosmic_id][site] = {tumor_id} else: temp_dict = {} temp_dict[site] = {tumor_id} sites[cosmic_id] = temp_dict counter += 1 pbar.update(counter) print('Parsing complete.') print('Removing temporary file...')
def execBashFile(self, path): try: sh.bash(path) return True except Exception as e: logException(e, self, location="execBashFile") return False
def test_flakehell_passes(cookies: Cookies, context: Dict[str, str], context_override: Dict[str, str]) -> None: """Generated project should pass flakehell.""" result = cookies.bake(extra_context={**context, **context_override}) try: # The black step is executed by the post hooks # we need to run everything in the same step so that flakehell uses the # virtualenv. sh.bash( "-c", "virtualenv -p `which python3.7` env; " "source env/bin/activate; " "pip install pip-tools; " "pip-compile -U --allow-unsafe setup.py; " "pip-compile -U --allow-unsafe requirements-dev.in " " --output-file requirements-dev.txt; " "pip install -r requirements-dev.txt; " "pip install -e .; " "black --exclude env .; " "flakehell lint src tests", _cwd=str(result.project), ) except sh.ErrorReturnCode as error: pytest.fail(error.stdout.decode())
def calibrate_coarse(self, monte_carlo_iterations=50): """ Run SLEUTH coarse calibration. Parameters: - `monte_carlo_iterations`: iterations for the coarse calibration step. """ coarse_dir = join(self.output_path, 'coarse') create_dir(coarse_dir) coarse_params = { 'diff': 50, 'diff_start': 0, 'diff_step': 25, 'diff_end': 100, 'brd': 50, 'brd_start': 0, 'brd_step': 25, 'brd_end': 100, 'sprd': 50, 'sprd_start': 0, 'sprd_step': 25, 'sprd_end': 100, 'slp': 50, 'slp_start': 0, 'slp_step': 25, 'slp_end': 100, 'rg': 50, 'rg_start': 0, 'rg_step': 25, 'rg_end': 100, 'output_dir': coarse_dir + '/' } with open(join(self.output_path, 'scenario.%s.coarse' % self.location), 'w') as f: scenario_file_path = f.name f.write( self.create_scenario_file(coarse_params, monte_carlo_iterations)) self.status['calibration']['coarse']['start'] = datetime.datetime.now() self.status['calibration']['coarse']['params'] = coarse_params self.save_status() if config['use_mpi']: mpirun('-np', config['mpi_cores'], config['grow_binary'], 'calibrate', scenario_file_path, _out=join(coarse_dir, 'mpi_out.log'), _err=join(coarse_dir, 'mpi_err.log')) else: bash( '-c', "%s calibrate %s" % (config['grow_binary'], scenario_file_path)) self.status['calibration']['coarse']['end'] = datetime.datetime.now() self.save_status()
def run_python_tests(self: object) -> None: bash_command_string = ". {0}; ./nosetests/runpytest.sh".format( self.rip_environment_path) sh.bash("-c", bash_command_string, _cwd=self.normalized_path, _out=sys.stdout.buffer, _err=sys.stderr.buffer)
def get_mace(configs, abis, output_dir, build_mace): if build_mace: sh.bash("tools/build_mace.sh", abis, os.path.abspath(output_dir), _fg=True) else: file_path = download_file(configs, "libmace.zip", output_dir) sh.unzip("-o", file_path, "-d", "third_party/mace")
def stop_tor(): log.debug('stop tor') cancel_timer() try: sh.bash('-c', 'service tor stop') except Exception as exc: print(exc) # it doesnt matter if it's already stopped pass
def start_tor(): log.debug('start tor') try: sh.bash('-c', 'service tor start') except Exception as exc: print(exc) raise else: reset_timer()
def chargingbackend(ctx): print("Installing charging backend") name = charg.get("url").split("/")[-1][:-4] cd(name) virtualenv("virtenv") bash("python-dep-install.sh") cd("..")
def run_runtests(self: object, test_path) -> None: if test_path is None: test_path = "{0}/tests".format(self.normalized_path) bash_command_string = ". {0}; runtests {1}".format( self.rip_environment_path, test_path) sh.bash("-c", bash_command_string, _cwd=self.normalized_path, _out=sys.stdout.buffer, _err=sys.stderr.buffer)
def rebootstrap_site(name, folder, python, mr_developer=False): """ (Re)run bootstrap.py & buildout. This will make Plone buildout.cfg setup pull are Python code needed to run the site from pypi.python.org and various other sources. This includes site addon code. Sudo first before doing this. :param mr_developer: Do Mr. Developer activation of src/ packages """ from sh import bash def build_it_out(_ok_code=[0]): """ Little helper """ # We really want to capture all output here since buildout is a bitch exit_code = bash("-c", "cd %s && bin/buildout" % folder, _out_bufsize=0, _out=_unbuffered_stdout, _err=_unbuffered_stdout, _ok_code=_ok_code ).wait().exit_code return exit_code # We cannot pass current working folder (cd) through sudo # we do a trick here by running the command through bash bash("-c", "cd %s && %s bootstrap.py" % (folder, python)) print "Running buildout on %s, Mr. Developer support is %s" % (folder, "activated" if mr_developer else "deactivated") _ok_code = [0] # Mr. Developer based buildouts will bonk with exit code 1 on first run if mr_developer: _ok_code.append(1) exit_code = build_it_out(_ok_code) # This is generated by first buildout run, # before bo fails on unknown pkg develop = os.path.join(folder, "bin", "develop") if mr_developer and (exit_code == 1) and os.path.exists(develop): # Buildout return 1 when it encounters non-activated pkg # which is in src/ bash("-c", "cd %s && bin/develop activate ''" % folder, _out_bufsize=0, _out=_unbuffered_stdout, _err=_unbuffered_stdout, ).wait() # Here we go again build_it_out()
def _execute(self, command_line): LOG.debug( 'Begin executing commmand: %s', ' '.join("'" + arg + "'" for arg in command_line)) self.status = "EXECUTING" self.exit_code = None try: sh.bash('-x', *command_line, _out=self.write_stdout, _err=self.write_stderr) except sh.ErrorReturnCode as error: exit_code = error.exit_code status = "FAILED" severity = logging.ERROR except Exception: exit_code = 1 status = "ERROR" severity = logging.ERROR LOG.exception('Internal error.') except BaseException: exit_code = 1 severity = logging.WARNING status = "INTERRUPTED" else: exit_code = 0 status = 'SUCCESS' severity = logging.DEBUG self.exit_code = exit_code self.status = status if exit_code != 0 and self.log_level < logging.ERROR: stream = sys.stderr stream.write('=' * 79 + '\n') sh.tail('-n', '100', self.log_path + '.ansi', _out=stream) stream.write('=' * 79 + '\n') LOG.log( severity, 'Finished executing command:\n' ' Command line: %s\n' ' Status: %s\n' ' Exit code: %s\n' ' Log file: %s\n', command_line, status, exit_code, self.log_path) return exit_code
def build_wasm(args): crate_path = os.path.normpath(args.crate_path) sh.bash(os.path.join(crate_path, "build.sh"), "--with-npm-install", **SH_KWARGS) output_dir_path = os.path.join(args.upload_path, APP_NAME) os.makedirs(output_dir_path) shutil.copytree(os.path.join(args.crate_path, "dist"), os.path.join(output_dir_path, "v%s" % args.version)) shutil.copytree(os.path.join(args.crate_path, "dist"), os.path.join(output_dir_path, "%s" % args.branch))
def index(request): if request.method == 'POST': error = '' try: bash(conf.TUTORIALS_UPDATE_SCRIPT.get()) except Exception, ex: error = unicode(ex) result = { 'tutorials': _get_tutorials_version(), 'error': error } return HttpResponse(json.dumps(result))
def test_modes(): err_count = 0 for i in modes: bash(c='make clean', _timeout=2) app_cmd = i[0] + ' ' + i[1] if app_cmd.isspace(): print('DEFAULT', end=": ", flush=True) else: print(app_cmd, end=": ", flush=True) str = bash(c='./build.sh ' + app_cmd, _timeout=10) err_count += test_library(str) return err_count
def run(self, commands, quiet=False): """ Run a command or a script of commands on the local machine. Obviously this is silly, the point is to have the same api for all four environments: local, vz, ssh, and ssh to vz. """ if quiet: return sh.bash(_in=commands) else: return sh.bash(_in=commands, _out=pse, _err=pse, _tee=True)
def postprocessing(cn, jobid, execute='slurm', packyear=True, rmyear=True, endmon=12): ''' tfile - template file ''' cn['year'] = str(cn['date_present'].year) cn['mon'] = str(cn['date_present'].month).zfill(2) cn['day'] = str( calendar.monthrange(cn['date_present'].year, cn['date_present'].month)[1]).zfill(2) cn['nyear'] = str(cn['date_next'].year) cn['nmon'] = str(cn['date_next'].month).zfill(2) cn['jobid'] = str(jobid) cn['packyear'] = packyear # if we pack years after endmon ( usually 12) cn['rmyear'] = rmyear # if we have to clean up after endmon (usually 12) cn['endmon'] = str(endmon).zfill( 2) # month of the year after wich we pack everything for a year ofile = open('postprocessing.sh', 'w') out_init = TEMP_ENV.get_template(cn['postprocessing_template']).render(cn) ofile.write(out_init) ofile.close() if execute == 'shell': os.system('chmod +x ./postprocessing.sh') logging.info('postprocessing start') process = Popen('./postprocessing.sh', shell=True, stdout=PIPE, stderr=PIPE) (out, err) = process.communicate() logging.debug(out) logging.debug(err) logging.info('postprocessing is over') elif execute == 'slurm': submit_job("./postprocessing.sh") elif execute == 'back': os.system('chmod +x ./postprocessing.sh') logging.info('postprocessing start in background mode, good luck...') sh.bash('./postprocessing.sh', _bg=True, _timeout=1200, _out='o.txt', _err='e.txt') else: pass
def run(self, threshold=3.0, cleanup=True): # Clean up # if cleanup: shutil.rmtree(self.p.working_dir) for f in self.files_to_keep: os.remove(self.base_dir + f) if os.path.exists(self.base_dir + f) else None # Move to the working dir # self.saved_cwd = os.getcwd() os.chdir(self.p.working_dir) # Make the abundances file # self.taxonomy.otu_csv_norm.transpose(self.abundances, d=',') # Make the most abundant OTU file (launching one perl command per OTU sequence takes forever) # path = "centers_N%i.fa" % self.N if len(self.taxonomy.centers) <= self.N: self.taxonomy.centers.copy(path) else: otus = self.taxonomy.otu_table_norm.sum() otus.sort() highest_otus = otus[-self.N:] sequences = (seq for seq in SeqIO.parse(self.taxonomy.centers, 'fasta') if seq.id in highest_otus) with open(path, 'w') as handle: SeqIO.write(sequences, handle, 'fasta') # Run the Quince pipeline with a special version of R # header = 'module load R/3.0.1' + '\n' header += 'export R_LIBS="$HOME/R/x86_64-unknown-linux-gnu-library/3.0/"' + '\n' header += 'unset R_HOME' + '\n' # Log standard out and standard error as well as both together # tee = "((%s | tee stdout.log) 3>&1 1>&2 2>&3 | tee stderr.log) &> stdboth.log" # See seqenv documentation for parameters # identity = 100 - threshold params = [ '-f', self.taxonomy.centers, '-s', self.abundances, '-n', self.N, '-m', identity, '-p', '-c', nr_threads ] # Activate bash debug mode # command = "bash -x " + seqenv_script + ' ' + ' '.join(map(str, params)) # Launch the whole thing with sh # self.script = header + tee % command sh.bash(TmpFile.from_string(self.script), _out=self.p.out, _err=self.p.err) # Move things into place # if cleanup: for f in self.files_to_keep: shutil.move(f, "../") # Go back # os.chdir(self.saved_cwd)
def build_wasm(args): crate_paths = [os.path.normpath(crate_path) for crate_path in args.crate_path] for crate_path in crate_paths: sh.npm("install", "--prefix", crate_path, **SH_KWARGS) sh.bash(os.path.join(crate_path, "build_dist.sh"), **SH_KWARGS) output_dir_path = os.path.join(args.upload_path, APP_NAME) os.makedirs(output_dir_path) shutil.copytree( os.path.join(crate_path, "dist"), os.path.join(output_dir_path, "v%s" % args.version)) shutil.copytree( os.path.join(crate_path, "dist"), os.path.join(output_dir_path, "%s" % args.branch))
def xxd_diff(old, new): ''' just for fun ''' # xxd -p for pure hexdump # -p must pass in before file name shell = check_shell() # support : bash, zsh # not support : dash cmd = 'diff <(xxd -p {}) <(xxd -p {})'.format(old, new) if shell['bash']: return sh.bash('-c', cmd, _ok_code=[0, 1]) elif shell['zsh']: return sh.zsh('-c', cmd, _ok_code=[0, 1]) else: tmp_old = '/var/tmp/old_hex' tmp_new = '/var/tmp/new_hex' sh.xxd('-p', old, _out=tmp_old) sh.xxd('-p', new, _out=tmp_new) patch = sh.diff(old, new, _iter=True, _ok_code=[0, 1]) sh.rm('-f', tmp_old) sh.rm('-f', tmp_new) return patch
def run_pysource_script( self, commands=(), bg=False, env=None, strip=True, _in=None, _out=None): commands = list(commands) script_path = self._create_script(commands) if env is None: env = os.environ.copy() env[PYSOURCE_HOME_ENV] = self.workdir conf = {'_env': env} if _in is not None: conf['_in'] = _in conf['_in_bufsize'] = 0 if _out is not None: conf['_out'] = _out conf['_out_bufsize'] = 0 if bg: conf['_bg'] = True bash(script_path, **conf) else: output = sh.bash(script_path, **conf).wait() if strip: output = output.strip() return output
def execute_bash(code, stdin=None): logger.info("Executing bash script") logger.debug(code) if stdin is not None: if not stdin.strip(): logger.info("Skipping execution with empty content") return True, stdin stdin = stdin.encode("utf-8") with tempfile.NamedTemporaryFile() as fp: logger.debug("Saving code to %r", fp.name) fp.write(code.encode('utf-8')) fp.flush() logger.debug("Launching script %r", fp.name) try: result = sh.bash(fp.name, _in=stdin) ok = True except sh.ErrorReturnCode as exc: result = exc ok = False stdout = result.stdout.decode('utf-8') stderr = result.stderr.decode('utf-8') if ok: log = logger.debug report = stdout else: log = logger.error report = stderr log("Bash exit_code: %r", result.exit_code) log("Bash stdout: %s", stdout) log("Bash stderr: %s", stderr) return ok, report
def assert_completion(self, expected, args=None, filter_non_options=False): args = args or [] args += ["''"] cmd = ['clue'] + list(args) partial_word = cmd[-1] cmdline = ' '.join(cmd) lines = [ 'set -e', 'eval "$(register-python-argcomplete clue)"', 'export COMP_LINE="{}"'.format(cmdline), 'export COMP_WORDS=({})'.format(cmdline), 'export COMP_CWORD={}'.format(cmd.index(partial_word)), 'export COMP_POINT={}'.format(len(cmdline)), '_python_argcomplete {}'.format(cmd[0]), 'echo ${COMPREPLY[*]}' ] script_path = self.workdir / 'completions.sh' script_path.write_text('\n'.join(lines)) p = sh.bash(script_path) completions = p.stdout.strip().split(' ') if filter_non_options: completions = [c for c in completions if c.startswith('-')] self.assertEqual( len(expected), len(completions), 'expected: {}, actual: {}'.format(expected, completions)) for expected_completion in expected: self.assertIn(expected_completion, completions)
def release(): """Release/publish the code. """ # Rebase and push the master with tags to origin. print("Here are the remaining TODO items:") print(bash('TODO.sh')) print() if not util.yes( "Do you still want to rebase and push the master with tags " "to origin (y/n)?"): util.delayed_exit() git.rebase('-i', 'origin/master') git.push('--tags', 'origin', 'master') # Upload to PyPI. if not util.yes("Do you want to upload to PyPI (this is permanent!) " "(y/n)?"): util.delayed_exit() setup.sdist.upload() # Reset the version number. # In CHANGES.txt: newheading = ('TBD (in `GitHub <https://github.com/kdavies4/natu>`_ ' 'only) -- Updates:') newlink = ('.. _vx.x.x: ' 'https://github.com/kdavies4/natu/archive/vx.x.x.zip') rpls = [(r'(<http://semver.org>`_\.)', r'\1\n\n' + newheading), (r'(Initial release\n\n\n)', r'\1%s\n' % newlink)] util.replace('CHANGES.txt', rpls)
def release(): """Release/publish the code. """ # Rebase and push the master with tags to origin. print("Here are the remaining TODO items:") print(bash('TODO.sh')) print() if not util.yes("Do you still want to rebase and push the master with tags " "to origin (y/n)?"): util.delayed_exit() git.rebase('-i', 'origin/master') git.push('--tags', 'origin', 'master') # Upload to PyPI. if not util.yes("Do you want to upload to PyPI (this is permanent!) " "(y/n)?"): util.delayed_exit() setup.sdist.upload() # Reset the version number. # In natu/__init__.py: set_version('None') # In CHANGES.txt: newheading = ('TBD (in `GitHub <https://github.com/kdavies4/natu>`_ ' 'only) -- Updates:') newlink = ('.. _vx.x.x: ' 'https://github.com/kdavies4/natu/archive/vx.x.x.zip') rpls = [(r'(<http://semver.org>`_\.)', r'\1\n\n' + newheading), (r'(Initial release\n\n\n)', r'\1%s\n' % newlink)] util.replace('CHANGES.txt', rpls)
def assert_completion(self, expected, args=None, filter_non_options=False): args = args or [] args += ["''"] cmd = ['claw'] + list(args) partial_word = cmd[-1] cmdline = ' '.join(cmd) lines = [ 'set -e', 'eval "$(register-python-argcomplete claw)"', 'export COMP_LINE="{}"'.format(cmdline), 'export COMP_WORDS=({})'.format(cmdline), 'export COMP_CWORD={}'.format(cmd.index(partial_word)), 'export COMP_POINT={}'.format(len(cmdline)), '_python_argcomplete {}'.format(cmd[0]), 'echo ${COMPREPLY[*]}' ] script_path = self.workdir / 'completions.sh' script_path.write_text('\n'.join(lines)) p = sh.bash(script_path) completions = p.stdout.strip().split(' ') if filter_non_options: completions = [c for c in completions if c.startswith('-')] self.assertEqual(len(expected), len(completions)) for expected_completion in expected: self.assertIn(expected_completion, completions)
def resource(charm_entity, channel, builder, out_path, resource_spec): out_path = Path(out_path) resource_spec = yaml.load(Path(resource_spec).read_text()) resource_spec_fragment = resource_spec.get(charm_entity, None) click.echo(resource_spec_fragment) if not resource_spec_fragment: raise SystemExit('Unable to determine resource spec for entity') os.makedirs(str(out_path), exist_ok=True) charm_id = sh.charm.show(charm_entity, '--channel', channel, 'id') charm_id = yaml.load(charm_id.stdout.decode()) try: resources = sh.charm('list-resources', charm_id['id']['Id'], channel=channel, format='yaml') except sh.ErrorReturnCode_1: click.echo('No resources found for {}'.format(charm_id)) return resources = yaml.load(resources.stdout.decode()) builder_sh = Path(builder).absolute() click.echo(builder_sh) for line in sh.bash(str(builder_sh), _cwd=out_path, _iter=True): click.echo(line.strip()) for line in glob('{}/*'.format(out_path)): resource_path = Path(line) resource_fn = resource_path.parts[-1] resource_key = resource_spec_fragment.get(resource_fn, None) if resource_key: out = sh.charm.attach(charm_entity, '--channel', channel, '{}={}'.format(resource_key, resource_path)) click.echo(out)
def _run_bash(self, bash_input): log.debug('Running bash command: "{}"'.format(bash_input)) f = None if self.log_file: f = open(self.log_file, 'a+') output = {'stdout': ''} try: def process_line(line): log.debug('Got line: %s' % line) output['stdout'] += line log.debug('BashExec stdout: {}'.format(line)) if f: f.write(line) if self.event_mgr: if len(line)>0 and line[-1] == '\n': line = line[:-1] self.event_mgr.trigger('on_task_event', {'output': line}) proc = bash(_in=bash_input, _out=process_line, _err=process_line) proc.wait() log.debug('Finished: %s, %s, %s' % (proc.exit_code, proc.stdout, proc.stderr)) except ErrorReturnCode as e: log.debug('BashExec failed') raise BashExecuteError(e.stderr, e.exit_code) return output['stdout']
def proxyCommand(host, port, chargingport, glassfishport): print("Installing logic proxy") name = proxy.get("url").split("/")[-1][:-4] cd(name) bash('install.sh') if not os.path.isfile("config.js"): shutil.copy2("config.js.template", "config.js") with open("config.js") as f: text = f.read() text = text.replace("config.port = 80", "config.port = {}".format(port))\ .replace("'/proxy'", "''") texts = text.split("\n") texts = texts[:59] + generate_endpoints(glassfishport, chargingport) + texts[121:] text = "\n".join(texts) with open("config.js", "w") as f: f.write(text) if os.path.isdir('indexes'): rm('-rf', 'indexes') mkdir('indexes') node('fill_indexes.js') print(""" Finished! Now, go to your IdM instance (e.g. https://account.lab.fiware.org) and create an application with this settings: - URL: http://{host}:{port} - Callback URL: http://{host}:{port}/auth/fiware/callback Create a role called "seller" Attach the role to the users you prefer. Modify config.js file with: - config.oauth2.clientID: The client ID that you got when you created the Application - config.oauth2.clientSecret: The client Secret that you got when you created the Application - config.oauth2.callbackURL = http://{host}:{port}/auth/fiware/callback Please refer to http://business-api-ecosystem.readthedocs.io/en/latest/installation-administration-guide.html#configuration for details on configuration settings """.format(host=host, port=port))
def run_sync(self, cmd: str, fail_on_error: bool = False, single_result=True, _out: str = None, _err: str = None, _tee: bool = False): def _prepare_kwargs(_err, _out, _tee): kwargs = {} if _out: kwargs['_out'] = _out if _err: kwargs['_err'] = _err if _tee: kwargs['_tee'] = True return kwargs self.LOG.info("Running command: {}".format(cmd)) output = [] exit_code = None try: kwargs = _prepare_kwargs(_err, _out, _tee) if "_out" not in kwargs: # TODO There were problems passing this lambda from _prepare_kwargs process = sh.bash("-c", cmd, **kwargs, _out=lambda line: output.append(line)) else: process = sh.bash("-c", cmd, **kwargs) process.wait() exit_code = process.exit_code except sh.ErrorReturnCode as e: if fail_on_error: raise e return e.stdout, e.exit_code except Exception as e: self.LOG.error("Error while executing command {}:\n {}".format( cmd, str(e))) self.LOG.info(" ".join(output)) # Remove trailing newlines from each line output = [line.rstrip() for line in output] if len(output) == 1 and single_result: output = output[0] return output, exit_code
def clean_all(): bash(c="./fortran_static_library/clean_all.sh", _timeout=2) bash(c="./fortran_dynamic_library/clean_all.sh", _timeout=2) bash(c="./cpp_static_library/clean_all.sh", _timeout=2) bash(c="./cpp_dynamic_library/clean_all.sh", _timeout=2) #------------------------------------------------------ # add script here #------------------------------------------------------ print('', flush=True)
def run_interactive_shell(message: str = None): logger = logging.getLogger(f'{__name__}') message = message or f'launching a subshell. when done, exit the shell' logger.info(message) return sh.bash('-i', _fg=True)
def run_runtests(self: object, test_path) -> None: rip_ini_file = "{0}/etc/machinekit/machinekit.ini".format( self.normalized_path) new_ini_items = "ANNOUNCE_IPV4=0\nANNOUNCE_IPV6=0\n" with open(rip_ini_file, "a") as writer: writer.write(new_ini_items) if test_path is None: test_path = "{0}/tests".format(self.normalized_path) rip_environment_path = "{0}/scripts/rip-environment".format( self.normalized_path) bash_command_string = ". {0}; runtests {1}".format( rip_environment_path, test_path) sh.bash("-c", bash_command_string, _cwd=self.normalized_path, _out=sys.stdout.buffer, _err=sys.stderr.buffer)
def get_aliases(): aliases = {} for alias in bash("-i", "-c", "alias", _iter=True): res = re.match('alias (.+)=\'(.+)\'', alias) if res: aliases[res.group(1)] = res.group(2) return aliases
def setup_package(): """ Sets up docker images and host containers for running the STs. """ # Pull and save each image, so we can use them inside the host containers. if not do_build(): print "Using pre-build calicoctl and calico-node images" return print sh.bash("./build_node.sh").stdout docker.save("--output", "calico_containers/calico-node.tar", "calico/node") if not os.path.isfile("calico_containers/busybox.tar"): docker.pull("busybox:latest") docker.save("--output", "calico_containers/busybox.tar", "busybox:latest") # Create the calicoctl binary here so it will be in the volume mounted on the hosts. print sh.bash("./create_binary.sh")
def run_interactive_shell(message: str = None): logger = logging.getLogger(f"{__name__}") message = message or f"launching a subshell. when done, exit the shell" logger.info(message) return sh.bash("-i", _fg=True)
def run(self): """ctool commands are run directly on the stress node""" cmd = self.cmd(include_envs=False) output = sh.bash('-c', "{}".format(cmd), _cwd=self.directory, _env=self.envs) logger.info(output)
def configure_source(self: object): if self.configure_src_cmd is None: sys.stderr.write( "No configureSourceCmd specified; doing nothing\n") return self.assert_parent_dir_writable() # May write orig.tar.gz file sys.stderr.write("Running configureSourceCmd '{}':\n".format( self.configure_src_cmd)) try: sh.bash('-c', self.configure_src_cmd, _out=sys.stdout.buffer, _err=sys.stderr.buffer, _cwd=self.normalized_path) except sh.ErrorReturnCode as e: message = "Configure source command '{}' failed:\n{}".format( self.configure_src_cmd, e) raise ValueError(message)
def chargingbackend(ctx): print("Installing charging backend") name = charg.get("url").split("/")[-1][:-4] cd(name) virtualenv("virtenv") bash("python-dep-install.sh") cd('src') mkdir('media') cd('media') mkdir('assets') mkdir('bills') cd("..") cd("..") cd("..")
def build_it_out(_ok_code=[0]): """ Little helper """ # We really want to capture all output here since buildout is a bitch exit_code = bash("-c", "cd %s && bin/buildout" % folder, _out_bufsize=0, _out=_unbuffered_stdout, _err=_unbuffered_stdout, _ok_code=_ok_code ).wait().exit_code return exit_code
def setup_package(): """ Sets up docker images and host containers for running the STs. """ # Pull and save each image, so we can use them inside the host containers. print sh.bash("./build_node.sh").stdout docker.save("--output", "calico-node.tar", "calico/node") if not os.path.isfile("busybox.tar"): docker.pull("busybox:latest") docker.save("--output", "busybox.tar", "busybox:latest") if not os.path.isfile("nsenter.tar"): docker.pull("jpetazzo/nsenter:latest") docker.save("--output", "nsenter.tar", "jpetazzo/nsenter:latest") if not os.path.isfile("etcd.tar"): docker.pull("quay.io/coreos/etcd:v2.0.10") docker.save("--output", "etcd.tar", "quay.io/coreos/etcd:v2.0.10") # Create the calicoctl binary here so it will be in the volume mounted on the hosts. print sh.bash("./create_binary.sh") print "Calicoctl binary created."
def cli(self, call_args, _iter=False): if self.docker_exe is None: raise click.ClickException("Cannot find docker executable in your" " PATH") args = call_args[:] args.insert(0, self.docker_exe) shell_args = ' '.join(args) self.ctx.vlog("Running : %s" % shell_args) if _iter: return bash("-c", shell_args, _iter=True) else: return call(args)
def tor_running(): running = False try: result = sh.bash('-c', 'service tor status') except Exception as exc: log.debug(exc) else: if NOT_RUNNING in str(result): log.debug('service tor status: {}'.format(str(result))) else: running = True return running
def test_cdconfiguration(self): self.init() self.claw.generate(tests.STUB_CONFIGURATION) script = [ 'eval "$(claw cdconfiguration)"', 'cdconfiguration {0}'.format(tests.STUB_CONFIGURATION), 'echo $PWD' ] script_path = self.workdir / 'script.sh' script_path.write_text('\n'.join(script)) self.assertEqual( self.settings.configurations / tests.STUB_CONFIGURATION, sh.bash(script_path).stdout.strip())
def generate_partial(work_env, from_dir, to_dir, dest_mar, channel_ids, version): log.debug("Generating partial %s", dest_mar) env = work_env.env env["MOZ_PRODUCT_VERSION"] = version env["MOZ_CHANNEL_ID"] = channel_ids make_incremental_update = os.path.join(work_env.workdir, "make_incremental_update.sh") out = sh.bash(make_incremental_update, dest_mar, from_dir, to_dir, _cwd=work_env.workdir, _env=env, _timeout=900, _err_to_out=True) if out: log.debug(out)
def postprocessing(cn, jobid, execute='slurm', packyear=True, rmyear=True, endmon = 12): ''' tfile - template file ''' cn['year'] = str(cn['date_present'].year) cn['mon'] = str(cn['date_present'].month).zfill(2) cn['day'] = str(calendar.monthrange(cn['date_present'].year,cn['date_present'].month)[1]).zfill(2) cn['nyear'] = str(cn['date_next'].year) cn['nmon'] = str(cn['date_next'].month).zfill(2) cn['jobid'] = str(jobid) cn['packyear'] = packyear # if we pack years after endmon ( usually 12) cn['rmyear'] = rmyear # if we have to clean up after endmon (usually 12) cn['endmon'] = str(endmon).zfill(2) # month of the year after wich we pack everything for a year ofile = open('postprocessing.sh', 'w') out_init = TEMP_ENV.get_template(cn['postprocessing_template']).render(cn) ofile.write(out_init) ofile.close() if execute=='shell': os.system('chmod +x ./postprocessing.sh') logging.info('postprocessing start') process = Popen('./postprocessing.sh', shell=True, stdout=PIPE, stderr=PIPE) (out,err) = process.communicate() logging.debug(out) logging.debug(err) logging.info('postprocessing is over') elif execute=='slurm': submit_job("./postprocessing.sh") elif execute=='back': os.system('chmod +x ./postprocessing.sh') logging.info('postprocessing start in background mode, good luck...') sh.bash('./postprocessing.sh', _bg=True, _timeout=1200, _out='o.txt', _err='e.txt') else: pass
def processLog(self): logger.info("starting to process sqldump file") # Bash scripts are in 'data' folder cd('data') # get the pointer pointer = self.findPointer() logger.info("> found pointer in file") return # Try to process this file output = bash('clean_sql.sh', self.infile, self.outfile, pointer) if int(output.trim()) == 0: raise Exception logger.info("> created out file") logger.info("finished processing sqldump file")
def main(): parser = argparse.ArgumentParser(description='cross compile build for golang') parser.add_argument('--repo', dest='repo', required=True) parser.add_argument('--tag', dest='tag', default='branch:master') args = parser.parse_args() global reponame reponame = args.repo tag = args.tag outjson['repo'] = reponame outjson['tag'] = tag outjson['created'] = int(time.time()) outjson['version'] = '%s%s' %(sh.go.version(), sh.gopm('-v')) outjson['files'] = {} print 'Fetching', reponame fetch(reponame, tag) rdir = pathjoin(os.getenv('GOPATH'), 'src', args.repo) os.chdir(rdir) # change directory outjson['gobuildrc'] = open('.gobuild.yml').read() if os.path.exists('.gobuild.yml') else '# nothing' os_archs = [('linux','amd64'), ('linux','386'), ('linux','arm'), ('windows','amd64'), ('windows','386'), ('darwin','amd64'), ('darwin','386')] for goos, arch in os_archs: print '\033[92mBuilding for %s,%s\033[0m' %(goos, arch) # green color env = {} if goos == 'darwin': exportenv = str(sh.bash('-c', osxenv)) for (key, value) in re.findall(r'export\s+([\w_]+)=([^\s]+)', exportenv): env[key] = value build(goos, arch, env=env) outjson['time_used'] = int(time.time())-outjson['created'] print 'Saving state to out.json' print '------------ out.json -------------' with open(pathjoin(OUTDIR, 'out.json'), 'w') as f: json.dump(outjson, f) print json.dumps(outjson, indent=4)
def generate_partial_mar(to_mar, from_mar, channel_id, product_version, working_dir): """ to_mar is the path of the newer complete .mar file from_mar is the path of the older complete .mar file """ my_env = os.environ.copy() my_env['MAR'] = MAR my_env['MBSDIFF'] = MBSDIFF my_env['MOZ_CHANNEL_ID'] = channel_id my_env['MOZ_PRODUCT_VERSION'] = product_version my_env['LC_ALL'] = 'C' to_mar_name = os.path.basename(to_mar) to_mar_wd = os.path.join(working_dir, to_mar_name) os.mkdir(to_mar_wd) log.info('Unwrapping "to" MAR') unwrap_cmd = sh.Command(UNWRAP) out = unwrap_cmd(to_mar, _cwd=to_mar_wd, _env=my_env, _timeout=120, _err_to_out=True) log.debug("Command returned:\n%s", out) from_mar_name = os.path.basename(from_mar) from_mar_wd = os.path.join(working_dir, from_mar_name) os.mkdir(from_mar_wd) log.info('Unwrapping "from" MAR') out = unwrap_cmd(from_mar, _cwd=from_mar_wd, _env=my_env, _timeout=120, _err_to_out=True) log.debug("Command returned:\n%s", out) partial_name = '-'.join([from_mar_name, to_mar_name]) partial_mar = os.path.join(working_dir, partial_name) log.info('Generating partial mar @ %s', partial_mar) out = sh.bash(MAKE_INCREMENTAL, partial_mar, from_mar_wd, to_mar_wd, _cwd=working_dir, _env=my_env, _timeout=300, _err_to_out=True) log.debug("Command returned: %s", out) log.info('Partial now available at path: %s', partial_mar) return partial_mar
def _run_bash(self, bash_input): log.debug('Running bash command: "{}"'.format(bash_input)) from sh import bash stdout = '' f = None if self.log_file: f = open(self.log_file, 'a+') try: for line in bash(_in=bash_input, _iter=True): log.debug('BashExec stdout: {}'.format(line)) stdout += line if f: f.write(line) except ErrorReturnCode as e: log.debug('BashExec failed') raise BashExecuteError(e.stderr, e.exit_code) return stdout
def test_cdconfiguration_bash_completion(self): self.init() self.claw.generate(tests.STUB_CONFIGURATION) cmd = ['cdconfiguration', "''"] partial_word = cmd[-1] cmdline = ' '.join(cmd) lines = [ 'eval "$(claw cdconfiguration)"', 'export COMP_LINE="{}"'.format(cmdline), 'export COMP_WORDS=({})'.format(cmdline), 'export COMP_CWORD={}'.format(cmd.index(partial_word)), 'export COMP_POINT={}'.format(len(cmdline)), '__claw_cdconfiguration_completion'.format(cmd[0]), 'echo ${COMPREPLY[*]}' ] script_path = self.workdir / 'completions.sh' script_path.write_text('\n'.join(lines)) self.assertEqual( set([tests.STUB_CONFIGURATION, '_']), set(sh.bash(script_path).stdout.strip().split(' ')))
def assert_completion(self, expected, args=None, filter_non_options=False): project_dir = os.path.dirname(os.path.dirname(clash.__file__)) this_file = os.path.basename(__file__) if this_file.endswith('.pyc'): this_file = this_file[:-1] this_dir = os.path.dirname(__file__) args = args or [] args += ["''"] cmd = [this_file] + list(args) partial_word = cmd[-1] cmdline = ' '.join(cmd) lines = [ 'set -e', 'export PATH={}:$PATH'.format(this_dir), 'export PYTHONPATH={}:$PYTHONPATH'.format(project_dir), 'eval "$(register-python-argcomplete {})"'.format(this_file), 'export COMP_LINE="{}"'.format(cmdline), 'export COMP_WORDS=({})'.format(cmdline), 'export COMP_CWORD={}'.format(cmd.index(partial_word)), 'export COMP_POINT={}'.format(len(cmdline)), '_python_argcomplete {}'.format(this_file), 'echo ${COMPREPLY[*]}' ] script_path = self.workdir / 'completions.sh' script_path.write_text('\n'.join(lines)) try: p = sh.bash(script_path) except sh.ErrorReturnCode as e: self.fail('out: {}, err: {}'.format(e.stdout, e.stderr)) completions = p.stdout.strip().split(' ') if filter_non_options: completions = [c for c in completions if c.startswith('-')] self.assertEqual(len(expected), len(completions), 'expected: {}, actual: {}'.format(expected, completions)) for expected_completion in expected: self.assertIn(expected_completion, completions)
def assert_completion(self, expected, args=None, filter_non_options=False): args = args or [] args += ["''"] cmd = ["claw"] + list(args) partial_word = cmd[-1] cmdline = " ".join(cmd) lines = [ "set -e", 'eval "$(register-python-argcomplete claw)"', 'export COMP_LINE="{}"'.format(cmdline), "export COMP_WORDS=({})".format(cmdline), "export COMP_CWORD={}".format(cmd.index(partial_word)), "export COMP_POINT={}".format(len(cmdline)), "_python_argcomplete {}".format(cmd[0]), "echo ${COMPREPLY[*]}", ] script_path = self.workdir / "completions.sh" script_path.write_text("\n".join(lines)) p = sh.bash(script_path) completions = p.stdout.strip().split(" ") if filter_non_options: completions = [c for c in completions if c.startswith("-")] self.assertEqual(set(expected), set(completions))
def parse_pkgbuild(pkgbuild): json_str = bash(SourceRepo._parse_script, pkgbuild).stdout.decode('utf-8') return json.loads(json_str)
return ping.do_one('pellet.cave.kevinross.name') > 0 except: with open('/tmp/pings', 'a') as o: traceback.print_exc(file=o) o.write('\n') return False # try for 3 seconds i = 0 while not pinger() and i < 6: time.sleep(0.5) i += 1 if vmdevel: tr('ads join - skipping (devel mode)') else: sh.bash(vms('join_domain.sh')) tr('ads join - done') ########################################################################### ################ ##################### ################ PAM ##################### ################ ##################### ########################################################################### if not vmreload: tr('pam') for i in glob.glob(vms('pam.d/*')): shutil.copy(i, '/etc/pam.d/')
def gen_mace_version(codegen_path="mace/codegen"): sh.mkdir("-p", "%s/version" % codegen_path) sh.bash("mace/tools/git/gen_version_source.sh", "%s/version/version.cc" % codegen_path)
def deploy(job_name, server_list, app_path, source_address, project_id, auth_info): cmd = "" p1 = Delivery.objects.get(job_name_id=project_id) job_workspace = "/var/opt/adminset/workspace/{0}/".format(job_name) log_path = job_workspace + 'logs/' log_name = 'deploy-' + str(p1.deploy_num) + ".log" with open(log_path + log_name, 'wb+') as f: f.writelines("<h4>Deploying project {0} for {1}th</h4>".format(job_name, p1.deploy_num)) if not app_path.endswith("/"): app_path += "/" # clean build code p1.bar_data = 20 p1.save() sleep(1) if p1.build_clean or p1.version: try: shutil.rmtree("{0}code/".format(job_workspace)) except Exception as msg: print("code dir is not exists, build clean over") if p1.job_name.source_type == "git": cmd = git_clone(job_workspace, auth_info, source_address, p1) if p1.job_name.source_type == "svn": cmd = svn_clone(job_workspace, auth_info, source_address, p1) data = cmd_exec(cmd) p1.bar_data = 30 p1.save() with open(log_path + log_name, 'ab+') as f: f.writelines(cmd) f.writelines(data) if p1.shell: deploy_shell = job_workspace + 'scripts/deploy-' + str(p1.deploy_num) + ".sh" deploy_shell_name = 'deploy-' + str(p1.deploy_num) + ".sh" with open(deploy_shell, 'wb+') as f: f.writelines(p1.shell) cmd = "/usr/bin/dos2unix {}".format(deploy_shell) data = cmd_exec(cmd) for server in server_list: cmd = "rsync --progress -raz --delete --exclude '.git' --exclude '.svn' {0}/code/ {1}:{2}".format( job_workspace, server, app_path) data = cmd_exec(cmd) with open(log_path + log_name, 'ab+') as f: f.writelines(cmd) f.writelines(data) if p1.shell and not p1.shell_position: cmd = "scp {0} {1}:/tmp".format(deploy_shell, server) data = cmd_exec(cmd) with open(log_path + log_name, 'ab+') as f: f.writelines(data) cmd = "ssh {1} '/usr/bin/bash /tmp/{0}'".format(deploy_shell_name, server) data = cmd_exec(cmd) with open(log_path + log_name, 'ab+') as f: f.writelines(data) if p1.bar_data <= 125: cur_bar = p1.bar_data p1.bar_data = cur_bar+5 p1.save() if p1.shell and p1.shell_position: # cmd = "/usr/bin/bash {0}'".format(deploy_shell) data = sh.bash(deploy_shell) with open(log_path + log_name, 'ab+') as f: f.writelines(data) p1.bar_data = 130 p1.status = False p1.save() with open(log_path + log_name, 'ab+') as f: f.writelines("<h4>Project {0} have deployed for {1}th </h4>".format(p1.job_name, p1.deploy_num)) return data