def iterate_variables(plot, vars): """ Iterate over the values of the specified variables (vars dict) and generate data. plot: Name/Number of the plot this data is associated with. """ # We access (arbitrarily) the first element of the dict to get the length of the corresponding dictionary L = len(vars[list(vars.keys())[0]]) for i in range(L): for var in vars.keys(): print("Calculating for {var} = {value} ...".format( var=var, value=vars[var][i])) constants('s/{var}.*$/{var} {value}/'.format(var=var, value=vars[var][i])) print() constants('s/PLOT.*$/PLOT "{plot}-{suffix}"/'.format( plot=plot, suffix=SUFFICES[i])) sh.make('data', _out=sys.stdout, _err=sys.stderr) ## Carry out some cleaning on the created data #sh.sed('-i', 'data/value-{plot}-{suffix}.json'.format(plot=PLOT, suffix=SUFFICES[i]), '-e', 's/inf/"inf"/') # Strings need to be quoted in json print("Done")
def main(): sh.mkdir("-p", "log") sh.fuser("-k", "-n", "tcp", "4040", _ok_code=[0, 1]) printc("COMPILE FRONTEND", color="blue") with chdir("frontend"): sh.make("compile", _out="../log/compile.txt") printc("UNIT TESTS", color="blue") fail = False try: with chdir("frontend"): sh.npm.test(_out="../log/unit_tests.txt") printc("PASS UNIT TESTS", color="green") except: with open("log/unit_tests.txt", "r") as fd: print(fd.read()) printc("FAIL UNIT TESTS", color="red") fail = True if fail: printc("FAIL", color="red") sys.exit(1) else: printc("SUCCESS", color="green") sys.exit(0)
def test_pii_annotations(cookies, extra_context): """ Test that the pii_check make target works correctly. """ with bake_in_temp_dir(cookies, extra_context=extra_context) as result: with inside_dir(six.text_type(result.project)): try: sh.make( 'upgrade' ) # first run make upgrade to populate requirements/test.txt sh.make('pii_check') except sh.ErrorReturnCode as exc: # uncovered models are expected IFF we generated any models via the cookiecutter. expected_uncovered_models = 0 if 'models' in extra_context: # count the number of (unannotated) models the cookiecutter should generate. expected_uncovered_models = len( extra_context['models'].split(',')) expected_message = 'Coverage found {} uncovered models:'.format( expected_uncovered_models) if expected_message not in six.text_type(exc.stdout): # First, print the stdout/stderr attrs, otherwise sh will truncate the output # guaranteeing that all we see is useless tox setup. print(exc.stdout) print(exc.stderr) pytest.fail(six.text_type(exc))
def runSims(sims, make=True, res_file_name="results"): res_file_name = "{}_{}.txt".format(res_file_name, platform.node()) # quit is results already exist, don't want to clobber existing results if os.path.isfile(res_file_name): raise Exception( "Results file {} already exists, please delete and rerun".format( res_file_name)) # make all non-Ode exes if make: logging.debug("Running make -B all") sh.make('-B', 'all') global res_file res_file = open(res_file_name, 'w') print("Running set of simulations at {}".format( strftime("%H:%M:%S on %d/%m/%Y", gmtime())), file=res_file) print("Benchmarks run on {} under OS {}".format(platform.node(), platform.release()), file=res_file) print("", file=res_file) for sim in sims: res_file.flush() sim.execute() # run a sim (<filename.exe>) # sh.Command('HodHux_C.exe') # analyse results (assume output is <filename>.bin) # run perf benchmark multiple times res_file.close() logging.debug("Done")
def options_upgraded(options_baked): # pylint: disable=redefined-outer-name """ Bake the cookie cutter, and run make upgrade. """ sh.make( 'upgrade') # first run make upgrade to populate requirements/test.txt yield options_baked
def compile( self, source_dir, build_dir, install_dir ): package_source_dir = os.path.join( source_dir, self.dirname ) assert( os.path.exists( package_source_dir ) ) package_build_dir = os.path.join( build_dir, self.dirname ) runpath_dir = os.path.join( package_source_dir, 'RunPath' ) if ( not os.path.exists( os.path.join( runpath_dir, 'media.zip' ) ) ): sh.cd( runpath_dir ) sh.wget( '--no-check-certificate', 'https://bitbucket.org/jacmoe/ogitor/downloads/media.zip' ) sh.unzip( 'media.zip' ) if ( not os.path.exists( os.path.join( runpath_dir, 'projects.zip' ) ) ): sh.cd( runpath_dir ) sh.wget( '--no-check-certificate', 'https://bitbucket.org/jacmoe/ogitor/downloads/projects.zip' ) sh.unzip( 'projects.zip' ) sh.mkdir( '-p', package_build_dir ) sh.cd( package_build_dir ) if ( platform.system() == 'Darwin' ): sh.cmake( '-G', 'Xcode', '-D', 'CMAKE_INSTALL_PREFIX=%s' % install_dir, '-D', 'CMAKE_MODULE_PATH=%s' % os.path.join( install_dir, 'CMake' ), package_source_dir, _out = sys.stdout ) sh.xcodebuild( '-configuration', 'Release', _out = sys.stdout ) else: sh.cmake( '-D', 'CMAKE_INSTALL_PREFIX=%s' % install_dir, '-D', 'CMAKE_MODULE_PATH=%s' % os.path.join( install_dir, 'lib/OGRE/cmake' ), package_source_dir, _out = sys.stdout ) sh.make( '-j4', 'VERBOSE=1', _out = sys.stdout ) sh.make.install( _out = sys.stdout )
def build_opencv(): sh.pip.install("numpy") clone_if_not_exists("opencv", "https://github.com/PolarNick239/opencv.git", branch="stable_3.0.0) clone_if_not_exists("opencv_contrib", "https://github.com/PolarNick239/opencv_contrib.git", branch="stable_3.0.0") sh.rm("-rf", "build") sh.mkdir("build") sh.cd("build") python_path = pathlib.Path(sh.pyenv.which("python").stdout.decode()).parent.parent version = "{}.{}".format(sys.version_info.major, sys.version_info.minor) sh.cmake( "..", "-DCMAKE_BUILD_TYPE=RELEASE", "-DCMAKE_INSTALL_PREFIX={}/usr/local".format(python_path), "-DWITH_CUDA=OFF", "-DWITH_FFMPEG=OFF", "-DINSTALL_C_EXAMPLES=OFF", "-DBUILD_opencv_legacy=OFF", "-DBUILD_NEW_PYTHON_SUPPORT=ON", "-DBUILD_opencv_python3=ON", "-DOPENCV_EXTRA_MODULES_PATH=~/opencv_contrib-3.4.1/modules", "-DBUILD_EXAMPLES=ON", "-DPYTHON_EXECUTABLE={}/bin/python".format(python_path), "-DPYTHON3_LIBRARY={}/lib/libpython{}m.so".format(python_path, version), "-DPYTHON3_PACKAGES_PATH={}/lib/python{}/site-packages/".format(python_path, version), "-DPYTHON3_NUMPY_INCLUDE_DIRS={}/lib/python{}/site-packages/numpy/core/include".format(python_path, version), "-DPYTHON_INCLUDE_DIR={}/include/python{}m".format(python_path, version), _out=sys.stdout, ) sh.make("-j4", _out=sys.stdout) sh.make.install(_out=sys.stdout)
def main(): filename = 'my_config' include_path = 'inc' test_path = '/home/leha/personal/configen/configen/test/data' test_files = glob(os.path.join(test_path, '*.json')) #test_files = [os.path.join(test_path, 'test_schema.json')] # copy header with helper test functions sh.cp(os.path.join(test_path, 'serialization_tests.h'), '.') # iterate over all files in test directory for test_filename in test_files: test_name = os.path.basename(test_filename).split('.')[0] print('Test file: ' + test_name) string_of_json = open(test_filename, 'r').read() code = cg.convert_json(string_of_json, language='c++', namespace=['config'], filename=filename, include_path=include_path); # write header, source and main with open(os.path.join(include_path, filename + '.h'), 'w') as header: header.write(code['header']) with open(os.path.join(filename + '.cc'), 'w') as src: src.write(code['source']) main_filename = os.path.join(test_path, test_name + '.cc') if os.path.exists(main_filename): sh.cp(main_filename, 'main.cc') else: print('Default main') with open('main.cc', 'w') as main_: main_.write('\n'.join(DEFAULT_MAIN)) sh.make() # check c code run_main = sh.Command('./configen_test') check_output(run_main())
def build_latex_pdf(output_filename="cscutcher_cv_latex.pdf"): """Build latex style pdf.""" resume = nx6_cv.resume.get_resume() output_stream = Path(output_filename).open('wb') with tempfile.TemporaryDirectory(prefix="nx6_cv_latex_") as tmpdir: tmpdir = Path(tmpdir) # Copy latex sources for path in pkg_resources.resource_listdir(__name__, "latex"): (tmpdir / path).open('wb').write( pkg_resources.resource_string( __name__, os.path.join("latex", path))) template_raw = (tmpdir / 'cv.tex.mako').open().read() # Stop mako nuking '//' in tex template_raw = re.sub( r"\\$", r"\\\\" + "\n", template_raw, flags=re.MULTILINE) # Render and write template template = mako.template.Template(template_raw) tex = template.render_unicode(cv=resume) (tmpdir / 'cv.tex').open('w').write(tex) # Add gitrevision tex (tmpdir / 'gitrevision.tex').open('w').write("NOTSET") sh.make("-C", str(tmpdir.resolve())) output_stream.write((tmpdir / 'cv.pdf').open('rb').read())
def test_upgrade(options_baked): """Make sure the upgrade target works""" try: # Sanity check the generated Makefile sh.make('upgrade') except sh.ErrorReturnCode as exc: pytest.fail(str(exc))
def compile( self, source_dir, build_dir, install_dir ): package_source_dir = os.path.join( source_dir, self.dirname ) assert( os.path.exists( package_source_dir ) ) package_build_dir = os.path.join( build_dir, self.dirname ) sh.cd( os.path.join( package_source_dir, 'scripts/Resources' ) ) sh.sh( './copyresources.sh' ) # the install target doesn't copy the stuff that copyresources.sh puts in place sh.cp( '-v', os.path.join( package_source_dir, 'bin/Release/Readme.txt' ), os.path.join( install_dir, 'Readme.meshy.txt' ) ) sh.cp( '-v', '-r', os.path.join( package_source_dir, 'bin/Release_Linux/Resources/' ), install_dir ) sh.mkdir( '-p', package_build_dir ) sh.cd( package_build_dir ) if ( platform.system() == 'Darwin' ): sh.cmake( '-G', 'Xcode', '-D', 'CMAKE_INSTALL_PREFIX=%s' % install_dir, '-D', 'CMAKE_MODULE_PATH=%s' % os.path.join( install_dir, 'CMake' ), package_source_dir, _out = sys.stdout ) sh.xcodebuild( '-configuration', 'Release', _out = sys.stdout ) else: sh.cmake( '-D', 'CMAKE_INSTALL_PREFIX=%s' % install_dir, '-D', 'CMAKE_MODULE_PATH=%s' % os.path.join( install_dir, 'lib/OGRE/cmake' ), package_source_dir, _out = sys.stdout ) sh.make( '-j4', 'VERBOSE=1', _out = sys.stdout ) sh.make.install( _out = sys.stdout )
def _extract(self, file: str, target_dir: str): if not self.unyaffs2: sh.make(_cwd=YAFFS2_SRC_DIR) self.unyaffs2 = sh.Command('unyaffs2', [YAFFS2_SRC_DIR]) # --yaffs-ecclayout: https://github.com/djeclipser/yaffs2utils/issues/35#issuecomment-153830017 self.unyaffs2(file, target_dir, yaffs_ecclayout=True)
def generate_template(): template_file = "" if not isdir(build_dir): mkdir(build_dir) if isdir(build_dir): template_file = build_dir + "/dekko.dekkoproject.pot" print("TemplateFile: " + template_file) cd(build_dir) print("Running cmake to generate updated template") cmake('..') print("Running make") make("-j2") if isfile(template_file): if isdir(po_dir): print("Moving template to po dir: " + po_dir) mv(template_file, po_dir) else: print("Couldn't find po dir: " + po_dir) cleanup() return else: cleanup() print("No template found for: " + template_file) return print("Cleaning up") cleanup() print("YeeHaa!") print("All done, you need to commit & push this to bitbucket now :-)") print("NOTE: this would also be a good time to sync with launchpad, run") print(" $ python3 launchpad_sync.py")
def build_opencv(): sh.pip.install("numpy") clone_if_not_exists("opencv", "https://github.com/PolarNick239/opencv.git", branch="stable_3.0.0) clone_if_not_exists("opencv_contrib", "https://github.com/PolarNick239/opencv_contrib.git", branch="stable_3.0.0") sh.rm("-rf", "build") sh.mkdir("build") sh.cd("build") python_path = pathlib.Path(sh.pyenv.which("python").stdout.decode()).parent.parent version = "{}.{}".format(sys.version_info.major, sys.version_info.minor) sh.cmake( "..", "-DCMAKE_BUILD_TYPE=RELEASE", "-DCMAKE_INSTALL_PREFIX={}/usr/local".format(python_path), "-DWITH_CUDA=OFF", "-DWITH_FFMPEG=OFF", "-DINSTALL_C_EXAMPLES=OFF", "-DBUILD_opencv_legacy=OFF", "-DBUILD_NEW_PYTHON_SUPPORT=ON", "-DBUILD_opencv_python3=ON", "-DOPENCV_EXTRA_MODULES_PATH=~/opencv_contrib-4.3.2/modules", "-DBUILD_EXAMPLES=ON", "-DPYTHON_EXECUTABLE={}/bin/python".format(python_path), "-DPYTHON3_LIBRARY={}/lib/libpython{}m.so".format(python_path, version), "-DPYTHON3_PACKAGES_PATH={}/lib/python{}/site-packages/".format(python_path, version), "-DPYTHON3_NUMPY_INCLUDE_DIRS={}/lib/python{}/site-packages/numpy/core/include".format(python_path, version), "-DPYTHON_INCLUDE_DIR={}/include/python{}m".format(python_path, version), _out=sys.stdout, ) sh.make("-j4", _out=sys.stdout) sh.make.install(_out=sys.stdout)
def main(): sh.mkdir("-p","log") sh.fuser("-k","-n","tcp","4040", _ok_code=[0,1]) printc("COMPILE FRONTEND", color="blue") with chdir("frontend"): sh.make("compile", _out="../log/compile.txt") printc("UNIT TESTS", color="blue") fail = False try: with chdir("frontend"): sh.npm.test(_out="../log/unit_tests.txt") printc("PASS UNIT TESTS", color="green") except: with open("log/unit_tests.txt","r") as fd: print(fd.read()) printc("FAIL UNIT TESTS", color="red") fail = True if fail: printc("FAIL", color="red") sys.exit(1) else: printc("SUCCESS", color="green") sys.exit(0)
def BuildYcmdLibs(cmake_args): build_dir = unicode(sh.mktemp('-d', '-t', 'ycm_build.XXXXXX')).strip() try: full_cmake_args = ['-G', 'Unix Makefiles'] if OnMac(): full_cmake_args.extend(CustomPythonCmakeArgs()) full_cmake_args.extend(cmake_args) full_cmake_args.append(p.join(DIR_OF_THIS_SCRIPT, 'cpp')) sh.cd(build_dir) sh.cmake(*full_cmake_args, _out=sys.stdout) build_target = ('ycm_support_libs' if 'YCM_TESTRUN' not in os.environ else 'ycm_core_tests') sh.make('-j', NumCores(), build_target, _out=sys.stdout, _err=sys.stderr) if 'YCM_TESTRUN' in os.environ: RunYcmdTests(build_dir) finally: sh.cd(DIR_OF_THIS_SCRIPT) sh.rm('-rf', build_dir)
def check_quality(result): """Run quality tests on the given generated output.""" for dirpath, _dirnames, filenames in os.walk(str(result.project)): pylintrc = str(result.project.join('pylintrc')) for filename in filenames: name = os.path.join(dirpath, filename) if not name.endswith('.py'): continue try: sh.pylint(name, rcfile=pylintrc) sh.pylint(name, py3k=True) sh.pycodestyle(name) if filename != 'setup.py': sh.pydocstyle(name) sh.isort(name, check_only=True) except sh.ErrorReturnCode as exc: pytest.fail(str(exc)) tox_ini = result.project.join('tox.ini') docs_build_dir = result.project.join('docs/_build') try: # Sanity check the generated Makefile sh.make('help') # quality check docs sh.doc8(result.project.join("README.rst"), ignore_path=docs_build_dir, config=tox_ini) sh.doc8(result.project.join("docs"), ignore_path=docs_build_dir, config=tox_ini) except sh.ErrorReturnCode as exc: pytest.fail(str(exc))
def compile(self, source_dir, build_dir, install_dir): package_source_dir = os.path.join(source_dir, self.dirname) assert (os.path.exists(package_source_dir)) package_build_dir = os.path.join(build_dir, self.dirname) sh.mkdir('-p', package_build_dir) sh.cd(package_build_dir) if (platform.system() == 'Darwin'): sh.cmake('-G', 'Xcode', '-D', 'CMAKE_INSTALL_PREFIX=%s' % install_dir, package_source_dir, _out=sys.stdout) sh.xcodebuild('-scheme', 'install', '-configuration', 'Release', _out=sys.stdout) else: sh.cmake('-D', 'CMAKE_INSTALL_PREFIX=%s' % install_dir, package_source_dir, _out=sys.stdout) sh.make('-j4', 'VERBOSE=1', _out=sys.stdout) sh.make.install(_out=sys.stdout)
def recompile(pset): """Overwrite params.h and recompiles the speedbenchmark code""" with open("../src/params.h", "w") as f: f.write(str(pset)) sh.cd("speedbenchmark") sh.make("clean") sh.make() sh.cd("../")
def xcompile_openssl(build_context, arch, output, path): # XXX: only android assert isinstance(build_context, AndroidBuildContext) target = 'android-%s' % arch env = build_context.get_build_env(arch, output) with sh.pushd(path): sh.Command('./Configure')(target, '--prefix=%s' % output, _fg=True, _env=env) sh.make(_fg=True, _env=env) sh.make('install_sw', 'VERBOSE=1', _fg=True, _env=env)
def install_cmake( build_dir, prefix ): cmake_archive='cmake-2.8.11.2' sh.cd( build_dir ) sh.wget( '-nc', 'http://www.cmake.org/files/v2.8/%s.tar.gz' % cmake_archive ) sh.tar( 'xvzf', '%s.tar.gz' % cmake_archive ) sh.cd( cmake_archive ) subprocess.check_call( [ './configure', '--prefix', PREFIX ], shell = True ) sh.make( '-j4' ) sh.make.install()
def install_cmake(build_dir, prefix): cmake_archive = 'cmake-2.8.11.2' sh.cd(build_dir) sh.wget('-nc', 'http://www.cmake.org/files/v2.8/%s.tar.gz' % cmake_archive) sh.tar('xvzf', '%s.tar.gz' % cmake_archive) sh.cd(cmake_archive) subprocess.check_call(['./configure', '--prefix', PREFIX], shell=True) sh.make('-j4') sh.make.install()
def upgrade_git( build_dir, prefix ): sh.cd( build_dir ) if ( os.path.exists( 'git' ) ): sh.cd( 'git' ) sh.git.pull() else: sh.git.clone( 'https://github.com/git/git' ) sh.cd( 'git' ) sh.make( 'prefix=%s' % prefix, '-j4' ) sh.make.install( 'prefix=%s' % prefix )
def upgrade_git(build_dir, prefix): sh.cd(build_dir) if (os.path.exists('git')): sh.cd('git') sh.git.pull() else: sh.git.clone('https://github.com/git/git') sh.cd('git') sh.make('prefix=%s' % prefix, '-j4') sh.make.install('prefix=%s' % prefix)
def build(cwd, site_dir): cfg = config.load_config() # sanity check - the version dirs exist as named for version in cfg['extra']['versions']: if not 'separate' in version or not version['separate']: d = os.path.join('versions', version['dir']) print('Verifying dir %s' % (d)) if not os.path.isdir(d): print("The directory %s does not exist" % (d)) return # sanity check - dependent_repos exist in '..' for repo in dependent_repos: d = os.path.join(cwd, '..', repo) print('Verifying repo dependency in %s' % (d)) if not os.path.isdir(d): print("The directory %s does not exist" % (d)) return # sanity check - only one latest latest = False for version in cfg['extra']['versions']: if not latest and 'latest' in version and version['latest']: print('Latest is %s' % (version['dir'])) latest = True elif latest and 'latest' in version and version['latest']: print('ERROR: More than one version is latest.') print('Only one version can be latest: True.') print('Check mkdocs.yml.') return print("Building site pages") sh.rm('-rf', site_dir) sh.mkdocs('build', '--clean', '--site-dir', site_dir) for version in cfg['extra']['versions']: print("Building doc pages for: %s" % (version['dir'])) if not 'separate' in version or not version['separate']: sh.mkdocs('build', '--site-dir', os.path.join(site_dir, version['dir']), _cwd=os.path.join("versions", version['dir'])) else: repo_dir = os.path.join(cwd, '..', 'mynewt-documentation') if version['dir'] != 'master': repo_dir = os.path.join(repo_dir, 'versions', version['dir'], 'mynewt-documentation') sh.make('clean', _cwd=repo_dir) sh.make('docs', _cwd=repo_dir) sh.mv(os.path.join(repo_dir, '_build', 'html'), os.path.join(site_dir, version['dir'])) if 'latest' in version and version['latest']: sh.ln('-s', version['dir'], 'latest', _cwd=site_dir)
def bump_ocp_releases(username, password, dry_run, bypass_iso_download): if dry_run: logger.info("On dry-run mode") with tempfile.TemporaryDirectory() as tmp_dir: tmp_dir = pathlib.Path(tmp_dir) clone_dir = clone_assisted_service(username, password, tmp_dir) default_release_images_json = request_json_file( ASSISTED_SERVICE_MASTER_DEFAULT_RELEASE_IMAGES_JSON_URL) default_os_images_json = request_json_file( ASSISTED_SERVICE_MASTER_DEFAULT_OS_IMAGES_JSON_URL) updates_made = set() updates_made_str = set() update_release_images_json(default_release_images_json, updates_made, updates_made_str, clone_dir) update_os_images_json(default_os_images_json, updates_made, updates_made_str, clone_dir, bypass_iso_download) if not updates_made: logger.info("No updates are needed, all is up to date!") return try: sh.make("generate-configuration", _cwd=clone_dir) # Ensure GOPATH is using current user HOME folder os.environ['GOPATH'] = os.environ['HOME'] + "/go" sh.make("generate-bundle", _cwd=clone_dir) except sh.ErrorReturnCode as e: raise RuntimeError( f"Failed {e.full_cmd} with stderr: {e.stderr}") from e if dry_run: logger.info(f"Bump OCP versions: {updates_made_str}") logger.info(f"GitHub PR description:\n{get_pr_body(updates_made)}") git_diff = sh.git.diff(_env={"GIT_PAGER": "cat"}, _cwd=clone_dir) logger.info(f"Git diff:\n{git_diff}") return github_client = github.Github(username, password) title = f'Bump OCP versions {", ".join(sorted(updates_made_str))}' repo = github_client.get_repo(ASSISTED_SERVICE_GITHUB_REPO) for pull_request in repo.get_pulls(state="open", base="master"): if pull_request.title == title: logger.info("Already created PR %s for changes: %s", pull_request.html_url, updates_made_str) return create_github_pr(github_client, clone_dir, updates_made, title, username)
def launch(self, worker): # `vagrant up` is slow even when VM is already running so bypass that with a # grep of the process name if not subprocess.call( "ps aux | grep '[V]BoxHeadless --comment stencila-worker'", shell=True, stdout=open(os.devnull, 'w') ) == 0: sh.make('worker-launch-vagrant') worker.ip = "10.0.1.100" worker.platform_id = "vbox-on-localhost"
def xcompile_autotools(build_context, arch, output, path): host = build_context.get_host(arch) env = build_context.get_build_env(arch, output) with sh.pushd(path): sh.Command('./configure')( '--host=%s' % host, '--prefix', output, _fg=True, _env=env, ) sh.make(_fg=True, _env=env) sh.make('install', _fg=True, _env=env)
def recompile_pycloak(m=None, pycloak_path='~/work/pycloak'): """Recompile and import module. Useful for quick command line testing without re-starting repl.""" import sh current_path = cwd() pycloak_path = expanduser(pycloak_path) cd(pycloak_path) sh.make() sh.make('install') cd(current_path) if m is not None: reload_module(m)
def _build_client(self, repo, binaries_path): self.log("running make on the client...") make() self.log("running build to get correct version...") python("setup.py", "build") self.log("updating hashes") os.environ["OPENVPN_BIN"] = os.path.join(binaries_path, "openvpn.files", "leap-openvpn") os.environ["BITMASK_ROOT"] = os.path.join(self._basedir, repo, "pkg", "linux", "bitmask-root") python("setup.py", "hash_binaries")
def compile(logfile=sys.stdout, MIX_ENV="test"): with chdir("backend/"), envset(MIX_ENV=MIX_ENV): sh.mix("local.hex", "--force") sh.mix("local.rebar", "--force") sh.mix("deps.get", _out=logfile, _err=logfile) sh.make("-f", "Makefile.hacks", "compile", _out=logfile, _err=logfile) with chdir("backend/apps/serverboards/"), envset(MIX_ENV=MIX_ENV): sh.mix("compile", _out=logfile, _err=logfile) with chdir("plugins/bindings/python/"): sh.make("setup", _out=logfile, _err=logfile)
def install(cls, prefix="~/programs/fastQValidator/"): """ To automatically download and install the fastQValidator software on this computer and for the current user, type these commands in python: >>> from fasta.validator import Validator >>> Validator.install() """ # Check we are on an OS with aptitude # check_apt_exists() # Start with the required apt packages # get_apt_packages(cls.apt_packages, verbose=True) # Download tarball 1 # tmp_dir_1 = new_temp_dir() tgz_url_1 = 'https://github.com/statgen/libStatGen/archive/master.tar.gz' tgz_loc_1 = download_from_url(tgz_url_1, tmp_dir_1, stream=True, progress=True) src_dir_1 = tgz_loc_1.untargz_to() # Download tarball 2 # tmp_dir_2 = new_temp_dir() tgz_url_2 = 'https://github.com/statgen/fastQValidator/archive/master.tar.gz' tgz_loc_2 = download_from_url(tgz_url_2, tmp_dir_2, stream=True, progress=True) src_dir_2 = tgz_loc_2.untargz_to() # Uncompressed 1 # src_dir_1 = src_dir_1.sub_directory # Uncompressed 2 # src_dir_2 = src_dir_2.sub_directory # Make 1 # sh.make('-C', src_dir_1, _out=sys.stdout, _err=sys.stderr) # Make 2 # sh.make( '-C', src_dir_2, 'LIB_PATH_FASTQ_VALIDATOR=%s' % src_dir_1, _out=sys.stdout, _err=sys.stderr, ) # Move the executable # binary = src_dir_2 + 'bin/fastQValidator' path = binary.move_to(prefix, overwrite=True) # The directory that contains the executable # bin_dir = path.directory.with_tilda[:-1].replace('~', '$HOME') # Suggest adding to the $PATH # print("\n fastQValidator was installed successfully. You should now " "add this line to your .bash_profile: \n\n " "export PATH=%s:$PATH\n" % bin_dir)
def compile( self, source_dir, build_dir, install_dir ): package_source_dir = os.path.join( source_dir, self.dirname ) assert( os.path.exists( package_source_dir ) ) package_build_dir = os.path.join( build_dir, self.dirname ) sh.mkdir( '-p', package_build_dir ) sh.cd( package_build_dir ) if ( platform.system() == 'Darwin' ): sh.cmake( '-G', 'Xcode', '-D', 'CMAKE_INSTALL_PREFIX=%s' % install_dir, package_source_dir, _out = sys.stdout ) sh.xcodebuild( '-scheme', 'install', '-configuration', 'Release', _out = sys.stdout ) else: sh.cmake( '-D', 'CMAKE_INSTALL_PREFIX=%s' % install_dir, package_source_dir, _out = sys.stdout ) sh.make( '-j4', 'VERBOSE=1', _out = sys.stdout ) sh.make.install( _out = sys.stdout )
def compile( self, source_dir, build_dir, install_dir ): package_source_dir = os.path.join( source_dir, self.dirname ) assert( os.path.exists( package_source_dir ) ) package_build_dir = os.path.join( build_dir, self.dirname ) sh.mkdir( '-p', package_build_dir ) sh.cd( package_source_dir ) subprocess.check_call( [ os.path.join( package_source_dir, 'autogen.sh') ] ) sh.cd( package_build_dir ) subprocess.check_call( [ os.path.join( package_source_dir, 'configure' ), '--prefix=%s' % install_dir, '--with-libzmq=%s' % install_dir ] ) sh.make( '-j4', _out = sys.stdout ) sh.make.install( _out = sys.stdout )
def build(self, arch, cc, config, cpus=8): dirname = self._dir make('-C', dirname, 'ARCH={}'.format(arch), 'CROSS_COMPILE={}'.format(cc), config, _out=sys.stdout) make('-C', dirname, 'ARCH={}'.format(arch), 'CROSS_COMPILE={}'.format(cc), '-j{}'.format(cpus), _out=sys.stdout)
def compile( self, source_dir, build_dir, install_dir ): package_source_dir = os.path.join( source_dir, self.dirname ) assert( os.path.exists( package_source_dir ) ) package_build_dir = os.path.join( build_dir, self.dirname ) sh.mkdir( '-p', package_build_dir ) # Invoke autogen.sh if configure script doesn't exist if not os.path.exists( '%s%s%s' % ( package_source_dir, os.sep, 'configure' ) ): sh.cd( package_source_dir ) subprocess.check_call( [ os.path.join( package_source_dir, 'autogen.sh') ], shell = True ) sh.cd( package_build_dir ) subprocess.check_call( [ os.path.join( package_source_dir, 'configure' ), '--prefix=%s' % install_dir, '--with-libzmq=%s' % install_dir ], shell = True ) sh.make( '-j4', _out = sys.stdout ) sh.make.install( _out = sys.stdout )
def loop(self): while True: # time.sleep(20) print "Pulling..." git.pull() latest_commit = self.__latest_commit() if self.__current_commit == latest_commit: # Sleep sometime, or next loop continue self.__current_commit = latest_commit try: make('test') except sh.ErrorReturnCode: print "Error" continue
def fli(env, dev, ipbuspkg): """ Build the Modelsim-ipbus foreign language interface """ # ------------------------------------------------------------------------- # Must be in a build area if env.project is None: raise click.ClickException( 'Project area not defined. Move into a project area and try again') if env.projectConfig['toolset'] != 'sim': raise click.ClickException( "Work area toolset mismatch. Expected 'sim', found '%s'" % env.projectConfig['toolset']) # ------------------------------------------------------------------------- # ------------------------------------------------------------------------- if not which('vsim'): raise click.ClickException( "ModelSim is not available. Have you sourced the environment script?" ) # ------------------------------------------------------------------------- # ------------------------------------------------------------------------- if ipbuspkg not in env.getSources(): raise click.ClickException( "Package %s not found in source/. The FLI cannot be built." % ipbuspkg) # ------------------------------------------------------------------------- # Set ModelSim root based on vsim's path os.environ['MODELSIM_ROOT'] = (dirname(dirname(which('vsim')))) # Apply set # os.environ['MTI_VCO_MODE']='64' lFliSrc = join(env.src, ipbuspkg, 'components', 'ipbus_eth', 'firmware', 'sim', 'modelsim_fli') import sh # Clean-up sh.rm('-rf', 'modelsim_fli', 'mac_fli.so', _out=sys.stdout) # Copy sh.cp('-a', lFliSrc, './', _out=sys.stdout) # Make sh.make('-C', 'modelsim_fli', 'TAP_DEV={0}'.format(dev), _out=sys.stdout) # Link sh.ln('-s', 'modelsim_fli/mac_fli.so', '.', _out=sys.stdout)
def build_client(): sh.make('distclean', _ok_code=(0, 1, 2)) repo_dir = _repo_path('xivo-client-qt') print('running qmake...') # sh.Command('qmake')('QMAKE_CXX=colorgcc', _cwd=repo_dir) sh.Command('qmake')(_cwd=repo_dir) print('running make...') for line in sh.make('-s', '-j4', 'FUNCTESTS=yes', 'DEBUG=yes', _cwd=repo_dir, _iter=True): if "ERROR" in line: logger.error("Compile error : {0}".format(line)) print line
def test_search(self): """Make sure aspiration search is the same as ordinary search Uses random fens as values, so not guaranteed to produce the same output when run multiple times""" lines = str(sh.rl("test/data/fenio.fens", "--count=10")).rstrip("\n") sh.make("aspire_search") run = sh.Command("./aspire_search") aspire_output = str(run(sh.echo(lines))) sh.make("no_aspire_search") run = sh.Command("./no_aspire_search") no_aspire_output = str(run(sh.echo(lines))) for fen_orig, fen1, fen2 in zip(lines.split("\n"), aspire_output.split("\n"), no_aspire_output.split("\n")): self.assertEquals(fen1, fen2, "Original fen: '%s'" % fen_orig)
def compile( self, source_dir, build_dir, install_dir ): package_source_dir = os.path.join( source_dir, self.dirname ) assert( os.path.exists( package_source_dir ) ) package_build_dir = os.path.join( build_dir, self.dirname ) sh.mkdir( '-p', package_build_dir ) # Always invoke autogen - there is one checked in but it doesn't work on my local setup for instance sh.cd( package_source_dir ) subprocess.check_call( [ os.path.join( package_source_dir, 'autogen.sh' ) ] ), # NOTE: there are problems with cmake support on this still, so using the autotools stuff sh.cd( package_build_dir ) # http://bugs.python.org/issue6689 subprocess.check_call( [ os.path.join( package_source_dir, 'configure' ), '--prefix=%s' % install_dir ] ) sh.make( '-j4', _out = sys.stdout ) sh.make.install( _out = sys.stdout )
def grade(uniqname, link): print("Grading {}".format(uniqname)) with pushd("373-f15-linked-list"): wget(link, "-O", "{}.c".format(uniqname)) rm("-f", "list.c", "list.o", "list") ln("-s", "{}.c".format(uniqname), "list.c") make("run") try: diff("list.out", "golden.out") perfect_grade(uniqname) except sh.ErrorReturnCode_1: try: diff("list.out", "naive.out") no_change(uniqname) except sh.ErrorReturnCode_1: handgrade(uniqname)
def run_benchmark(app: str): print(app) run_dir = pjoin(base_run_dir, emu_base, app) app_dir = c.get_am_app_dir(app) bin_file = app_dir + f'/build/{app}{am_arch_suffix}' if not os.path.isfile(bin_file): os.chdir(app_dir) sh.make("ARCH=riscv64-emu") create_emu_env(run_dir, bin_file) run, ok = example_task(run_dir) c.avoid_repeating(emulator, bin_file, run_dir, force_run, ok, run)
def main(): sh.mkdir("-p", "log") sh.fuser("-k", "-n", "tcp", "4040", _ok_code=[0, 1]) printc("COMPILING", color="blue") start = time.time() try: compile(logfile=open("log/compile.txt", "wb"), MIX_ENV="prod") except: printc("ERROR COMPILING", color="red") with open("log/compile.txt") as fd: print(fd.read()) sys.exit(1) end = time.time() printc("Compiled in %.2f seconds" % (end - start)) token = uuid.uuid4() fail = False dbname = random_dbname() dburl = "postgresql://*****:*****@localhost/%s" % dbname with tmpdb(dbname), \ envset(MIX_ENV="prod", SERVERBOARDS_DATABASE_URL=dburl, SERVERBOARDS_TCP_PORT="4040", SERVERBOARDS_INI="test/plugins.ini"), \ running("mix", "run", "--no-halt", _out="log/serverboards.txt", _err_to_out=True, _cwd="backend"): printc("WAIT FOR PORT", color="blue") wait_for_port(4040, timeout=20) printc("CREATE USER", color="blue") create_user(dburl, token) printc("TESTS", color="blue") with chdir("cli"): try: sh.make() sh.Command("./s10s-plugin-test.py")("--auth-token", token, _out=sys.stdout.buffer, _err_to_out=True) except sh.ErrorReturnCode_1: fail = True except: import traceback traceback.print_exc() fail = True if fail: print() printc("FAILED", color="red") sys.exit(1) sys.exit(0)
def setup(flamegraph_directory, flamegraph_path, perf_map_agent_path, java_home): """Setup deps for flamegraph""" # Create the flamegraph directory and clean the directory if not os.path.exists(flamegraph_directory): os.mkdir(flamegraph_directory) for f in os.listdir(flamegraph_directory): file_path = os.path.join(flamegraph_directory, f) sh.sudo.rm(file_path) if not os.path.exists(perf_map_agent_path): sh.git('clone', 'https://github.com/jrudolph/perf-map-agent', perf_map_agent_path) sh.cmake('.', _cwd=perf_map_agent_path, _env={'JAVA_HOME': java_home}) sh.make(_cwd=perf_map_agent_path) if not os.path.exists(flamegraph_path): sh.git('clone', 'https://github.com/brendangregg/FlameGraph', flamegraph_path)
def build_rip(self: object) -> None: src_directory = "{0}/src".format(self.normalized_path) number_of_cores_string = sh.nproc(_tty_out=False).strip() autogen_path = "{0}/autogen.sh".format(src_directory) sh.Command(autogen_path)(_cwd=src_directory, _out=sys.stdout.buffer) configure_path = "{0}/configure".format(src_directory) sh.Command(configure_path)(_cwd=src_directory, _out=sys.stdout.buffer) sh.make("-j", number_of_cores_string, _cwd=src_directory, _out=sys.stdout.buffer) sh.sudo("-S", "make", "setuid", _in=self.sudo_password, _cwd=src_directory, _out=sys.stdout.buffer)
def cmd_build(): """ Build docs """ # change to ``docs/`` sh.cd('docs') # run ``make html`` output = sh.make('html') sh.cd('..')
def build(version, to_build): saved_cwd = os.getcwd() root_directory = os.path.join(config.WORKING_DIRECTORY, version) source_directory = os.path.join(root_directory, config.SOURCE_PREFIX) for (compiler_name, targets) in to_build.items(): directory = os.path.join(root_directory, compiler_name) build_directory = os.path.join(directory, config.BUILD_PREFIX) log_directory = os.path.join(directory, "log") compiler = config.COMPILERS[compiler_name] try: os.makedirs(build_directory) os.mkdir(log_directory) except FileExistsError: # ignore -- we have already built before, directories exist pass os.chdir(build_directory) if not os.path.exists(os.path.join(build_directory, "CMakeCache.txt")): # N.B. Must NOT run CMake twice. This can break things! # In particular, CMake will think that the compilers are being changed # (even though they're remaining the same); it will issue a warning, # and hose the CMakeCache completely (e.g. compiler flags get reset) c_flags = compiler.get("flags", "") + " " + compiler.get("cflags", "") cxx_flags = compiler.get("flags", "") + " " + compiler.get("cxxflags", "") args = [source_directory, "-DCMAKE_BUILD_TYPE=Custom", "-DCMAKE_C_COMPILER=" + compiler["cc"], "-DCMAKE_CXX_COMPILER=" + compiler["cxx"], "-DCMAKE_C_FLAGS_CUSTOM=" + c_flags, "-DCMAKE_CXX_FLAGS_CUSTOM=" + cxx_flags] sh.cmake(*args) # run make sh.make(config.MAKE_FLAGS + list(targets), _out=os.path.join(log_directory, "makefile.out"), _err=os.path.join(log_directory, "makefile.err")) os.chdir(saved_cwd)
def BuildYcmdLibs( cmake_args ): build_dir = unicode( sh.mktemp( '-d', '-t', 'ycm_build.XXXXXX' ) ).strip() try: full_cmake_args = [ '-G', 'Unix Makefiles' ] if OnMac(): full_cmake_args.extend( CustomPythonCmakeArgs() ) full_cmake_args.extend( cmake_args ) full_cmake_args.append( p.join( DIR_OF_THIS_SCRIPT, 'cpp' ) ) sh.cd( build_dir ) sh.cmake( *full_cmake_args, _out = sys.stdout ) build_target = ( 'ycm_support_libs' if 'YCM_TESTRUN' not in os.environ else 'ycm_core_tests' ) sh.make( '-j', NumCores(), build_target, _out = sys.stdout, _err = sys.stderr ) if 'YCM_TESTRUN' in os.environ: RunYcmdTests( build_dir ) finally: sh.cd( DIR_OF_THIS_SCRIPT ) sh.rm( '-rf', build_dir )
def buildGen(): currentDir = os.getcwd() if not PackageManager.installed(["gcc"]): PackageManager.install("gcc") if not (spawn.find_executable("mvn")) and not (os.path.isdir("./apache-maven-3.3.1")): print "Maven Not Found....Installing" maven = wget.download( "http://mirror.sdunix.com/apache/maven/maven-3/3.3.1/binaries/apache-maven-3.3.1-bin.tar.gz", "apache-maven-3.3.1-bin.tar.gz") mavenTar = tarfile.open("./apache-maven-3.3.1-bin.tar.gz", "r:gz") mavenTar.extractall(currentDir) path = os.getenv("PATH") mavenHome = currentDir + "/apache-maven-3.3.1" os.environ["MAVEN_HOME"] = mavenHome os.environ["PATH"] = path + ":" + mavenHome + "/bin" os.chdir(os.getcwd() + "/tpcds-gen") print "Building Generator...." sh.make("clean") sh.make() print "Build Complete"
def compile_tikz_timing_diagram(tikz_timings, work_dir=tempfile.mkdtemp(), debug=False): from sh import make output = "".join(named_tikz_timings_diagram(name, timing) for name, timing in tikz_timings.iteritems()) # tikz_timings_diagram(timing_pairs[0][1]) table_file_name = os.path.join(work_dir, "tikz_timing_table.tex") write_tikz_timing_table_file(output, table_file_name) if debug: print "Wrote: " + table_file_name tikz_tex = write_tikz_toplevel_file(table_file_name) if debug: print "Wrote: " + tikz_tex make_file = write_latex_makefile(tikz_tex) if debug: print "Wrote: " + make_file make_output = make('-C', work_dir) if debug: print make_output return os.path.splitext(tikz_tex)[0] + ".pdf"
def update_target(): """ Run the "update-charm" make target within the project """ log('Hook function: update_target') config_data = ansible_config() required_configs = [ 'build_label', 'archive_filename', 'current_code_dir', 'update_make_target' ] # Check all required configs are set if ( items_are_not_empty(config_data, required_configs) and path.isdir(config_data['current_code_dir']) ): # Ensure make is installed apt_output = sh.apt_get.install('make') log('Installed make:') log(str(apt_output)) env_vars = parse_json_file(env_file_path) # Execute make target with all environment variables make_output = sh.make( config_data['update_make_target'], directory=path.join(config_data['current_code_dir']), _env=env_vars ) log('Make output:') log(str(make_output))
#!/usr/bin/env python from contextlib import contextmanager import os from sh import git, cmake, make, mv import sys @contextmanager def cd(newdir): prevdir = os.getcwd() os.chdir(os.path.expanduser(newdir)) try: yield finally: os.chdir(prevdir) if __name__ == "__main__": # git clone with cd("tool"): print(cmake(".", _out=sys.stdout)) print(make(_out=sys.stdout)) mv("src/sigrefmc", "..") mv("src/sigrefmc_ht", "..")
def test_units(self): """Run the C unittests""" sh.make("unittest") run = sh.Command("./unittest") run() # will throw an exception upon nonzero exit code
directory = os.path.dirname(os.path.realpath(__file__)) json_c_dir = os.path.join(directory, "json-c-json-c-0.12-20140410") rm("-r", "-f", json_c_dir) cd(directory) tar(curl( "-L", "https://github.com/json-c/json-c/archive/json-c-0.12-20140410.tar.gz", _piped=True ), "-xz") # Replace the Makefile.am.inc with one without -Werror replacement_amfile = os.path.join(directory, "json_c_new_Makefile.am.inc") original_amfile = os.path.join(json_c_dir, "Makefile.am.inc") cp(replacement_amfile, original_amfile) # Build it cd(json_c_dir) autogen_location = os.path.join(json_c_dir, "autogen.sh") autogen = sh.Command(autogen_location) autogen(prefix="/usr", _out=write_output, _env=install_env) make(_out=write_output, _env=install_env) if os.environ.get("ZMAP_TRAVIS_BUILD", None): print("Installing...") with sudo: make.install(_out=write_output, _env=install_env) print("Done.")
def tearDown(self): sh.make("clean")