def grab(path, filename=None, version=None, protocol=None, pip='pip'): # guess protocol if it's obvious from the url (usually is) if protocol is None: protocol = path.split(':')[0] if protocol in ('http', 'https', 'ftp'): shell('wget --quiet {} -O {}'.format(path, filename)) elif protocol in ('git'): shell("git clone " "-b {version} " "-q --recursive " "-- {path} {filename}" "".format( version=strip_version(version), path=path, filename=filename, )) elif protocol in ('nfs', 'fs-ln'): shell('cp --recursive --symbolic-link {} {}'.format(path, filename)) elif protocol in ('fs-cp'): shell('cp --recursive {} {}'.format(path, filename)) elif protocol in ('rsync'): shell('rsync -a {}/ {}'.format(path, filename)) else: raise NotImplementedError('Unknown protocol {}'.format(protocol))
def pip_install(pkg, ver, pkgpath, data, prefix, dlprefix, pip="pip", *args, **kwargs): ver = strip_version(ver) if ver == LATEST_PACKAGE_ALIAS: ver = latest_pypi_version(pkg) cmd = [ pip, "install {}=={}".format(pkg, strip_version(ver)), "--root {}".format(kwargs["fakeroot"]), "--prefix {}".format(prefix), "--no-index", "--no-deps", "--ignore-installed", "--cache-dir {}".format(dlprefix), "--find-links {}".format(dlprefix), kwargs.get("makeopts", ""), ] print('Installing {} ({}) from pip'.format(pkg, ver)) shell(cmd)
def rsync(pkg, ver, pkgpath, data, prefix, *args, **kwargs): print('Installing {} ({}) with rsync'.format(pkg, ver)) # assume a root-like layout in the pkgpath dir, and just copy it shell([ "rsync -am", kwargs.get("makeopts"), "{}/".format(pkgpath), kwargs["fakeroot"] + prefix, ])
def test_enable_bash_with_presets(tmpdir): with tmpdir.as_cwd(): kmd_release = "bleeding" kmd_pyver = "3.6" kmd_prefix = "prefix" _create_script(kmd_prefix, kmd_pyver, kmd_release, "enable", "enable.in") with open("test_enable.sh", "w") as test_file: test_file.write( TEST_SCRIPT_SIMPLE.format( set_envs=BASH_ENVS, enable_path="bleeding/enable" ) ) shell(["bash test_enable.sh"]) pre_env, sourced_env, post_env = _load_envs() assert pre_env["LD_LIBRARY_PATH"] == "/some/path" assert sourced_env["LD_LIBRARY_PATH"] == "prefix/lib:prefix/lib64:/some/path" assert pre_env["MANPATH"] == "/some/man/path" assert sourced_env["MANPATH"] == "prefix/share/man:/some/man/path" assert pre_env == post_env
def cmake(pkg, ver, path, data, prefix, builddir, makeopts, jobs, cmake='cmake', *args, **kwargs): bdir = '{}-{}-build'.format(pkg, ver) if builddir is not None: bdir = os.path.join(builddir, bdir) fakeroot = kwargs['fakeroot'] fakeprefix = fakeroot + prefix flags = [ '-DCMAKE_BUILD_TYPE=Release', '-DBOOST_ROOT={}'.format(fakeprefix), '-DBUILD_SHARED_LIBS=ON', '-DCMAKE_PREFIX_PATH={}'.format(fakeprefix), '-DCMAKE_MODULE_PATH={}/share/cmake/Modules'.format(fakeprefix), '-DCMAKE_INSTALL_PREFIX={}'.format(prefix), '-DDEST_PREFIX={}'.format(fakeroot), ] mkpath(bdir) with pushd(bdir): os.environ["LD_LIBRARY_PATH"] = kwargs.get("ld_lib_path") _pre_PATH = os.environ["PATH"] os.environ["PATH"] = kwargs.get("binpath") print('Installing {} ({}) from source with cmake'.format(pkg, ver)) shell([cmake, path] + flags + [makeopts]) print(shell('make -j{}'.format(jobs))) print(shell('make DESTDIR={} install'.format(fakeroot))) del os.environ["LD_LIBRARY_PATH"] os.environ["PATH"] = _pre_PATH
def rpm(pkg, ver, path, data, prefix, *args, **kwargs): # cpio always outputs to cwd, can't be overriden with switches with pushd(prefix): print('Installing {} ({}) from rpm'.format(pkg, ver)) shell('rpm2cpio {}.rpm | cpio -imd --quiet'.format(path)) shell('rsync -a usr/* .') shell('rm -rf usr')
def sh(pkg, ver, pkgpath, data, prefix, makefile, *args, **kwargs): makefile = data.get(makefile) with pushd(pkgpath): cmd = [ 'bash {} --prefix {}'.format(makefile, prefix), '--fakeroot {}'.format(kwargs['fakeroot']), '--python {}/bin/python'.format(prefix) ] if 'jobs' in kwargs: cmd.append('--jobs {}'.format(kwargs['jobs'])) if 'cmake' in kwargs: cmd.append('--cmake {}'.format(kwargs['cmake'])) cmd.append('--pythonpath {}'.format(kwargs['pythonpath'])) cmd.append('--path {}'.format(kwargs['binpath'])) cmd.append('--pip {}'.format(kwargs['pip'])) cmd.append('--virtualenv {}'.format(kwargs['virtualenv'])) cmd.append('--ld-library-path {}'.format(kwargs['ld_lib_path'])) cmd.append(kwargs.get('makeopts')) print('Installing {} ({}) from sh'.format(pkg, ver)) shell(cmd)
def test_enable_csh_no_presets(tmpdir): with tmpdir.as_cwd(): kmd_release = "bleeding" kmd_pyver = "3.6" kmd_prefix = "prefix" _create_script( kmd_prefix, kmd_pyver, kmd_release, "enable.csh", "enable.csh.in" ) with open("test_enable.sh", "w") as test_file: test_file.write( TEST_SCRIPT_SIMPLE.format( set_envs=CLEAN_CSH_ENV, enable_path="bleeding/enable.csh" ) ) shell(["csh test_enable.sh"]) pre_env, sourced_env, post_env = _load_envs() assert "LD_LIBRARY_PATH" not in pre_env assert sourced_env["LD_LIBRARY_PATH"] == "prefix/lib:prefix/lib64" assert "MANPATH" not in pre_env assert sourced_env["MANPATH"] == "prefix/share/man:" assert pre_env == post_env
def _create_script(kmd_prefix, kmd_pyver, kmd_release, target, template): data = Data(extra_data_dirs=None) os.mkdir(kmd_release) with open("{}/{}".format(kmd_release, target), "w") as f: f.write( shell( [ "m4 {}".format(data.get("enable.m4")), "-D komodo_prefix={}".format(kmd_prefix), "-D komodo_pyver={}".format(kmd_pyver), "-D komodo_release={}".format(kmd_release), data.get(template), ] ).decode("utf-8") )
def fixup_python_shebangs(prefix, release): """Fix shebang to $PREFIX/bin/python. Some packages installed with pip do not respect target executable, that is, they set as their shebang executable the Python executabl used to build the komodo distribution with instead of the Python executable that komodo deploys. This breaks the application since the corresponding Python modules won't be picked up correctly. For now, we use sed to rewrite the first line in some executables. This is a hack that should be fixed at some point. """ binpath = os.path.join(prefix, release, "root", "bin") if not os.path.isdir(binpath): # No bin files to fix return python_ = os.path.join(binpath, "python") bins_ = [] # executables with wrong shebang for bin_ in os.listdir(binpath): try: with open(os.path.join(binpath, bin_), "r") as f: shebang = f.readline().strip() if _is_shebang(shebang): bins_.append(bin_) except Exception as err: print("Exception in reading bin {}: {}".format(bin_, err)) sedfxp = """sed -i 1c#!{0} {1}""" for bin_ in bins_: binpath_ = os.path.join(prefix, release, "root", "bin", bin_) if os.path.exists(binpath_): shell(sedfxp.format(python_, binpath_))
def make( pkgs, repo, data, prefix, dlprefix=None, builddir=None, jobs=1, cmk="cmake", pip="pip", virtualenv=None, fakeroot=".", ): xs = flatten(dfs(pkg, ver, pkgs, repo) for pkg, ver in pkgs.items()) seen = set() pkgorder = [] for x in xs: if x in seen: continue seen.add(x) pkgorder.append(x) fakeprefix = fakeroot + prefix shell(['mkdir -p', fakeprefix]) prefix = os.path.abspath(prefix) # assuming there always is a python *and* that python will be installed # before pip is required. This dependency *must* be explicit in the # repository os.environ['DESTDIR'] = fakeroot os.environ['BOOST_ROOT'] = fakeprefix build_ld_lib_path = ':'.join( filter(None, [ os.path.join(fakeprefix, 'lib'), os.path.join(fakeprefix, 'lib64'), os.environ.get('LD_LIBRARY_PATH') ])) extra_makeopts = os.environ.get('extra_makeopts') build_pythonpath = pypaths(fakeprefix, pkgs.get('python')) build_path = ':'.join( [os.path.join(fakeprefix, 'bin'), os.environ['PATH']]) pkgpaths = ['{}-{}'.format(pkg, pkgs[pkg]) for pkg in pkgorder] if dlprefix: pkgpaths = [os.path.join(dlprefix, path) for path in pkgpaths] def resolve(x): return x.replace('$(prefix)', prefix) build = { 'rpm': rpm, 'cmake': cmake, 'sh': sh, 'pip': pip_install, 'rsync': rsync, 'noop': noop } for pkg, path in zip(pkgorder, pkgpaths): ver = pkgs[pkg] current = repo[pkg][ver] make = current['make'] pkgpath = os.path.abspath(path) if "pypi_package_name" in current and make != "pip": raise ValueError( "pypi_package_name is only valid when building with pip") package_name = current.get("pypi_package_name", pkg) if extra_makeopts: oldopts = current.get("makeopts", "") current["makeopts"] = " ".join((oldopts, extra_makeopts)) current["makeopts"] = resolve(current.get("makeopts", "")) build[make]( package_name, ver, pkgpath, data, prefix=prefix, builddir=builddir, makeopts=current.get("makeopts"), makefile=current.get("makefile"), dlprefix=dlprefix if dlprefix else ".", jobs=jobs, cmake=cmk, pip=pip, virtualenv=virtualenv, fakeroot=fakeroot, pythonpath=build_pythonpath, binpath=build_path, ld_lib_path=build_ld_lib_path, )
def fetch(pkgs, repo, outdir=".", pip="pip"): missingpkg = [pkg for pkg in pkgs if pkg not in repo] missingver = [ pkg for pkg, ver in pkgs.items() if pkg in repo and ver not in repo[pkg] ] if missingpkg: eprint('Packages requested, but not found in the repository:') eprint('missingpkg: {}'.format(','.join(missingpkg))) for pkg in missingver: eprint('missingver: missing version for {}: {} requested, found: {}'. format(pkg, pkgs[pkg], ','.join(repo[pkg].keys()))) if missingpkg or missingver: return if outdir and not os.path.exists(outdir): os.mkdir(outdir) pypi_packages = [] git_hashes = {} with pushd(outdir): for pkg, ver in pkgs.items(): current = repo[pkg][ver] if "pypi_package_name" in current and current["make"] != "pip": raise ValueError( "pypi_package_name is only valid when building with pip") url = current.get("source") protocol = current.get("fetch") pkg_alias = current.get("pypi_package_name", pkg) if url == "pypi" and ver == LATEST_PACKAGE_ALIAS: ver = latest_pypi_version(pkg_alias) name = "{} ({}): {}".format(pkg_alias, ver, url) pkgname = "{}-{}".format(pkg_alias, ver) if url is None and protocol is None: package_folder = os.path.abspath(pkgname) print('Nothing to fetch for {}, but created folder {}'.format( pkgname, package_folder)) os.mkdir(pkgname) continue dst = pkgname spliturl = url.split('?')[0].split('.') ext = spliturl[-1] if len(spliturl) > 1 and spliturl[-2] == 'tar': ext = 'tar.{}'.format(spliturl[-1]) if ext in [ 'rpm', 'tar', 'gz', 'tgz', 'tar.gz', 'tar.bz2', 'tar.xz' ]: dst = '{}.{}'.format(dst, ext) if url == "pypi": print("Defering download of {}".format(name)) pypi_packages.append(f"{pkg_alias}=={ver.split('+')[0]}") continue print('Downloading {}'.format(name)) grab(url, filename=dst, version=ver, protocol=protocol, pip=pip) if protocol == "git": git_hashes[pkg] = get_git_revision_hash(path=dst) if ext in ['tgz', 'tar.gz', 'tar.bz2', 'tar.xz']: print('Extracting {} ...'.format(dst)) topdir = shell( ' tar -xvf {}'.format(dst)).decode("utf-8").split()[0] normalised_dir = topdir.split('/')[0] if not os.path.exists(pkgname): print('Creating symlink {} -> {}'.format( normalised_dir, pkgname)) os.symlink(normalised_dir, pkgname) print('Downloading {} pypi packages'.format(len(pypi_packages))) shell([ pip, 'download', '--no-deps', '--dest .', " ".join(pypi_packages) ]) return git_hashes
def _main(args): args.prefix = os.path.abspath(args.prefix) data = Data(extra_data_dirs=args.extra_data_dirs) if args.download or (not args.build and not args.install): git_hashes = fetch(args.pkgs, args.repo, outdir=args.cache, pip=args.pip) if args.download and not args.build: sys.exit(0) # append root to the temporary build dir, as we want a named root/ # directory as the distribution root, organised under the distribution name # (release) tmp_prefix = os.path.join(os.path.join(args.prefix), args.release, "root") fakeroot = os.path.abspath(args.release) if args.build or not args.install: make( args.pkgs, args.repo, data, prefix=tmp_prefix, dlprefix=args.cache, builddir=args.tmp, jobs=args.jobs, cmk=args.cmake, pip=args.pip, virtualenv=args.virtualenv, fakeroot=fakeroot, ) shell("mv {} {}".format(args.release + tmp_prefix, args.release)) shell("rmdir -p --ignore-fail-on-non-empty {}".format( os.path.dirname(args.release + tmp_prefix))) if args.build and not args.install: sys.exit(0) # create the enable script for tmpl, target in [("enable.in", "enable"), ("enable.csh.in", "enable.csh")]: # TODO should args.release be release_path? with open("{}/{}".format(args.release, target), "w") as f: f.write( shell([ "m4 {}".format(data.get("enable.m4")), "-D komodo_prefix={}".format(tmp_prefix), "-D komodo_pyver={}".format(args.pyver), "-D komodo_release={}".format(args.release), data.get(tmpl), ]).decode("utf-8")) with open(args.locations_config) as defs, open( os.path.join(args.release, "local"), "w") as local_activator, open( os.path.join(args.release, "local.csh"), "w") as local_csh_activator: defs = yml.safe_load(defs) local.write_local_activators(data, defs, local_activator, local_csh_activator) releasedoc = os.path.join(args.release, args.release) with open(releasedoc, "w") as y: release = {} for pkg, ver in args.pkgs.items(): entry = args.repo[pkg][ver] maintainer = args.repo[pkg][ver]["maintainer"] if ver == LATEST_PACKAGE_ALIAS: ver = latest_pypi_version(entry.get("pypi_package_name", pkg)) elif args.repo[pkg][ver].get("fetch") == "git": ver = git_hashes[pkg] release[pkg] = { "version": ver, "maintainer": maintainer, } yml.dump(release, y, default_flow_style=False) if args.dry_run: return print("Installing {} to {}".format(args.release, args.prefix)) install_root = os.path.join(args.prefix, args.release, "root") shell("{1} {0} .{0} {0}".format(args.release, args.renamer)) shell("rsync -a .{} {}".format(args.release, args.prefix), sudo=args.sudo) if os.path.exists("{1}/{0}".format(args.release, args.prefix)): shell( "{2} {0} {0}.delete {1}/{0}".format(args.release, args.prefix, args.renamer), sudo=args.sudo, ) shell( "{2} .{0} {0} {1}/.{0}".format(args.release, args.prefix, args.renamer), sudo=args.sudo, ) shell("rm -rf {1}/{0}.delete".format(args.release, args.prefix), sudo=args.sudo) if args.tmp: # Allows e.g. pip to use this folder as tmpfolder, instead of in some # cases falling back to /tmp, which is undesired when building on nfs. os.environ["TMPDIR"] = args.tmp print('Fixup #! in pip-provided packages if bin exist') release_path = os.path.join(args.prefix, args.release) release_root = os.path.join(release_path, "root") for pkg, ver in args.pkgs.items(): current = args.repo[pkg][ver] if current["make"] != "pip": continue package_name = current.get("pypi_package_name", pkg) if ver == LATEST_PACKAGE_ALIAS: ver = latest_pypi_version(package_name) shell_input = [ args.pip, "install {}=={}".format(package_name, strip_version(ver)), "--prefix", release_root, "--no-index", "--no-deps", "--ignore-installed", "--cache-dir {}".format(args.cache), "--find-links {}".format(args.cache), ] shell_input.append(current.get("makeopts")) print(shell(shell_input, sudo=args.sudo)) fixup_python_shebangs(args.prefix, args.release) switch.create_activator_switch(data, args.prefix, args.release) # run any post-install scripts on the release if args.postinst: shell([args.postinst, release_path]) print("running", "find {} -name '*.pyc' -delete".format(release_root)) shell("find {} -name '*.pyc' -delete".format(release_root)) print("Setting permissions", [data.get("set_permissions.sh"), release_path]) shell([data.get("set_permissions.sh"), release_path])