def create_files(dir_path, m, config): """ Create the test files for pkg in the directory given. The resulting test files are configuration (i.e. platform, architecture, Python and numpy version, ...) independent. Return False, if the package has no tests (for any configuration), and True if it has. """ has_files = False for fn in m.get_value('test/files', []): has_files = True path = join(m.path, fn) copy_into(path, join(dir_path, fn), config.timeout) # need to re-download source in order to do tests if m.get_value('test/source_files') and not isdir(config.work_dir): source.provide(m.path, m.get_section('source'), config=config) for pattern in m.get_value('test/source_files', []): if on_win and '\\' in pattern: raise RuntimeError("test/source_files paths must use / " "as the path delimiter on Windows") has_files = True files = glob.glob(join(source.get_dir(config), pattern)) if not files: raise RuntimeError("Did not find any source_files for test with pattern %s", pattern) for f in files: copy_into(f, f.replace(source.get_dir(config), config.test_dir), config.timeout) for ext in '.pyc', '.pyo': for f in get_ext_files(config.test_dir, ext): os.remove(f) return has_files
def create_files(dir_path, m, config): """ Create the test files for pkg in the directory given. The resulting test files are configuration (i.e. platform, architecture, Python and numpy version, ...) independent. Return False, if the package has no tests (for any configuration), and True if it has. """ has_files = False for fn in m.get_value('test/files', []): has_files = True path = join(m.path, fn) copy_into(path, join(dir_path, fn), config.timeout) # need to re-download source in order to do tests if m.get_value('test/source_files') and not isdir(config.work_dir): source.provide(m.path, m.get_section('source'), config=config) for pattern in m.get_value('test/source_files', []): if on_win and '\\' in pattern: raise RuntimeError("test/source_files paths must use / " "as the path delimiter on Windows") has_files = True files = glob.glob(join(source.get_dir(config), pattern)) if not files: raise RuntimeError( "Did not find any source_files for test with pattern %s", pattern) for f in files: copy_into(f, f.replace(source.get_dir(config), config.test_dir), config.timeout) for ext in '.pyc', '.pyo': for f in get_ext_files(config.test_dir, ext): os.remove(f) return has_files
def build(m, get_src=True, pypi=False): rm_rf(prefix) create_env(prefix, [ms.spec for ms in m.ms_depends('build')], pypi) print("BUILD START:", m.dist()) if get_src: source.provide(m.path, m.get_section('source')) assert isdir(source.WORK_DIR) if os.listdir(source.get_dir()): print("source tree in:", source.get_dir()) else: print("no source") rm_rf(info_dir) files1 = prefix_files() if sys.platform == 'win32': import conda_build.windows as windows windows.build(m) else: env = environ.get_dict(m) cmd = ['/bin/bash', '-x', '-e', join(m.path, 'build.sh')] _check_call(cmd, env=env, cwd=source.get_dir()) create_post_scripts(m) create_entry_points(m.get_value('build/entry_points')) post_process(preserve_egg_dir=bool( m.get_value('build/preserve_egg_dir'))) assert not exists(info_dir) files2 = prefix_files() post_build(sorted(files2 - files1)) create_info_files(m, sorted(files2 - files1)) files3 = prefix_files() fix_permissions(files3 - files1) path = bldpkg_path(m) t = tarfile.open(path, 'w:bz2') for f in sorted(files3 - files1): t.add(join(prefix, f), f) t.close() print("BUILD END:", m.dist()) # we're done building, perform some checks tarcheck.check_all(path) update_index(bldpkgs_dir)
def meta_vars(meta, config): d = {} for var_name in meta.get_value('build/script_env', []): value = os.getenv(var_name) if value is None: warnings.warn( "The environment variable '%s' is undefined." % var_name, UserWarning ) else: d[var_name] = value git_dir = join(source.get_dir(config), '.git') hg_dir = join(source.get_dir(config), '.hg') if not isinstance(git_dir, str): # On Windows, subprocess env can't handle unicode. git_dir = git_dir.encode(sys.getfilesystemencoding() or 'utf-8') if external.find_executable('git', config.build_prefix) and os.path.exists(git_dir): git_url = meta.get_value('source/git_url') if os.path.exists(git_url): if sys.platform == 'win32': git_url = utils.convert_unix_path_to_win(git_url) # If git_url is a relative path instead of a url, convert it to an abspath git_url = normpath(join(meta.path, git_url)) _x = False if git_url: _x = verify_git_repo(git_dir, git_url, config, meta.get_value('source/git_rev', 'HEAD')) if _x or meta.get_value('source/path'): d.update(get_git_info(git_dir, config)) elif external.find_executable('hg', config.build_prefix) and os.path.exists(hg_dir): d.update(get_hg_build_info(hg_dir)) d['PKG_NAME'] = meta.name() d['PKG_VERSION'] = meta.version() d['PKG_BUILDNUM'] = str(meta.build_number()) d['PKG_BUILD_STRING'] = str(meta.build_id()) d['RECIPE_DIR'] = meta.path return d
def meta_vars(meta): d = {} for var_name in meta.get_value("build/script_env", []): value = os.getenv(var_name) if value is None: warnings.warn("The environment variable '%s' is undefined." % var_name, UserWarning) else: d[var_name] = value git_dir = join(source.get_dir(), ".git") if not isinstance(git_dir, str): # On Windows, subprocess env can't handle unicode. git_dir = git_dir.encode(sys.getfilesystemencoding() or "utf-8") if external.find_executable("git") and os.path.exists(git_dir): git_url = meta.get_value("source/git_url") if os.path.exists(git_url): # If git_url is a relative path instead of a url, convert it to an abspath git_url = normpath(join(meta.path, git_url)) _x = False if git_url: _x = verify_git_repo(git_dir, git_url, meta.get_value("source/git_rev", "HEAD")) if _x or meta.get_value("source/path"): d.update(get_git_info(git_dir)) d["PKG_NAME"] = meta.name() d["PKG_VERSION"] = meta.version() d["PKG_BUILDNUM"] = str(meta.build_number()) d["PKG_BUILD_STRING"] = str(meta.build_id()) d["RECIPE_DIR"] = meta.path return d
def execute(args, parser): import sys import shutil import tarfile import tempfile from os.path import abspath, isdir, isfile, join from conda.lock import Locked import conda_build.build as build import conda_build.source as source from conda_build.config import croot from conda_build.metadata import MetaData check_external() with Locked(croot): for arg in args.recipe: if isfile(arg): if arg.endswith(('.tar', '.tar.gz', '.tgz', '.tar.bz2')): recipe_dir = tempfile.mkdtemp() t = tarfile.open(arg, 'r:*') t.extractall(path=recipe_dir) t.close() need_cleanup = True else: print("Ignoring non-recipe: %s" % arg) continue else: recipe_dir = abspath(arg) need_cleanup = False if not isdir(recipe_dir): sys.exit("Error: no such directory: %s" % recipe_dir) m = MetaData(recipe_dir) binstar_upload = False if args.check and len(args.recipe) > 1: print(m.path) m.check_fields() if args.check: continue if args.output: print(build.bldpkg_path(m)) continue elif args.test: build.test(m) elif args.source: source.provide(m.path, m.get_section('source')) print('Source tree in:', source.get_dir()) else: build.build(m) if not args.notest: build.test(m) binstar_upload = True if need_cleanup: shutil.rmtree(recipe_dir) if binstar_upload: handle_binstar_upload(build.bldpkg_path(m), args)
def build(m): env = dict(os.environ) env.update(environ.get_dict(m)) for name in 'BIN', 'INC', 'LIB': path = env['LIBRARY_' + name] if not isdir(path): os.makedirs(path) src_dir = source.get_dir() bld_bat = join(m.path, 'bld.bat') if exists(bld_bat): with open(bld_bat) as fi: data = fi.read() with open(join(src_dir, 'bld.bat'), 'w') as fo: fo.write(msvc_env_cmd()) for kv in iteritems(env): fo.write('set "%s=%s"\n' % kv) # more debuggable with echo on fo.write('@echo on\n') fo.write("REM ===== end generated header =====\n") fo.write(data) cmd = [os.environ['COMSPEC'], '/c', 'call', 'bld.bat'] _check_call(cmd, cwd=src_dir) kill_processes() fix_staged_scripts()
def get_build_metadata(m): src_dir = source.get_dir() if "build" not in m.meta: m.meta["build"] = {} if exists(join(src_dir, '__conda_version__.txt')): print( "Deprecation warning: support for __conda_version__ will be removed in Conda build 2.0." # noqa "Try Jinja templates instead: " "http://conda.pydata.org/docs/building/environment-vars.html#git-environment-variables" ) # noqa with open(join(src_dir, '__conda_version__.txt')) as f: version = f.read().strip() print("Setting version from __conda_version__.txt: %s" % version) m.meta['package']['version'] = version if exists(join(src_dir, '__conda_buildnum__.txt')): print( "Deprecation warning: support for __conda_buildnum__ will be removed in Conda build 2.0." # noqa "Try Jinja templates instead: " "http://conda.pydata.org/docs/building/environment-vars.html#git-environment-variables" ) # noqa with open(join(src_dir, '__conda_buildnum__.txt')) as f: build_number = f.read().strip() print("Setting build number from __conda_buildnum__.txt: %s" % build_number) m.meta['build']['number'] = build_number if exists(join(src_dir, '__conda_buildstr__.txt')): print( "Deprecation warning: support for __conda_buildstr__ will be removed in Conda build 2.0." # noqa "Try Jinja templates instead: " "http://conda.pydata.org/docs/building/environment-vars.html#git-environment-variables" ) # noqa with open(join(src_dir, '__conda_buildstr__.txt')) as f: buildstr = f.read().strip() print("Setting version from __conda_buildstr__.txt: %s" % buildstr) m.meta['build']['string'] = buildstr
def build(m, bld_bat, dirty=False): env = dict(os.environ) env.update(environ.get_dict(m, dirty=dirty)) env = environ.prepend_bin_path(env, config.build_prefix, True) for name in 'BIN', 'INC', 'LIB': path = env['LIBRARY_' + name] if not isdir(path): os.makedirs(path) src_dir = source.get_dir() if os.path.isfile(bld_bat): with open(bld_bat) as fi: data = fi.read() with open(join(src_dir, 'bld.bat'), 'w') as fo: # more debuggable with echo on fo.write('@echo on\n') for key, value in env.items(): fo.write('set "{key}={value}"\n'.format(key=key, value=value)) fo.write("set INCLUDE={};%INCLUDE%\n".format(env["LIBRARY_INC"])) fo.write("set LIB={};%LIB%\n".format(env["LIBRARY_LIB"])) fo.write(msvc_env_cmd(bits=cc.bits, override=m.get_value('build/msvc_compiler', None))) fo.write('\n') fo.write("REM ===== end generated header =====\n") fo.write(data) cmd = [os.environ['COMSPEC'], '/c', 'call', 'bld.bat'] _check_call(cmd, cwd=src_dir) kill_processes() fix_staged_scripts()
def get_build_metadata(m, config): src_dir = source.get_dir(config) if "build" not in m.meta: m.meta["build"] = {} if exists(join(src_dir, '__conda_version__.txt')): print("Deprecation warning: support for __conda_version__ will be removed in Conda build 3.0." # noqa "Try Jinja templates instead: " "http://conda.pydata.org/docs/building/meta-yaml.html#templating-with-jinja") with open(join(src_dir, '__conda_version__.txt')) as f: version = f.read().strip() print("Setting version from __conda_version__.txt: %s" % version) m.meta['package']['version'] = version if exists(join(src_dir, '__conda_buildnum__.txt')): print("Deprecation warning: support for __conda_buildnum__ will be removed in Conda build 3.0." # noqa "Try Jinja templates instead: " "http://conda.pydata.org/docs/building/meta-yaml.html#templating-with-jinja") with open(join(src_dir, '__conda_buildnum__.txt')) as f: build_number = f.read().strip() print("Setting build number from __conda_buildnum__.txt: %s" % build_number) m.meta['build']['number'] = build_number if exists(join(src_dir, '__conda_buildstr__.txt')): print("Deprecation warning: support for __conda_buildstr__ will be removed in Conda build 3.0." # noqa "Try Jinja templates instead: " "http://conda.pydata.org/docs/building/meta-yaml.html#templating-with-jinja") with open(join(src_dir, '__conda_buildstr__.txt')) as f: buildstr = f.read().strip() print("Setting version from __conda_buildstr__.txt: %s" % buildstr) m.meta['build']['string'] = buildstr
def build(m): env = dict(os.environ) env.update(environ.get_dict(m)) env = environ.prepend_bin_path(env, config.build_prefix, True) for name in 'BIN', 'INC', 'LIB': path = env['LIBRARY_' + name] if not isdir(path): os.makedirs(path) src_dir = source.get_dir() bld_bat = join(m.path, 'bld.bat') if exists(bld_bat): with open(bld_bat) as fi: data = fi.read() with open(join(src_dir, 'bld.bat'), 'w') as fo: fo.write(msvc_env_cmd(override=m.get_value('build/msvc_compiler', None))) fo.write('\n') # more debuggable with echo on fo.write('@echo on\n') fo.write("set INCLUDE={};%INCLUDE%\n".format(env["LIBRARY_INC"])) fo.write("set LIB={};%LIB%\n".format(env["LIBRARY_LIB"])) fo.write("REM ===== end generated header =====\n") fo.write(data) cmd = [os.environ['COMSPEC'], '/c', 'call', 'bld.bat'] _check_call(cmd, cwd=src_dir, env={str(k): str(v) for k, v in env.items()}) kill_processes() fix_staged_scripts()
def build(m, bld_bat, dirty=False, activate=True): env = environ.get_dict(m, dirty=dirty) for name in 'BIN', 'INC', 'LIB': path = env['LIBRARY_' + name] if not isdir(path): os.makedirs(path) src_dir = source.get_dir() if os.path.isfile(bld_bat): with open(bld_bat) as fi: data = fi.read() with open(join(src_dir, 'bld.bat'), 'w') as fo: # more debuggable with echo on fo.write('@echo on\n') for key, value in env.items(): fo.write('set "{key}={value}"\n'.format(key=key, value=value)) fo.write("set INCLUDE={};%INCLUDE%\n".format(env["LIBRARY_INC"])) fo.write("set LIB={};%LIB%\n".format(env["LIBRARY_LIB"])) fo.write(msvc_env_cmd(bits=cc.bits, override=m.get_value('build/msvc_compiler', None))) if activate: fo.write("call activate _build\n") fo.write('\n') fo.write("REM ===== end generated header =====\n") fo.write(data) cmd = [os.environ['COMSPEC'], '/c', 'call', 'bld.bat'] _check_call(cmd, cwd=src_dir) kill_processes() fix_staged_scripts()
def build(m, bld_bat): env = dict(os.environ) env.update(environ.get_dict(m)) env = environ.prepend_bin_path(env, config.build_prefix, True) for name in 'BIN', 'INC', 'LIB': path = env['LIBRARY_' + name] if not isdir(path): os.makedirs(path) src_dir = source.get_dir() if exists(bld_bat): with open(bld_bat) as fi: data = fi.read() with open(join(src_dir, 'bld.bat'), 'w') as fo: fo.write( msvc_env_cmd( override=m.get_value('build/msvc_compiler', None))) fo.write('\n') # more debuggable with echo on fo.write('@echo on\n') fo.write("set INCLUDE={};%INCLUDE%\n".format(env["LIBRARY_INC"])) fo.write("set LIB={};%LIB%\n".format(env["LIBRARY_LIB"])) fo.write("REM ===== end generated header =====\n") fo.write(data) cmd = [os.environ['COMSPEC'], '/c', 'call', 'bld.bat'] _check_call(cmd, cwd=src_dir, env={str(k): str(v) for k, v in env.items()}) kill_processes() fix_staged_scripts()
def build(m, bld_bat): env = dict(os.environ) env.update(environ.get_dict(m)) env = environ.prepend_bin_path(env, config.build_prefix, True) for name in "BIN", "INC", "LIB": path = env["LIBRARY_" + name] if not isdir(path): os.makedirs(path) src_dir = source.get_dir() if os.path.isfile(bld_bat): with open(bld_bat) as fi: data = fi.read() with open(join(src_dir, "bld.bat"), "w") as fo: # more debuggable with echo on fo.write("@echo on\n") for key, value in env.items(): fo.write('set "{key}={value}"\n'.format(key=key, value=value)) fo.write("set INCLUDE={};%INCLUDE%\n".format(env["LIBRARY_INC"])) fo.write("set LIB={};%LIB%\n".format(env["LIBRARY_LIB"])) fo.write(msvc_env_cmd(bits=cc.bits, override=m.get_value("build/msvc_compiler", None))) fo.write("\n") fo.write("REM ===== end generated header =====\n") fo.write(data) cmd = [os.environ["COMSPEC"], "/c", "call", "bld.bat"] _check_call(cmd, cwd=src_dir) kill_processes() fix_staged_scripts()
def build(m, bld_bat, dirty=False, activate=True): env = environ.get_dict(m, dirty=dirty) for name in 'BIN', 'INC', 'LIB': path = env['LIBRARY_' + name] if not isdir(path): os.makedirs(path) src_dir = source.get_dir() if os.path.isfile(bld_bat): with open(bld_bat) as fi: data = fi.read() with open(join(src_dir, 'bld.bat'), 'w') as fo: # more debuggable with echo on fo.write('@echo on\n') for key, value in env.items(): fo.write('set "{key}={value}"\n'.format(key=key, value=value)) fo.write(msvc_env_cmd(bits=cc.bits, override=m.get_value('build/msvc_compiler', None))) # Reset echo on, because MSVC scripts might have turned it off fo.write('@echo on\n') fo.write('set "INCLUDE={};%INCLUDE%"\n'.format(env["LIBRARY_INC"])) fo.write('set "LIB={};%LIB%"\n'.format(env["LIBRARY_LIB"])) if activate: fo.write("call activate.bat {0}\n".format(config.build_prefix)) fo.write("REM ===== end generated header =====\n") fo.write(data) cmd = [os.environ['COMSPEC'], '/c', 'bld.bat'] _check_call(cmd, cwd=src_dir) fix_staged_scripts()
def get_build_metadata(m): src_dir = source.get_dir() if exists(join(src_dir, '__conda_version__.txt')): print("Deprecation warning: support for __conda_version__ will be removed in Conda build 2.0." # noqa "Try Jinja templates instead: " "http://conda.pydata.org/docs/building/environment-vars.html#git-environment-variables") # noqa with open(join(src_dir, '__conda_version__.txt')) as f: version = f.read().strip() print("Setting version from __conda_version__.txt: %s" % version) m.meta['package']['version'] = version if exists(join(src_dir, '__conda_buildnum__.txt')): print("Deprecation warning: support for __conda_buildnum__ will be removed in Conda build 2.0." # noqa "Try Jinja templates instead: " "http://conda.pydata.org/docs/building/environment-vars.html#git-environment-variables") # noqa with open(join(src_dir, '__conda_buildnum__.txt')) as f: build_number = f.read().strip() print("Setting build number from __conda_buildnum__.txt: %s" % build_number) m.meta['build']['number'] = build_number if exists(join(src_dir, '__conda_buildstr__.txt')): print("Deprecation warning: support for __conda_buildstr__ will be removed in Conda build 2.0." # noqa "Try Jinja templates instead: " "http://conda.pydata.org/docs/building/environment-vars.html#git-environment-variables") # noqa with open(join(src_dir, '__conda_buildstr__.txt')) as f: buildstr = f.read().strip() print("Setting version from __conda_buildstr__.txt: %s" % buildstr) m.meta['build']['string'] = buildstr
def build(m): env = dict(os.environ) env.update(environ.get_dict(m)) for name in 'BIN', 'INC', 'LIB': path = env['LIBRARY_' + name] if not isdir(path): os.makedirs(path) src_dir = source.get_dir() bld_bat = join(m.path, 'bld.bat') if exists(bld_bat): with open(bld_bat) as fi: data = fi.read() with open(join(src_dir, 'bld.bat'), 'w') as fo: fo.write(msvc_env_cmd()) # more debuggable with echo on fo.write('@echo on\n') for kv in iteritems(env): fo.write('set %s=%s\n' % kv) fo.write("REM ===== end generated header =====\n") fo.write(data) cmd = [os.environ['COMSPEC'], '/c', 'call', 'bld.bat'] _check_call(cmd, cwd=src_dir) kill_processes() fix_staged_scripts()
def build(m, bld_bat, config): with path_prepended(config.build_prefix): env = environ.get_dict(config=config, m=m) env["CONDA_BUILD_STATE"] = "BUILD" for name in 'BIN', 'INC', 'LIB': path = env['LIBRARY_' + name] if not isdir(path): os.makedirs(path) src_dir = source.get_dir(config) if os.path.isfile(bld_bat): with open(bld_bat) as fi: data = fi.read() with open(join(src_dir, 'bld.bat'), 'w') as fo: # more debuggable with echo on fo.write('@echo on\n') for key, value in env.items(): fo.write('set "{key}={value}"\n'.format(key=key, value=value)) fo.write(msvc_env_cmd(bits=bits, config=config, override=m.get_value('build/msvc_compiler', None))) # Reset echo on, because MSVC scripts might have turned it off fo.write('@echo on\n') fo.write('set "INCLUDE={};%INCLUDE%"\n'.format(env["LIBRARY_INC"])) fo.write('set "LIB={};%LIB%"\n'.format(env["LIBRARY_LIB"])) if config.activate: fo.write("call {conda_root}\\activate.bat {prefix}\n".format( conda_root=root_script_dir, prefix=config.build_prefix)) fo.write("REM ===== end generated header =====\n") fo.write(data) cmd = ['cmd.exe', '/c', 'bld.bat'] _check_call(cmd, cwd=src_dir)
def get_dict(m=None, prefix=None): if not prefix: prefix = config.build_prefix python = config.build_python d = {'CONDA_BUILD': '1'} d['ARCH'] = str(cc.bits) d['PREFIX'] = prefix d['PYTHON'] = python d['PY3K'] = str(config.PY3K) d['STDLIB_DIR'] = get_stdlib_dir() d['SP_DIR'] = get_sp_dir() d['SYS_PREFIX'] = sys.prefix d['SYS_PYTHON'] = sys.executable d['PERL_VER'] = get_perl_ver() d['PY_VER'] = get_py_ver() d['NPY_VER'] = get_npy_ver() d['SRC_DIR'] = source.get_dir() if "LANG" in os.environ: d['LANG'] = os.environ['LANG'] if os.path.isdir(os.path.join(d['SRC_DIR'], '.git')): d.update(**get_git_build_info(d['SRC_DIR'])) if sys.platform == 'win32': # -------- Windows d['PATH'] = (join(prefix, 'Library', 'bin') + ';' + join(prefix) + ';' + join(prefix, 'Scripts') + ';' + os.getenv('PATH')) d['SCRIPTS'] = join(prefix, 'Scripts') d['LIBRARY_PREFIX'] = join(prefix, 'Library') d['LIBRARY_BIN'] = join(d['LIBRARY_PREFIX'], 'bin') d['LIBRARY_INC'] = join(d['LIBRARY_PREFIX'], 'include') d['LIBRARY_LIB'] = join(d['LIBRARY_PREFIX'], 'lib') else: # -------- Unix d['PATH'] = '%s/bin:%s' % (prefix, os.getenv('PATH')) d['HOME'] = os.getenv('HOME', 'UNKNOWN') d['PKG_CONFIG_PATH'] = join(prefix, 'lib', 'pkgconfig') if sys.platform == 'darwin': # -------- OSX d['OSX_ARCH'] = 'i386' if cc.bits == 32 else 'x86_64' d['CFLAGS'] = '-arch %(OSX_ARCH)s' % d d['CXXFLAGS'] = d['CFLAGS'] d['LDFLAGS'] = d['CFLAGS'] d['MACOSX_DEPLOYMENT_TARGET'] = '10.5' elif sys.platform.startswith('linux'): # -------- Linux d['LD_RUN_PATH'] = prefix + '/lib' if m: d['PKG_NAME'] = m.name() d['PKG_VERSION'] = m.version() d['PKG_BUILDNUM'] = str(m.build_number()) d['RECIPE_DIR'] = m.path return d
def get_dict(m=None, prefix=None): if not prefix: prefix = config.build_prefix python = config.build_python d = {'CONDA_BUILD': '1'} d['ARCH'] = str(cc.bits) d['PREFIX'] = prefix d['PYTHON'] = python d['PY3K'] = str(config.PY3K) d['STDLIB_DIR'] = get_stdlib_dir() d['SP_DIR'] = get_sp_dir() d['SYS_PREFIX'] = sys.prefix d['SYS_PYTHON'] = sys.executable d['PERL_VER'] = get_perl_ver() d['PY_VER'] = get_py_ver() d['SRC_DIR'] = source.get_dir() if "LANG" in os.environ: d['LANG'] = os.environ['LANG'] if os.path.isdir(os.path.join(d['SRC_DIR'], '.git')): d.update(**get_git_build_info(d['SRC_DIR'])) if sys.platform == 'win32': # -------- Windows d['PATH'] = (join(prefix, 'Library', 'bin') + ';' + join(prefix) + ';' + join(prefix, 'Scripts') + ';' + os.getenv('PATH')) d['SCRIPTS'] = join(prefix, 'Scripts') d['LIBRARY_PREFIX'] = join(prefix, 'Library') d['LIBRARY_BIN'] = join(d['LIBRARY_PREFIX'], 'bin') d['LIBRARY_INC'] = join(d['LIBRARY_PREFIX'], 'include') d['LIBRARY_LIB'] = join(d['LIBRARY_PREFIX'], 'lib') else: # -------- Unix d['PATH'] = '%s/bin:%s' % (prefix, os.getenv('PATH')) d['HOME'] = os.getenv('HOME', 'UNKNOWN') d['PKG_CONFIG_PATH'] = join(prefix, 'lib', 'pkgconfig') if sys.platform == 'darwin': # -------- OSX d['OSX_ARCH'] = 'i386' if cc.bits == 32 else 'x86_64' d['CFLAGS'] = '-arch %(OSX_ARCH)s' % d d['CXXFLAGS'] = d['CFLAGS'] d['LDFLAGS'] = d['CFLAGS'] d['MACOSX_DEPLOYMENT_TARGET'] = '10.5' elif sys.platform.startswith('linux'): # -------- Linux d['LD_RUN_PATH'] = prefix + '/lib' if m: d['PKG_NAME'] = m.name() d['PKG_VERSION'] = m.version() d['PKG_BUILDNUM'] = str(m.build_number()) d['RECIPE_DIR'] = m.path return d
def conda_build_vars(prefix): return { "CONDA_BUILD": "1", "PYTHONNOUSERSITE": "1", "CONDA_DEFAULT_ENV": config.build_prefix, "ARCH": str(cc.bits), "PREFIX": prefix, "SYS_PREFIX": sys.prefix, "SYS_PYTHON": sys.executable, "SRC_DIR": source.get_dir(), "HTTPS_PROXY": os.getenv("HTTPS_PROXY", ""), "HTTP_PROXY": os.getenv("HTTP_PROXY", ""), }
def conda_build_vars(prefix): return { 'CONDA_BUILD': '1', 'PYTHONNOUSERSITE': '1', 'CONDA_DEFAULT_ENV': config.build_prefix, 'ARCH': str(cc.bits), 'PREFIX': prefix, 'SYS_PREFIX': sys.prefix, 'SYS_PYTHON': sys.executable, 'SRC_DIR': source.get_dir(), 'HTTPS_PROXY': os.getenv('HTTPS_PROXY', ''), 'HTTP_PROXY': os.getenv('HTTP_PROXY', ''), }
def get_dict(m=None, prefix=build_prefix): stdlib_dir = join(prefix, 'Lib' if sys.platform == 'win32' else 'lib/python%s' % py_ver) sp_dir = join(stdlib_dir, 'site-packages') python = _get_python(prefix) d = {'CONDA_BUILD': '1'} d['ARCH'] = str(cc.bits) d['PREFIX'] = prefix d['PYTHON'] = python d['PY3K'] = str(PY3K) d['STDLIB_DIR'] = stdlib_dir d['SP_DIR'] = sp_dir d['SYS_PREFIX'] = sys.prefix d['SYS_PYTHON'] = sys.executable d['PY_VER'] = py_ver d['SRC_DIR'] = source.get_dir() if sys.platform == 'win32': # -------- Windows d['PATH'] = (join(prefix, 'Library', 'bin') + ';' + join(prefix) + ';' + join(prefix, 'Scripts') + ';' + os.getenv('PATH')) d['SCRIPTS'] = join(prefix, 'Scripts') d['LIBRARY_PREFIX'] = join(prefix, 'Library') d['LIBRARY_BIN'] = join(d['LIBRARY_PREFIX'], 'bin') d['LIBRARY_INC'] = join(d['LIBRARY_PREFIX'], 'include') d['LIBRARY_LIB'] = join(d['LIBRARY_PREFIX'], 'lib') else: # -------- Unix d['PATH'] = '%s/bin:%s' % (prefix, os.getenv('PATH')) d['HOME'] = os.getenv('HOME', 'UNKNOWN') d['PKG_CONFIG_PATH'] = join(prefix, 'lib', 'pkgconfig') if sys.platform == 'darwin': # -------- OSX d['OSX_ARCH'] = 'i386' if cc.bits == 32 else 'x86_64' d['CFLAGS'] = '-arch %(OSX_ARCH)s' % d d['CXXFLAGS'] = d['CFLAGS'] d['LDFLAGS'] = d['CFLAGS'] d['MACOSX_DEPLOYMENT_TARGET'] = '10.5' elif sys.platform.startswith('linux'): # -------- Linux d['LD_RUN_PATH'] = prefix + '/lib' if m: d['PKG_NAME'] = m.name() d['PKG_VERSION'] = m.version() d['PKG_BUILDNUM'] = str(m.build_number()) d['RECIPE_DIR'] = m.path return d
def conda_build_vars(prefix, config): return { 'CONDA_BUILD': '1', 'PYTHONNOUSERSITE': '1', 'CONDA_DEFAULT_ENV': config.build_prefix, 'ARCH': str(config.bits), 'PREFIX': prefix, 'SYS_PREFIX': sys.prefix, 'SYS_PYTHON': sys.executable, 'SUBDIR': config.subdir, 'SRC_DIR': source.get_dir(config), 'HTTPS_PROXY': os.getenv('HTTPS_PROXY', ''), 'HTTP_PROXY': os.getenv('HTTP_PROXY', ''), 'DIRTY': '1' if config.dirty else '', 'ROOT': root_dir, }
def get_build_metadata(m): src_dir = source.get_dir() if exists(join(src_dir, '__conda_version__.txt')): with open(join(src_dir, '__conda_version__.txt')) as f: version = f.read().strip() print("Setting version from __conda_version__.txt: %s" % version) m.meta['package']['version'] = version if exists(join(src_dir, '__conda_buildnum__.txt')): with open(join(src_dir, '__conda_buildnum__.txt')) as f: build_number = f.read().strip() print("Setting build number from __conda_buildnum__.txt: %s" % build_number) m.meta['build']['number'] = build_number if exists(join(src_dir, '__conda_buildstr__.txt')): with open(join(src_dir, '__conda_buildstr__.txt')) as f: buildstr = f.read().strip() print("Setting version from __conda_buildstr__.txt: %s" % buildstr) m.meta['build']['string'] = buildstr
def windows_build(m): from conda_build.windows import msvc_env_cmd, kill_processes env = dict(os.environ) env.update(environ.get_dict(m)) env = environ.prepend_bin_path(env, config.build_prefix, True) for name in 'BIN', 'INC', 'LIB': path = env['LIBRARY_' + name] if not isdir(path): os.makedirs(path) src_dir = source.get_dir() bld_bat = join(m.path, 'bld_wheel.bat') if exists(bld_bat): with open(bld_bat) as fi: data = fi.read() else: print("Using plain 'python setup.py bdist_wheel' as build script") data = "\n:: Autogenerated build command:\npython setup.py bdist_wheel\n" with open(join(src_dir, 'bld.bat'), 'w') as fo: fo.write('@echo on\n') fo.write( msvc_env_cmd(override=m.get_value('build/msvc_compiler', None))) fo.write('\n') # more debuggable with echo on fo.write('set\n') fo.write('where python\n') fo.write('@echo on\n') fo.write("set INCLUDE={};%INCLUDE%\n".format(env["LIBRARY_INC"])) fo.write("set LIB={};%LIB%\n".format(env["LIBRARY_LIB"])) fo.write("REM ===== end generated header =====\n") fo.write(data) cmd = [os.environ['COMSPEC'], '/c', 'call', 'bld.bat'] print("build cmd: %s" % cmd) _check_call(cmd, cwd=src_dir, env={str(k): str(v) for k, v in env.items()}) kill_processes()
def windows_build(m): from conda_build.windows import msvc_env_cmd, kill_processes env = dict(os.environ) env.update(environ.get_dict(m)) env = environ.prepend_bin_path(env, config.build_prefix, True) for name in 'BIN', 'INC', 'LIB': path = env['LIBRARY_' + name] if not isdir(path): os.makedirs(path) src_dir = source.get_dir() bld_bat = join(m.path, 'bld_wheel.bat') if exists(bld_bat): with open(bld_bat) as fi: data = fi.read() else: print("Using plain 'python setup.py bdist_wheel' as build script") data = "\n:: Autogenerated build command:\npython setup.py bdist_wheel\n" with open(join(src_dir, 'bld.bat'), 'w') as fo: fo.write('@echo on\n') fo.write(msvc_env_cmd(override=m.get_value('build/msvc_compiler', None))) fo.write('\n') # more debuggable with echo on fo.write('set\n') fo.write('where python\n') fo.write('@echo on\n') fo.write("set INCLUDE={};%INCLUDE%\n".format(env["LIBRARY_INC"])) fo.write("set LIB={};%LIB%\n".format(env["LIBRARY_LIB"])) fo.write("REM ===== end generated header =====\n") fo.write(data) cmd = [os.environ['COMSPEC'], '/c', 'call', 'bld.bat'] print("build cmd: %s" % cmd) _check_call(cmd, cwd=src_dir, env={str(k): str(v) for k, v in env.items()}) kill_processes()
def build(m, bld_bat, config): with path_prepended(config.build_prefix): env = environ.get_dict(config=config, m=m) env["CONDA_BUILD_STATE"] = "BUILD" for name in 'BIN', 'INC', 'LIB': path = env['LIBRARY_' + name] if not isdir(path): os.makedirs(path) src_dir = source.get_dir(config) if os.path.isfile(bld_bat): with open(bld_bat) as fi: data = fi.read() with open(join(src_dir, 'bld.bat'), 'w') as fo: # more debuggable with echo on fo.write('@echo on\n') for key, value in env.items(): fo.write('set "{key}={value}"\n'.format(key=key, value=value)) fo.write( msvc_env_cmd(bits=bits, config=config, override=m.get_value('build/msvc_compiler', None))) # Reset echo on, because MSVC scripts might have turned it off fo.write('@echo on\n') fo.write('set "INCLUDE={};%INCLUDE%"\n'.format(env["LIBRARY_INC"])) fo.write('set "LIB={};%LIB%"\n'.format(env["LIBRARY_LIB"])) if config.activate: fo.write("call {conda_root}\\activate.bat {prefix}\n".format( conda_root=root_script_dir, prefix=config.build_prefix)) fo.write("REM ===== end generated header =====\n") fo.write(data) cmd = ['cmd.exe', '/c', 'bld.bat'] _check_call(cmd, cwd=src_dir)
def build(m, verbose=True, channel_urls=(), override_channels=False, wheel_dir="./build"): ''' Build the package with the specified metadata. :param m: Package metadata :type m: Metadata ''' if (m.get_value('build/detect_binary_files_with_prefix') or m.binary_has_prefix_files()): # We must use a long prefix here as the package will only be # installable into prefixes shorter than this one. config.use_long_build_prefix = True else: # In case there are multiple builds in the same process config.use_long_build_prefix = False if m.skip(): print("Skipped: The %s recipe defines build/skip for this " "configuration." % m.dist()) return print("Removing old build environment") if on_win: if isdir(config.short_build_prefix): move_to_trash(config.short_build_prefix, '') if isdir(config.long_build_prefix): move_to_trash(config.long_build_prefix, '') else: rm_rf(config.short_build_prefix) rm_rf(config.long_build_prefix) print("Removing old work directory") if on_win: if isdir(source.WORK_DIR): move_to_trash(source.WORK_DIR, '') else: rm_rf(source.WORK_DIR) # Display the name only # Version number could be missing due to dependency on source info. print("BUILD START:", m.dist()) create_env(config.build_prefix, [ms.spec for ms in m.ms_depends('build')], verbose=verbose, channel_urls=channel_urls, override_channels=override_channels) if m.name() in [i.rsplit('-', 2)[0] for i in linked(config.build_prefix)]: print("%s is installed as a build dependency. Removing." % m.name()) index = get_build_index(clear_cache=False, channel_urls=channel_urls, override_channels=override_channels) actions = plan.remove_actions(config.build_prefix, [m.name()], index=index) assert not plan.nothing_to_do(actions), actions plan.display_actions(actions, index) plan.execute_actions(actions, index) # downlaod source code... source.provide(m.path, m.get_section('source')) # Parse our metadata again because we did not initialize the source # information before. m.parse_again() print("Package:", m.dist()) assert isdir(source.WORK_DIR) src_dir = source.get_dir() contents = os.listdir(src_dir) if contents: print("source tree in:", src_dir) else: print("no source") rm_rf(config.info_dir) files1 = prefix_files() for pat in m.always_include_files(): has_matches = False for f in set(files1): if fnmatch.fnmatch(f, pat): print("Including in package existing file", f) files1.discard(f) has_matches = True if not has_matches: sys.exit( "Error: Glob %s from always_include_files does not match any files" % pat) # Save this for later with open(join(config.croot, 'prefix_files.txt'), 'w') as f: f.write(u'\n'.join(sorted(list(files1)))) f.write(u'\n') print("Source dir: %s" % src_dir) if sys.platform == 'win32': windows_build(m) else: env = environ.get_dict(m) build_file = join(m.path, 'build_wheel.sh') if not isfile(build_file): print("Using plain 'python setup.py bdist_wheel' as build script") build_file = join(src_dir, 'build_wheel.sh') with open(build_file, 'w') as fo: fo.write('\n') fo.write('# Autogenerated build command:\n') fo.write('python setup.py bdist_wheel\n') fo.write('\n') cmd = [shell_path, '-x', '-e', build_file] _check_call(cmd, env=env, cwd=src_dir) all_wheels = glob(join(src_dir, "dist", '*.whl')) if len(all_wheels) == 0: print("No wheels produced!") else: if len(all_wheels) == 1: print("More than one wheel produced!") try: os.makedirs(wheel_dir) print("Created wheel dir: %s:" % wheel_dir) except OSError: if not isdir(wheel_dir): raise print("Copying to %s:" % wheel_dir) for wheel in all_wheels: shutil.copy(wheel, wheel_dir) print(" %s" % basename(wheel))
def build(m, get_src=True, verbose=True, post=None, channel_urls=(), override_channels=False): ''' Build the package with the specified metadata. :param m: Package metadata :type m: Metadata :param get_src: Should we download the source? :type get_src: bool :type post: bool or None. None means run the whole build. True means run post only. False means stop just before the post. ''' if (m.get_value('build/detect_binary_files_with_prefix') or m.binary_has_prefix_files()): # We must use a long prefix here as the package will only be # installable into prefixes shorter than this one. config.use_long_build_prefix = True else: # In case there are multiple builds in the same process config.use_long_build_prefix = False if post in [False, None]: print("Removing old build directory") rm_rf(config.short_build_prefix) rm_rf(config.long_build_prefix) print("Removing old work directory") rm_rf(source.WORK_DIR) # Display the name only # Version number could be missing due to dependency on source info. print("BUILD START:", m.dist()) create_env(config.build_prefix, [ms.spec for ms in m.ms_depends('build')], verbose=verbose, channel_urls=channel_urls, override_channels=override_channels) if get_src: source.provide(m.path, m.get_section('source')) # Parse our metadata again because we did not initialize the source # information before. m.parse_again() print("Package:", m.dist()) assert isdir(source.WORK_DIR) src_dir = source.get_dir() contents = os.listdir(src_dir) if contents: print("source tree in:", src_dir) else: print("no source") rm_rf(config.info_dir) files1 = prefix_files() for f in m.always_include_files(): if f not in files1: sys.exit("Error: File %s from always_include_files not found" % f) files1 = files1.difference(set(m.always_include_files())) # Save this for later with open(join(config.croot, 'prefix_files.txt'), 'w') as f: f.write(u'\n'.join(sorted(list(files1)))) f.write(u'\n') if sys.platform == 'win32': import conda_build.windows as windows windows.build(m) else: env = environ.get_dict(m) build_file = join(m.path, 'build.sh') script = m.get_value('build/script', None) if script: if isinstance(script, list): script = '\n'.join(script) build_file = join(source.get_dir(), 'conda_build.sh') with open(build_file, 'w') as bf: bf.write(script) os.chmod(build_file, 0o766) if exists(build_file): cmd = ['/bin/bash', '-x', '-e', build_file] _check_call(cmd, env=env, cwd=src_dir) if post in [True, None]: if post == True: with open(join(config.croot, 'prefix_files.txt'), 'r') as f: files1 = set(f.read().splitlines()) get_build_metadata(m) create_post_scripts(m) create_entry_points(m.get_value('build/entry_points')) assert not exists(config.info_dir) files2 = prefix_files() post_process(sorted(files2 - files1), preserve_egg_dir=bool( m.get_value('build/preserve_egg_dir'))) # The post processing may have deleted some files (like easy-install.pth) files2 = prefix_files() post_build(m, sorted(files2 - files1)) create_info_files(m, sorted(files2 - files1), include_recipe=bool(m.path)) if m.get_value('build/noarch_python'): import conda_build.noarch_python as noarch_python noarch_python.transform(m, sorted(files2 - files1)) files3 = prefix_files() fix_permissions(files3 - files1) path = bldpkg_path(m) t = tarfile.open(path, 'w:bz2') for f in sorted(files3 - files1): t.add(join(config.build_prefix, f), f) t.close() print("BUILD END:", m.dist()) # we're done building, perform some checks tarcheck.check_all(path) update_index(config.bldpkgs_dir) else: print("STOPPING BUILD BEFORE POST:", m.dist())
def get_dict(m=None, prefix=None): if not prefix: prefix = config.build_prefix python = config.build_python d = {'CONDA_BUILD': '1', 'PYTHONNOUSERSITE': '1'} d['CONDA_DEFAULT_ENV'] = config.build_prefix d['ARCH'] = str(cc.bits) d['PREFIX'] = prefix d['PYTHON'] = python d['PY3K'] = str(config.PY3K) d['STDLIB_DIR'] = get_stdlib_dir() d['SP_DIR'] = get_sp_dir() d['SYS_PREFIX'] = sys.prefix d['SYS_PYTHON'] = sys.executable d['PERL_VER'] = get_perl_ver() d['PY_VER'] = get_py_ver() if get_npy_ver(): d['NPY_VER'] = get_npy_ver() d['SRC_DIR'] = source.get_dir() if "LANG" in os.environ: d['LANG'] = os.environ['LANG'] if "HTTPS_PROXY" in os.environ: d['HTTPS_PROXY'] = os.environ['HTTPS_PROXY'] if "HTTP_PROXY" in os.environ: d['HTTP_PROXY'] = os.environ['HTTP_PROXY'] if m: for var_name in m.get_value('build/script_env', []): value = os.getenv(var_name) if value is None: value = '<UNDEFINED>' d[var_name] = value if sys.platform == "darwin": # multiprocessing.cpu_count() is not reliable on OSX # See issue #645 on github.com/conda/conda-build out, err = subprocess.Popen('sysctl -n hw.logicalcpu', shell=True, stdout=subprocess.PIPE).communicate() d['CPU_COUNT'] = out.decode('utf-8').strip() else: try: d['CPU_COUNT'] = str(multiprocessing.cpu_count()) except NotImplementedError: d['CPU_COUNT'] = "1" if m.get_value('source/git_url'): d.update(**get_git_build_info(d['SRC_DIR'])) d['PATH'] = dict(os.environ)['PATH'] d = prepend_bin_path(d, prefix) if sys.platform == 'win32': # -------- Windows d['SCRIPTS'] = join(prefix, 'Scripts') d['LIBRARY_PREFIX'] = join(prefix, 'Library') d['LIBRARY_BIN'] = join(d['LIBRARY_PREFIX'], 'bin') d['LIBRARY_INC'] = join(d['LIBRARY_PREFIX'], 'include') d['LIBRARY_LIB'] = join(d['LIBRARY_PREFIX'], 'lib') # This probably should be done more generally d['CYGWIN_PREFIX'] = prefix.replace('\\', '/').replace('C:', '/cygdrive/c') d['R'] = join(prefix, 'Scripts', 'R.exe') else: # -------- Unix d['HOME'] = os.getenv('HOME', 'UNKNOWN') d['PKG_CONFIG_PATH'] = join(prefix, 'lib', 'pkgconfig') d['R'] = join(prefix, 'bin', 'R') if sys.platform == 'darwin': # -------- OSX d['OSX_ARCH'] = 'i386' if cc.bits == 32 else 'x86_64' d['CFLAGS'] = '-arch %(OSX_ARCH)s' % d d['CXXFLAGS'] = d['CFLAGS'] d['LDFLAGS'] = d['CFLAGS'] d['MACOSX_DEPLOYMENT_TARGET'] = '10.6' elif sys.platform.startswith('linux'): # -------- Linux d['LD_RUN_PATH'] = prefix + '/lib' if m: d['PKG_NAME'] = m.name() d['PKG_VERSION'] = m.version() d['PKG_BUILDNUM'] = str(m.build_number()) d['PKG_BUILD_STRING'] = str(m.build_id()) d['RECIPE_DIR'] = m.path return d
def build(m, get_src=True, verbose=True, post=None, channel_urls=(), override_channels=False): """ Build the package with the specified metadata. :param m: Package metadata :type m: Metadata :param get_src: Should we download the source? :type get_src: bool :type post: bool or None. None means run the whole build. True means run post only. False means stop just before the post. """ if m.get_value("build/detect_binary_files_with_prefix") or m.binary_has_prefix_files(): # We must use a long prefix here as the package will only be # installable into prefixes shorter than this one. config.use_long_build_prefix = True else: # In case there are multiple builds in the same process config.use_long_build_prefix = False if post in [False, None]: print("Removing old build directory") rm_rf(config.short_build_prefix) rm_rf(config.long_build_prefix) print("Removing old work directory") rm_rf(source.WORK_DIR) # Display the name only # Version number could be missing due to dependency on source info. print("BUILD START:", m.dist()) create_env( config.build_prefix, [ms.spec for ms in m.ms_depends("build")], verbose=verbose, channel_urls=channel_urls, override_channels=override_channels, ) if m.name() in [i.rsplit("-", 2)[0] for i in linked(config.build_prefix)]: print("%s is installed as a build dependency. Removing." % m.name()) index = get_build_index(clear_cache=False, channel_urls=channel_urls, override_channels=override_channels) actions = plan.remove_actions(config.build_prefix, [m.name()], index=index) assert not plan.nothing_to_do(actions), actions plan.display_actions(actions, index) plan.execute_actions(actions, index) if get_src: source.provide(m.path, m.get_section("source")) # Parse our metadata again because we did not initialize the source # information before. m.parse_again() print("Package:", m.dist()) assert isdir(source.WORK_DIR) src_dir = source.get_dir() contents = os.listdir(src_dir) if contents: print("source tree in:", src_dir) else: print("no source") rm_rf(config.info_dir) files1 = prefix_files() for rx in m.always_include_files(): pat = re.compile(rx) has_matches = False for f in set(files1): if pat.match(f): print("Including in package existing file", f) files1.discard(f) has_matches = True if not has_matches: sys.exit("Error: Regex %s from always_include_files does not match any files" % rx) # Save this for later with open(join(config.croot, "prefix_files.txt"), "w") as f: f.write(u"\n".join(sorted(list(files1)))) f.write(u"\n") if sys.platform == "win32": import conda_build.windows as windows windows.build(m) else: env = environ.get_dict(m) build_file = join(m.path, "build.sh") script = m.get_value("build/script", None) if script: if isinstance(script, list): script = "\n".join(script) build_file = join(source.get_dir(), "conda_build.sh") with open(build_file, "w") as bf: bf.write(script) os.chmod(build_file, 0o766) if exists(build_file): cmd = ["/bin/bash", "-x", "-e", build_file] _check_call(cmd, env=env, cwd=src_dir) if post in [True, None]: if post == True: with open(join(config.croot, "prefix_files.txt"), "r") as f: files1 = set(f.read().splitlines()) get_build_metadata(m) create_post_scripts(m) create_entry_points(m.get_value("build/entry_points")) assert not exists(config.info_dir) files2 = prefix_files() post_process(sorted(files2 - files1), preserve_egg_dir=bool(m.get_value("build/preserve_egg_dir"))) # The post processing may have deleted some files (like easy-install.pth) files2 = prefix_files() assert not any(config.meta_dir in join(config.build_prefix, f) for f in files2 - files1) post_build(m, sorted(files2 - files1)) create_info_files(m, sorted(files2 - files1), include_recipe=bool(m.path)) if m.get_value("build/noarch_python"): import conda_build.noarch_python as noarch_python noarch_python.transform(m, sorted(files2 - files1)) files3 = prefix_files() fix_permissions(files3 - files1) path = bldpkg_path(m) t = tarfile.open(path, "w:bz2") for f in sorted(files3 - files1): t.add(join(config.build_prefix, f), f) t.close() print("BUILD END:", m.dist()) # we're done building, perform some checks tarcheck.check_all(path) update_index(config.bldpkgs_dir) else: print("STOPPING BUILD BEFORE POST:", m.dist())
def build(m, get_src=True, verbose=True, post=None, channel_urls=(), override_channels=False, include_recipe=True): ''' Build the package with the specified metadata. :param m: Package metadata :type m: Metadata :param get_src: Should we download the source? :type get_src: bool :type post: bool or None. None means run the whole build. True means run post only. False means stop just before the post. ''' if (m.get_value('build/detect_binary_files_with_prefix') or m.binary_has_prefix_files()): # We must use a long prefix here as the package will only be # installable into prefixes shorter than this one. config.use_long_build_prefix = True else: # In case there are multiple builds in the same process config.use_long_build_prefix = False if post in [False, None]: print("Removing old build environment") if on_win: if isdir(config.short_build_prefix): move_to_trash(config.short_build_prefix, '') if isdir(config.long_build_prefix): move_to_trash(config.long_build_prefix, '') else: rm_rf(config.short_build_prefix) rm_rf(config.long_build_prefix) print("Removing old work directory") if on_win: if isdir(source.WORK_DIR): move_to_trash(source.WORK_DIR, '') else: rm_rf(source.WORK_DIR) # Display the name only # Version number could be missing due to dependency on source info. print("BUILD START:", m.dist()) create_env(config.build_prefix, [ms.spec for ms in m.ms_depends('build')], verbose=verbose, channel_urls=channel_urls, override_channels=override_channels) if m.name() in [ i.rsplit('-', 2)[0] for i in linked(config.build_prefix) ]: print("%s is installed as a build dependency. Removing." % m.name()) index = get_build_index(clear_cache=False, channel_urls=channel_urls, override_channels=override_channels) actions = plan.remove_actions(config.build_prefix, [m.name()], index=index) assert not plan.nothing_to_do(actions), actions plan.display_actions(actions, index) plan.execute_actions(actions, index) if get_src: source.provide(m.path, m.get_section('source')) # Parse our metadata again because we did not initialize the source # information before. m.parse_again() print("Package:", m.dist()) assert isdir(source.WORK_DIR) src_dir = source.get_dir() contents = os.listdir(src_dir) if contents: print("source tree in:", src_dir) else: print("no source") rm_rf(config.info_dir) files1 = prefix_files() for pat in m.always_include_files(): has_matches = False for f in set(files1): if fnmatch.fnmatch(f, pat): print("Including in package existing file", f) files1.discard(f) has_matches = True if not has_matches: sys.exit( "Error: Glob %s from always_include_files does not match any files" % pat) # Save this for later with open(join(config.croot, 'prefix_files.txt'), 'w') as f: f.write(u'\n'.join(sorted(list(files1)))) f.write(u'\n') if sys.platform == 'win32': import conda_build.windows as windows windows.build(m) else: env = environ.get_dict(m) build_file = join(m.path, 'build.sh') script = m.get_value('build/script', None) if script: if isinstance(script, list): script = '\n'.join(script) build_file = join(source.get_dir(), 'conda_build.sh') with open(build_file, 'w') as bf: bf.write(script) os.chmod(build_file, 0o766) if isfile(build_file): cmd = ['/bin/bash', '-x', '-e', build_file] _check_call(cmd, env=env, cwd=src_dir) if post in [True, None]: if post == True: with open(join(config.croot, 'prefix_files.txt'), 'r') as f: files1 = set(f.read().splitlines()) get_build_metadata(m) create_post_scripts(m) create_entry_points(m.get_value('build/entry_points')) assert not exists(config.info_dir) files2 = prefix_files() post_process(sorted(files2 - files1), preserve_egg_dir=bool( m.get_value('build/preserve_egg_dir'))) # The post processing may have deleted some files (like easy-install.pth) files2 = prefix_files() if any(config.meta_dir in join(config.build_prefix, f) for f in files2 - files1): sys.exit( indent( """Error: Untracked file(s) %s found in conda-meta directory. This error usually comes from using conda in the build script. Avoid doing this, as it can lead to packages that include their dependencies.""" % (tuple(f for f in files2 - files1 if config.meta_dir in join( config.build_prefix, f)), ))) post_build(m, sorted(files2 - files1)) create_info_files(m, sorted(files2 - files1), include_recipe=bool(m.path) and include_recipe) if m.get_value('build/noarch_python'): import conda_build.noarch_python as noarch_python noarch_python.transform(m, sorted(files2 - files1)) files3 = prefix_files() fix_permissions(files3 - files1) path = bldpkg_path(m) t = tarfile.open(path, 'w:bz2') for f in sorted(files3 - files1): t.add(join(config.build_prefix, f), f) t.close() print("BUILD END:", m.dist()) # we're done building, perform some checks tarcheck.check_all(path) update_index(config.bldpkgs_dir) else: print("STOPPING BUILD BEFORE POST:", m.dist())
def get_dict(m=None, prefix=None): if not prefix: prefix = config.build_prefix python = config.build_python d = {'CONDA_BUILD': '1', 'PYTHONNOUSERSITE': '1'} d['CONDA_DEFAULT_ENV'] = config.build_prefix d['ARCH'] = str(cc.bits) d['PREFIX'] = prefix d['PYTHON'] = python d['PY3K'] = str(config.PY3K) d['STDLIB_DIR'] = get_stdlib_dir() d['SP_DIR'] = get_sp_dir() d['SYS_PREFIX'] = sys.prefix d['SYS_PYTHON'] = sys.executable d['PERL_VER'] = get_perl_ver() d['PY_VER'] = get_py_ver() d['NPY_VER'] = get_npy_ver() d['SRC_DIR'] = source.get_dir() if "LANG" in os.environ: d['LANG'] = os.environ['LANG'] if "HTTPS_PROXY" in os.environ: d['HTTPS_PROXY'] = os.environ['HTTPS_PROXY'] if "HTTP_PROXY" in os.environ: d['HTTP_PROXY'] = os.environ['HTTP_PROXY'] if m: for var_name in m.get_value('build/script_env', []): value = os.getenv(var_name) if value is None: value = '<UNDEFINED>' d[var_name] = value try: d['CPU_COUNT'] = str(multiprocessing.cpu_count()) except NotImplementedError: d['CPU_COUNT'] = "1" d.update(**get_git_build_info(d['SRC_DIR'])) if sys.platform == 'win32': # -------- Windows d['PATH'] = (join(prefix, 'Library', 'bin') + ';' + join(prefix) + ';' + join(prefix, 'Scripts') + ';%PATH%') d['SCRIPTS'] = join(prefix, 'Scripts') d['LIBRARY_PREFIX'] = join(prefix, 'Library') d['LIBRARY_BIN'] = join(d['LIBRARY_PREFIX'], 'bin') d['LIBRARY_INC'] = join(d['LIBRARY_PREFIX'], 'include') d['LIBRARY_LIB'] = join(d['LIBRARY_PREFIX'], 'lib') # This probably should be done more generally d['CYGWIN_PREFIX'] = prefix.replace('\\', '/').replace('C:', '/cygdrive/c') d['R'] = join(prefix, 'Scripts', 'R.exe') else: # -------- Unix d['PATH'] = '%s/bin:%s' % (prefix, os.getenv('PATH')) d['HOME'] = os.getenv('HOME', 'UNKNOWN') d['PKG_CONFIG_PATH'] = join(prefix, 'lib', 'pkgconfig') d['INCLUDE_PATH'] = join(prefix, 'include') d['LIBRARY_PATH'] = join(prefix, 'lib') d['R'] = join(prefix, 'bin', 'R') if sys.platform == 'darwin': # -------- OSX d['OSX_ARCH'] = 'i386' if cc.bits == 32 else 'x86_64' d['CFLAGS'] = '-arch %(OSX_ARCH)s' % d d['CXXFLAGS'] = d['CFLAGS'] d['LDFLAGS'] = d['CFLAGS'] d['MACOSX_DEPLOYMENT_TARGET'] = '10.6' elif sys.platform.startswith('linux'): # -------- Linux d['LD_RUN_PATH'] = prefix + '/lib' if m: d['PKG_NAME'] = m.name() d['PKG_VERSION'] = m.version() d['PKG_BUILDNUM'] = str(m.build_number()) d['RECIPE_DIR'] = m.path return d
def build(m, config, post=None, need_source_download=True, need_reparse_in_env=False): ''' Build the package with the specified metadata. :param m: Package metadata :type m: Metadata :type post: bool or None. None means run the whole build. True means run post only. False means stop just before the post. :type keep_old_work: bool: Keep any previous work directory. :type need_source_download: bool: if rendering failed to download source (due to missing tools), retry here after build env is populated ''' if m.skip(): print_skip_message(m) return False if config.skip_existing: package_exists = is_package_built(m, config) if package_exists: print(m.dist(), "is already built in {0}, skipping.".format(package_exists)) return False if post in [False, None]: print("BUILD START:", m.dist()) if m.uses_jinja and (need_source_download or need_reparse_in_env): print( " (actual version deferred until further download or env creation)" ) specs = [ms.spec for ms in m.ms_depends('build')] create_env(config.build_prefix, specs, config=config) vcs_source = m.uses_vcs_in_build if vcs_source and vcs_source not in specs: vcs_executable = "hg" if vcs_source == "mercurial" else vcs_source has_vcs_available = os.path.isfile( external.find_executable(vcs_executable, config.build_prefix) or "") if not has_vcs_available: if (vcs_source != "mercurial" or not any( spec.startswith('python') and "3." in spec for spec in specs)): specs.append(vcs_source) log.warn( "Your recipe depends on %s at build time (for templates), " "but you have not listed it as a build dependency. Doing " "so for this build.", vcs_source) # Display the name only # Version number could be missing due to dependency on source info. create_env(config.build_prefix, specs, config=config) else: raise ValueError( "Your recipe uses mercurial in build, but mercurial" " does not yet support Python 3. Please handle all of " "your mercurial actions outside of your build script.") if need_source_download: # Execute any commands fetching the source (e.g., git) in the _build environment. # This makes it possible to provide source fetchers (eg. git, hg, svn) as build # dependencies. with path_prepended(config.build_prefix): m, need_source_download, need_reparse_in_env = parse_or_try_download( m, no_download_source=False, force_download=True, config=config) assert not need_source_download, "Source download failed. Please investigate." if m.uses_jinja: print("BUILD START (revised):", m.dist()) if need_reparse_in_env: reparse(m, config=config) print("BUILD START (revised):", m.dist()) if m.name() in [ i.rsplit('-', 2)[0] for i in linked(config.build_prefix) ]: print("%s is installed as a build dependency. Removing." % m.name()) index = get_build_index(config=config, clear_cache=False) actions = plan.remove_actions(config.build_prefix, [m.name()], index=index) assert not plan.nothing_to_do(actions), actions plan.display_actions(actions, index) plan.execute_actions(actions, index) print("Package:", m.dist()) with filelock.SoftFileLock(join(config.build_folder, ".conda_lock"), timeout=config.timeout): # get_dir here might be just work, or it might be one level deeper, # dependening on the source. src_dir = source.get_dir(config) if isdir(src_dir): print("source tree in:", src_dir) else: print("no source - creating empty work folder") os.makedirs(src_dir) rm_rf(config.info_dir) files1 = prefix_files(prefix=config.build_prefix) for pat in m.always_include_files(): has_matches = False for f in set(files1): if fnmatch.fnmatch(f, pat): print("Including in package existing file", f) files1.discard(f) has_matches = True if not has_matches: log.warn( "Glob %s from always_include_files does not match any files", pat) # Save this for later with open(join(config.croot, 'prefix_files.txt'), 'w') as f: f.write(u'\n'.join(sorted(list(files1)))) f.write(u'\n') # Use script from recipe? script = m.get_value('build/script', None) if script: if isinstance(script, list): script = '\n'.join(script) if isdir(src_dir): if on_win: build_file = join(m.path, 'bld.bat') if script: build_file = join(src_dir, 'bld.bat') with open(build_file, 'w') as bf: bf.write(script) import conda_build.windows as windows windows.build(m, build_file, config=config) else: build_file = join(m.path, 'build.sh') # There is no sense in trying to run an empty build script. if isfile(build_file) or script: with path_prepended(config.build_prefix): env = environ.get_dict(config=config, m=m) env["CONDA_BUILD_STATE"] = "BUILD" work_file = join(source.get_dir(config), 'conda_build.sh') if script: with open(work_file, 'w') as bf: bf.write(script) if config.activate: if isfile(build_file): data = open(build_file).read() else: data = open(work_file).read() with open(work_file, 'w') as bf: bf.write( "source {conda_root}activate {build_prefix} &> " "/dev/null\n".format( conda_root=root_script_dir + os.path.sep, build_prefix=config.build_prefix)) bf.write(data) else: if not isfile(work_file): copy_into(build_file, work_file, config.timeout) os.chmod(work_file, 0o766) if isfile(work_file): cmd = [shell_path, '-x', '-e', work_file] # this should raise if any problems occur while building _check_call(cmd, env=env, cwd=src_dir) if post in [True, None]: if post: with open(join(config.croot, 'prefix_files.txt'), 'r') as f: files1 = set(f.read().splitlines()) get_build_metadata(m, config=config) create_post_scripts(m, config=config) create_entry_points(m.get_value('build/entry_points'), config=config) files2 = prefix_files(prefix=config.build_prefix) post_process(sorted(files2 - files1), prefix=config.build_prefix, config=config, preserve_egg_dir=bool( m.get_value('build/preserve_egg_dir'))) # The post processing may have deleted some files (like easy-install.pth) files2 = prefix_files(prefix=config.build_prefix) if any(config.meta_dir in join(config.build_prefix, f) for f in files2 - files1): meta_files = (tuple( f for f in files2 - files1 if config.meta_dir in join(config.build_prefix, f)), ) sys.exit( indent( """Error: Untracked file(s) %s found in conda-meta directory. This error usually comes from using conda in the build script. Avoid doing this, as it can lead to packages that include their dependencies.""" % meta_files)) post_build(m, sorted(files2 - files1), prefix=config.build_prefix, build_python=config.build_python, croot=config.croot) create_info_files(m, sorted(files2 - files1), config=config, prefix=config.build_prefix) if m.get_value('build/noarch_python'): import conda_build.noarch_python as noarch_python noarch_python.transform(m, sorted(files2 - files1), config.build_prefix) files3 = prefix_files(prefix=config.build_prefix) fix_permissions(files3 - files1, config.build_prefix) path = bldpkg_path(m, config) # lock the output directory while we build this file # create the tarball in a temporary directory to minimize lock time with TemporaryDirectory() as tmp: tmp_path = os.path.join(tmp, os.path.basename(path)) t = tarfile.open(tmp_path, 'w:bz2') def order(f): # we don't care about empty files so send them back via 100000 fsize = os.stat(join(config.build_prefix, f)).st_size or 100000 # info/* records will be False == 0, others will be 1. info_order = int(os.path.dirname(f) != 'info') return info_order, fsize # add files in order of a) in info directory, b) increasing size so # we can access small manifest or json files without decompressing # possible large binary or data files for f in sorted(files3 - files1, key=order): t.add(join(config.build_prefix, f), f) t.close() # we're done building, perform some checks tarcheck.check_all(tmp_path) copy_into(tmp_path, path, config.timeout) update_index(config.bldpkgs_dir, config) else: print("STOPPING BUILD BEFORE POST:", m.dist()) # returning true here says package is OK to test return True
def create_info_files(m, files, include_recipe=True): ''' Creates the metadata files that will be stored in the built package. :param m: Package metadata :type m: Metadata :param files: Paths to files to include in package :type files: list of str :param include_recipe: Whether or not to include the recipe (True by default) :type include_recipe: bool ''' if not isdir(config.info_dir): os.makedirs(config.info_dir) if include_recipe: recipe_dir = join(config.info_dir, 'recipe') os.makedirs(recipe_dir) for fn in os.listdir(m.path): if fn.startswith('.'): continue src_path = join(m.path, fn) dst_path = join(recipe_dir, fn) if isdir(src_path): shutil.copytree(src_path, dst_path) else: shutil.copy(src_path, dst_path) # store the rendered meta.yaml file, plus information about where it came from # and what version of conda-build created it metayaml = output_yaml(m) with open(join(recipe_dir, "meta.yaml.rendered"), 'w') as f: f.write( "# This file created by conda-build {}\n".format(__version__)) f.write("# meta.yaml template originally from:\n") f.write("# " + source.get_repository_info(m.path) + "\n") f.write("# ------------------------------------------------\n\n") f.write(metayaml) license_file = m.get_value('about/license_file') if license_file: shutil.copyfile(join(source.get_dir(), license_file), join(config.info_dir, 'LICENSE.txt')) readme = m.get_value('about/readme') if readme: src = join(source.get_dir(), readme) if not isfile(src): sys.exit("Error: no readme file: %s" % readme) dst = join(config.info_dir, readme) shutil.copyfile(src, dst) if os.path.split(readme)[1] not in { "README.md", "README.rst", "README" }: print( "WARNING: anaconda.org only recognizes about/readme as README.md and README.rst", file=sys.stderr) # noqa info_index = m.info_index() pin_depends = m.get_value('build/pin_depends') if pin_depends: dists = get_run_dists(m) with open(join(config.info_dir, 'requires'), 'w') as fo: fo.write("""\ # This file as created when building: # # %s.tar.bz2 (on '%s') # # It can be used to create the runtime environment of this package using: # $ conda create --name <env> --file <this file> """ % (m.dist(), cc.subdir)) for dist in sorted(dists + [m.dist()]): fo.write('%s\n' % '='.join(dist.rsplit('-', 2))) if pin_depends == 'strict': info_index['depends'] = [ ' '.join(dist.rsplit('-', 2)) for dist in dists ] # Deal with Python 2 and 3's different json module type reqs mode_dict = {'mode': 'w', 'encoding': 'utf-8'} if PY3 else {'mode': 'wb'} with open(join(config.info_dir, 'index.json'), **mode_dict) as fo: json.dump(info_index, fo, indent=2, sort_keys=True) with open(join(config.info_dir, 'about.json'), 'w') as fo: d = {} for key in ('home', 'dev_url', 'doc_url', 'license_url', 'license', 'summary', 'description', 'license_family'): value = m.get_value('about/%s' % key) if value: d[key] = value json.dump(d, fo, indent=2, sort_keys=True) if on_win: # make sure we use '/' path separators in metadata files = [_f.replace('\\', '/') for _f in files] with open(join(config.info_dir, 'files'), **mode_dict) as fo: if m.get_value('build/noarch_python'): fo.write('\n') else: for f in files: fo.write(f + '\n') files_with_prefix = sorted(have_prefix_files(files)) binary_has_prefix_files = m.binary_has_prefix_files() text_has_prefix_files = m.has_prefix_files() ignore_files = m.ignore_prefix_files() if ignore_files: # do we have a list of files, or just ignore everything? if hasattr(ignore_files, "__iter__"): files_with_prefix = [ f for f in files_with_prefix if f[2] not in ignore_files ] binary_has_prefix_files = [ f for f in binary_has_prefix_files if f[2] not in ignore_files ] # noqa text_has_prefix_files = [ f for f in text_has_prefix_files if f[2] not in ignore_files ] else: files_with_prefix = [] if files_with_prefix and not m.get_value('build/noarch_python'): auto_detect = m.get_value('build/detect_binary_files_with_prefix') if on_win: # Paths on Windows can contain spaces, so we need to quote the # paths. Fortunately they can't contain quotes, so we don't have # to worry about nested quotes. fmt_str = '"%s" %s "%s"\n' else: # Don't do it everywhere because paths on Unix can contain quotes, # and we don't have a good method of escaping, and because older # versions of conda don't support quotes in has_prefix fmt_str = '%s %s %s\n' with open(join(config.info_dir, 'has_prefix'), 'w') as fo: for pfix, mode, fn in files_with_prefix: if (fn in text_has_prefix_files): # register for text replacement, regardless of mode fo.write(fmt_str % (pfix, 'text', fn)) text_has_prefix_files.remove(fn) elif ((mode == 'binary') and (fn in binary_has_prefix_files)): print("Detected hard-coded path in binary file %s" % fn) fo.write(fmt_str % (pfix, mode, fn)) binary_has_prefix_files.remove(fn) elif (auto_detect or (mode == 'text')): print("Detected hard-coded path in %s file %s" % (mode, fn)) fo.write(fmt_str % (pfix, mode, fn)) else: print("Ignored hard-coded path in %s" % fn) # make sure we found all of the files expected errstr = "" for f in text_has_prefix_files: errstr += "Did not detect hard-coded path in %s from has_prefix_files\n" % f for f in binary_has_prefix_files: errstr += "Did not detect hard-coded path in %s from binary_has_prefix_files\n" % f if errstr: raise RuntimeError(errstr) no_link = m.get_value('build/no_link') if no_link: if not isinstance(no_link, list): no_link = [no_link] with open(join(config.info_dir, 'no_link'), 'w') as fo: for f in files: if any(fnmatch.fnmatch(f, p) for p in no_link): fo.write(f + '\n') if m.get_value('source/git_url'): with io.open(join(config.info_dir, 'git'), 'w', encoding='utf-8') as fo: source.git_info(fo) if m.get_value('app/icon'): shutil.copyfile(join(m.path, m.get_value('app/icon')), join(config.info_dir, 'icon.png'))
def execute(args, parser): import sys import shutil import tarfile import tempfile from os import makedirs from os.path import abspath, isdir, isfile from conda.lock import Locked import conda_build.build as build import conda_build.source as source from conda_build.config import config from conda_build.metadata import MetaData check_external() # change globals in build module, see comment there as well build.channel_urls = args.channel or () build.override_channels = args.override_channels build.verbose = not args.quiet if on_win: try: # needs to happen before any c extensions are imported that might be # hard-linked by files in the trash. one of those is markupsafe, # used by jinja2. see https://github.com/conda/conda-build/pull/520 delete_trash(None) except: # when we can't delete the trash, don't crash on AssertionError, # instead inform the user and try again next time. # see https://github.com/conda/conda-build/pull/744 warnings.warn("Cannot delete trash; some c extension has been " "imported that is hard-linked by files in the trash. " "Will try again on next run.") conda_version = { 'python': 'CONDA_PY', 'numpy': 'CONDA_NPY', 'perl': 'CONDA_PERL', 'R': 'CONDA_R', } for lang in ['python', 'numpy', 'perl', 'R']: versions = getattr(args, lang) if not versions: continue if versions == ['all']: if all_versions[lang]: versions = all_versions[lang] else: parser.error("'all' is not supported for --%s" % lang) if len(versions) > 1: for ver in versions[:]: setattr(args, lang, [str(ver)]) execute(args, parser) # This is necessary to make all combinations build. setattr(args, lang, versions) return else: version = versions[0] if lang in ('python', 'numpy'): version = int(version.replace('.', '')) setattr(config, conda_version[lang], version) if not len(str(version)) in (2, 3) and lang in ['python', 'numpy']: if all_versions[lang]: raise RuntimeError("%s must be major.minor, like %s, not %s" % (conda_version[lang], all_versions[lang][-1]/10, version)) else: raise RuntimeError("%s must be major.minor, not %s" % (conda_version[lang], version)) # Using --python, --numpy etc. is equivalent to using CONDA_PY, CONDA_NPY, etc. # Auto-set those env variables for var in conda_version.values(): if getattr(config, var): # Set the env variable. os_environ[var] = str(getattr(config, var)) if args.skip_existing: if not isdir(config.bldpkgs_dir): makedirs(config.bldpkgs_dir) update_index(config.bldpkgs_dir) index = build.get_build_index(clear_cache=True) already_built = [] to_build_recursive = [] with Locked(config.croot): recipes = deque(args.recipe) while recipes: arg = recipes.popleft() try_again = False # Don't use byte literals for paths in Python 2 if not PY3: arg = arg.decode(getpreferredencoding() or 'utf-8') if isfile(arg): if arg.endswith(('.tar', '.tar.gz', '.tgz', '.tar.bz2')): recipe_dir = tempfile.mkdtemp() t = tarfile.open(arg, 'r:*') t.extractall(path=recipe_dir) t.close() need_cleanup = True else: print("Ignoring non-recipe: %s" % arg) continue else: recipe_dir = abspath(arg) need_cleanup = False if not isdir(recipe_dir): sys.exit("Error: no such directory: %s" % recipe_dir) try: m = MetaData(recipe_dir) if m.get_value('build/noarch_python'): config.noarch = True except exceptions.YamlParsingError as e: sys.stderr.write(e.error_msg()) sys.exit(1) binstar_upload = False if args.check and len(args.recipe) > 1: print(m.path) m.check_fields() if args.check: continue if args.skip_existing: if m.pkg_fn() in index or m.pkg_fn() in already_built: print("%s is already built, skipping." % m.dist()) continue if m.skip(): print("Skipped: The %s recipe defines build/skip for this " "configuration." % m.dist()) continue if args.output: try: m.parse_again(permit_undefined_jinja=False) except SystemExit: # Something went wrong; possibly due to undefined GIT_ jinja variables. # Maybe we need to actually download the source in order to resolve the build_id. source.provide(m.path, m.get_section('source')) # Parse our metadata again because we did not initialize the source # information before. m.parse_again(permit_undefined_jinja=False) print(build.bldpkg_path(m)) continue elif args.test: build.test(m, move_broken=False) elif args.source: source.provide(m.path, m.get_section('source')) print('Source tree in:', source.get_dir()) else: # This loop recursively builds dependencies if recipes exist if args.build_only: post = False args.notest = True args.binstar_upload = False elif args.post: post = True args.notest = True args.binstar_upload = False else: post = None try: build.build(m, post=post, include_recipe=args.include_recipe) except (RuntimeError, SystemExit) as e: error_str = str(e) if error_str.startswith('No packages found') or error_str.startswith('Could not find some'): # Build dependency if recipe exists dep_pkg = error_str.split(': ')[1] # Handle package names that contain version deps. if ' ' in dep_pkg: dep_pkg = dep_pkg.split(' ')[0] recipe_glob = glob(dep_pkg + '-[v0-9][0-9.]*') if exists(dep_pkg): recipe_glob.append(dep_pkg) if recipe_glob: recipes.appendleft(arg) try_again = True for recipe_dir in recipe_glob: if dep_pkg in to_build_recursive: sys.exit(str(e)) print(("Missing dependency {0}, but found" + " recipe directory, so building " + "{0} first").format(dep_pkg)) recipes.appendleft(recipe_dir) to_build_recursive.append(dep_pkg) else: raise elif error_str.strip().startswith("Hint:"): lines = [line for line in error_str.splitlines() if line.strip().startswith('- ')] pkgs = [line.lstrip('- ') for line in lines] # Typically if a conflict is with one of these # packages, the other package needs to be rebuilt # (e.g., a conflict with 'python 3.5*' and 'x' means # 'x' isn't build for Python 3.5 and needs to be # rebuilt). skip_names = ['python', 'r'] pkgs = [pkg for pkg in pkgs if pkg.split(' ')[0] not in skip_names] for pkg in pkgs: # Handle package names that contain version deps. if ' ' in pkg: pkg = pkg.split(' ')[0] recipe_glob = glob(pkg + '-[v0-9][0-9.]*') if exists(pkg): recipe_glob.append(pkg) if recipe_glob: recipes.appendleft(arg) try_again = True for recipe_dir in recipe_glob: if pkg in to_build_recursive: sys.exit(str(e)) print(error_str) print(("Missing dependency {0}, but found" + " recipe directory, so building " + "{0} first").format(pkg)) recipes.appendleft(recipe_dir) to_build_recursive.append(pkg) else: raise else: raise if try_again: continue if not args.notest: build.test(m) binstar_upload = True if need_cleanup: shutil.rmtree(recipe_dir) if binstar_upload: handle_binstar_upload(build.bldpkg_path(m), args) already_built.append(m.pkg_fn())
def create_info_files(m, files, include_recipe=True): ''' Creates the metadata files that will be stored in the built package. :param m: Package metadata :type m: Metadata :param files: Paths to files to include in package :type files: list of str :param include_recipe: Whether or not to include the recipe (True by default) :type include_recipe: bool ''' recipe_dir = join(config.info_dir, 'recipe') os.makedirs(recipe_dir) if include_recipe: for fn in os.listdir(m.path): if fn.startswith('.'): continue src_path = join(m.path, fn) dst_path = join(recipe_dir, fn) if isdir(src_path): shutil.copytree(src_path, dst_path) else: shutil.copy(src_path, dst_path) if isfile(join(recipe_dir, 'meta.yaml')): shutil.move(join(recipe_dir, 'meta.yaml'), join(recipe_dir, 'meta.yaml.orig')) with open(join(recipe_dir, 'meta.yaml'), 'w') as fo: yaml.safe_dump(m.meta, fo) readme = m.get_value('about/readme') if readme: src = join(source.get_dir(), readme) if not os.path.exists(src): sys.exit("Error: Could not find the readme: %s" % readme) dst = join(config.info_dir, readme) shutil.copy(src, dst) if os.path.split(readme)[1] not in {"README.md", "README.rst", "README"}: print("WARNING: Binstar only recognizes about/readme as README.md and README.rst", file=sys.stderr) # Deal with Python 2 and 3's different json module type reqs mode_dict = {'mode': 'w', 'encoding': 'utf-8'} if PY3 else {'mode': 'wb'} with open(join(config.info_dir, 'index.json'), **mode_dict) as fo: json.dump(m.info_index(), fo, indent=2, sort_keys=True) with open(join(config.info_dir, 'recipe.json'), **mode_dict) as fo: json.dump(m.meta, fo, indent=2, sort_keys=True) if sys.platform == 'win32': # make sure we use '/' path separators in metadata files = [f.replace('\\', '/') for f in files] with open(join(config.info_dir, 'files'), 'w') as fo: if m.get_value('build/noarch_python'): fo.write('\n') else: for f in files: fo.write(f + '\n') files_with_prefix = sorted(have_prefix_files(files)) binary_has_prefix_files = m.binary_has_prefix_files() text_has_prefix_files = m.has_prefix_files() if files_with_prefix and not m.get_value('build/noarch_python'): auto_detect = m.get_value('build/detect_binary_files_with_prefix') if sys.platform == 'win32': # Paths on Windows can contain spaces, so we need to quote the # paths. Fortunately they can't contain quotes, so we don't have # to worry about nested quotes. fmt_str = '"%s" %s "%s"\n' else: # Don't do it everywhere because paths on Unix can contain quotes, # and we don't have a good method of escaping, and because older # versions of conda don't support quotes in has_prefix fmt_str = '%s %s %s\n' with open(join(config.info_dir, 'has_prefix'), 'w') as fo: for pfix, mode, fn in files_with_prefix: if (fn in text_has_prefix_files): # register for text replacement, regardless of mode fo.write(fmt_str % (pfix, 'text', fn)) text_has_prefix_files.remove(fn) elif ((mode == 'binary') and (fn in binary_has_prefix_files)): print("Detected hard-coded path in binary file %s" % fn) fo.write(fmt_str % (pfix, mode, fn)) binary_has_prefix_files.remove(fn) elif (auto_detect or (mode == 'text')): print("Detected hard-coded path in %s file %s" % (mode, fn)) fo.write(fmt_str % (pfix, mode, fn)) else: print("Ignored hard-coded path in %s" % fn) # make sure we found all of the files expected errstr = "" for f in text_has_prefix_files: errstr += "Did not detect hard-coded path in %s from has_prefix_files\n" % f for f in binary_has_prefix_files: errstr += "Did not detect hard-coded path in %s from binary_has_prefix_files\n" % f if errstr: raise RuntimeError(errstr) no_link = m.get_value('build/no_link') if no_link: if not isinstance(no_link, list): no_link = [no_link] with open(join(config.info_dir, 'no_link'), 'w') as fo: for f in files: if any(fnmatch.fnmatch(f, p) for p in no_link): fo.write(f + '\n') if m.get_value('source/git_url'): with io.open(join(config.info_dir, 'git'), 'w', encoding='utf-8') as fo: source.git_info(fo) if m.get_value('app/icon'): shutil.copyfile(join(m.path, m.get_value('app/icon')), join(config.info_dir, 'icon.png'))
def execute(args, parser): import sys import shutil import tarfile import tempfile from os import makedirs from os.path import abspath, isdir, isfile from conda.lock import Locked import conda_build.build as build import conda_build.source as source from conda_build.config import config from conda_build.metadata import MetaData check_external() # change globals in build module, see comment there as well build.channel_urls = args.channel or () build.override_channels = args.override_channels build.verbose = not args.quiet if on_win: try: # needs to happen before any c extensions are imported that might be # hard-linked by files in the trash. one of those is markupsafe, # used by jinja2. see https://github.com/conda/conda-build/pull/520 delete_trash(None) except: # when we can't delete the trash, don't crash on AssertionError, # instead inform the user and try again next time. # see https://github.com/conda/conda-build/pull/744 warnings.warn( "Cannot delete trash; some c extension has been " "imported that is hard-linked by files in the trash. " "Will try again on next run.") conda_version = { 'python': 'CONDA_PY', 'numpy': 'CONDA_NPY', 'perl': 'CONDA_PERL', 'R': 'CONDA_R', } for lang in ['python', 'numpy', 'perl', 'R']: versions = getattr(args, lang) if not versions: continue if versions == ['all']: if all_versions[lang]: versions = all_versions[lang] else: parser.error("'all' is not supported for --%s" % lang) if len(versions) > 1: for ver in versions[:]: setattr(args, lang, [str(ver)]) execute(args, parser) # This is necessary to make all combinations build. setattr(args, lang, versions) return else: version = versions[0] if lang in ('python', 'numpy'): version = int(version.replace('.', '')) setattr(config, conda_version[lang], version) if not len(str(version)) in (2, 3) and lang in ['python', 'numpy']: if all_versions[lang]: raise RuntimeError("%s must be major.minor, like %s, not %s" % (conda_version[lang], all_versions[lang][-1] / 10, version)) else: raise RuntimeError("%s must be major.minor, not %s" % (conda_version[lang], version)) # Using --python, --numpy etc. is equivalent to using CONDA_PY, CONDA_NPY, etc. # Auto-set those env variables for var in conda_version.values(): if getattr(config, var): # Set the env variable. os_environ[var] = str(getattr(config, var)) if args.skip_existing: if not isdir(config.bldpkgs_dir): makedirs(config.bldpkgs_dir) update_index(config.bldpkgs_dir) index = build.get_build_index(clear_cache=True) already_built = [] to_build_recursive = [] with Locked(config.croot): recipes = deque(args.recipe) while recipes: arg = recipes.popleft() try_again = False # Don't use byte literals for paths in Python 2 if not PY3: arg = arg.decode(getpreferredencoding() or 'utf-8') if isfile(arg): if arg.endswith(('.tar', '.tar.gz', '.tgz', '.tar.bz2')): recipe_dir = tempfile.mkdtemp() t = tarfile.open(arg, 'r:*') t.extractall(path=recipe_dir) t.close() need_cleanup = True else: print("Ignoring non-recipe: %s" % arg) continue else: recipe_dir = abspath(arg) need_cleanup = False if not isdir(recipe_dir): sys.exit("Error: no such directory: %s" % recipe_dir) try: m = MetaData(recipe_dir) if m.get_value('build/noarch_python'): config.noarch = True except exceptions.YamlParsingError as e: sys.stderr.write(e.error_msg()) sys.exit(1) binstar_upload = False if args.check and len(args.recipe) > 1: print(m.path) m.check_fields() if args.check: continue if args.skip_existing: if m.pkg_fn() in index or m.pkg_fn() in already_built: print("%s is already built, skipping." % m.dist()) continue if m.skip(): print("Skipped: The %s recipe defines build/skip for this " "configuration." % m.dist()) continue if args.output: try: m.parse_again(permit_undefined_jinja=False) except SystemExit: # Something went wrong; possibly due to undefined GIT_ jinja variables. # Maybe we need to actually download the source in order to resolve the build_id. source.provide(m.path, m.get_section('source')) # Parse our metadata again because we did not initialize the source # information before. m.parse_again(permit_undefined_jinja=False) print(build.bldpkg_path(m)) continue elif args.test: build.test(m, move_broken=False) elif args.source: source.provide(m.path, m.get_section('source')) print('Source tree in:', source.get_dir()) else: # This loop recursively builds dependencies if recipes exist if args.build_only: post = False args.notest = True args.binstar_upload = False elif args.post: post = True args.notest = True args.binstar_upload = False else: post = None try: build.build(m, post=post, include_recipe=args.include_recipe) except (RuntimeError, SystemExit) as e: error_str = str(e) if error_str.startswith( 'No packages found') or error_str.startswith( 'Could not find some'): # Build dependency if recipe exists dep_pkg = error_str.split(': ')[1] # Handle package names that contain version deps. if ' ' in dep_pkg: dep_pkg = dep_pkg.split(' ')[0] recipe_glob = glob(dep_pkg + '-[v0-9][0-9.]*') if exists(dep_pkg): recipe_glob.append(dep_pkg) if recipe_glob: recipes.appendleft(arg) try_again = True for recipe_dir in recipe_glob: if dep_pkg in to_build_recursive: sys.exit(str(e)) print(("Missing dependency {0}, but found" + " recipe directory, so building " + "{0} first").format(dep_pkg)) recipes.appendleft(recipe_dir) to_build_recursive.append(dep_pkg) else: raise elif error_str.strip().startswith("Hint:"): lines = [ line for line in error_str.splitlines() if line.strip().startswith('- ') ] pkgs = [line.lstrip('- ') for line in lines] # Typically if a conflict is with one of these # packages, the other package needs to be rebuilt # (e.g., a conflict with 'python 3.5*' and 'x' means # 'x' isn't build for Python 3.5 and needs to be # rebuilt). skip_names = ['python', 'r'] pkgs = [ pkg for pkg in pkgs if pkg.split(' ')[0] not in skip_names ] for pkg in pkgs: # Handle package names that contain version deps. if ' ' in pkg: pkg = pkg.split(' ')[0] recipe_glob = glob(pkg + '-[v0-9][0-9.]*') if exists(pkg): recipe_glob.append(pkg) if recipe_glob: recipes.appendleft(arg) try_again = True for recipe_dir in recipe_glob: if pkg in to_build_recursive: sys.exit(str(e)) print(error_str) print( ("Missing dependency {0}, but found" + " recipe directory, so building " + "{0} first").format(pkg)) recipes.appendleft(recipe_dir) to_build_recursive.append(pkg) else: raise else: raise if try_again: continue if not args.notest: build.test(m) binstar_upload = True if need_cleanup: shutil.rmtree(recipe_dir) if binstar_upload: handle_binstar_upload(build.bldpkg_path(m), args) already_built.append(m.pkg_fn())
def copy_license(m, config): license_file = m.get_value('about/license_file') if license_file: copy_into(join(source.get_dir(config), license_file), join(config.info_dir, 'LICENSE.txt'), config.timeout)
def build(m, post=None, include_recipe=True, keep_old_work=False, need_source_download=True, verbose=True, dirty=False, activate=True): ''' Build the package with the specified metadata. :param m: Package metadata :type m: Metadata :type post: bool or None. None means run the whole build. True means run post only. False means stop just before the post. :type keep_old_work: bool: Keep any previous work directory. :type need_source_download: bool: if rendering failed to download source (due to missing tools), retry here after build env is populated ''' if (m.get_value('build/detect_binary_files_with_prefix') or m.binary_has_prefix_files()) and not on_win: # We must use a long prefix here as the package will only be # installable into prefixes shorter than this one. config.use_long_build_prefix = True else: # In case there are multiple builds in the same process config.use_long_build_prefix = False if m.skip(): print("Skipped: The %s recipe defines build/skip for this " "configuration." % m.dist()) return with Locked(cc.root_dir): # If --keep-old-work, then move the contents of source.WORK_DIR to a # temporary directory for the duration of the build. # The source unpacking procedure is too varied and complex # to allow this to be written cleanly (see source.get_dir() for example) if keep_old_work: old_WORK_DIR = tempfile.mkdtemp() old_sub_dirs = [ name for name in os.listdir(source.WORK_DIR) if os.path.isdir(os.path.join(source.WORK_DIR, name)) ] if len(old_sub_dirs): print("Keeping old work directory backup: %s => %s" % (old_sub_dirs, old_WORK_DIR)) for old_sub in old_sub_dirs: shutil.move(os.path.join(source.WORK_DIR, old_sub), old_WORK_DIR) if post in [False, None]: print("Removing old build environment") print("BUILD START:", m.dist()) if on_win: if isdir(config.short_build_prefix): move_to_trash(config.short_build_prefix, '') if isdir(config.long_build_prefix): move_to_trash(config.long_build_prefix, '') else: rm_rf(config.short_build_prefix) rm_rf(config.long_build_prefix) # Display the name only # Version number could be missing due to dependency on source info. create_env(config.build_prefix, [ms.spec for ms in m.ms_depends('build')]) if need_source_download: # Execute any commands fetching the source (e.g., git) in the _build environment. # This makes it possible to provide source fetchers (eg. git, hg, svn) as build # dependencies. m, need_source_download = parse_or_try_download( m, no_download_source=False, force_download=True, verbose=verbose, dirty=dirty) assert not need_source_download, "Source download failed. Please investigate." if m.name() in [ i.rsplit('-', 2)[0] for i in linked(config.build_prefix) ]: print("%s is installed as a build dependency. Removing." % m.name()) index = get_build_index(clear_cache=False) actions = plan.remove_actions(config.build_prefix, [m.name()], index=index) assert not plan.nothing_to_do(actions), actions plan.display_actions(actions, index) plan.execute_actions(actions, index) print("Package:", m.dist()) assert isdir(source.WORK_DIR) src_dir = source.get_dir() contents = os.listdir(src_dir) if contents: print("source tree in:", src_dir) else: print("no source") rm_rf(config.info_dir) files1 = prefix_files() for pat in m.always_include_files(): has_matches = False for f in set(files1): if fnmatch.fnmatch(f, pat): print("Including in package existing file", f) files1.discard(f) has_matches = True if not has_matches: sys.exit( "Error: Glob %s from always_include_files does not match any files" % pat) # Save this for later with open(join(config.croot, 'prefix_files.txt'), 'w') as f: f.write(u'\n'.join(sorted(list(files1)))) f.write(u'\n') # Use script from recipe? script = m.get_value('build/script', None) if script: if isinstance(script, list): script = '\n'.join(script) if on_win: build_file = join(m.path, 'bld.bat') if script: build_file = join(source.get_dir(), 'bld.bat') with open(join(source.get_dir(), 'bld.bat'), 'w') as bf: bf.write(script) import conda_build.windows as windows windows.build(m, build_file, dirty=dirty, activate=activate) else: build_file = join(m.path, 'build.sh') # There is no sense in trying to run an empty build script. if isfile(build_file) or script: env = environ.get_dict(m, dirty=dirty) work_file = join(source.get_dir(), 'conda_build.sh') if script: with open(work_file, 'w') as bf: bf.write(script) if activate: if isfile(build_file): data = open(build_file).read() else: data = open(work_file).read() with open(work_file, 'w') as bf: bf.write("source activate {build_prefix}\n".format( build_prefix=config.build_prefix)) bf.write(data) else: if not isfile(work_file): shutil.copy(build_file, work_file) os.chmod(work_file, 0o766) if isfile(work_file): cmd = [shell_path, '-x', '-e', work_file] _check_call(cmd, env=env, cwd=src_dir) if post in [True, None]: if post: with open(join(config.croot, 'prefix_files.txt'), 'r') as f: files1 = set(f.read().splitlines()) get_build_metadata(m) create_post_scripts(m) create_entry_points(m.get_value('build/entry_points')) assert not exists(config.info_dir) files2 = prefix_files() post_process(sorted(files2 - files1), preserve_egg_dir=bool( m.get_value('build/preserve_egg_dir'))) # The post processing may have deleted some files (like easy-install.pth) files2 = prefix_files() if any(config.meta_dir in join(config.build_prefix, f) for f in files2 - files1): sys.exit( indent( """Error: Untracked file(s) %s found in conda-meta directory. This error usually comes from using conda in the build script. Avoid doing this, as it can lead to packages that include their dependencies.""" % (tuple(f for f in files2 - files1 if config.meta_dir in join(config.build_prefix, f)), ))) post_build(m, sorted(files2 - files1)) create_info_files(m, sorted(files2 - files1), include_recipe=bool(m.path) and include_recipe) if m.get_value('build/noarch_python'): import conda_build.noarch_python as noarch_python noarch_python.transform(m, sorted(files2 - files1)) files3 = prefix_files() fix_permissions(files3 - files1) path = bldpkg_path(m) t = tarfile.open(path, 'w:bz2') def order(f): # we don't care about empty files so send them back via 100000 fsize = os.stat(join(config.build_prefix, f)).st_size or 100000 # info/* records will be False == 0, others will be 1. info_order = int(os.path.dirname(f) != 'info') return info_order, fsize # add files in order of a) in info directory, b) increasing size so # we can access small manifest or json files without decompressing # possible large binary or data files for f in sorted(files3 - files1, key=order): t.add(join(config.build_prefix, f), f) t.close() print("BUILD END:", m.dist()) # we're done building, perform some checks tarcheck.check_all(path) update_index(config.bldpkgs_dir) else: print("STOPPING BUILD BEFORE POST:", m.dist()) if keep_old_work and len(old_sub_dirs): print("Restoring old work directory backup: %s :: %s => %s" % (old_WORK_DIR, old_sub_dirs, source.WORK_DIR)) for old_sub in old_sub_dirs: if os.path.exists(os.path.join(source.WORK_DIR, old_sub)): print( "Not restoring old source directory %s over new build's version" % (old_sub)) else: shutil.move(os.path.join(old_WORK_DIR, old_sub), source.WORK_DIR) shutil.rmtree(old_WORK_DIR, ignore_errors=True)
def execute(args, parser): import sys import shutil import tarfile import tempfile from os import makedirs from os.path import abspath, isdir, isfile from conda.lock import Locked import conda_build.build as build import conda_build.source as source from conda_build.config import config from conda_build.metadata import MetaData check_external() channel_urls = args.channel or () if on_win: # needs to happen before any c extensions are imported that might be # hard-linked by files in the trash. one of those is markupsafe, used # by jinja2. see https://github.com/conda/conda-build/pull/520 assert "markupsafe" not in sys.modules delete_trash(None) conda_version = {"python": "CONDA_PY", "numpy": "CONDA_NPY", "perl": "CONDA_PERL", "R": "CONDA_R"} for lang in ["python", "numpy", "perl", "R"]: versions = getattr(args, lang) if not versions: continue if versions == ["all"]: if all_versions[lang]: versions = all_versions[lang] else: parser.error("'all' is not supported for --%s" % lang) if len(versions) > 1: for ver in versions[:]: setattr(args, lang, [str(ver)]) execute(args, parser) # This is necessary to make all combinations build. setattr(args, lang, versions) return else: version = versions[0] if lang in ("python", "numpy"): version = int(version.replace(".", "")) setattr(config, conda_version[lang], version) if not len(str(version)) in (2, 3) and lang in ["python", "numpy"]: if all_versions[lang]: raise RuntimeError( "%s must be major.minor, like %s, not %s" % (conda_version[lang], all_versions[lang][-1] / 10, version) ) else: raise RuntimeError("%s must be major.minor, not %s" % (conda_version[lang], version)) # Using --python, --numpy etc. is equivalent to using CONDA_PY, CONDA_NPY, etc. # Auto-set those env variables for var in conda_version.values(): if getattr(config, var): # Set the env variable. os_environ[var] = str(getattr(config, var)) if args.skip_existing: if not isdir(config.bldpkgs_dir): makedirs(config.bldpkgs_dir) update_index(config.bldpkgs_dir) index = build.get_build_index( clear_cache=True, channel_urls=channel_urls, override_channels=args.override_channels ) already_built = [] to_build_recursive = [] with Locked(config.croot): recipes = deque(args.recipe) while recipes: arg = recipes.popleft() try_again = False # Don't use byte literals for paths in Python 2 if not PY3: arg = arg.decode(getpreferredencoding() or "utf-8") if isfile(arg): if arg.endswith((".tar", ".tar.gz", ".tgz", ".tar.bz2")): recipe_dir = tempfile.mkdtemp() t = tarfile.open(arg, "r:*") t.extractall(path=recipe_dir) t.close() need_cleanup = True else: print("Ignoring non-recipe: %s" % arg) continue else: recipe_dir = abspath(arg) need_cleanup = False if not isdir(recipe_dir): sys.exit("Error: no such directory: %s" % recipe_dir) try: m = MetaData(recipe_dir) if m.get_value("build/noarch_python"): config.noarch = True except exceptions.YamlParsingError as e: sys.stderr.write(e.error_msg()) sys.exit(1) binstar_upload = False if args.check and len(args.recipe) > 1: print(m.path) m.check_fields() if args.check: continue if args.skip_existing: if m.pkg_fn() in index or m.pkg_fn() in already_built: print("%s is already built, skipping." % m.dist()) continue if args.output: print(build.bldpkg_path(m)) continue elif args.test: build.test( m, verbose=not args.quiet, channel_urls=channel_urls, override_channels=args.override_channels ) elif args.source: source.provide(m.path, m.get_section("source")) print("Source tree in:", source.get_dir()) else: # This loop recursively builds dependencies if recipes exist if args.build_only: post = False args.notest = True args.binstar_upload = False elif args.post: post = True args.notest = True args.binstar_upload = False else: post = None try: if m.skip(): print("Skipped: The %s recipe defines build/skip for this " "configuration." % m.dist()) continue build.build( m, verbose=not args.quiet, post=post, channel_urls=channel_urls, override_channels=args.override_channels, include_recipe=args.include_recipe, ) except (RuntimeError, SystemExit) as e: error_str = str(e) if error_str.startswith("No packages found") or error_str.startswith("Could not find some"): # Build dependency if recipe exists dep_pkg = error_str.split(": ")[1] # Handle package names that contain version deps. if " " in dep_pkg: dep_pkg = dep_pkg.split(" ")[0] recipe_glob = glob(dep_pkg + "-[v0-9][0-9.]*") if exists(dep_pkg): recipe_glob.append(dep_pkg) if recipe_glob: recipes.appendleft(arg) try_again = True for recipe_dir in recipe_glob: if dep_pkg in to_build_recursive: sys.exit(str(e)) print( ( "Missing dependency {0}, but found" + " recipe directory, so building " + "{0} first" ).format(dep_pkg) ) recipes.appendleft(recipe_dir) to_build_recursive.append(dep_pkg) else: raise elif error_str.strip().startswith("Hint:"): lines = [line for line in error_str.splitlines() if line.strip().startswith("- ")] pkgs = [line.lstrip("- ") for line in lines] # Typically if a conflict is with one of these # packages, the other package needs to be rebuilt # (e.g., a conflict with 'python 3.5*' and 'x' means # 'x' isn't build for Python 3.5 and needs to be # rebuilt). skip_names = ["python", "r"] pkgs = [pkg for pkg in pkgs if pkg.split(" ")[0] not in skip_names] for pkg in pkgs: # Handle package names that contain version deps. if " " in pkg: pkg = pkg.split(" ")[0] recipe_glob = glob(pkg + "-[v0-9][0-9.]*") if exists(pkg): recipe_glob.append(pkg) if recipe_glob: recipes.appendleft(arg) try_again = True for recipe_dir in recipe_glob: if pkg in to_build_recursive: sys.exit(str(e)) print(error_str) print( ( "Missing dependency {0}, but found" + " recipe directory, so building " + "{0} first" ).format(pkg) ) recipes.appendleft(recipe_dir) to_build_recursive.append(pkg) else: raise else: raise if try_again: continue if not args.notest: build.test( m, verbose=not args.quiet, channel_urls=channel_urls, override_channels=args.override_channels ) binstar_upload = True if need_cleanup: shutil.rmtree(recipe_dir) if binstar_upload: handle_binstar_upload(build.bldpkg_path(m), args) already_built.append(m.pkg_fn())
def build(m, get_src=True, verbose=True, post=None): ''' Build the package with the specified metadata. :param m: Package metadata :type m: Metadata :param get_src: Should we download the source? :type get_src: bool :type post: bool or None. None means run the whole build. True means run post only. False means stop just before the post. ''' if post in [False, None]: rm_rf(prefix) print("BUILD START:", m.dist()) create_env(prefix, [ms.spec for ms in m.ms_depends('build')], verbose=verbose) if get_src: source.provide(m.path, m.get_section('source')) assert isdir(source.WORK_DIR) src_dir = source.get_dir() contents = os.listdir(src_dir) if contents: print("source tree in:", src_dir) else: print("no source") rm_rf(info_dir) files1 = prefix_files() if post == False: # Save this for later with open(join(source.WORK_DIR, 'prefix_files.txt'), 'w') as f: f.write(u'\n'.join(sorted(list(files1)))) f.write(u'\n') if sys.platform == 'win32': import conda_build.windows as windows windows.build(m) else: env = environ.get_dict(m) build_file = join(m.path, 'build.sh') script = m.get_value('build/script', None) if script: if isinstance(script, list): script = '\n'.join(script) with open(build_file, 'w', encoding='utf-8') as bf: bf.write(script) os.chmod(build_file, 0o766) if exists(build_file): cmd = ['/bin/bash', '-x', '-e', build_file] _check_call(cmd, env=env, cwd=src_dir) if post in [True, None]: if post == True: with open(join(source.WORK_DIR, 'prefix_files.txt'), 'r') as f: files1 = set(f.read().splitlines()) get_build_metadata(m) create_post_scripts(m) create_entry_points(m.get_value('build/entry_points')) post_process( preserve_egg_dir=bool(m.get_value('build/preserve_egg_dir'))) assert not exists(info_dir) files2 = prefix_files() post_build(sorted(files2 - files1), binary_relocation=bool( m.get_value('build/binary_relocation', True))) create_info_files(m, sorted(files2 - files1), include_recipe=bool(m.path)) files3 = prefix_files() fix_permissions(files3 - files1) path = bldpkg_path(m) t = tarfile.open(path, 'w:bz2') for f in sorted(files3 - files1): t.add(join(prefix, f), f) t.close() print("BUILD END:", m.dist()) # we're done building, perform some checks tarcheck.check_all(path) update_index(config.bldpkgs_dir) else: print("STOPPING BUILD BEFORE POST:", m.dist())
def create_info_files(m, files, include_recipe=True): ''' Creates the metadata files that will be stored in the built package. :param m: Package metadata :type m: Metadata :param files: Paths to files to include in package :type files: list of str :param include_recipe: Whether or not to include the recipe (True by default) :type include_recipe: bool ''' if not isdir(config.info_dir): os.makedirs(config.info_dir) if include_recipe: recipe_dir = join(config.info_dir, 'recipe') os.makedirs(recipe_dir) for fn in os.listdir(m.path): if fn.startswith('.'): continue src_path = join(m.path, fn) dst_path = join(recipe_dir, fn) if isdir(src_path): shutil.copytree(src_path, dst_path) else: shutil.copy(src_path, dst_path) license_file = m.get_value('about/license_file') if license_file: filenames = 'LICENSE', 'LICENSE.txt', 'license', 'license.txt' if license_file is True: for fn in filenames: src = join(source.get_dir(), fn) if isfile(src): break else: sys.exit("Error: could not locate license file (any of " "%s) in: %s" % (', '.join(filenames), source.get_dir())) else: src = join(source.get_dir(), license_file) shutil.copy(src, join(config.info_dir, 'license.txt')) readme = m.get_value('about/readme') if readme: src = join(source.get_dir(), readme) if not isfile(src): sys.exit("Error: no readme file: %s" % readme) dst = join(config.info_dir, readme) shutil.copy(src, dst) if os.path.split(readme)[1] not in { "README.md", "README.rst", "README" }: print( "WARNING: anaconda.org only recognizes about/readme as README.md and README.rst", file=sys.stderr) # Deal with Python 2 and 3's different json module type reqs mode_dict = {'mode': 'w', 'encoding': 'utf-8'} if PY3 else {'mode': 'wb'} with open(join(config.info_dir, 'index.json'), **mode_dict) as fo: json.dump(m.info_index(), fo, indent=2, sort_keys=True) if include_recipe: with open(join(config.info_dir, 'recipe.json'), **mode_dict) as fo: json.dump(m.meta, fo, indent=2, sort_keys=True) if sys.platform == 'win32': # make sure we use '/' path separators in metadata files = [f.replace('\\', '/') for f in files] with open(join(config.info_dir, 'files'), 'w') as fo: if m.get_value('build/noarch_python'): fo.write('\n') else: for f in files: fo.write(f + '\n') files_with_prefix = sorted(have_prefix_files(files)) binary_has_prefix_files = m.binary_has_prefix_files() text_has_prefix_files = m.has_prefix_files() if files_with_prefix and not m.get_value('build/noarch_python'): auto_detect = m.get_value('build/detect_binary_files_with_prefix') if sys.platform == 'win32': # Paths on Windows can contain spaces, so we need to quote the # paths. Fortunately they can't contain quotes, so we don't have # to worry about nested quotes. fmt_str = '"%s" %s "%s"\n' else: # Don't do it everywhere because paths on Unix can contain quotes, # and we don't have a good method of escaping, and because older # versions of conda don't support quotes in has_prefix fmt_str = '%s %s %s\n' with open(join(config.info_dir, 'has_prefix'), 'w') as fo: for pfix, mode, fn in files_with_prefix: if (fn in text_has_prefix_files): # register for text replacement, regardless of mode fo.write(fmt_str % (pfix, 'text', fn)) text_has_prefix_files.remove(fn) elif ((mode == 'binary') and (fn in binary_has_prefix_files)): print("Detected hard-coded path in binary file %s" % fn) fo.write(fmt_str % (pfix, mode, fn)) binary_has_prefix_files.remove(fn) elif (auto_detect or (mode == 'text')): print("Detected hard-coded path in %s file %s" % (mode, fn)) fo.write(fmt_str % (pfix, mode, fn)) else: print("Ignored hard-coded path in %s" % fn) # make sure we found all of the files expected errstr = "" for f in text_has_prefix_files: errstr += "Did not detect hard-coded path in %s from has_prefix_files\n" % f for f in binary_has_prefix_files: errstr += "Did not detect hard-coded path in %s from binary_has_prefix_files\n" % f if errstr: raise RuntimeError(errstr) no_link = m.get_value('build/no_link') if no_link: if not isinstance(no_link, list): no_link = [no_link] with open(join(config.info_dir, 'no_link'), 'w') as fo: for f in files: if any(fnmatch.fnmatch(f, p) for p in no_link): fo.write(f + '\n') if m.get_value('source/git_url'): with io.open(join(config.info_dir, 'git'), 'w', encoding='utf-8') as fo: source.git_info(fo) if m.get_value('app/icon'): shutil.copyfile(join(m.path, m.get_value('app/icon')), join(config.info_dir, 'icon.png'))
def execute(args, parser): import sys import shutil import tarfile import tempfile from os.path import abspath, isdir, isfile from conda.lock import Locked import conda_build.build as build import conda_build.source as source from conda_build.config import config from conda_build.metadata import MetaData check_external() if args.python: if args.python == ['all']: for py in [26, 27, 33, 34]: args.python = [str(py)] execute(args, parser) return if len(args.python) > 1: for py in args.python[:]: args.python = [py] execute(args, parser) else: config.CONDA_PY = int(args.python[0].replace('.', '')) if args.perl: config.CONDA_PERL = args.perl if args.numpy: if args.numpy == ['all']: for npy in [16, 17, 18, 19]: args.numpy = [str(npy)] execute(args, parser) return if len(args.numpy) > 1: for npy in args.numpy[:]: args.numpy = [npy] execute(args, parser) else: config.CONDA_NPY = int(args.numpy[0].replace('.', '')) with Locked(config.croot): recipes = deque(args.recipe) while recipes: arg = recipes.popleft() try_again = False # Don't use byte literals for paths in Python 2 if not PY3: arg = arg.decode(getpreferredencoding()) if isfile(arg): if arg.endswith(('.tar', '.tar.gz', '.tgz', '.tar.bz2')): recipe_dir = tempfile.mkdtemp() t = tarfile.open(arg, 'r:*') t.extractall(path=recipe_dir) t.close() need_cleanup = True else: print("Ignoring non-recipe: %s" % arg) continue else: recipe_dir = abspath(arg) need_cleanup = False if not isdir(recipe_dir): sys.exit("Error: no such directory: %s" % recipe_dir) m = MetaData(recipe_dir) binstar_upload = False if args.check and len(args.recipe) > 1: print(m.path) m.check_fields() if args.check: continue if args.output: print(build.bldpkg_path(m)) continue elif args.test: build.test(m, verbose=not args.quiet) elif args.source: source.provide(m.path, m.get_section('source')) print('Source tree in:', source.get_dir()) else: # This loop recursively builds dependencies if recipes exist if args.build_only: post = False args.notest = True args.binstar_upload = False elif args.post: post = True args.notest = True args.binstar_upload = False else: post = None try: build.build(m, verbose=not args.quiet, post=post) except RuntimeError as e: error_str = str(e) if error_str.startswith('No packages found'): # Build dependency if recipe exists dep_pkg = error_str.split(': ')[1] # Handle package names that contain version deps. if ' ' in dep_pkg: dep_pkg = dep_pkg.split(' ')[0] recipe_glob = glob(dep_pkg + '-[v0-9][0-9.]*') if exists(dep_pkg): recipe_glob.append(dep_pkg) if recipe_glob: recipes.appendleft(arg) try_again = True for recipe_dir in recipe_glob: print(("Missing dependency {0}, but found" + " recipe directory, so building " + "{0} first").format(dep_pkg)) recipes.appendleft(recipe_dir) else: raise else: raise if try_again: continue if not args.notest: build.test(m, verbose=not args.quiet) binstar_upload = True if need_cleanup: shutil.rmtree(recipe_dir) if binstar_upload: handle_binstar_upload(build.bldpkg_path(m), args)
def execute(args, parser): import sys import shutil import tarfile import tempfile from os.path import abspath, isdir, isfile from conda.lock import Locked import conda_build.build as build import conda_build.source as source from conda_build.config import config from conda_build.metadata import MetaData check_external() if args.python: if args.python == ['all']: for py in [26, 27, 33, 34]: args.python = [str(py)] execute(args, parser) return if len(args.python) > 1: for py in args.python[:]: args.python = [py] execute(args, parser) else: config.CONDA_PY = int(args.python[0].replace('.', '')) if args.perl: config.CONDA_PERL = args.perl if args.numpy: if args.numpy == ['all']: for npy in [16, 17, 18]: args.numpy = [str(npy)] execute(args, parser) return if len(args.numpy) > 1: for npy in args.numpy[:]: args.numpy = [npy] execute(args, parser) else: config.CONDA_NPY = int(args.numpy[0].replace('.', '')) with Locked(config.croot): recipes = deque(args.recipe) while recipes: arg = recipes.popleft() try_again = False # Don't use byte literals for paths in Python 2 if not PY3: arg = arg.decode(getpreferredencoding()) if isfile(arg): if arg.endswith(('.tar', '.tar.gz', '.tgz', '.tar.bz2')): recipe_dir = tempfile.mkdtemp() t = tarfile.open(arg, 'r:*') t.extractall(path=recipe_dir) t.close() need_cleanup = True else: print("Ignoring non-recipe: %s" % arg) continue else: recipe_dir = abspath(arg) need_cleanup = False if not isdir(recipe_dir): sys.exit("Error: no such directory: %s" % recipe_dir) m = MetaData(recipe_dir) binstar_upload = False if args.check and len(args.recipe) > 1: print(m.path) m.check_fields() if args.check: continue if args.output: print(build.bldpkg_path(m)) continue elif args.test: build.test(m, verbose=not args.quiet) elif args.source: source.provide(m.path, m.get_section('source')) print('Source tree in:', source.get_dir()) else: # This loop recursively builds dependencies if recipes exist if args.build_only: post = False args.notest = True args.binstar_upload = False elif args.post: post = True args.notest = True args.binstar_upload = False else: post = None try: build.build(m, verbose=not args.quiet, post=post) except RuntimeError as e: error_str = str(e) if error_str.startswith('No packages found matching:'): # Build dependency if recipe exists dep_pkg = error_str.split(': ')[1] # Handle package names that contain version deps. if ' ' in dep_pkg: dep_pkg = dep_pkg.split(' ')[0] recipe_glob = glob(dep_pkg + '-[v0-9][0-9.]*') if exists(dep_pkg): recipe_glob.append(dep_pkg) if recipe_glob: recipes.appendleft(arg) try_again = True for recipe_dir in recipe_glob: print(("Missing dependency {0}, but found" + " recipe directory, so building " + "{0} first").format(dep_pkg)) recipes.appendleft(recipe_dir) else: raise else: raise if try_again: continue if not args.notest: build.test(m, verbose=not args.quiet) binstar_upload = True if need_cleanup: shutil.rmtree(recipe_dir) if binstar_upload: handle_binstar_upload(build.bldpkg_path(m), args)
def execute(args, parser): import sys import shutil import tarfile import tempfile from os.path import abspath, isdir, isfile from conda.lock import Locked import conda_build.build as build import conda_build.source as source from conda_build.config import croot from conda_build.metadata import MetaData check_external() with Locked(croot): recipes = deque(args.recipe) while recipes: arg = recipes.popleft() try_again = False # Don't use byte literals for paths in Python 2 if not PY3: arg = arg.decode(getpreferredencoding()) if isfile(arg): if arg.endswith(('.tar', '.tar.gz', '.tgz', '.tar.bz2')): recipe_dir = tempfile.mkdtemp() t = tarfile.open(arg, 'r:*') t.extractall(path=recipe_dir) t.close() need_cleanup = True else: print("Ignoring non-recipe: %s" % arg) continue else: recipe_dir = abspath(arg) need_cleanup = False if not isdir(recipe_dir): sys.exit("Error: no such directory: %s" % recipe_dir) m = MetaData(recipe_dir) binstar_upload = False if args.check and len(args.recipe) > 1: print(m.path) m.check_fields() if args.check: continue if args.output: print(build.bldpkg_path(m)) continue elif args.test: build.test(m) elif args.source: source.provide(m.path, m.get_section('source')) print('Source tree in:', source.get_dir()) else: # This loop recursively builds dependencies if recipes exist try: build.build(m) except RuntimeError as e: error_str = str(e) if error_str.startswith('No packages found matching:'): # Build dependency if recipe exists dep_pkg = error_str.split(': ')[1].replace(' ', '-') recipe_glob = glob(dep_pkg + '-[v0-9][0-9.]*') if exists(dep_pkg): recipe_glob.append(dep_pkg) if recipe_glob: recipes.appendleft(arg) try_again = True for recipe_dir in recipe_glob: print(("Missing dependency {0}, but found" + " recipe directory, so building " + "{0} first").format(dep_pkg)) recipes.appendleft(recipe_dir) else: raise else: raise if try_again: continue if not args.notest: build.test(m) binstar_upload = True if need_cleanup: shutil.rmtree(recipe_dir) if binstar_upload: handle_binstar_upload(build.bldpkg_path(m), args)
def execute(args, parser): import sys import shutil import tarfile import tempfile from os.path import abspath, isdir, isfile from conda.lock import Locked import conda_build.build as build import conda_build.source as source from conda_build.config import config from conda_build.metadata import MetaData check_external() channel_urls = args.channel or () if on_win: # needs to happen before any c extensions are imported that might be # hard-linked by files in the trash. one of those is markupsafe, used # by jinja2. see https://github.com/conda/conda-build/pull/520 assert 'markupsafe' not in sys.modules delete_trash(None) conda_version = { 'python': 'CONDA_PY', 'numpy': 'CONDA_NPY', 'perl': 'CONDA_PERL', 'R': 'CONDA_R', } for lang in ['python', 'numpy', 'perl', 'R']: versions = getattr(args, lang) if not versions: continue if versions == ['all']: if all_versions[lang]: versions = all_versions[lang] else: parser.error("'all' is not supported for --%s" % lang) if len(versions) > 1: for ver in versions[:]: setattr(args, lang, [str(ver)]) execute(args, parser) # This is necessary to make all combinations build. setattr(args, lang, versions) return else: version = int(versions[0].replace('.', '')) setattr(config, conda_version[lang], version) if not len(str(version)) == 2 and lang in ['python', 'numpy']: if all_versions[lang]: raise RuntimeError("%s must be major.minor, like %s, not %s" % (conda_version[lang], all_versions[lang][-1]/10, version)) else: raise RuntimeError("%s must be major.minor, not %s" % (conda_version[lang], version)) if args.skip_existing: update_index(config.bldpkgs_dir) index = build.get_build_index(clear_cache=True, channel_urls=channel_urls, override_channels=args.override_channels) already_built = [] to_build = args.recipe[:] with Locked(config.croot): recipes = deque(args.recipe) while recipes: arg = recipes.popleft() try_again = False # Don't use byte literals for paths in Python 2 if not PY3: arg = arg.decode(getpreferredencoding() or 'utf-8') if isfile(arg): if arg.endswith(('.tar', '.tar.gz', '.tgz', '.tar.bz2')): recipe_dir = tempfile.mkdtemp() t = tarfile.open(arg, 'r:*') t.extractall(path=recipe_dir) t.close() need_cleanup = True else: print("Ignoring non-recipe: %s" % arg) continue else: recipe_dir = abspath(arg) need_cleanup = False if not isdir(recipe_dir): sys.exit("Error: no such directory: %s" % recipe_dir) try: m = MetaData(recipe_dir) if m.get_value('build/noarch_python'): config.noarch = True except exceptions.YamlParsingError as e: sys.stderr.write(e.error_msg()) sys.exit(1) binstar_upload = False if args.check and len(args.recipe) > 1: print(m.path) m.check_fields() if args.check: continue if args.skip_existing: if m.pkg_fn() in index or m.pkg_fn() in already_built: print("%s is already built, skipping." % m.dist()) continue if args.output: print(build.bldpkg_path(m)) continue elif args.test: build.test(m, verbose=not args.quiet, channel_urls=channel_urls, override_channels=args.override_channels) elif args.source: source.provide(m.path, m.get_section('source')) print('Source tree in:', source.get_dir()) else: # This loop recursively builds dependencies if recipes exist if args.build_only: post = False args.notest = True args.binstar_upload = False elif args.post: post = True args.notest = True args.binstar_upload = False else: post = None try: build.build(m, verbose=not args.quiet, post=post, channel_urls=channel_urls, override_channels=args.override_channels, include_recipe=args.include_recipe) except RuntimeError as e: error_str = str(e) if error_str.startswith('No packages found') or error_str.startswith('Could not find some'): # Build dependency if recipe exists dep_pkg = error_str.split(': ')[1] # Handle package names that contain version deps. if ' ' in dep_pkg: dep_pkg = dep_pkg.split(' ')[0] recipe_glob = glob(dep_pkg + '-[v0-9][0-9.]*') if exists(dep_pkg): recipe_glob.append(dep_pkg) if recipe_glob: recipes.appendleft(arg) try_again = True for recipe_dir in recipe_glob: if dep_pkg in to_build: sys.exit(str(e)) print(("Missing dependency {0}, but found" + " recipe directory, so building " + "{0} first").format(dep_pkg)) recipes.appendleft(recipe_dir) to_build.append(dep_pkg) else: raise else: raise if try_again: continue if not args.notest: build.test(m, verbose=not args.quiet, channel_urls=channel_urls, override_channels=args.override_channels) binstar_upload = True if need_cleanup: shutil.rmtree(recipe_dir) if binstar_upload: handle_binstar_upload(build.bldpkg_path(m), args) already_built.append(m.pkg_fn())
def execute(args, parser): import sys import shutil import tarfile import tempfile from os import makedirs from os.path import abspath, isdir, isfile import conda_build.build as build import conda_build.source as source from conda_build.config import config check_external() # change globals in build module, see comment there as well build.channel_urls = args.channel or () build.override_channels = args.override_channels build.verbose = not args.quiet if on_win: try: # needs to happen before any c extensions are imported that might be # hard-linked by files in the trash. one of those is markupsafe, # used by jinja2. see https://github.com/conda/conda-build/pull/520 delete_trash(None) except: # when we can't delete the trash, don't crash on AssertionError, # instead inform the user and try again next time. # see https://github.com/conda/conda-build/pull/744 warnings.warn( "Cannot delete trash; some c extension has been " "imported that is hard-linked by files in the trash. " "Will try again on next run.") set_language_env_vars(args, parser, execute=execute) if args.skip_existing: for d in config.bldpkgs_dirs: if not isdir(d): makedirs(d) update_index(d) index = build.get_build_index(clear_cache=True) already_built = set() to_build_recursive = [] recipes = deque(args.recipe) while recipes: arg = recipes.popleft() try_again = False # Don't use byte literals for paths in Python 2 if not PY3: arg = arg.decode(getpreferredencoding() or 'utf-8') if isfile(arg): if arg.endswith(('.tar', '.tar.gz', '.tgz', '.tar.bz2')): recipe_dir = tempfile.mkdtemp() t = tarfile.open(arg, 'r:*') t.extractall(path=recipe_dir) t.close() need_cleanup = True else: print("Ignoring non-recipe: %s" % arg) continue else: recipe_dir = abspath(arg) need_cleanup = False # recurse looking for meta.yaml that is potentially not in immediate folder recipe_dir = find_recipe(recipe_dir) if not isdir(recipe_dir): sys.exit("Error: no such directory: %s" % recipe_dir) # this fully renders any jinja templating, throwing an error if any data is missing m, need_source_download = render_recipe(recipe_dir, no_download_source=False, verbose=False, dirty=args.dirty) if m.get_value('build/noarch_python'): config.noarch = True if args.check and len(args.recipe) > 1: print(m.path) m.check_fields() if args.check: continue if m.skip(): print("Skipped: The %s recipe defines build/skip for this " "configuration." % m.dist()) continue if args.skip_existing: # 'or m.pkg_fn() in index' is for conda <4.1 and could be removed in the future. if ('local::' + m.pkg_fn() in index or m.pkg_fn() in index or m.pkg_fn() in already_built): print(m.dist(), "is already built, skipping.") continue if args.output: print(bldpkg_path(m)) continue elif args.test: build.test(m, move_broken=False) elif args.source: source.provide(m.path, m.get_section('source'), verbose=build.verbose) print('Source tree in:', source.get_dir()) else: # This loop recursively builds dependencies if recipes exist if args.build_only: post = False args.notest = True args.binstar_upload = False elif args.post: post = True args.notest = True args.binstar_upload = False else: post = None try: build.build(m, post=post, include_recipe=args.include_recipe, keep_old_work=args.keep_old_work, need_source_download=need_source_download, dirty=args.dirty) except (NoPackagesFound, Unsatisfiable) as e: error_str = str(e) # Typically if a conflict is with one of these # packages, the other package needs to be rebuilt # (e.g., a conflict with 'python 3.5*' and 'x' means # 'x' isn't build for Python 3.5 and needs to be # rebuilt). skip_names = ['python', 'r'] add_recipes = [] for line in error_str.splitlines(): if not line.startswith(' - '): continue pkg = line.lstrip(' - ').split(' -> ')[-1] pkg = pkg.strip().split(' ')[0] if pkg in skip_names: continue recipe_glob = glob(pkg + '-[v0-9][0-9.]*') if os.path.exists(pkg): recipe_glob.append(pkg) if recipe_glob: try_again = True for recipe_dir in recipe_glob: if pkg in to_build_recursive: sys.exit(str(e)) print(error_str) print(("Missing dependency {0}, but found" + " recipe directory, so building " + "{0} first").format(pkg)) add_recipes.append(recipe_dir) to_build_recursive.append(pkg) else: raise recipes.appendleft(arg) recipes.extendleft(reversed(add_recipes)) if try_again: continue if not args.notest: build.test(m) if need_cleanup: shutil.rmtree(recipe_dir) # outputs message, or does upload, depending on value of args.binstar_upload handle_binstar_upload(build.bldpkg_path(m), args) already_built.add(m.pkg_fn())
def build(m, get_src=True, verbose=True, post=None, channel_urls=(), override_channels=False): ''' Build the package with the specified metadata. :param m: Package metadata :type m: Metadata :param get_src: Should we download the source? :type get_src: bool :type post: bool or None. None means run the whole build. True means run post only. False means stop just before the post. ''' if post in [False, None]: print("Removing old build directory") rm_rf(config.short_build_prefix) rm_rf(config.long_build_prefix) print("Removing old work directory") rm_rf(source.WORK_DIR) if (m.get_value('build/detect_binary_files_with_prefix') or m.binary_has_prefix_files()): # We must use a long prefix here as the package will only be # installable into prefixes shorter than this one. config.use_long_build_prefix = True else: # In case there are multiple builds in the same process config.use_long_build_prefix = False # Display the name only # Version number could be missing due to dependency on source info. print("BUILD START:", m.dist()) create_env(config.build_prefix, [ms.spec for ms in m.ms_depends('build')], verbose=verbose, channel_urls=channel_urls, override_channels=override_channels) if get_src: source.provide(m.path, m.get_section('source')) # Parse our metadata again because we did not initialize the source # information before. m.parse_again() print("Package:", m.dist()) assert isdir(source.WORK_DIR) src_dir = source.get_dir() contents = os.listdir(src_dir) if contents: print("source tree in:", src_dir) else: print("no source") rm_rf(config.info_dir) files1 = prefix_files().difference(set(m.always_include_files())) # Save this for later with open(join(config.croot, 'prefix_files.txt'), 'w') as f: f.write(u'\n'.join(sorted(list(files1)))) f.write(u'\n') if sys.platform == 'win32': import conda_build.windows as windows windows.build(m) else: env = environ.get_dict(m) build_file = join(m.path, 'build.sh') script = m.get_value('build/script', None) if script: if isinstance(script, list): script = '\n'.join(script) build_file = join(source.get_dir(), 'conda_build.sh') with open(build_file, 'w') as bf: bf.write(script) os.chmod(build_file, 0o766) if exists(build_file): cmd = ['/bin/bash', '-x', '-e', build_file] _check_call(cmd, env=env, cwd=src_dir) if post in [True, None]: if post == True: with open(join(config.croot, 'prefix_files.txt'), 'r') as f: files1 = set(f.read().splitlines()) get_build_metadata(m) create_post_scripts(m) create_entry_points(m.get_value('build/entry_points')) assert not exists(config.info_dir) files2 = prefix_files() post_process(sorted(files2 - files1), preserve_egg_dir=bool(m.get_value('build/preserve_egg_dir'))) # The post processing may have deleted some files (like easy-install.pth) files2 = prefix_files() post_build(m, sorted(files2 - files1)) create_info_files(m, sorted(files2 - files1), include_recipe=bool(m.path)) if m.get_value('build/noarch_python'): import conda_build.noarch_python as noarch_python noarch_python.transform(m, sorted(files2 - files1)) files3 = prefix_files() fix_permissions(files3 - files1) path = bldpkg_path(m) t = tarfile.open(path, 'w:bz2') for f in sorted(files3 - files1): t.add(join(config.build_prefix, f), f) t.close() print("BUILD END:", m.dist()) # we're done building, perform some checks tarcheck.check_all(path) update_index(config.bldpkgs_dir) else: print("STOPPING BUILD BEFORE POST:", m.dist())
def get_dict(m=None, prefix=None): if not prefix: prefix = config.build_prefix python = config.build_python d = {'CONDA_BUILD': '1', 'PYTHONNOUSERSITE': '1'} d['CONDA_DEFAULT_ENV'] = config.build_prefix d['ARCH'] = str(cc.bits) d['PREFIX'] = prefix d['PYTHON'] = python d['PY3K'] = str(config.PY3K) d['STDLIB_DIR'] = get_stdlib_dir() d['SP_DIR'] = get_sp_dir() d['SYS_PREFIX'] = sys.prefix d['SYS_PYTHON'] = sys.executable d['PERL_VER'] = get_perl_ver() d['PY_VER'] = get_py_ver() if get_npy_ver(): d['NPY_VER'] = get_npy_ver() d['SRC_DIR'] = source.get_dir() if "LANG" in os.environ: d['LANG'] = os.environ['LANG'] if "HTTPS_PROXY" in os.environ: d['HTTPS_PROXY'] = os.environ['HTTPS_PROXY'] if "HTTP_PROXY" in os.environ: d['HTTP_PROXY'] = os.environ['HTTP_PROXY'] if m: for var_name in m.get_value('build/script_env', []): value = os.getenv(var_name) if value is None: warnings.warn( "The environment variable '%s' is undefined." % var_name, UserWarning) else: d[var_name] = value if sys.platform == "darwin": # multiprocessing.cpu_count() is not reliable on OSX # See issue #645 on github.com/conda/conda-build out, err = subprocess.Popen('sysctl -n hw.logicalcpu', shell=True, stdout=subprocess.PIPE).communicate() d['CPU_COUNT'] = out.decode('utf-8').strip() else: try: d['CPU_COUNT'] = str(multiprocessing.cpu_count()) except NotImplementedError: d['CPU_COUNT'] = "1" if m.get_value('source/git_url'): git_url = m.get_value('source/git_url') if '://' not in git_url: # If git_url is a relative path instead of a url, convert it to an abspath if not isabs(git_url): git_url = join(m.path, git_url) git_url = normpath(join(m.path, git_url)) d.update(**get_git_build_info(d['SRC_DIR'], git_url, m.get_value('source/git_rev'))) d['PATH'] = dict(os.environ)['PATH'] d = prepend_bin_path(d, prefix) if sys.platform == 'win32': # -------- Windows d['SCRIPTS'] = join(prefix, 'Scripts') d['LIBRARY_PREFIX'] = join(prefix, 'Library') d['LIBRARY_BIN'] = join(d['LIBRARY_PREFIX'], 'bin') d['LIBRARY_INC'] = join(d['LIBRARY_PREFIX'], 'include') d['LIBRARY_LIB'] = join(d['LIBRARY_PREFIX'], 'lib') # This probably should be done more generally d['CYGWIN_PREFIX'] = prefix.replace('\\', '/').replace('C:', '/cygdrive/c') d['R'] = join(prefix, 'Scripts', 'R.exe') else: # -------- Unix d['HOME'] = os.getenv('HOME', 'UNKNOWN') d['PKG_CONFIG_PATH'] = join(prefix, 'lib', 'pkgconfig') d['R'] = join(prefix, 'bin', 'R') if sys.platform == 'darwin': # -------- OSX d['OSX_ARCH'] = 'i386' if cc.bits == 32 else 'x86_64' d['CFLAGS'] = '-arch %(OSX_ARCH)s' % d d['CXXFLAGS'] = d['CFLAGS'] d['LDFLAGS'] = d['CFLAGS'] d['MACOSX_DEPLOYMENT_TARGET'] = '10.6' elif sys.platform.startswith('linux'): # -------- Linux d['LD_RUN_PATH'] = prefix + '/lib' if m: d['PKG_NAME'] = m.name() d['PKG_VERSION'] = m.version() d['PKG_BUILDNUM'] = str(m.build_number()) d['PKG_BUILD_STRING'] = str(m.build_id()) d['RECIPE_DIR'] = m.path return d
def execute(args, parser): import sys import shutil import tarfile import tempfile from os.path import abspath, isdir, isfile from conda.lock import Locked import conda_build.build as build import conda_build.source as source from conda_build.config import config from conda_build.metadata import MetaData check_external() channel_urls = args.channel or () all_versions = { 'python': [26, 27, 33, 34], 'numpy': [16, 17, 18, 19], 'perl': None, 'R': None, } conda_version = { 'python': 'CONDA_PY', 'numpy': 'CONDA_NPY', 'perl': 'CONDA_PERL', 'R': 'CONDA_R', } for lang in ['python', 'numpy', 'perl', 'R']: versions = getattr(args, lang) if not versions: continue if versions == ['all']: if all_versions[lang]: versions = all_versions[lang] else: parser.error("'all' is not supported for --%s" % lang) if len(versions) > 1: for ver in versions[:]: setattr(args, lang, [str(ver)]) execute(args, parser) # This is necessary to make all combinations build. setattr(args, lang, versions) return else: version = int(versions[0].replace('.', '')) setattr(config, conda_version[lang], version) if not len(str(version)) == 2: if all_versions[lang]: raise RuntimeError("%s must be major.minor, like %s, not %s" % (conda_version[lang], all_versions[lang][-1]/10, version)) else: raise RuntimeError("%s must be major.minor, not %s" % (conda_version[lang], version)) if args.skip_existing: update_index(config.bldpkgs_dir) index = build.get_build_index(clear_cache=True, channel_urls=channel_urls, override_channels=args.override_channels) with Locked(config.croot): recipes = deque(args.recipe) while recipes: arg = recipes.popleft() try_again = False # Don't use byte literals for paths in Python 2 if not PY3: arg = arg.decode(getpreferredencoding() or 'utf-8') if isfile(arg): if arg.endswith(('.tar', '.tar.gz', '.tgz', '.tar.bz2')): recipe_dir = tempfile.mkdtemp() t = tarfile.open(arg, 'r:*') t.extractall(path=recipe_dir) t.close() need_cleanup = True else: print("Ignoring non-recipe: %s" % arg) continue else: recipe_dir = abspath(arg) need_cleanup = False if not isdir(recipe_dir): sys.exit("Error: no such directory: %s" % recipe_dir) try: m = MetaData(recipe_dir) if m.get_value('build/noarch_python'): config.noarch = True except exceptions.YamlParsingError as e: sys.stderr.write(e.error_msg()) sys.exit(1) binstar_upload = False if args.check and len(args.recipe) > 1: print(m.path) m.check_fields() if args.check: continue if args.skip_existing: if m.pkg_fn() in index: print("%s is already built, skipping." % m.dist()) continue if args.output: print(build.bldpkg_path(m)) continue elif args.test: build.test(m, verbose=not args.quiet, channel_urls=channel_urls, override_channels=args.override_channels) elif args.source: source.provide(m.path, m.get_section('source')) print('Source tree in:', source.get_dir()) else: # This loop recursively builds dependencies if recipes exist if args.build_only: post = False args.notest = True args.binstar_upload = False elif args.post: post = True args.notest = True args.binstar_upload = False else: post = None try: build.build(m, verbose=not args.quiet, post=post, channel_urls=channel_urls, override_channels=args.override_channels) except RuntimeError as e: error_str = str(e) if error_str.startswith('No packages found') or error_str.startswith('Could not find some'): # Build dependency if recipe exists dep_pkg = error_str.split(': ')[1] # Handle package names that contain version deps. if ' ' in dep_pkg: dep_pkg = dep_pkg.split(' ')[0] recipe_glob = glob(dep_pkg + '-[v0-9][0-9.]*') if exists(dep_pkg): recipe_glob.append(dep_pkg) if recipe_glob: recipes.appendleft(arg) try_again = True for recipe_dir in recipe_glob: print(("Missing dependency {0}, but found" + " recipe directory, so building " + "{0} first").format(dep_pkg)) recipes.appendleft(recipe_dir) else: raise else: raise if try_again: continue if not args.notest: build.test(m, verbose=not args.quiet, channel_urls=channel_urls, override_channels=args.override_channels) binstar_upload = True if need_cleanup: shutil.rmtree(recipe_dir) if binstar_upload: handle_binstar_upload(build.bldpkg_path(m), args)
def source_action(metadata, config): source.provide(metadata.path, metadata.get_section('source'), config=config) print('Source tree in:', source.get_dir(config))
def build(m, get_src=True, verbose=True): ''' Build the package with the specified metadata. :param m: Package metadata :type m: Metadata :param get_src: Should we download the source? :type get_src: bool ''' rm_rf(prefix) print("BUILD START:", m.dist()) create_env(prefix, [ms.spec for ms in m.ms_depends('build')], verbose=verbose) if get_src: source.provide(m.path, m.get_section('source')) assert isdir(source.WORK_DIR) if os.listdir(source.get_dir()): print("source tree in:", source.get_dir()) else: print("no source") rm_rf(info_dir) files1 = prefix_files() if sys.platform == 'win32': import conda_build.windows as windows windows.build(m) else: env = environ.get_dict(m) build_file = join(m.path, 'build.sh') if exists(build_file): script = m.get_value('build/script', None) if script: if isinstance(script, list): script = '\n'.join(script) with open(build_file, 'w', encoding='utf-8') as bf: bf.write(script) os.chmod(build_file, 0o766) cmd = ['/bin/bash', '-x', '-e', build_file] _check_call(cmd, env=env, cwd=source.get_dir()) get_build_metadata(m) create_post_scripts(m) create_entry_points(m.get_value('build/entry_points')) post_process(preserve_egg_dir=bool(m.get_value('build/preserve_egg_dir'))) assert not exists(info_dir) files2 = prefix_files() post_build(sorted(files2 - files1), binary_relocation=bool( m.get_value('build/binary_relocation', True))) create_info_files(m, sorted(files2 - files1), include_recipe=bool(m.path)) files3 = prefix_files() fix_permissions(files3 - files1) path = bldpkg_path(m) t = tarfile.open(path, 'w:bz2') for f in sorted(files3 - files1): t.add(join(prefix, f), f) t.close() print("BUILD END:", m.dist()) # we're done building, perform some checks tarcheck.check_all(path) update_index(config.bldpkgs_dir)
def build(m, get_src=True, verbose=True, post=None, channel_urls=(), override_channels=False, include_recipe=True): ''' Build the package with the specified metadata. :param m: Package metadata :type m: Metadata :param get_src: Should we download the source? :type get_src: bool :type post: bool or None. None means run the whole build. True means run post only. False means stop just before the post. ''' if (m.get_value('build/detect_binary_files_with_prefix') or m.binary_has_prefix_files()): # We must use a long prefix here as the package will only be # installable into prefixes shorter than this one. config.use_long_build_prefix = True else: # In case there are multiple builds in the same process config.use_long_build_prefix = False if m.skip(): print("Skipped: The %s recipe defines build/skip for this " "configuration." % m.dist()) sys.exit(0) if post in [False, None]: print("Removing old build environment") if on_win: if isdir(config.short_build_prefix): move_to_trash(config.short_build_prefix, '') if isdir(config.long_build_prefix): move_to_trash(config.long_build_prefix, '') else: rm_rf(config.short_build_prefix) rm_rf(config.long_build_prefix) print("Removing old work directory") if on_win: if isdir(source.WORK_DIR): move_to_trash(source.WORK_DIR, '') else: rm_rf(source.WORK_DIR) # Display the name only # Version number could be missing due to dependency on source info. print("BUILD START:", m.dist()) create_env(config.build_prefix, [ms.spec for ms in m.ms_depends('build')], verbose=verbose, channel_urls=channel_urls, override_channels=override_channels) if m.name() in [i.rsplit('-', 2)[0] for i in linked(config.build_prefix)]: print("%s is installed as a build dependency. Removing." % m.name()) index = get_build_index(clear_cache=False, channel_urls=channel_urls, override_channels=override_channels) actions = plan.remove_actions(config.build_prefix, [m.name()], index=index) assert not plan.nothing_to_do(actions), actions plan.display_actions(actions, index) plan.execute_actions(actions, index) if get_src: source.provide(m.path, m.get_section('source')) # Parse our metadata again because we did not initialize the source # information before. m.parse_again() print("Package:", m.dist()) assert isdir(source.WORK_DIR) src_dir = source.get_dir() contents = os.listdir(src_dir) if contents: print("source tree in:", src_dir) else: print("no source") rm_rf(config.info_dir) files1 = prefix_files() for pat in m.always_include_files(): has_matches = False for f in set(files1): if fnmatch.fnmatch(f, pat): print("Including in package existing file", f) files1.discard(f) has_matches = True if not has_matches: sys.exit("Error: Glob %s from always_include_files does not match any files" % pat) # Save this for later with open(join(config.croot, 'prefix_files.txt'), 'w') as f: f.write(u'\n'.join(sorted(list(files1)))) f.write(u'\n') if sys.platform == 'win32': import conda_build.windows as windows windows.build(m) else: env = environ.get_dict(m) build_file = join(m.path, 'build.sh') script = m.get_value('build/script', None) if script: if isinstance(script, list): script = '\n'.join(script) build_file = join(source.get_dir(), 'conda_build.sh') with open(build_file, 'w') as bf: bf.write(script) os.chmod(build_file, 0o766) if isfile(build_file): cmd = ['/bin/bash', '-x', '-e', build_file] _check_call(cmd, env=env, cwd=src_dir) if post in [True, None]: if post == True: with open(join(config.croot, 'prefix_files.txt'), 'r') as f: files1 = set(f.read().splitlines()) get_build_metadata(m) create_post_scripts(m) create_entry_points(m.get_value('build/entry_points')) assert not exists(config.info_dir) files2 = prefix_files() post_process(sorted(files2 - files1), preserve_egg_dir=bool(m.get_value('build/preserve_egg_dir'))) # The post processing may have deleted some files (like easy-install.pth) files2 = prefix_files() if any(config.meta_dir in join(config.build_prefix, f) for f in files2 - files1): sys.exit(indent("""Error: Untracked file(s) %s found in conda-meta directory. This error usually comes from using conda in the build script. Avoid doing this, as it can lead to packages that include their dependencies.""" % (tuple(f for f in files2 - files1 if config.meta_dir in join(config.build_prefix, f)),))) post_build(m, sorted(files2 - files1)) create_info_files(m, sorted(files2 - files1), include_recipe=bool(m.path) and include_recipe) if m.get_value('build/noarch_python'): import conda_build.noarch_python as noarch_python noarch_python.transform(m, sorted(files2 - files1)) files3 = prefix_files() fix_permissions(files3 - files1) path = bldpkg_path(m) t = tarfile.open(path, 'w:bz2') for f in sorted(files3 - files1): t.add(join(config.build_prefix, f), f) t.close() print("BUILD END:", m.dist()) # we're done building, perform some checks tarcheck.check_all(path) update_index(config.bldpkgs_dir) else: print("STOPPING BUILD BEFORE POST:", m.dist())