def system_vars(env_dict, prefix): d = dict() compiler_vars = defaultdict(text_type) if 'MAKEFLAGS' in os.environ: d['MAKEFLAGS'] = os.environ['MAKEFLAGS'] d['CPU_COUNT'] = get_cpu_count() if "LANG" in os.environ: d['LANG'] = os.environ['LANG'] d['PATH'] = os.environ['PATH'] d = prepend_bin_path(d, prefix) if sys.platform == 'win32': d.update(windows_vars(prefix)) else: d.update(unix_vars(prefix)) if sys.platform == 'darwin': d.update(osx_vars(compiler_vars)) elif sys.platform.startswith('linux'): d.update(linux_vars(compiler_vars, prefix)) # make sure compiler_vars get appended to anything already set, including build/script_env for key in compiler_vars: if key in env_dict: compiler_vars[key] += env_dict[key] d.update(compiler_vars) return d
def system_vars(env_dict, prefix): d = dict() compiler_vars = defaultdict(text_type) d["CPU_COUNT"] = get_cpu_count() if "LANG" in os.environ: d["LANG"] = os.environ["LANG"] d["PATH"] = os.environ["PATH"] d = prepend_bin_path(d, prefix) if sys.platform == "win32": d.update(windows_vars(prefix)) else: d.update(unix_vars(prefix)) if sys.platform == "darwin": d.update(osx_vars(compiler_vars)) elif sys.platform.startswith("linux"): d.update(linux_vars(compiler_vars, prefix)) # make sure compiler_vars get appended to anything already set, including build/script_env for key in compiler_vars: if key in env_dict: compiler_vars[key] += env_dict[key] d.update(compiler_vars) return d
def system_vars(env_dict, prefix): d = dict() compiler_vars = defaultdict(text_type) if 'MAKEFLAGS' in os.environ: d['MAKEFLAGS'] = os.environ['MAKEFLAGS'] d['CPU_COUNT'] = get_cpu_count() if "LANG" in os.environ: d['LANG'] = os.environ['LANG'] d['PATH'] = os.environ['PATH'] d = prepend_bin_path(d, prefix) if sys.platform == 'win32': d.update(windows_vars(prefix)) else: d.update(unix_vars(prefix)) if sys.platform == 'darwin': d.update(osx_vars(compiler_vars)) elif sys.platform.startswith('linux'): d.update(linux_vars(compiler_vars, prefix)) # make sure compiler_vars get appended to anything already set, including build/script_env for key in compiler_vars: if key in env_dict: compiler_vars[key] += env_dict[key] d.update(compiler_vars) return d
def create_env(prefix, specs, clear_cache=True, debug=False): ''' Create a conda envrionment for the given prefix and specs. ''' if not debug: # This squelches a ton of conda output that is not hugely relevant logging.getLogger("conda.install").setLevel(logging.ERROR) logging.getLogger("fetch").setLevel(logging.WARN) logging.getLogger("print").setLevel(logging.WARN) logging.getLogger("progress").setLevel(logging.WARN) logging.getLogger("dotupdate").setLevel(logging.WARN) logging.getLogger("stdoutlog").setLevel(logging.WARN) logging.getLogger("requests.packages.urllib3.connectionpool").setLevel( logging.WARN) specs = list(specs) for feature, value in feature_list: if value: specs.append('%s@' % feature) for d in config.bldpkgs_dirs: if not isdir(d): os.makedirs(d) update_index(d) if specs: # Don't waste time if there is nothing to do # FIXME: stupid hack to put test prefix on PATH so that runtime libs can be found old_path = os.environ['PATH'] os.environ['PATH'] = prepend_bin_path(os.environ.copy(), prefix, True)['PATH'] index = get_build_index(clear_cache=True) warn_on_old_conda_build(index) cc.pkgs_dirs = cc.pkgs_dirs[:1] actions = plan.install_actions(prefix, index, specs) plan.display_actions(actions, index) plan.execute_actions(actions, index, verbose=debug) os.environ['PATH'] = old_path # ensure prefix exists, even if empty, i.e. when specs are empty if not isdir(prefix): os.makedirs(prefix) if on_win: shell = "cmd.exe" else: shell = "bash" symlink_conda(prefix, sys.prefix, shell)
def create_env(prefix, specs, clear_cache=True, debug=False): ''' Create a conda envrionment for the given prefix and specs. ''' if not debug: # This squelches a ton of conda output that is not hugely relevant logging.getLogger("conda.install").setLevel(logging.ERROR) logging.getLogger("fetch").setLevel(logging.WARN) logging.getLogger("print").setLevel(logging.WARN) logging.getLogger("progress").setLevel(logging.WARN) logging.getLogger("dotupdate").setLevel(logging.WARN) logging.getLogger("stdoutlog").setLevel(logging.WARN) logging.getLogger("requests.packages.urllib3.connectionpool").setLevel(logging.WARN) specs = list(specs) for feature, value in feature_list: if value: specs.append('%s@' % feature) for d in config.bldpkgs_dirs: if not isdir(d): os.makedirs(d) update_index(d) if specs: # Don't waste time if there is nothing to do # FIXME: stupid hack to put test prefix on PATH so that runtime libs can be found old_path = os.environ['PATH'] os.environ['PATH'] = prepend_bin_path(os.environ.copy(), prefix, True)['PATH'] index = get_build_index(clear_cache=True) warn_on_old_conda_build(index) cc.pkgs_dirs = cc.pkgs_dirs[:1] actions = plan.install_actions(prefix, index, specs) plan.display_actions(actions, index) plan.execute_actions(actions, index, verbose=debug) os.environ['PATH'] = old_path # ensure prefix exists, even if empty, i.e. when specs are empty if not isdir(prefix): os.makedirs(prefix) if on_win: shell = "cmd.exe" else: shell = "bash" symlink_conda(prefix, sys.prefix, shell)
def run_pkg_tests(m, env_prefix): """ Run the tests defined in the recipe of a package in the given environment. """ tmpdir = tempfile.mkdtemp() try: test_files = conda_build_test.create_test_files(m, tmpdir) py_files, pl_files, shell_files = test_files if not (py_files or pl_files or shell_files): return env = os.environ env = prepend_bin_path(env, env_prefix, prepend_prefix=True) conda_build_test.run_tests(m, env, tmpdir, py_files, pl_files, shell_files) finally: shutil.rmtree(tmpdir)
def test(m, move_broken=True): ''' Execute any test scripts for the given package. :param m: Package's metadata. :type m: Metadata ''' # remove from package cache rm_pkgs_cache(m.dist()) tmp_dir = join(config.croot, 'test-tmp_dir') rm_rf(tmp_dir) os.makedirs(tmp_dir) create_files(tmp_dir, m) # Make Perl or Python-specific test files if m.name().startswith('perl-'): pl_files = create_pl_files(tmp_dir, m) py_files = False else: py_files = create_py_files(tmp_dir, m) pl_files = False shell_files = create_shell_files(tmp_dir, m) if not (py_files or shell_files or pl_files): print("Nothing to test for:", m.dist()) return print("TEST START:", m.dist()) if on_win: if isdir(config.build_prefix): move_to_trash(config.build_prefix, '') if isdir(config.test_prefix): move_to_trash(config.test_prefix, '') else: rm_rf(config.build_prefix) rm_rf(config.test_prefix) get_build_metadata(m) specs = ['%s %s %s' % (m.name(), m.version(), m.build_id())] # add packages listed in test/requires specs += m.get_value('test/requires', []) if py_files: # as the tests are run by python, ensure that python is installed. # (If they already provided python as a run or test requirement, this won't hurt anything.) specs += ['python %s*' % environ.get_py_ver()] if pl_files: # as the tests are run by perl, we need to specify it specs += ['perl %s*' % environ.get_perl_ver()] create_env(config.test_prefix, specs) env = dict(os.environ) env.update(environ.get_dict(m, prefix=config.test_prefix)) # prepend bin (or Scripts) directory env = prepend_bin_path(env, config.test_prefix, prepend_prefix=True) if sys.platform == 'win32': env['PATH'] = config.test_prefix + os.pathsep + env['PATH'] for varname in 'CONDA_PY', 'CONDA_NPY', 'CONDA_PERL': env[varname] = str(getattr(config, varname) or '') env['PREFIX'] = config.test_prefix # Python 2 Windows requires that envs variables be string, not unicode env = {str(key): str(value) for key, value in env.items()} if py_files: try: subprocess.check_call([config.test_python, '-s', join(tmp_dir, 'run_test.py')], env=env, cwd=tmp_dir) except subprocess.CalledProcessError: tests_failed(m, move_broken=move_broken) if pl_files: try: subprocess.check_call([config.test_perl, join(tmp_dir, 'run_test.pl')], env=env, cwd=tmp_dir) except subprocess.CalledProcessError: tests_failed(m, move_broken=move_broken) if shell_files: if sys.platform == 'win32': test_file = join(tmp_dir, 'run_test.bat') cmd = [os.environ['COMSPEC'], '/c', 'call', test_file] try: subprocess.check_call(cmd, env=env, cwd=tmp_dir) except subprocess.CalledProcessError: tests_failed(m, move_broken=move_broken) else: test_file = join(tmp_dir, 'run_test.sh') # TODO: Run the test/commands here instead of in run_test.py cmd = [shell_path, '-x', '-e', test_file] try: subprocess.check_call(cmd, env=env, cwd=tmp_dir) except subprocess.CalledProcessError: tests_failed(m, move_broken=move_broken) print("TEST END:", m.dist())
def test(m, move_broken=True): ''' Execute any test scripts for the given package. :param m: Package's metadata. :type m: Metadata ''' # remove from package cache rm_pkgs_cache(m.dist()) tmp_dir = join(config.croot, 'test-tmp_dir') rm_rf(tmp_dir) if on_win: time.sleep( 1) # wait for rm_rf(tmp_dir) to finish before recreating tmp_dir os.makedirs(tmp_dir) create_files(tmp_dir, m) # Make Perl or Python-specific test files if m.name().startswith('perl-'): pl_files = create_pl_files(tmp_dir, m) py_files = False lua_files = False else: py_files = create_py_files(tmp_dir, m) pl_files = False lua_files = False shell_files = create_shell_files(tmp_dir, m) if not (py_files or shell_files or pl_files or lua_files): print("Nothing to test for:", m.dist()) return print("TEST START:", m.dist()) if on_win: if isdir(config.build_prefix): move_to_trash(config.build_prefix, '') if isdir(config.test_prefix): move_to_trash(config.test_prefix, '') else: rm_rf(config.build_prefix) rm_rf(config.test_prefix) get_build_metadata(m) specs = ['%s %s %s' % (m.name(), m.version(), m.build_id())] # add packages listed in test/requires specs += m.get_value('test/requires', []) if py_files: # as the tests are run by python, ensure that python is installed. # (If they already provided python as a run or test requirement, this won't hurt anything.) specs += ['python %s*' % environ.get_py_ver()] if pl_files: # as the tests are run by perl, we need to specify it specs += ['perl %s*' % environ.get_perl_ver()] if lua_files: # not sure how this shakes out specs += ['lua %s*' % environ.get_lua_ver()] create_env(config.test_prefix, specs) env = dict(os.environ) env.update(environ.get_dict(m, prefix=config.test_prefix)) # prepend bin (or Scripts) directory env = prepend_bin_path(env, config.test_prefix, prepend_prefix=True) if sys.platform == 'win32': env['PATH'] = config.test_prefix + os.pathsep + env['PATH'] for varname in 'CONDA_PY', 'CONDA_NPY', 'CONDA_PERL', 'CONDA_LUA': env[varname] = str(getattr(config, varname) or '') env['PREFIX'] = config.test_prefix # Python 2 Windows requires that envs variables be string, not unicode env = {str(key): str(value) for key, value in env.items()} if py_files: try: subprocess.check_call( [config.test_python, '-s', join(tmp_dir, 'run_test.py')], env=env, cwd=tmp_dir) except subprocess.CalledProcessError: tests_failed(m, move_broken=move_broken) if pl_files: try: subprocess.check_call( [config.test_perl, join(tmp_dir, 'run_test.pl')], env=env, cwd=tmp_dir) except subprocess.CalledProcessError: tests_failed(m, move_broken=move_broken) if lua_files: try: subprocess.check_call( [config.test_lua, join(tmp_dir, 'run_test.lua')], env=env, cwd=tmp_dir) except subprocess.CalledProcessError: tests_failed(m) if shell_files: if sys.platform == 'win32': test_file = join(tmp_dir, 'run_test.bat') cmd = [os.environ['COMSPEC'], '/c', 'call', test_file] try: subprocess.check_call(cmd, env=env, cwd=tmp_dir) except subprocess.CalledProcessError: tests_failed(m, move_broken=move_broken) else: test_file = join(tmp_dir, 'run_test.sh') # TODO: Run the test/commands here instead of in run_test.py cmd = [shell_path, '-x', '-e', test_file] try: subprocess.check_call(cmd, env=env, cwd=tmp_dir) except subprocess.CalledProcessError: tests_failed(m, move_broken=move_broken) print("TEST END:", m.dist())
def test(m, move_broken=True, activate=True, debug=False): ''' Execute any test scripts for the given package. :param m: Package's metadata. :type m: Metadata ''' with Locked(cc.root_dir): # remove from package cache rm_pkgs_cache(m.dist()) tmp_dir = join(config.croot, 'test-tmp_dir') rm_rf(tmp_dir) if on_win: time.sleep(1) # wait for rm_rf(tmp_dir) to finish before recreating tmp_dir os.makedirs(tmp_dir) create_files(tmp_dir, m) # Make Perl or Python-specific test files if m.name().startswith('perl-'): pl_files = create_pl_files(tmp_dir, m) py_files = False lua_files = False else: py_files = create_py_files(tmp_dir, m) pl_files = False lua_files = False shell_files = create_shell_files(tmp_dir, m) if not (py_files or shell_files or pl_files or lua_files): print("Nothing to test for:", m.dist()) return print("TEST START:", m.dist()) if on_win: if isdir(config.build_prefix): move_to_trash(config.build_prefix, '') if isdir(config.test_prefix): move_to_trash(config.test_prefix, '') else: rm_rf(config.build_prefix) rm_rf(config.test_prefix) get_build_metadata(m) specs = ['%s %s %s' % (m.name(), m.version(), m.build_id())] # add packages listed in the run environment and test/requires specs.extend(ms.spec for ms in m.ms_depends('run')) specs += m.get_value('test/requires', []) if py_files: # as the tests are run by python, ensure that python is installed. # (If they already provided python as a run or test requirement, # this won't hurt anything.) specs += ['python %s*' % environ.get_py_ver()] if pl_files: # as the tests are run by perl, we need to specify it specs += ['perl %s*' % environ.get_perl_ver()] if lua_files: # not sure how this shakes out specs += ['lua %s*' % environ.get_lua_ver()] create_env(config.test_prefix, specs, debug=debug) env = dict(os.environ.copy()) env.update(environ.get_dict(m, prefix=config.test_prefix)) if not activate: # prepend bin (or Scripts) directory env = prepend_bin_path(env, config.test_prefix, prepend_prefix=True) if on_win: env['PATH'] = config.test_prefix + os.pathsep + env['PATH'] for varname in 'CONDA_PY', 'CONDA_NPY', 'CONDA_PERL', 'CONDA_LUA': env[varname] = str(getattr(config, varname) or '') env['PREFIX'] = config.test_prefix # Python 2 Windows requires that envs variables be string, not unicode env = {str(key): str(value) for key, value in env.items()} suffix = "bat" if on_win else "sh" test_script = join(tmp_dir, "conda_test_runner.{suffix}".format(suffix=suffix)) with open(test_script, 'w') as tf: if activate: source = "call " if on_win else "source " ext = ".bat" if on_win else "" tf.write("{source}activate{ext} {test_env}\n".format(source=source, ext=ext, test_env=config.test_prefix)) tf.write("if errorlevel 1 exit 1\n") if on_win else None if py_files: tf.write("{python} -s {test_file}\n".format( python=config.test_python, test_file=join(tmp_dir, 'run_test.py'))) tf.write("if errorlevel 1 exit 1\n") if on_win else None if pl_files: tf.write("{perl} {test_file}\n".format( python=config.test_perl, test_file=join(tmp_dir, 'run_test.pl'))) tf.write("if errorlevel 1 exit 1\n") if on_win else None if lua_files: tf.write("{lua} {test_file}\n".format( python=config.test_perl, test_file=join(tmp_dir, 'run_test.lua'))) tf.write("if errorlevel 1 exit 1\n") if on_win else None if shell_files: test_file = join(tmp_dir, 'run_test.' + suffix) if on_win: tf.write("call {test_file}\n".format(test_file=test_file)) tf.write("if errorlevel 1 exit 1\n") else: # TODO: Run the test/commands here instead of in run_test.py tf.write("{shell_path} -x -e {test_file}\n".format(shell_path=shell_path, test_file=test_file)) if on_win: cmd = [env["COMSPEC"], "/d", "/c", test_script] else: cmd = [shell_path, '-x', '-e', test_script] try: subprocess.check_call(cmd, env=env, cwd=tmp_dir) except subprocess.CalledProcessError: tests_failed(m, move_broken=move_broken) print("TEST END:", m.dist())
def get_dict(m=None, prefix=None): if not prefix: prefix = config.build_prefix python = config.build_python d = {'CONDA_BUILD': '1', 'PYTHONNOUSERSITE': '1'} d['CONDA_DEFAULT_ENV'] = config.build_prefix d['ARCH'] = str(cc.bits) d['PREFIX'] = prefix d['PYTHON'] = python d['PY3K'] = str(config.PY3K) d['STDLIB_DIR'] = get_stdlib_dir() d['SP_DIR'] = get_sp_dir() d['SYS_PREFIX'] = sys.prefix d['SYS_PYTHON'] = sys.executable d['PERL_VER'] = get_perl_ver() d['PY_VER'] = get_py_ver() if get_npy_ver(): d['NPY_VER'] = get_npy_ver() d['SRC_DIR'] = source.get_dir() if "LANG" in os.environ: d['LANG'] = os.environ['LANG'] if "HTTPS_PROXY" in os.environ: d['HTTPS_PROXY'] = os.environ['HTTPS_PROXY'] if "HTTP_PROXY" in os.environ: d['HTTP_PROXY'] = os.environ['HTTP_PROXY'] if m: for var_name in m.get_value('build/script_env', []): value = os.getenv(var_name) if value is None: value = '<UNDEFINED>' d[var_name] = value try: d['CPU_COUNT'] = str(multiprocessing.cpu_count()) except NotImplementedError: d['CPU_COUNT'] = "1" d.update(**get_git_build_info(d['SRC_DIR'])) d['PATH'] = dict(os.environ)['PATH'] d = prepend_bin_path(d, prefix) if sys.platform == 'win32': # -------- Windows d['SCRIPTS'] = join(prefix, 'Scripts') d['LIBRARY_PREFIX'] = join(prefix, 'Library') d['LIBRARY_BIN'] = join(d['LIBRARY_PREFIX'], 'bin') d['LIBRARY_INC'] = join(d['LIBRARY_PREFIX'], 'include') d['LIBRARY_LIB'] = join(d['LIBRARY_PREFIX'], 'lib') # This probably should be done more generally d['CYGWIN_PREFIX'] = prefix.replace('\\', '/').replace('C:', '/cygdrive/c') d['R'] = join(prefix, 'Scripts', 'R.exe') else: # -------- Unix d['HOME'] = os.getenv('HOME', 'UNKNOWN') d['PKG_CONFIG_PATH'] = join(prefix, 'lib', 'pkgconfig') d['R'] = join(prefix, 'bin', 'R') if sys.platform == 'darwin': # -------- OSX d['OSX_ARCH'] = 'i386' if cc.bits == 32 else 'x86_64' d['CFLAGS'] = '-arch %(OSX_ARCH)s' % d d['CXXFLAGS'] = d['CFLAGS'] d['LDFLAGS'] = d['CFLAGS'] d['MACOSX_DEPLOYMENT_TARGET'] = '10.6' elif sys.platform.startswith('linux'): # -------- Linux d['LD_RUN_PATH'] = prefix + '/lib' if m: d['PKG_NAME'] = m.name() d['PKG_VERSION'] = m.version() d['PKG_BUILDNUM'] = str(m.build_number()) d['RECIPE_DIR'] = m.path return d
def create_env(prefix, specs, clear_cache=True, debug=False): ''' Create a conda envrionment for the given prefix and specs. ''' if debug: logging.getLogger("conda").setLevel(logging.DEBUG) logging.getLogger("binstar").setLevel(logging.DEBUG) logging.getLogger("install").setLevel(logging.DEBUG) logging.getLogger("conda.install").setLevel(logging.DEBUG) logging.getLogger("fetch").setLevel(logging.DEBUG) logging.getLogger("print").setLevel(logging.DEBUG) logging.getLogger("progress").setLevel(logging.DEBUG) logging.getLogger("dotupdate").setLevel(logging.DEBUG) logging.getLogger("stdoutlog").setLevel(logging.DEBUG) logging.getLogger("requests").setLevel(logging.DEBUG) else: # This squelches a ton of conda output that is not hugely relevant logging.getLogger("conda").setLevel(logging.WARN) logging.getLogger("binstar").setLevel(logging.WARN) logging.getLogger("install").setLevel(logging.ERROR) logging.getLogger("conda.install").setLevel(logging.ERROR) logging.getLogger("fetch").setLevel(logging.WARN) logging.getLogger("print").setLevel(logging.WARN) logging.getLogger("progress").setLevel(logging.WARN) logging.getLogger("dotupdate").setLevel(logging.WARN) logging.getLogger("stdoutlog").setLevel(logging.WARN) logging.getLogger("requests").setLevel(logging.WARN) specs = list(specs) for feature, value in feature_list: if value: specs.append('%s@' % feature) for d in config.bldpkgs_dirs: if not isdir(d): os.makedirs(d) update_index(d) if specs: # Don't waste time if there is nothing to do # FIXME: stupid hack to put test prefix on PATH so that runtime libs can be found old_path = os.environ['PATH'] os.environ['PATH'] = prepend_bin_path(os.environ.copy(), prefix, True)['PATH'] index = get_build_index(clear_cache=True) warn_on_old_conda_build(index) cc.pkgs_dirs = cc.pkgs_dirs[:1] actions = plan.install_actions(prefix, index, specs) plan.display_actions(actions, index) try: plan.execute_actions(actions, index, verbose=debug) except SystemExit as exc: if "too short in" in exc.message and config.prefix_length > 80: log.warn("Build prefix failed with prefix length {0}.".format( config.prefix_length)) log.warn("Error was: ") log.warn(exc.message) log.warn( "One or more of your package dependencies needs to be rebuilt with a " "longer prefix length.") log.warn( "Falling back to legacy prefix length of 80 characters.") log.warn( "Your package will not install into prefixes longer than 80 characters." ) config.prefix_length = 80 create_env(prefix, specs, clear_cache=clear_cache, debug=debug) os.environ['PATH'] = old_path # ensure prefix exists, even if empty, i.e. when specs are empty if not isdir(prefix): os.makedirs(prefix) if on_win: shell = "cmd.exe" else: shell = "bash" symlink_conda(prefix, sys.prefix, shell)
def build(m, get_src=True, post=None, include_recipe=True): ''' Build the package with the specified metadata. :param m: Package metadata :type m: Metadata :param get_src: Should we download the source? :type get_src: bool :type post: bool or None. None means run the whole build. True means run post only. False means stop just before the post. ''' if (m.get_value('build/detect_binary_files_with_prefix') or m.binary_has_prefix_files()): # We must use a long prefix here as the package will only be # installable into prefixes shorter than this one. config.use_long_build_prefix = True else: # In case there are multiple builds in the same process config.use_long_build_prefix = False if m.skip(): print("Skipped: The %s recipe defines build/skip for this " "configuration." % m.dist()) return if post in [False, None]: print("Removing old build environment") if on_win: if isdir(config.short_build_prefix): move_to_trash(config.short_build_prefix, '') if isdir(config.long_build_prefix): move_to_trash(config.long_build_prefix, '') else: rm_rf(config.short_build_prefix) rm_rf(config.long_build_prefix) print("Removing old work directory") if on_win: if isdir(source.WORK_DIR): move_to_trash(source.WORK_DIR, '') else: rm_rf(source.WORK_DIR) # Display the name only # Version number could be missing due to dependency on source info. print("BUILD START:", m.dist()) create_env(config.build_prefix, [ms.spec for ms in m.ms_depends('build')]) if m.name() in [i.rsplit('-', 2)[0] for i in linked(config.build_prefix)]: print("%s is installed as a build dependency. Removing." % m.name()) index = get_build_index(clear_cache=False) actions = plan.remove_actions(config.build_prefix, [m.name()], index=index) assert not plan.nothing_to_do(actions), actions plan.display_actions(actions, index) plan.execute_actions(actions, index) if get_src: # Execute any commands fetching the source (e.g., git) in the _build environment. # This makes it possible to provide source fetchers (eg. git, hg, svn) as build # dependencies. _old_path = os.environ['PATH'] try: os.environ['PATH'] = prepend_bin_path({'PATH' : _old_path}, config.build_prefix)['PATH'] source.provide(m.path, m.get_section('source')) finally: os.environ['PATH'] = _old_path # Parse our metadata again because we did not initialize the source # information before. # By now, all jinja variables should be defined, so don't permit undefined vars. m.parse_again(permit_undefined_jinja=False) print("Package:", m.dist()) assert isdir(source.WORK_DIR) src_dir = source.get_dir() contents = os.listdir(src_dir) if contents: print("source tree in:", src_dir) else: print("no source") rm_rf(config.info_dir) files1 = prefix_files() for pat in m.always_include_files(): has_matches = False for f in set(files1): if fnmatch.fnmatch(f, pat): print("Including in package existing file", f) files1.discard(f) has_matches = True if not has_matches: sys.exit("Error: Glob %s from always_include_files does not match any files" % pat) # Save this for later with open(join(config.croot, 'prefix_files.txt'), 'w') as f: f.write(u'\n'.join(sorted(list(files1)))) f.write(u'\n') # Use script from recipe? script = m.get_value('build/script', None) if script: if isinstance(script, list): script = '\n'.join(script) if sys.platform == 'win32': build_file = join(m.path, 'bld.bat') if script: build_file = join(source.get_dir(), 'bld.bat') with open(join(source.get_dir(), 'bld.bat'), 'w') as bf: bf.write(script) import conda_build.windows as windows windows.build(m, build_file) else: env = environ.get_dict(m) build_file = join(m.path, 'build.sh') if script: build_file = join(source.get_dir(), 'conda_build.sh') with open(build_file, 'w') as bf: bf.write(script) os.chmod(build_file, 0o766) if isfile(build_file): cmd = [shell_path, '-x', '-e', build_file] _check_call(cmd, env=env, cwd=src_dir) if post in [True, None]: if post == True: with open(join(config.croot, 'prefix_files.txt'), 'r') as f: files1 = set(f.read().splitlines()) get_build_metadata(m) create_post_scripts(m) create_entry_points(m.get_value('build/entry_points')) assert not exists(config.info_dir) files2 = prefix_files() post_process(sorted(files2 - files1), preserve_egg_dir=bool(m.get_value('build/preserve_egg_dir'))) # The post processing may have deleted some files (like easy-install.pth) files2 = prefix_files() if any(config.meta_dir in join(config.build_prefix, f) for f in files2 - files1): sys.exit(indent("""Error: Untracked file(s) %s found in conda-meta directory. This error usually comes from using conda in the build script. Avoid doing this, as it can lead to packages that include their dependencies.""" % (tuple(f for f in files2 - files1 if config.meta_dir in join(config.build_prefix, f)),))) post_build(m, sorted(files2 - files1)) create_info_files(m, sorted(files2 - files1), include_recipe=bool(m.path) and include_recipe) if m.get_value('build/noarch_python'): import conda_build.noarch_python as noarch_python noarch_python.transform(m, sorted(files2 - files1)) files3 = prefix_files() fix_permissions(files3 - files1) path = bldpkg_path(m) t = tarfile.open(path, 'w:bz2') def order(f): # we don't care about empty files so send them back via 100000 fsize = os.stat(join(config.build_prefix, f)).st_size or 100000 # info/* records will be False == 0, others will be 1. info_order = int(os.path.dirname(f) != 'info') return info_order, fsize # add files in order of a) in info directory, b) increasing size so # we can access small manifest or json files without decompressing # possible large binary or data files for f in sorted(files3 - files1, key=order): t.add(join(config.build_prefix, f), f) t.close() print("BUILD END:", m.dist()) # we're done building, perform some checks tarcheck.check_all(path) update_index(config.bldpkgs_dir) else: print("STOPPING BUILD BEFORE POST:", m.dist())
def get_dict(m=None, prefix=None): if not prefix: prefix = config.build_prefix python = config.build_python d = {'CONDA_BUILD': '1', 'PYTHONNOUSERSITE': '1'} d['CONDA_DEFAULT_ENV'] = config.build_prefix d['ARCH'] = str(cc.bits) d['PREFIX'] = prefix d['PYTHON'] = python d['PY3K'] = str(config.PY3K) d['STDLIB_DIR'] = get_stdlib_dir() d['SP_DIR'] = get_sp_dir() d['SYS_PREFIX'] = sys.prefix d['SYS_PYTHON'] = sys.executable d['PERL_VER'] = get_perl_ver() d['PY_VER'] = get_py_ver() if get_npy_ver(): d['NPY_VER'] = get_npy_ver() d['SRC_DIR'] = source.get_dir() if "LANG" in os.environ: d['LANG'] = os.environ['LANG'] if "HTTPS_PROXY" in os.environ: d['HTTPS_PROXY'] = os.environ['HTTPS_PROXY'] if "HTTP_PROXY" in os.environ: d['HTTP_PROXY'] = os.environ['HTTP_PROXY'] if m: for var_name in m.get_value('build/script_env', []): value = os.getenv(var_name) if value is None: warnings.warn( "The environment variable '%s' is undefined." % var_name, UserWarning) else: d[var_name] = value if sys.platform == "darwin": # multiprocessing.cpu_count() is not reliable on OSX # See issue #645 on github.com/conda/conda-build out, err = subprocess.Popen('sysctl -n hw.logicalcpu', shell=True, stdout=subprocess.PIPE).communicate() d['CPU_COUNT'] = out.decode('utf-8').strip() else: try: d['CPU_COUNT'] = str(multiprocessing.cpu_count()) except NotImplementedError: d['CPU_COUNT'] = "1" if m.get_value('source/git_url'): git_url = m.get_value('source/git_url') if '://' not in git_url: # If git_url is a relative path instead of a url, convert it to an abspath if not isabs(git_url): git_url = join(m.path, git_url) git_url = normpath(join(m.path, git_url)) d.update(**get_git_build_info(d['SRC_DIR'], git_url, m.get_value('source/git_rev'))) d['PATH'] = dict(os.environ)['PATH'] d = prepend_bin_path(d, prefix) if sys.platform == 'win32': # -------- Windows d['SCRIPTS'] = join(prefix, 'Scripts') d['LIBRARY_PREFIX'] = join(prefix, 'Library') d['LIBRARY_BIN'] = join(d['LIBRARY_PREFIX'], 'bin') d['LIBRARY_INC'] = join(d['LIBRARY_PREFIX'], 'include') d['LIBRARY_LIB'] = join(d['LIBRARY_PREFIX'], 'lib') # This probably should be done more generally d['CYGWIN_PREFIX'] = prefix.replace('\\', '/').replace('C:', '/cygdrive/c') d['R'] = join(prefix, 'Scripts', 'R.exe') else: # -------- Unix d['HOME'] = os.getenv('HOME', 'UNKNOWN') d['PKG_CONFIG_PATH'] = join(prefix, 'lib', 'pkgconfig') d['R'] = join(prefix, 'bin', 'R') if sys.platform == 'darwin': # -------- OSX d['OSX_ARCH'] = 'i386' if cc.bits == 32 else 'x86_64' d['CFLAGS'] = '-arch %(OSX_ARCH)s' % d d['CXXFLAGS'] = d['CFLAGS'] d['LDFLAGS'] = d['CFLAGS'] d['MACOSX_DEPLOYMENT_TARGET'] = '10.6' elif sys.platform.startswith('linux'): # -------- Linux d['LD_RUN_PATH'] = prefix + '/lib' if m: d['PKG_NAME'] = m.name() d['PKG_VERSION'] = m.version() d['PKG_BUILDNUM'] = str(m.build_number()) d['PKG_BUILD_STRING'] = str(m.build_id()) d['RECIPE_DIR'] = m.path return d
def build(m, post=None, include_recipe=True, keep_old_work=False, need_source_download=True, need_reparse_in_env=False, verbose=True, dirty=False, activate=True, debug=False): ''' Build the package with the specified metadata. :param m: Package metadata :type m: Metadata :type post: bool or None. None means run the whole build. True means run post only. False means stop just before the post. :type keep_old_work: bool: Keep any previous work directory. :type need_source_download: bool: if rendering failed to download source (due to missing tools), retry here after build env is populated ''' if (m.get_value('build/detect_binary_files_with_prefix') or m.binary_has_prefix_files()) and not on_win: # We must use a long prefix here as the package will only be # installable into prefixes shorter than this one. config.use_long_build_prefix = True else: # In case there are multiple builds in the same process config.use_long_build_prefix = False if m.skip(): print("Skipped: The %s recipe defines build/skip for this " "configuration." % m.dist()) return with Locked(cc.root_dir): # If --keep-old-work, then move the contents of source.WORK_DIR to a # temporary directory for the duration of the build. # The source unpacking procedure is too varied and complex # to allow this to be written cleanly (see source.get_dir() for example) if keep_old_work: old_WORK_DIR = tempfile.mkdtemp() old_sub_dirs = [ name for name in os.listdir(source.WORK_DIR) if os.path.isdir(os.path.join(source.WORK_DIR, name)) ] if len(old_sub_dirs): print("Keeping old work directory backup: %s => %s" % (old_sub_dirs, old_WORK_DIR)) for old_sub in old_sub_dirs: shutil.move(os.path.join(source.WORK_DIR, old_sub), old_WORK_DIR) if post in [False, None]: print("Removing old build environment") print("BUILD START:", m.dist()) if not need_source_download or not need_reparse_in_env: print( " (actual version deferred until further download or env creation)" ) if on_win: if isdir(config.short_build_prefix): move_to_trash(config.short_build_prefix, '') if isdir(config.long_build_prefix): move_to_trash(config.long_build_prefix, '') else: rm_rf(config.short_build_prefix) rm_rf(config.long_build_prefix) specs = [ms.spec for ms in m.ms_depends('build')] if activate: # If we activate the build envrionment, we need to be sure that we # have the appropriate VCS available in the environment. People # are not used to explicitly listing it in recipes, though. # We add it for them here, but warn them about it. vcs_source = m.uses_vcs_in_build() if vcs_source and vcs_source not in specs: vcs_executable = "hg" if vcs_source == "mercurial" else vcs_source has_vcs_available = os.path.isfile( external.find_executable(vcs_executable) or "") if not has_vcs_available: if (vcs_source != "mercurial" or not any( spec.startswith('python') and "3." in spec for spec in specs)): specs.append(vcs_source) log.warn( "Your recipe depends on {} at build time (for templates), " "but you have not listed it as a build dependency. Doing " "so for this build.") else: raise ValueError( "Your recipe uses mercurial in build, but mercurial" " does not yet support Python 3. Please handle all of " "your mercurial actions outside of your build script." ) # Display the name only # Version number could be missing due to dependency on source info. create_env(config.build_prefix, specs, debug=debug) if need_source_download: # Execute any commands fetching the source (e.g., git) in the _build environment. # This makes it possible to provide source fetchers (eg. git, hg, svn) as build # dependencies. if not activate: _old_path = os.environ['PATH'] os.environ['PATH'] = prepend_bin_path( {'PATH': _old_path}, config.build_prefix)['PATH'] try: m, need_source_download, need_reparse_in_env = parse_or_try_download( m, no_download_source=False, force_download=True, verbose=verbose, dirty=dirty) assert not need_source_download, "Source download failed. Please investigate." finally: if not activate: os.environ['PATH'] = _old_path print("BUILD START:", m.dist()) if need_reparse_in_env: reparse(m) print("BUILD START:", m.dist()) if m.name() in [ i.rsplit('-', 2)[0] for i in linked(config.build_prefix) ]: print("%s is installed as a build dependency. Removing." % m.name()) index = get_build_index(clear_cache=False) actions = plan.remove_actions(config.build_prefix, [m.name()], index=index) assert not plan.nothing_to_do(actions), actions plan.display_actions(actions, index) plan.execute_actions(actions, index) print("Package:", m.dist()) src_dir = source.get_dir() if isdir(source.WORK_DIR): print("source tree in:", src_dir) else: print("no source - creating empty work folder") os.makedirs(source.WORK_DIR) rm_rf(config.info_dir) files1 = prefix_files() for pat in m.always_include_files(): has_matches = False for f in set(files1): if fnmatch.fnmatch(f, pat): print("Including in package existing file", f) files1.discard(f) has_matches = True if not has_matches: log.warn( "Glob %s from always_include_files does not match any files" % pat) # Save this for later with open(join(config.croot, 'prefix_files.txt'), 'w') as f: f.write(u'\n'.join(sorted(list(files1)))) f.write(u'\n') # Use script from recipe? script = m.get_value('build/script', None) if script: if isinstance(script, list): script = '\n'.join(script) if isdir(source.WORK_DIR): if on_win: build_file = join(m.path, 'bld.bat') if script: build_file = join(source.get_dir(), 'bld.bat') with open(join(source.get_dir(), 'bld.bat'), 'w') as bf: bf.write(script) import conda_build.windows as windows windows.build(m, build_file, dirty=dirty, activate=activate) else: build_file = join(m.path, 'build.sh') # There is no sense in trying to run an empty build script. if isfile(build_file) or script: env = environ.get_dict(m, dirty=dirty) work_file = join(source.get_dir(), 'conda_build.sh') if script: with open(work_file, 'w') as bf: bf.write(script) if activate: if isfile(build_file): data = open(build_file).read() else: data = open(work_file).read() with open(work_file, 'w') as bf: bf.write( "source activate {build_prefix}\n".format( build_prefix=config.build_prefix)) bf.write(data) else: if not isfile(work_file): shutil.copy(build_file, work_file) os.chmod(work_file, 0o766) if isfile(work_file): cmd = [shell_path, '-x', '-e', work_file] _check_call(cmd, env=env, cwd=src_dir) if post in [True, None]: if post: with open(join(config.croot, 'prefix_files.txt'), 'r') as f: files1 = set(f.read().splitlines()) get_build_metadata(m) create_post_scripts(m) create_entry_points(m.get_value('build/entry_points')) assert not exists(config.info_dir) files2 = prefix_files() post_process(sorted(files2 - files1), preserve_egg_dir=bool( m.get_value('build/preserve_egg_dir'))) # The post processing may have deleted some files (like easy-install.pth) files2 = prefix_files() if any(config.meta_dir in join(config.build_prefix, f) for f in files2 - files1): sys.exit( indent( """Error: Untracked file(s) %s found in conda-meta directory. This error usually comes from using conda in the build script. Avoid doing this, as it can lead to packages that include their dependencies.""" % (tuple(f for f in files2 - files1 if config.meta_dir in join(config.build_prefix, f)), ))) post_build(m, sorted(files2 - files1)) create_info_files(m, sorted(files2 - files1), include_recipe=bool(m.path) and include_recipe) if m.get_value('build/noarch_python'): import conda_build.noarch_python as noarch_python noarch_python.transform(m, sorted(files2 - files1)) files3 = prefix_files() fix_permissions(files3 - files1) path = bldpkg_path(m) t = tarfile.open(path, 'w:bz2') def order(f): # we don't care about empty files so send them back via 100000 fsize = os.stat(join(config.build_prefix, f)).st_size or 100000 # info/* records will be False == 0, others will be 1. info_order = int(os.path.dirname(f) != 'info') return info_order, fsize # add files in order of a) in info directory, b) increasing size so # we can access small manifest or json files without decompressing # possible large binary or data files for f in sorted(files3 - files1, key=order): t.add(join(config.build_prefix, f), f) t.close() print("BUILD END:", m.dist()) # we're done building, perform some checks tarcheck.check_all(path) update_index(config.bldpkgs_dir) else: print("STOPPING BUILD BEFORE POST:", m.dist()) if keep_old_work and len(old_sub_dirs): print("Restoring old work directory backup: %s :: %s => %s" % (old_WORK_DIR, old_sub_dirs, source.WORK_DIR)) for old_sub in old_sub_dirs: if os.path.exists(os.path.join(source.WORK_DIR, old_sub)): print( "Not restoring old source directory %s over new build's version" % (old_sub)) else: shutil.move(os.path.join(old_WORK_DIR, old_sub), source.WORK_DIR) shutil.rmtree(old_WORK_DIR, ignore_errors=True)
def create_env(prefix, specs, clear_cache=True, debug=False): ''' Create a conda envrionment for the given prefix and specs. ''' if debug: logging.getLogger("conda").setLevel(logging.DEBUG) logging.getLogger("binstar").setLevel(logging.DEBUG) logging.getLogger("install").setLevel(logging.DEBUG) logging.getLogger("conda.install").setLevel(logging.DEBUG) logging.getLogger("fetch").setLevel(logging.DEBUG) logging.getLogger("print").setLevel(logging.DEBUG) logging.getLogger("progress").setLevel(logging.DEBUG) logging.getLogger("dotupdate").setLevel(logging.DEBUG) logging.getLogger("stdoutlog").setLevel(logging.DEBUG) logging.getLogger("requests").setLevel(logging.DEBUG) else: # This squelches a ton of conda output that is not hugely relevant logging.getLogger("conda").setLevel(logging.WARN) logging.getLogger("binstar").setLevel(logging.WARN) logging.getLogger("install").setLevel(logging.ERROR) logging.getLogger("conda.install").setLevel(logging.ERROR) logging.getLogger("fetch").setLevel(logging.WARN) logging.getLogger("print").setLevel(logging.WARN) logging.getLogger("progress").setLevel(logging.WARN) logging.getLogger("dotupdate").setLevel(logging.WARN) logging.getLogger("stdoutlog").setLevel(logging.WARN) logging.getLogger("requests").setLevel(logging.WARN) specs = list(specs) for feature, value in feature_list: if value: specs.append('%s@' % feature) for d in config.bldpkgs_dirs: if not isdir(d): os.makedirs(d) update_index(d) if specs: # Don't waste time if there is nothing to do # FIXME: stupid hack to put test prefix on PATH so that runtime libs can be found old_path = os.environ['PATH'] os.environ['PATH'] = prepend_bin_path(os.environ.copy(), prefix, True)['PATH'] index = get_build_index(clear_cache=True) warn_on_old_conda_build(index) cc.pkgs_dirs = cc.pkgs_dirs[:1] actions = plan.install_actions(prefix, index, specs) plan.display_actions(actions, index) try: plan.execute_actions(actions, index, verbose=debug) except SystemExit as exc: if "too short in" in str(exc) and config.prefix_length > 80: log.warn("Build prefix failed with prefix length {0}." .format(config.prefix_length)) log.warn("Error was: ") log.warn(str(exc)) log.warn("One or more of your package dependencies needs to be rebuilt with a " "longer prefix length.") log.warn("Falling back to legacy prefix length of 80 characters.") log.warn("Your package will not install into prefixes longer than 80 characters.") config.prefix_length = 80 create_env(config.build_prefix, specs, clear_cache=clear_cache, debug=debug) os.environ['PATH'] = old_path # ensure prefix exists, even if empty, i.e. when specs are empty if not isdir(prefix): os.makedirs(prefix) if on_win: shell = "cmd.exe" else: shell = "bash" symlink_conda(prefix, sys.prefix, shell)
def test(m, move_broken=True, activate=True): ''' Execute any test scripts for the given package. :param m: Package's metadata. :type m: Metadata ''' with Locked(cc.root_dir): # remove from package cache rm_pkgs_cache(m.dist()) tmp_dir = join(config.croot, 'test-tmp_dir') rm_rf(tmp_dir) if on_win: time.sleep( 1 ) # wait for rm_rf(tmp_dir) to finish before recreating tmp_dir os.makedirs(tmp_dir) create_files(tmp_dir, m) # Make Perl or Python-specific test files if m.name().startswith('perl-'): pl_files = create_pl_files(tmp_dir, m) py_files = False lua_files = False else: py_files = create_py_files(tmp_dir, m) pl_files = False lua_files = False shell_files = create_shell_files(tmp_dir, m) if not (py_files or shell_files or pl_files or lua_files): print("Nothing to test for:", m.dist()) return print("TEST START:", m.dist()) if on_win: if isdir(config.build_prefix): move_to_trash(config.build_prefix, '') if isdir(config.test_prefix): move_to_trash(config.test_prefix, '') else: rm_rf(config.build_prefix) rm_rf(config.test_prefix) get_build_metadata(m) specs = ['%s %s %s' % (m.name(), m.version(), m.build_id())] # add packages listed in the run environment and test/requires specs.extend(ms.spec for ms in m.ms_depends('run')) specs += m.get_value('test/requires', []) if py_files: # as the tests are run by python, ensure that python is installed. # (If they already provided python as a run or test requirement, # this won't hurt anything.) specs += ['python %s*' % environ.get_py_ver()] if pl_files: # as the tests are run by perl, we need to specify it specs += ['perl %s*' % environ.get_perl_ver()] if lua_files: # not sure how this shakes out specs += ['lua %s*' % environ.get_lua_ver()] create_env(config.test_prefix, specs) env = dict(os.environ) env.update(environ.get_dict(m, prefix=config.test_prefix)) if not activate: # prepend bin (or Scripts) directory env = prepend_bin_path(env, config.test_prefix, prepend_prefix=True) if on_win: env['PATH'] = config.test_prefix + os.pathsep + env['PATH'] for varname in 'CONDA_PY', 'CONDA_NPY', 'CONDA_PERL', 'CONDA_LUA': env[varname] = str(getattr(config, varname) or '') env['PREFIX'] = config.test_prefix # Python 2 Windows requires that envs variables be string, not unicode env = {str(key): str(value) for key, value in env.items()} suffix = "bat" if on_win else "sh" test_script = join(tmp_dir, "conda_test_runner.{suffix}".format(suffix=suffix)) with open(test_script, 'w') as tf: if activate: tf.write("{source}activate _test\n".format( source="" if on_win else "source ")) if py_files: tf.write("{python} -s {test_file}\n".format( python=config.test_python, test_file=join(tmp_dir, 'run_test.py'))) if pl_files: tf.write("{perl} {test_file}\n".format(python=config.test_perl, test_file=join( tmp_dir, 'run_test.pl'))) if lua_files: tf.write("{lua} {test_file}\n".format(python=config.test_perl, test_file=join( tmp_dir, 'run_test.lua'))) if shell_files: test_file = join(tmp_dir, 'run_test.' + suffix) if on_win: tf.write("call {test_file}\n".format(test_file=test_file)) else: # TODO: Run the test/commands here instead of in run_test.py tf.write("{shell_path} -x -e {test_file}\n".format( shell_path=shell_path, test_file=test_file)) if on_win: cmd = [env["COMSPEC"], "/d", "/c", test_script] else: cmd = [shell_path, '-x', '-e', test_script] try: subprocess.check_call(cmd, env=env, cwd=tmp_dir) except subprocess.CalledProcessError: tests_failed(m, move_broken=move_broken) print("TEST END:", m.dist())
def build(m, post=None, include_recipe=True, keep_old_work=False, need_source_download=True, need_reparse_in_env=False, verbose=True, dirty=False, activate=True, debug=False): ''' Build the package with the specified metadata. :param m: Package metadata :type m: Metadata :type post: bool or None. None means run the whole build. True means run post only. False means stop just before the post. :type keep_old_work: bool: Keep any previous work directory. :type need_source_download: bool: if rendering failed to download source (due to missing tools), retry here after build env is populated ''' if (m.get_value('build/detect_binary_files_with_prefix') or m.binary_has_prefix_files()) and not on_win: # We must use a long prefix here as the package will only be # installable into prefixes shorter than this one. config.use_long_build_prefix = True else: # In case there are multiple builds in the same process config.use_long_build_prefix = False if m.skip(): print("Skipped: The %s recipe defines build/skip for this " "configuration." % m.dist()) return with Locked(cc.root_dir): # If --keep-old-work, then move the contents of source.WORK_DIR to a # temporary directory for the duration of the build. # The source unpacking procedure is too varied and complex # to allow this to be written cleanly (see source.get_dir() for example) if keep_old_work: old_WORK_DIR = tempfile.mkdtemp() old_sub_dirs = [name for name in os.listdir(source.WORK_DIR) if os.path.isdir(os.path.join(source.WORK_DIR, name))] if len(old_sub_dirs): print("Keeping old work directory backup: %s => %s" % (old_sub_dirs, old_WORK_DIR)) for old_sub in old_sub_dirs: shutil.move(os.path.join(source.WORK_DIR, old_sub), old_WORK_DIR) if post in [False, None]: print("Removing old build environment") print("BUILD START:", m.dist()) if need_source_download or need_reparse_in_env: print(" (actual version deferred until further download or env creation)") if on_win: if isdir(config.short_build_prefix): move_to_trash(config.short_build_prefix, '') if isdir(config.long_build_prefix): move_to_trash(config.long_build_prefix, '') else: rm_rf(config.short_build_prefix) rm_rf(config.long_build_prefix) specs = [ms.spec for ms in m.ms_depends('build')] if activate: # If we activate the build envrionment, we need to be sure that we # have the appropriate VCS available in the environment. People # are not used to explicitly listing it in recipes, though. # We add it for them here, but warn them about it. vcs_source = m.uses_vcs_in_build() if vcs_source and vcs_source not in specs: vcs_executable = "hg" if vcs_source == "mercurial" else vcs_source has_vcs_available = os.path.isfile(external.find_executable(vcs_executable) or "") if not has_vcs_available: if (vcs_source != "mercurial" or not any(spec.startswith('python') and "3." in spec for spec in specs)): specs.append(vcs_source) log.warn("Your recipe depends on {} at build time (for templates), " "but you have not listed it as a build dependency. Doing " "so for this build.") else: raise ValueError("Your recipe uses mercurial in build, but mercurial" " does not yet support Python 3. Please handle all of " "your mercurial actions outside of your build script.") # Display the name only # Version number could be missing due to dependency on source info. create_env(config.build_prefix, specs, debug=debug) if need_source_download: # Execute any commands fetching the source (e.g., git) in the _build environment. # This makes it possible to provide source fetchers (eg. git, hg, svn) as build # dependencies. if not activate: _old_path = os.environ['PATH'] os.environ['PATH'] = prepend_bin_path({'PATH': _old_path}, config.build_prefix)['PATH'] try: m, need_source_download, need_reparse_in_env = parse_or_try_download(m, no_download_source=False, force_download=True, verbose=verbose, dirty=dirty) assert not need_source_download, "Source download failed. Please investigate." finally: if not activate: os.environ['PATH'] = _old_path print("BUILD START:", m.dist()) if need_reparse_in_env: reparse(m) print("BUILD START:", m.dist()) if m.name() in [i.rsplit('-', 2)[0] for i in linked(config.build_prefix)]: print("%s is installed as a build dependency. Removing." % m.name()) index = get_build_index(clear_cache=False) actions = plan.remove_actions(config.build_prefix, [m.name()], index=index) assert not plan.nothing_to_do(actions), actions plan.display_actions(actions, index) plan.execute_actions(actions, index) print("Package:", m.dist()) src_dir = source.get_dir() if isdir(src_dir): print("source tree in:", src_dir) else: print("no source - creating empty work folder") os.makedirs(src_dir) rm_rf(config.info_dir) files1 = prefix_files() for pat in m.always_include_files(): has_matches = False for f in set(files1): if fnmatch.fnmatch(f, pat): print("Including in package existing file", f) files1.discard(f) has_matches = True if not has_matches: log.warn("Glob %s from always_include_files does not match any files" % pat) # Save this for later with open(join(config.croot, 'prefix_files.txt'), 'w') as f: f.write(u'\n'.join(sorted(list(files1)))) f.write(u'\n') # Use script from recipe? script = m.get_value('build/script', None) if script: if isinstance(script, list): script = '\n'.join(script) if isdir(source.WORK_DIR): if on_win: build_file = join(m.path, 'bld.bat') if script: build_file = join(source.get_dir(), 'bld.bat') with open(join(source.get_dir(), 'bld.bat'), 'w') as bf: bf.write(script) import conda_build.windows as windows windows.build(m, build_file, dirty=dirty, activate=activate) else: build_file = join(m.path, 'build.sh') # There is no sense in trying to run an empty build script. if isfile(build_file) or script: env = environ.get_dict(m, dirty=dirty) work_file = join(source.get_dir(), 'conda_build.sh') if script: with open(work_file, 'w') as bf: bf.write(script) if activate: if isfile(build_file): data = open(build_file).read() else: data = open(work_file).read() with open(work_file, 'w') as bf: bf.write("source activate {build_prefix}\n".format( build_prefix=config.build_prefix)) bf.write(data) else: if not isfile(work_file): shutil.copy(build_file, work_file) os.chmod(work_file, 0o766) if isfile(work_file): cmd = [shell_path, '-x', '-e', work_file] _check_call(cmd, env=env, cwd=src_dir) if post in [True, None]: if post: with open(join(config.croot, 'prefix_files.txt'), 'r') as f: files1 = set(f.read().splitlines()) get_build_metadata(m) create_post_scripts(m) create_entry_points(m.get_value('build/entry_points')) assert not exists(config.info_dir) files2 = prefix_files() post_process(sorted(files2 - files1), preserve_egg_dir=bool(m.get_value('build/preserve_egg_dir'))) # The post processing may have deleted some files (like easy-install.pth) files2 = prefix_files() if any(config.meta_dir in join(config.build_prefix, f) for f in files2 - files1): sys.exit(indent("""Error: Untracked file(s) %s found in conda-meta directory. This error usually comes from using conda in the build script. Avoid doing this, as it can lead to packages that include their dependencies.""" % (tuple(f for f in files2 - files1 if config.meta_dir in join(config.build_prefix, f)),))) post_build(m, sorted(files2 - files1)) create_info_files(m, sorted(files2 - files1), include_recipe=bool(m.path) and include_recipe) if m.get_value('build/noarch_python'): import conda_build.noarch_python as noarch_python noarch_python.transform(m, sorted(files2 - files1)) files3 = prefix_files() fix_permissions(files3 - files1) path = bldpkg_path(m) t = tarfile.open(path, 'w:bz2') def order(f): # we don't care about empty files so send them back via 100000 fsize = os.stat(join(config.build_prefix, f)).st_size or 100000 # info/* records will be False == 0, others will be 1. info_order = int(os.path.dirname(f) != 'info') return info_order, fsize # add files in order of a) in info directory, b) increasing size so # we can access small manifest or json files without decompressing # possible large binary or data files for f in sorted(files3 - files1, key=order): t.add(join(config.build_prefix, f), f) t.close() print("BUILD END:", m.dist()) # we're done building, perform some checks tarcheck.check_all(path) update_index(config.bldpkgs_dir) else: print("STOPPING BUILD BEFORE POST:", m.dist()) if keep_old_work and len(old_sub_dirs): print("Restoring old work directory backup: %s :: %s => %s" % (old_WORK_DIR, old_sub_dirs, source.WORK_DIR)) for old_sub in old_sub_dirs: if os.path.exists(os.path.join(source.WORK_DIR, old_sub)): print("Not restoring old source directory %s over new build's version" % (old_sub)) else: shutil.move(os.path.join(old_WORK_DIR, old_sub), source.WORK_DIR) shutil.rmtree(old_WORK_DIR, ignore_errors=True)
def get_dict(m=None, prefix=None): if not prefix: prefix = config.build_prefix python = config.build_python d = {'CONDA_BUILD': '1', 'PYTHONNOUSERSITE': '1'} d['CONDA_DEFAULT_ENV'] = config.build_prefix d['ARCH'] = str(cc.bits) d['PREFIX'] = prefix d['PYTHON'] = python d['PY3K'] = str(config.PY3K) d['STDLIB_DIR'] = get_stdlib_dir() d['SP_DIR'] = get_sp_dir() d['SYS_PREFIX'] = sys.prefix d['SYS_PYTHON'] = sys.executable d['PERL_VER'] = get_perl_ver() d['PY_VER'] = get_py_ver() if get_npy_ver(): d['NPY_VER'] = get_npy_ver() d['SRC_DIR'] = source.get_dir() if "LANG" in os.environ: d['LANG'] = os.environ['LANG'] if "HTTPS_PROXY" in os.environ: d['HTTPS_PROXY'] = os.environ['HTTPS_PROXY'] if "HTTP_PROXY" in os.environ: d['HTTP_PROXY'] = os.environ['HTTP_PROXY'] if m: for var_name in m.get_value('build/script_env', []): value = os.getenv(var_name) if value is None: value = '<UNDEFINED>' d[var_name] = value if sys.platform == "darwin": # multiprocessing.cpu_count() is not reliable on OSX # See issue #645 on github.com/conda/conda-build out, err = subprocess.Popen('sysctl -n hw.logicalcpu', shell=True, stdout=subprocess.PIPE).communicate() d['CPU_COUNT'] = out.decode('utf-8').strip() else: try: d['CPU_COUNT'] = str(multiprocessing.cpu_count()) except NotImplementedError: d['CPU_COUNT'] = "1" if m.get_value('source/git_url'): d.update(**get_git_build_info(d['SRC_DIR'])) d['PATH'] = dict(os.environ)['PATH'] d = prepend_bin_path(d, prefix) if sys.platform == 'win32': # -------- Windows d['SCRIPTS'] = join(prefix, 'Scripts') d['LIBRARY_PREFIX'] = join(prefix, 'Library') d['LIBRARY_BIN'] = join(d['LIBRARY_PREFIX'], 'bin') d['LIBRARY_INC'] = join(d['LIBRARY_PREFIX'], 'include') d['LIBRARY_LIB'] = join(d['LIBRARY_PREFIX'], 'lib') # This probably should be done more generally d['CYGWIN_PREFIX'] = prefix.replace('\\', '/').replace('C:', '/cygdrive/c') d['R'] = join(prefix, 'Scripts', 'R.exe') else: # -------- Unix d['HOME'] = os.getenv('HOME', 'UNKNOWN') d['PKG_CONFIG_PATH'] = join(prefix, 'lib', 'pkgconfig') d['R'] = join(prefix, 'bin', 'R') if sys.platform == 'darwin': # -------- OSX d['OSX_ARCH'] = 'i386' if cc.bits == 32 else 'x86_64' d['CFLAGS'] = '-arch %(OSX_ARCH)s' % d d['CXXFLAGS'] = d['CFLAGS'] d['LDFLAGS'] = d['CFLAGS'] d['MACOSX_DEPLOYMENT_TARGET'] = '10.6' elif sys.platform.startswith('linux'): # -------- Linux d['LD_RUN_PATH'] = prefix + '/lib' if m: d['PKG_NAME'] = m.name() d['PKG_VERSION'] = m.version() d['PKG_BUILDNUM'] = str(m.build_number()) d['PKG_BUILD_STRING'] = str(m.build_id()) d['RECIPE_DIR'] = m.path return d
def get_dict(m=None, prefix=None): if not prefix: prefix = config.build_prefix python = config.build_python d = {'CONDA_BUILD': '1', 'PYTHONNOUSERSITE': '1'} d['CONDA_DEFAULT_ENV'] = config.build_prefix d['ARCH'] = str(cc.bits) d['PREFIX'] = prefix d['PYTHON'] = python d['PY3K'] = str(config.PY3K) d['STDLIB_DIR'] = get_stdlib_dir() d['SP_DIR'] = get_sp_dir() d['SYS_PREFIX'] = sys.prefix d['SYS_PYTHON'] = sys.executable d['PERL_VER'] = get_perl_ver() d['PY_VER'] = get_py_ver() if get_npy_ver(): d['NPY_VER'] = get_npy_ver() d['SRC_DIR'] = source.get_dir() if "LANG" in os.environ: d['LANG'] = os.environ['LANG'] if "HTTPS_PROXY" in os.environ: d['HTTPS_PROXY'] = os.environ['HTTPS_PROXY'] if "HTTP_PROXY" in os.environ: d['HTTP_PROXY'] = os.environ['HTTP_PROXY'] if m: for var_name in m.get_value('build/script_env', []): value = os.getenv(var_name) if value is None: warnings.warn( "The environment variable '%s' is undefined." % var_name, UserWarning ) else: d[var_name] = value if sys.platform == "darwin": # multiprocessing.cpu_count() is not reliable on OSX # See issue #645 on github.com/conda/conda-build out, err = Popen('sysctl -n hw.logicalcpu', shell=True, stdout=PIPE).communicate() d['CPU_COUNT'] = out.decode('utf-8').strip() else: try: d['CPU_COUNT'] = str(multiprocessing.cpu_count()) except NotImplementedError: d['CPU_COUNT'] = "1" if m and m.get_value('source/git_url'): git_url = m.get_value('source/git_url') if '://' not in git_url: # If git_url is a relative path instead of a url, convert it to an # abspath if not isabs(git_url): git_url = join(m.path, git_url) git_url = normpath(join(m.path, git_url)) d.update(get_git_build_info(d['SRC_DIR'], git_url, m.get_value('source/git_rev'))) d['PATH'] = dict(os.environ)['PATH'] d = prepend_bin_path(d, prefix) if sys.platform == 'win32': # -------- Windows d['SCRIPTS'] = join(prefix, 'Scripts') d['LIBRARY_PREFIX'] = join(prefix, 'Library') d['LIBRARY_BIN'] = join(d['LIBRARY_PREFIX'], 'bin') d['LIBRARY_INC'] = join(d['LIBRARY_PREFIX'], 'include') d['LIBRARY_LIB'] = join(d['LIBRARY_PREFIX'], 'lib') drive, tail = prefix.split(':') d['CYGWIN_PREFIX'] = ''.join(['/cygdrive/', drive.lower(), tail.replace('\\', '/')]) d['R'] = join(prefix, 'Scripts', 'R.exe') else: # -------- Unix d['HOME'] = os.getenv('HOME', 'UNKNOWN') d['PKG_CONFIG_PATH'] = join(prefix, 'lib', 'pkgconfig') d['R'] = join(prefix, 'bin', 'R') # in case CFLAGS was added in the `script_env` section above cflags = d.get('CFLAGS', '') cxxflags = d.get('CXXFLAGS', '') ldflags = d.get('LDFLAGS', '') if sys.platform == 'darwin': # -------- OSX d['OSX_ARCH'] = 'i386' if cc.bits == 32 else 'x86_64' d['CFLAGS'] = cflags + ' -arch %(OSX_ARCH)s' % d d['CXXFLAGS'] = cxxflags + ' -arch %(OSX_ARCH)s' % d rpath = ' -Wl,-rpath,%(PREFIX)s/lib' % d # SIP workaround, DYLD_* no longer works. d['LDFLAGS'] = ldflags + rpath + ' -arch %(OSX_ARCH)s' % d d['MACOSX_DEPLOYMENT_TARGET'] = '10.6' elif sys.platform.startswith('linux'): # -------- Linux d['LD_RUN_PATH'] = prefix + '/lib' if cc.bits == 32: d['CFLAGS'] = cflags + ' -m32' d['CXXFLAGS'] = cxxflags + ' -m32' if m: d['PKG_NAME'] = m.name() d['PKG_VERSION'] = m.version() d['PKG_BUILDNUM'] = str(m.build_number()) d['PKG_BUILD_STRING'] = str(m.build_id()) d['RECIPE_DIR'] = m.path return d