def build(m, bld_bat, stats): with path_prepended(m.config.build_prefix): with path_prepended(m.config.host_prefix): env = environ.get_dict(m=m) env["CONDA_BUILD_STATE"] = "BUILD" # hard-code this because we never want pip's build isolation # https://github.com/conda/conda-build/pull/2972#discussion_r198290241 # # Note that pip env "NO" variables are inverted logic. # PIP_NO_BUILD_ISOLATION=False means don't use build isolation. # env["PIP_NO_BUILD_ISOLATION"] = False # some other env vars to have pip ignore dependencies. # we supply them ourselves instead. # See note above about inverted logic on "NO" variables env["PIP_NO_DEPENDENCIES"] = False env["PIP_IGNORE_INSTALLED"] = True # disable use of pip's cache directory. # See note above about inverted logic on "NO" variables env["PIP_NO_CACHE_DIR"] = False # set variables like CONDA_PY in the test environment env.update(set_language_env_vars(m.config.variant)) for name in 'BIN', 'INC', 'LIB': path = env['LIBRARY_' + name] if not isdir(path): os.makedirs(path) src_dir = m.config.work_dir if os.path.isfile(bld_bat): with open(bld_bat) as fi: data = fi.read() with open(join(src_dir, 'bld.bat'), 'w') as fo: # more debuggable with echo on fo.write('@echo on\n') for key, value in env.items(): if value: fo.write('set "{key}={value}"\n'.format(key=key, value=value)) if not m.uses_new_style_compiler_activation: fo.write( msvc_env_cmd(bits=m.config.host_arch, config=m.config, override=m.get_value('build/msvc_compiler', None))) # Reset echo on, because MSVC scripts might have turned it off fo.write('@echo on\n') fo.write('set "INCLUDE={};%INCLUDE%"\n'.format(env["LIBRARY_INC"])) fo.write('set "LIB={};%LIB%"\n'.format(env["LIBRARY_LIB"])) if m.config.activate and m.name() != 'conda': _write_bat_activation_text(fo, m) fo.write("REM ===== end generated header =====\n") fo.write(data) cmd = ['cmd.exe', '/c', 'bld.bat'] check_call_env(cmd, cwd=src_dir, stats=stats) fix_staged_scripts(join(m.config.host_prefix, 'Scripts'), config=m.config)
def build(m, bld_bat, stats): with path_prepended(m.config.build_prefix): with path_prepended(m.config.host_prefix): env = environ.get_dict(m=m) env["CONDA_BUILD_STATE"] = "BUILD" # hard-code this because we never want pip's build isolation # https://github.com/conda/conda-build/pull/2972#discussion_r198290241 # # Note that pip env "NO" variables are inverted logic. # PIP_NO_BUILD_ISOLATION=False means don't use build isolation. # env["PIP_NO_BUILD_ISOLATION"] = False # some other env vars to have pip ignore dependencies. # we supply them ourselves instead. # See note above about inverted logic on "NO" variables env["PIP_NO_DEPENDENCIES"] = False env["PIP_IGNORE_INSTALLED"] = True # pip's cache directory (PIP_NO_CACHE_DIR) should not be # disabled as this results in .egg-info rather than # .dist-info directories being created, see gh-3094 # set PIP_CACHE_DIR to a path in the work dir that does not exist. env['PIP_CACHE_DIR'] = m.config.pip_cache_dir # set variables like CONDA_PY in the test environment env.update(set_language_env_vars(m.config.variant)) for name in 'BIN', 'INC', 'LIB': path = env['LIBRARY_' + name] if not isdir(path): os.makedirs(path) src_dir = m.config.work_dir if os.path.isfile(bld_bat): with open(bld_bat) as fi: data = fi.read() with open(join(src_dir, 'bld.bat'), 'w') as fo: # more debuggable with echo on fo.write('@echo on\n') for key, value in env.items(): if value: fo.write('set "{key}={value}"\n'.format(key=key, value=value)) if not m.uses_new_style_compiler_activation: fo.write(msvc_env_cmd(bits=m.config.host_arch, config=m.config, override=m.get_value('build/msvc_compiler', None))) # Reset echo on, because MSVC scripts might have turned it off fo.write('@echo on\n') fo.write('set "INCLUDE={};%INCLUDE%"\n'.format(env["LIBRARY_INC"])) fo.write('set "LIB={};%LIB%"\n'.format(env["LIBRARY_LIB"])) if m.config.activate and m.name() != 'conda': _write_bat_activation_text(fo, m) fo.write("REM ===== end generated header =====\n") fo.write(data) cmd = ['cmd.exe', '/c', 'bld.bat'] check_call_env(cmd, cwd=src_dir, stats=stats) fix_staged_scripts(join(m.config.host_prefix, 'Scripts'), config=m.config)
def write_build_scripts(m, script, build_file): with utils.path_prepended(m.config.host_prefix): with utils.path_prepended(m.config.build_prefix): env = environ.get_dict(m=m, variant={'no': 'variant'}) env["CONDA_BUILD_STATE"] = "BUILD" # hard-code this because we never want pip's build isolation # https://github.com/conda/conda-build/pull/2972#discussion_r198290241 # # Note that pip env "NO" variables are inverted logic. # PIP_NO_BUILD_ISOLATION=False means don't use build isolation. # env["PIP_NO_BUILD_ISOLATION"] = 'False' # some other env vars to have pip ignore dependencies. # we supply them ourselves instead. env["PIP_NO_DEPENDENCIES"] = True env["PIP_IGNORE_INSTALLED"] = True # pip's cache directory (PIP_NO_CACHE_DIR) should not be # disabled as this results in .egg-info rather than # .dist-info directories being created, see gh-3094 # set PIP_CACHE_DIR to a path in the work dir that does not exist. env['PIP_CACHE_DIR'] = m.config.pip_cache_dir # tell pip to not get anything from PyPI, please. We have everything we need # locally, and if we don't, it's a problem. env["PIP_NO_INDEX"] = True if m.noarch == "python": env["PYTHONDONTWRITEBYTECODE"] = True work_file = join(m.config.work_dir, 'conda_build.sh') env_file = join(m.config.work_dir, 'build_env_setup.sh') with open(env_file, 'w') as bf: for k, v in env.items(): if v != '' and v is not None: bf.write('export {0}="{1}"\n'.format(k, v)) if m.activate_build_script: _write_sh_activation_text(bf, m) with open(work_file, 'w') as bf: # bf.write('set -ex\n') bf.write('if [ -z ${CONDA_BUILD+x} ]; then\n') bf.write(" source {}\n".format(env_file)) bf.write("fi\n") if isfile(build_file): bf.write(open(build_file).read()) elif script: bf.write(script) os.chmod(work_file, 0o766) return work_file, env_file
def build(m, bld_bat, stats, provision_only=False): with path_prepended(m.config.host_prefix): with path_prepended(m.config.build_prefix): env = environ.get_dict(m=m) env["CONDA_BUILD_STATE"] = "BUILD" # hard-code this because we never want pip's build isolation # https://github.com/conda/conda-build/pull/2972#discussion_r198290241 # # Note that pip env "NO" variables are inverted logic. # PIP_NO_BUILD_ISOLATION=False means don't use build isolation. # env["PIP_NO_BUILD_ISOLATION"] = 'False' # some other env vars to have pip ignore dependencies. # we supply them ourselves instead. # See note above about inverted logic on "NO" variables env["PIP_NO_DEPENDENCIES"] = True env["PIP_IGNORE_INSTALLED"] = True # pip's cache directory (PIP_NO_CACHE_DIR) should not be # disabled as this results in .egg-info rather than # .dist-info directories being created, see gh-3094 # set PIP_CACHE_DIR to a path in the work dir that does not exist. env['PIP_CACHE_DIR'] = m.config.pip_cache_dir # tell pip to not get anything from PyPI, please. We have everything we need # locally, and if we don't, it's a problem. env["PIP_NO_INDEX"] = True # set variables like CONDA_PY in the test environment env.update(set_language_env_vars(m.config.variant)) for name in 'BIN', 'INC', 'LIB': path = env['LIBRARY_' + name] if not isdir(path): os.makedirs(path) work_script, env_script = write_build_scripts(m, env, bld_bat) if not provision_only and os.path.isfile(work_script): cmd = ['cmd.exe', '/c', os.path.basename(work_script)] # rewrite long paths in stdout back to their env variables if m.config.debug: rewrite_env = None else: rewrite_env = { k: env[k] for k in ['PREFIX', 'BUILD_PREFIX', 'SRC_DIR'] if k in env } print("Rewriting env in output: %s" % pprint.pformat(rewrite_env)) check_call_env(cmd, cwd=m.config.work_dir, stats=stats, rewrite_stdout_env=rewrite_env) fix_staged_scripts(join(m.config.host_prefix, 'Scripts'), config=m.config)
def build(m, bld_bat, stats, provision_only=False): with path_prepended(m.config.host_prefix): with path_prepended(m.config.build_prefix): env = environ.get_dict(m=m) env["CONDA_BUILD_STATE"] = "BUILD" # hard-code this because we never want pip's build isolation # https://github.com/conda/conda-build/pull/2972#discussion_r198290241 # # Note that pip env "NO" variables are inverted logic. # PIP_NO_BUILD_ISOLATION=False means don't use build isolation. # env["PIP_NO_BUILD_ISOLATION"] = 'False' # some other env vars to have pip ignore dependencies. # we supply them ourselves instead. # See note above about inverted logic on "NO" variables env["PIP_NO_DEPENDENCIES"] = True env["PIP_IGNORE_INSTALLED"] = True # pip's cache directory (PIP_NO_CACHE_DIR) should not be # disabled as this results in .egg-info rather than # .dist-info directories being created, see gh-3094 # set PIP_CACHE_DIR to a path in the work dir that does not exist. env['PIP_CACHE_DIR'] = m.config.pip_cache_dir # tell pip to not get anything from PyPI, please. We have everything we need # locally, and if we don't, it's a problem. env["PIP_NO_INDEX"] = True # set variables like CONDA_PY in the test environment env.update(set_language_env_vars(m.config.variant)) for name in 'BIN', 'INC', 'LIB': path = env['LIBRARY_' + name] if not isdir(path): os.makedirs(path) work_script, env_script = write_build_scripts(m, env, bld_bat) if not provision_only and os.path.isfile(work_script): cmd = ['cmd.exe', '/d', '/c', os.path.basename(work_script)] # rewrite long paths in stdout back to their env variables if m.config.debug: rewrite_env = None else: rewrite_env = { k: env[k] for k in ['PREFIX', 'BUILD_PREFIX', 'SRC_DIR'] if k in env } print("Rewriting env in output: %s" % pprint.pformat(rewrite_env)) check_call_env(cmd, cwd=m.config.work_dir, stats=stats, rewrite_stdout_env=rewrite_env) fix_staged_scripts(join(m.config.host_prefix, 'Scripts'), config=m.config)
def build(m, bld_bat, config): with path_prepended(config.build_prefix): env = environ.get_dict(config=config, m=m) env["CONDA_BUILD_STATE"] = "BUILD" for name in 'BIN', 'INC', 'LIB': path = env['LIBRARY_' + name] if not isdir(path): os.makedirs(path) src_dir = config.work_dir if os.path.isfile(bld_bat): with open(bld_bat) as fi: data = fi.read() with open(join(src_dir, 'bld.bat'), 'w') as fo: # more debuggable with echo on fo.write('@echo on\n') for key, value in env.items(): fo.write('set "{key}={value}"\n'.format(key=key, value=value)) fo.write(msvc_env_cmd(bits=bits, config=config, override=m.get_value('build/msvc_compiler', None))) # Reset echo on, because MSVC scripts might have turned it off fo.write('@echo on\n') fo.write('set "INCLUDE={};%INCLUDE%"\n'.format(env["LIBRARY_INC"])) fo.write('set "LIB={};%LIB%"\n'.format(env["LIBRARY_LIB"])) if config.activate: fo.write("call {conda_root}\\activate.bat {prefix}\n".format( conda_root=root_script_dir, prefix=config.build_prefix)) fo.write("REM ===== end generated header =====\n") fo.write(data) cmd = ['cmd.exe', '/c', 'bld.bat'] _check_call(cmd, cwd=src_dir)
def build(m, stats=None, from_interactive=False, allow_interactive=False): try: if not stats: stats = {} if m.skip(): console.print(utils.get_skip_message(m)) return {} with utils.path_prepended(m.config.build_prefix): env = environ.get_dict(m=m) env["CONDA_BUILD_STATE"] = "BUILD" if env_path_backup_var_exists: env["CONDA_PATH_BACKUP"] = os.environ["CONDA_PATH_BACKUP"] m.output.sections["package"]["name"] = m.output.name env["PKG_NAME"] = m.get_value("package/name") src_dir = m.config.work_dir if isdir(src_dir): if m.config.verbose: console.print("source tree in:", src_dir) else: if m.config.verbose: console.print("no source - creating empty work folder") os.makedirs(src_dir) utils.rm_rf(m.config.info_dir) files_before_script = utils.prefix_files(prefix=m.config.host_prefix) with open(join(m.config.build_folder, "prefix_files.txt"), "w") as f: f.write("\n".join(sorted(list(files_before_script)))) f.write("\n") execute_build_script(m, src_dir, env) if m.output.sections["build"].get("intermediate"): utils.rm_rf(m.config.host_prefix) return bundle_conda(m, files_before_script, env, m.output.sections["files"]) except subprocess.CalledProcessError: ext = "bat" if utils.on_win else "sh" work_dir = pathlib.Path(m.config.build_prefix).parent / "work" build_cmd = work_dir / f"conda_build.{ext}" console.print("\n") console.print(f"Work directory: {work_dir}") console.print(f"Try building again with {build_cmd}") if not from_interactive and allow_interactive: console.print("[red]Build went wrong, entering interactive mode![/red]") from boa.tui import tui import asyncio asyncio.run(tui.enter_tui(m))
def build(m, bld_bat, stats): with path_prepended(m.config.build_prefix): with path_prepended(m.config.host_prefix): env = environ.get_dict(config=m.config, m=m) env["CONDA_BUILD_STATE"] = "BUILD" # set variables like CONDA_PY in the test environment env.update(set_language_env_vars(m.config.variant)) for name in 'BIN', 'INC', 'LIB': path = env['LIBRARY_' + name] if not isdir(path): os.makedirs(path) src_dir = m.config.work_dir if os.path.isfile(bld_bat): with open(bld_bat) as fi: data = fi.read() with open(join(src_dir, 'bld.bat'), 'w') as fo: # more debuggable with echo on fo.write('@echo on\n') for key, value in env.items(): if value: fo.write('set "{key}={value}"\n'.format(key=key, value=value)) if not m.uses_new_style_compiler_activation: fo.write( msvc_env_cmd(bits=m.config.host_arch, config=m.config, override=m.get_value('build/msvc_compiler', None))) # Reset echo on, because MSVC scripts might have turned it off fo.write('@echo on\n') fo.write('set "INCLUDE={};%INCLUDE%"\n'.format(env["LIBRARY_INC"])) fo.write('set "LIB={};%LIB%"\n'.format(env["LIBRARY_LIB"])) if m.config.activate and m.name() != 'conda': _write_bat_activation_text(fo, m) fo.write("REM ===== end generated header =====\n") fo.write(data) cmd = ['cmd.exe', '/c', 'bld.bat'] check_call_env(cmd, cwd=src_dir, stats=stats) fix_staged_scripts(join(m.config.host_prefix, 'Scripts'), config=m.config)
def build(m, bld_bat, stats): with path_prepended(m.config.build_prefix): with path_prepended(m.config.host_prefix): env = environ.get_dict(config=m.config, m=m) env["CONDA_BUILD_STATE"] = "BUILD" # set variables like CONDA_PY in the test environment env.update(set_language_env_vars(m.config.variant)) for name in 'BIN', 'INC', 'LIB': path = env['LIBRARY_' + name] if not isdir(path): os.makedirs(path) src_dir = m.config.work_dir if os.path.isfile(bld_bat): with open(bld_bat) as fi: data = fi.read() with open(join(src_dir, 'bld.bat'), 'w') as fo: # more debuggable with echo on fo.write('@echo on\n') for key, value in env.items(): if value: fo.write('set "{key}={value}"\n'.format(key=key, value=value)) if not m.uses_new_style_compiler_activation: fo.write(msvc_env_cmd(bits=m.config.host_arch, config=m.config, override=m.get_value('build/msvc_compiler', None))) # Reset echo on, because MSVC scripts might have turned it off fo.write('@echo on\n') fo.write('set "INCLUDE={};%INCLUDE%"\n'.format(env["LIBRARY_INC"])) fo.write('set "LIB={};%LIB%"\n'.format(env["LIBRARY_LIB"])) if m.config.activate and m.name() != 'conda': _write_bat_activation_text(fo, m) fo.write("REM ===== end generated header =====\n") fo.write(data) cmd = ['cmd.exe', '/c', 'bld.bat'] check_call_env(cmd, cwd=src_dir, stats=stats) fix_staged_scripts(join(m.config.host_prefix, 'Scripts'), config=m.config)
def build(m, stats={}): if m.skip(): print(utils.get_skip_message(m)) return {} log = utils.get_logger(__name__) with utils.path_prepended(m.config.build_prefix): env = environ.get_dict(m=m) env["CONDA_BUILD_STATE"] = "BUILD" if env_path_backup_var_exists: env["CONDA_PATH_BACKUP"] = os.environ["CONDA_PATH_BACKUP"] m.output.sections["package"]["name"] = m.output.name env["PKG_NAME"] = m.get_value('package/name') src_dir = m.config.work_dir if isdir(src_dir): if m.config.verbose: print("source tree in:", src_dir) else: if m.config.verbose: print("no source - creating empty work folder") os.makedirs(src_dir) utils.rm_rf(m.config.info_dir) files_before_script = utils.prefix_files(prefix=m.config.host_prefix) with open(join(m.config.build_folder, "prefix_files.txt"), "w") as f: f.write("\n".join(sorted(list(files_before_script)))) f.write("\n") execute_build_script(m, src_dir, env) files_after_script = utils.prefix_files(prefix=m.config.host_prefix) files_difference = files_after_script - files_before_script if m.output.sections['build'].get('intermediate') == True: utils.rm_rf(m.config.host_prefix) return bundle_conda(m, files_before_script, env, m.output.sections['files'])
def build(m, bld_bat, config): with path_prepended(config.build_prefix): env = environ.get_dict(config=config, m=m) env["CONDA_BUILD_STATE"] = "BUILD" for name in 'BIN', 'INC', 'LIB': path = env['LIBRARY_' + name] if not isdir(path): os.makedirs(path) src_dir = config.work_dir if os.path.isfile(bld_bat): with open(bld_bat) as fi: data = fi.read() with open(join(src_dir, 'bld.bat'), 'w') as fo: # more debuggable with echo on fo.write('@echo on\n') for key, value in env.items(): fo.write('set "{key}={value}"\n'.format(key=key, value=value)) fo.write( msvc_env_cmd(bits=bits, config=config, override=m.get_value('build/msvc_compiler', None))) # Reset echo on, because MSVC scripts might have turned it off fo.write('@echo on\n') fo.write('set "INCLUDE={};%INCLUDE%"\n'.format(env["LIBRARY_INC"])) fo.write('set "LIB={};%LIB%"\n'.format(env["LIBRARY_LIB"])) if config.activate: fo.write( 'call "{conda_root}\\activate.bat" "{prefix}"\n'.format( conda_root=root_script_dir, prefix=config.build_prefix)) fo.write("REM ===== end generated header =====\n") fo.write(data) cmd = ['cmd.exe', '/c', 'bld.bat'] _check_call(cmd, cwd=src_dir) fix_staged_scripts(join(config.build_prefix, 'Scripts'))
def test(m, config, move_broken=True): ''' Execute any test scripts for the given package. :param m: Package's metadata. :type m: Metadata ''' if not os.path.isdir(config.build_folder): os.makedirs(config.build_folder) clean_pkg_cache(m.dist(), config.timeout) with filelock.SoftFileLock(join(config.build_folder, ".conda_lock"), timeout=config.timeout): tmp_dir = config.test_dir if not isdir(tmp_dir): os.makedirs(tmp_dir) create_files(tmp_dir, m, config) # Make Perl or Python-specific test files if m.name().startswith('perl-'): pl_files = create_pl_files(tmp_dir, m) py_files = False lua_files = False else: py_files = create_py_files(tmp_dir, m) pl_files = False lua_files = False shell_files = create_shell_files(tmp_dir, m, config) if not (py_files or shell_files or pl_files or lua_files): print("Nothing to test for:", m.dist()) return print("TEST START:", m.dist()) get_build_metadata(m, config=config) specs = ['%s %s %s' % (m.name(), m.version(), m.build_id())] # add packages listed in the run environment and test/requires specs.extend(ms.spec for ms in m.ms_depends('run')) specs += m.get_value('test/requires', []) if py_files: # as the tests are run by python, ensure that python is installed. # (If they already provided python as a run or test requirement, # this won't hurt anything.) specs += ['python %s*' % environ.get_py_ver(config)] if pl_files: # as the tests are run by perl, we need to specify it specs += ['perl %s*' % environ.get_perl_ver(config)] if lua_files: # not sure how this shakes out specs += ['lua %s*' % environ.get_lua_ver(config)] create_env(config.test_prefix, specs, config=config) with path_prepended(config.test_prefix): env = dict(os.environ.copy()) env.update( environ.get_dict(config=config, m=m, prefix=config.test_prefix)) env["CONDA_BUILD_STATE"] = "TEST" if not config.activate: # prepend bin (or Scripts) directory env = prepend_bin_path(env, config.test_prefix, prepend_prefix=True) if on_win: env['PATH'] = config.test_prefix + os.pathsep + env['PATH'] for varname in 'CONDA_PY', 'CONDA_NPY', 'CONDA_PERL', 'CONDA_LUA': env[varname] = str(getattr(config, varname) or '') # Python 2 Windows requires that envs variables be string, not unicode env = {str(key): str(value) for key, value in env.items()} suffix = "bat" if on_win else "sh" test_script = join(tmp_dir, "conda_test_runner.{suffix}".format(suffix=suffix)) with open(test_script, 'w') as tf: if config.activate: ext = ".bat" if on_win else "" tf.write( "{source} {conda_root}activate{ext} {test_env} {squelch}\n" .format(conda_root=root_script_dir + os.path.sep, source="call" if on_win else "source", ext=ext, test_env=config.test_prefix, squelch=">nul 2>&1" if on_win else "&> /dev/null")) if on_win: tf.write("if errorlevel 1 exit 1\n") if py_files: tf.write("{python} -s {test_file}\n".format( python=config.test_python, test_file=join(tmp_dir, 'run_test.py'))) if on_win: tf.write("if errorlevel 1 exit 1\n") if pl_files: tf.write("{perl} {test_file}\n".format(perl=config.test_perl, test_file=join( tmp_dir, 'run_test.pl'))) if on_win: tf.write("if errorlevel 1 exit 1\n") if lua_files: tf.write("{lua} {test_file}\n".format(lua=config.test_lua, test_file=join( tmp_dir, 'run_test.lua'))) if on_win: tf.write("if errorlevel 1 exit 1\n") if shell_files: test_file = join(tmp_dir, 'run_test.' + suffix) if on_win: tf.write("call {test_file}\n".format(test_file=test_file)) if on_win: tf.write("if errorlevel 1 exit 1\n") else: # TODO: Run the test/commands here instead of in run_test.py tf.write("{shell_path} -x -e {test_file}\n".format( shell_path=shell_path, test_file=test_file)) if on_win: cmd = ['cmd.exe', "/d", "/c", test_script] else: cmd = [shell_path, '-x', '-e', test_script] try: subprocess.check_call(cmd, env=env, cwd=tmp_dir) except subprocess.CalledProcessError: tests_failed(m, move_broken=move_broken, broken_dir=config.broken_dir, config=config) print("TEST END:", m.dist())
def build(m, config, post=None, need_source_download=True, need_reparse_in_env=False): ''' Build the package with the specified metadata. :param m: Package metadata :type m: Metadata :type post: bool or None. None means run the whole build. True means run post only. False means stop just before the post. :type keep_old_work: bool: Keep any previous work directory. :type need_source_download: bool: if rendering failed to download source (due to missing tools), retry here after build env is populated ''' if m.skip(): print_skip_message(m) return False if config.skip_existing: package_exists = is_package_built(m, config) if package_exists: print(m.dist(), "is already built in {0}, skipping.".format(package_exists)) return False if post in [False, None]: print("BUILD START:", m.dist()) if m.uses_jinja and (need_source_download or need_reparse_in_env): print( " (actual version deferred until further download or env creation)" ) specs = [ms.spec for ms in m.ms_depends('build')] create_env(config.build_prefix, specs, config=config) vcs_source = m.uses_vcs_in_build if vcs_source and vcs_source not in specs: vcs_executable = "hg" if vcs_source == "mercurial" else vcs_source has_vcs_available = os.path.isfile( external.find_executable(vcs_executable, config.build_prefix) or "") if not has_vcs_available: if (vcs_source != "mercurial" or not any( spec.startswith('python') and "3." in spec for spec in specs)): specs.append(vcs_source) log.warn( "Your recipe depends on %s at build time (for templates), " "but you have not listed it as a build dependency. Doing " "so for this build.", vcs_source) # Display the name only # Version number could be missing due to dependency on source info. create_env(config.build_prefix, specs, config=config) else: raise ValueError( "Your recipe uses mercurial in build, but mercurial" " does not yet support Python 3. Please handle all of " "your mercurial actions outside of your build script.") if need_source_download: # Execute any commands fetching the source (e.g., git) in the _build environment. # This makes it possible to provide source fetchers (eg. git, hg, svn) as build # dependencies. with path_prepended(config.build_prefix): m, need_source_download, need_reparse_in_env = parse_or_try_download( m, no_download_source=False, force_download=True, config=config) assert not need_source_download, "Source download failed. Please investigate." if m.uses_jinja: print("BUILD START (revised):", m.dist()) if need_reparse_in_env: reparse(m, config=config) print("BUILD START (revised):", m.dist()) if m.name() in [ i.rsplit('-', 2)[0] for i in linked(config.build_prefix) ]: print("%s is installed as a build dependency. Removing." % m.name()) index = get_build_index(config=config, clear_cache=False) actions = plan.remove_actions(config.build_prefix, [m.name()], index=index) assert not plan.nothing_to_do(actions), actions plan.display_actions(actions, index) plan.execute_actions(actions, index) print("Package:", m.dist()) with filelock.SoftFileLock(join(config.build_folder, ".conda_lock"), timeout=config.timeout): # get_dir here might be just work, or it might be one level deeper, # dependening on the source. src_dir = source.get_dir(config) if isdir(src_dir): print("source tree in:", src_dir) else: print("no source - creating empty work folder") os.makedirs(src_dir) rm_rf(config.info_dir) files1 = prefix_files(prefix=config.build_prefix) for pat in m.always_include_files(): has_matches = False for f in set(files1): if fnmatch.fnmatch(f, pat): print("Including in package existing file", f) files1.discard(f) has_matches = True if not has_matches: log.warn( "Glob %s from always_include_files does not match any files", pat) # Save this for later with open(join(config.croot, 'prefix_files.txt'), 'w') as f: f.write(u'\n'.join(sorted(list(files1)))) f.write(u'\n') # Use script from recipe? script = m.get_value('build/script', None) if script: if isinstance(script, list): script = '\n'.join(script) if isdir(src_dir): if on_win: build_file = join(m.path, 'bld.bat') if script: build_file = join(src_dir, 'bld.bat') with open(build_file, 'w') as bf: bf.write(script) import conda_build.windows as windows windows.build(m, build_file, config=config) else: build_file = join(m.path, 'build.sh') # There is no sense in trying to run an empty build script. if isfile(build_file) or script: with path_prepended(config.build_prefix): env = environ.get_dict(config=config, m=m) env["CONDA_BUILD_STATE"] = "BUILD" work_file = join(source.get_dir(config), 'conda_build.sh') if script: with open(work_file, 'w') as bf: bf.write(script) if config.activate: if isfile(build_file): data = open(build_file).read() else: data = open(work_file).read() with open(work_file, 'w') as bf: bf.write( "source {conda_root}activate {build_prefix} &> " "/dev/null\n".format( conda_root=root_script_dir + os.path.sep, build_prefix=config.build_prefix)) bf.write(data) else: if not isfile(work_file): copy_into(build_file, work_file, config.timeout) os.chmod(work_file, 0o766) if isfile(work_file): cmd = [shell_path, '-x', '-e', work_file] # this should raise if any problems occur while building _check_call(cmd, env=env, cwd=src_dir) if post in [True, None]: if post: with open(join(config.croot, 'prefix_files.txt'), 'r') as f: files1 = set(f.read().splitlines()) get_build_metadata(m, config=config) create_post_scripts(m, config=config) create_entry_points(m.get_value('build/entry_points'), config=config) files2 = prefix_files(prefix=config.build_prefix) post_process(sorted(files2 - files1), prefix=config.build_prefix, config=config, preserve_egg_dir=bool( m.get_value('build/preserve_egg_dir'))) # The post processing may have deleted some files (like easy-install.pth) files2 = prefix_files(prefix=config.build_prefix) if any(config.meta_dir in join(config.build_prefix, f) for f in files2 - files1): meta_files = (tuple( f for f in files2 - files1 if config.meta_dir in join(config.build_prefix, f)), ) sys.exit( indent( """Error: Untracked file(s) %s found in conda-meta directory. This error usually comes from using conda in the build script. Avoid doing this, as it can lead to packages that include their dependencies.""" % meta_files)) post_build(m, sorted(files2 - files1), prefix=config.build_prefix, build_python=config.build_python, croot=config.croot) create_info_files(m, sorted(files2 - files1), config=config, prefix=config.build_prefix) if m.get_value('build/noarch_python'): import conda_build.noarch_python as noarch_python noarch_python.transform(m, sorted(files2 - files1), config.build_prefix) files3 = prefix_files(prefix=config.build_prefix) fix_permissions(files3 - files1, config.build_prefix) path = bldpkg_path(m, config) # lock the output directory while we build this file # create the tarball in a temporary directory to minimize lock time with TemporaryDirectory() as tmp: tmp_path = os.path.join(tmp, os.path.basename(path)) t = tarfile.open(tmp_path, 'w:bz2') def order(f): # we don't care about empty files so send them back via 100000 fsize = os.stat(join(config.build_prefix, f)).st_size or 100000 # info/* records will be False == 0, others will be 1. info_order = int(os.path.dirname(f) != 'info') return info_order, fsize # add files in order of a) in info directory, b) increasing size so # we can access small manifest or json files without decompressing # possible large binary or data files for f in sorted(files3 - files1, key=order): t.add(join(config.build_prefix, f), f) t.close() # we're done building, perform some checks tarcheck.check_all(tmp_path) copy_into(tmp_path, path, config.timeout) update_index(config.bldpkgs_dir, config) else: print("STOPPING BUILD BEFORE POST:", m.dist()) # returning true here says package is OK to test return True
def create_env(prefix, specs, config, clear_cache=True): ''' Create a conda envrionment for the given prefix and specs. ''' if config.debug: logging.getLogger("conda").setLevel(logging.DEBUG) logging.getLogger("binstar").setLevel(logging.DEBUG) logging.getLogger("install").setLevel(logging.DEBUG) logging.getLogger("conda.install").setLevel(logging.DEBUG) logging.getLogger("fetch").setLevel(logging.DEBUG) logging.getLogger("print").setLevel(logging.DEBUG) logging.getLogger("progress").setLevel(logging.DEBUG) logging.getLogger("dotupdate").setLevel(logging.DEBUG) logging.getLogger("stdoutlog").setLevel(logging.DEBUG) logging.getLogger("requests").setLevel(logging.DEBUG) else: silence_loggers(show_warnings_and_errors=True) if os.path.isdir(prefix): rm_rf(prefix) specs = list(specs) for feature, value in feature_list: if value: specs.append('%s@' % feature) if specs: # Don't waste time if there is nothing to do with path_prepended(prefix): locks = [] try: cc.pkgs_dirs = cc.pkgs_dirs[:1] locked_folders = cc.pkgs_dirs + list(config.bldpkgs_dirs) for folder in locked_folders: if not os.path.isdir(folder): os.makedirs(folder) lock = filelock.SoftFileLock(join(folder, '.conda_lock')) update_index(folder, config=config, lock=lock) locks.append(lock) for lock in locks: lock.acquire(timeout=config.timeout) index = get_build_index(config=config, clear_cache=True) actions = plan.install_actions(prefix, index, specs) if config.disable_pip: actions['LINK'] = [ spec for spec in actions['LINK'] if not spec.startswith('pip-') ] # noqa actions['LINK'] = [ spec for spec in actions['LINK'] if not spec.startswith('setuptools-') ] # noqa plan.display_actions(actions, index) if on_win: for k, v in os.environ.items(): os.environ[k] = str(v) plan.execute_actions(actions, index, verbose=config.debug) except (SystemExit, PaddingError, LinkError) as exc: if (("too short in" in str(exc) or 'post-link failed for: openssl' in str(exc) or isinstance(exc, PaddingError)) and config.prefix_length > 80): log.warn("Build prefix failed with prefix length %d", config.prefix_length) log.warn("Error was: ") log.warn(str(exc)) log.warn( "One or more of your package dependencies needs to be rebuilt " "with a longer prefix length.") log.warn( "Falling back to legacy prefix length of 80 characters." ) log.warn( "Your package will not install into prefixes > 80 characters." ) config.prefix_length = 80 # Set this here and use to create environ # Setting this here is important because we use it below (symlink) prefix = config.build_prefix for lock in locks: lock.release() if os.path.isfile(lock._lock_file): os.remove(lock._lock_file) create_env(prefix, specs, config=config, clear_cache=clear_cache) else: for lock in locks: lock.release() if os.path.isfile(lock._lock_file): os.remove(lock._lock_file) raise finally: for lock in locks: lock.release() if os.path.isfile(lock._lock_file): os.remove(lock._lock_file) warn_on_old_conda_build(index=index) # ensure prefix exists, even if empty, i.e. when specs are empty if not isdir(prefix): os.makedirs(prefix) if on_win: shell = "cmd.exe" else: shell = "bash" symlink_conda(prefix, sys.prefix, shell)
def run_test( recipedir_or_package_or_metadata, config, stats, move_broken=True, provision_only=False, solver=None, ): """ Execute any test scripts for the given package. :param m: Package's metadata. :type m: Metadata """ # we want to know if we're dealing with package input. If so, we can move the input on success. hash_input = {} # store this name to keep it consistent. By changing files, we change the hash later. # It matches the build hash now, so let's keep it around. test_package_name = ( recipedir_or_package_or_metadata.dist() if hasattr(recipedir_or_package_or_metadata, "dist") else recipedir_or_package_or_metadata ) if not provision_only: print("TEST START:", test_package_name) if hasattr(recipedir_or_package_or_metadata, "config"): metadata = recipedir_or_package_or_metadata utils.rm_rf(metadata.config.test_dir) else: metadata, hash_input = construct_metadata_for_test( recipedir_or_package_or_metadata, config ) trace = "-x " if metadata.config.debug else "" # Must download *after* computing build id, or else computing build id will change # folder destination _extract_test_files_from_package(metadata) # When testing a .tar.bz2 in the pkgs dir, clean_pkg_cache() will remove it. # Prevent this. When https://github.com/conda/conda/issues/5708 gets fixed # I think we can remove this call to clean_pkg_cache(). in_pkg_cache = ( not hasattr(recipedir_or_package_or_metadata, "config") and os.path.isfile(recipedir_or_package_or_metadata) and recipedir_or_package_or_metadata.endswith(CONDA_PACKAGE_EXTENSIONS) and os.path.dirname(recipedir_or_package_or_metadata) in pkgs_dirs[0] ) if not in_pkg_cache: environ.clean_pkg_cache(metadata.dist(), metadata.config) copy_test_source_files(metadata, metadata.config.test_dir) # this is also copying tests/source_files from work_dir to testing workdir _, pl_files, py_files, r_files, lua_files, shell_files = create_all_test_files( metadata ) if ( not any([py_files, shell_files, pl_files, lua_files, r_files]) and not metadata.config.test_run_post ): print("Nothing to test for:", test_package_name) return True if metadata.config.remove_work_dir: for name, prefix in ( ("host", metadata.config.host_prefix), ("build", metadata.config.build_prefix), ): if os.path.isdir(prefix): # move host folder to force hardcoded paths to host env to break during tests # (so that they can be properly addressed by recipe author) dest = os.path.join( os.path.dirname(prefix), "_".join( ( "%s_prefix_moved" % name, metadata.dist(), getattr(metadata.config, "%s_subdir" % name), ) ), ) # Needs to come after create_files in case there's test/source_files shutil_move_more_retrying(prefix, dest, "{} prefix".format(prefix)) # nested if so that there's no warning when we just leave the empty workdir in place if metadata.source_provided: dest = os.path.join( os.path.dirname(metadata.config.work_dir), "_".join(("work_moved", metadata.dist(), metadata.config.host_subdir)), ) # Needs to come after create_files in case there's test/source_files shutil_move_more_retrying(config.work_dir, dest, "work") else: log.warn( "Not moving work directory after build. Your package may depend on files " "in the work directory that are not included with your package" ) # looks like a dead function to me # get_build_metadata(metadata) specs = metadata.get_test_deps(py_files, pl_files, lua_files, r_files) with utils.path_prepended(metadata.config.test_prefix): env = dict(os.environ.copy()) env.update(environ.get_dict(m=metadata, prefix=config.test_prefix)) env["CONDA_BUILD_STATE"] = "TEST" env["CONDA_BUILD"] = "1" if env_path_backup_var_exists: env["CONDA_PATH_BACKUP"] = os.environ["CONDA_PATH_BACKUP"] if not metadata.config.activate or metadata.name() == "conda": # prepend bin (or Scripts) directory env = utils.prepend_bin_path( env, metadata.config.test_prefix, prepend_prefix=True ) if utils.on_win: env["PATH"] = metadata.config.test_prefix + os.pathsep + env["PATH"] env["PREFIX"] = metadata.config.test_prefix if "BUILD_PREFIX" in env: del env["BUILD_PREFIX"] # In the future, we will need to support testing cross compiled # packages on physical hardware. until then it is expected that # something like QEMU or Wine will be used on the build machine, # therefore, for now, we use host_subdir. # ensure that the test prefix isn't kept between variants utils.rm_rf(metadata.config.test_prefix) if solver is None: solver, pkg_cache_path = get_solver(metadata.config.host_subdir) else: pkg_cache_path = PackageCacheData.first_writable().pkgs_dir solver.replace_channels() transaction = solver.solve(specs, [pkg_cache_path]) downloaded = transaction.fetch_extract_packages( pkg_cache_path, solver.repos + list(solver.local_repos.values()), ) if not downloaded: raise RuntimeError("Did not succeed in downloading packages.") mkdir_p(metadata.config.test_prefix) transaction.execute( PrefixData(metadata.config.test_prefix), pkg_cache_path, ) with utils.path_prepended(metadata.config.test_prefix): env = dict(os.environ.copy()) env.update(environ.get_dict(m=metadata, prefix=metadata.config.test_prefix)) env["CONDA_BUILD_STATE"] = "TEST" if env_path_backup_var_exists: env["CONDA_PATH_BACKUP"] = os.environ["CONDA_PATH_BACKUP"] if config.test_run_post: from conda_build.utils import get_installed_packages installed = get_installed_packages(metadata.config.test_prefix) files = installed[metadata.meta["package"]["name"]]["files"] replacements = get_all_replacements(metadata.config) try_download(metadata, False, True) create_info_files(metadata, replacements, files, metadata.config.test_prefix) post_build(metadata, files, None, metadata.config.test_prefix, True) # when workdir is removed, the source files are unavailable. There's the test/source_files # entry that lets people keep these files around. The files are copied into test_dir for # intuitive relative path behavior, though, not work_dir, so we need to adjust where # SRC_DIR points. The initial CWD during tests is test_dir. if metadata.config.remove_work_dir: env["SRC_DIR"] = metadata.config.test_dir test_script, _ = write_test_scripts( metadata, env, py_files, pl_files, lua_files, r_files, shell_files, trace ) if utils.on_win: cmd = [os.environ.get("COMSPEC", "cmd.exe"), "/d", "/c", test_script] else: cmd = ( [shell_path] + (["-x"] if metadata.config.debug else []) + ["-o", "errexit", test_script] ) try: test_stats = {} if not provision_only: # rewrite long paths in stdout back to their env variables if metadata.config.debug or metadata.config.no_rewrite_stdout_env: rewrite_env = None else: rewrite_env = {k: env[k] for k in ["PREFIX", "SRC_DIR"] if k in env} if metadata.config.verbose: for k, v in rewrite_env.items(): print( "{0} {1}={2}".format( "set" if test_script.endswith(".bat") else "export", k, v, ) ) utils.check_call_env( cmd, env=env, cwd=metadata.config.test_dir, stats=test_stats, rewrite_stdout_env=rewrite_env, ) log_stats(test_stats, "testing {}".format(metadata.name())) # TODO need to implement metadata.get_used_loop_vars # if stats is not None and metadata.config.variants: # stats[ # stats_key(metadata, "test_{}".format(metadata.name())) # ] = test_stats if os.path.exists(join(metadata.config.test_dir, "TEST_FAILED")): raise subprocess.CalledProcessError(-1, "") print("TEST END:", test_package_name) except subprocess.CalledProcessError as _: # noqa tests_failed( metadata, move_broken=move_broken, broken_dir=metadata.config.broken_dir, config=metadata.config, ) raise if config.need_cleanup and config.recipe_dir is not None and not provision_only: utils.rm_rf(config.recipe_dir) return True
def download_source(m, interactive=False): with utils.path_prepended(m.config.build_prefix): _try_download(m, interactive)
def write_build_scripts(m, script, build_file): with utils.path_prepended(m.config.host_prefix): with utils.path_prepended(m.config.build_prefix): env = environ.get_dict(m=m) sysvars = get_sys_vars_stubs(env["target_platform"]) for s in sysvars: if s not in env and s in m.config.variant: env[s] = m.config.variant[s] env.update(m.build_features()) env["CONDA_BUILD_STATE"] = "BUILD" # forcing shiny colors everywhere env["CLICOLOR_FORCE"] = 1 env["AM_COLOR_TESTS"] = "always" env["MAKE_TERMOUT"] = "1" env["CMAKE_COLOR_MAKEFILE"] = "ON" env["CXXFLAGS"] = "-fdiagnostics-color=always" env["CFLAGS"] = "-fdiagnostics-color=always" # hard-code this because we never want pip's build isolation # https://github.com/conda/conda-build/pull/2972#discussion_r198290241 # # Note that pip env "NO" variables are inverted logic. # PIP_NO_BUILD_ISOLATION=False means don't use build isolation. # env["PIP_NO_BUILD_ISOLATION"] = "False" # some other env vars to have pip ignore dependencies. # we supply them ourselves instead. env["PIP_NO_DEPENDENCIES"] = True env["PIP_IGNORE_INSTALLED"] = True # pip's cache directory (PIP_NO_CACHE_DIR) should not be # disabled as this results in .egg-info rather than # .dist-info directories being created, see gh-3094 # set PIP_CACHE_DIR to a path in the work dir that does not exist. env["PIP_CACHE_DIR"] = m.config.pip_cache_dir # tell pip to not get anything from PyPI, please. We have everything we need # locally, and if we don't, it's a problem. env["PIP_NO_INDEX"] = True if m.noarch == "python": env["PYTHONDONTWRITEBYTECODE"] = True work_file = join(m.config.work_dir, "conda_build.sh") env_file = join(m.config.work_dir, "build_env_setup.sh") with open(env_file, "w") as bf: for k, v in env.items(): if v != "" and v is not None: bf.write('export {0}="{1}"\n'.format(k, v)) # console.print('export {0}="{1}"\n'.format(k, v)) if m.activate_build_script: _write_sh_activation_text(bf, m) # add the feature function bf.write(""" function feature() { if [[ $1 != "0" ]] then echo $2 else echo $3 fi } """) with open(work_file, "w") as bf: # bf.write('set -ex\n') bf.write("if [ -z ${CONDA_BUILD+x} ]; then\n") bf.write(" source {}\n".format(env_file)) bf.write("fi\n") if isfile(build_file): bf.write(open(build_file).read()) elif script: bf.write(script) os.chmod(work_file, 0o766) return work_file, env_file
def build(m, bld_bat): with path_prepended(m.config.build_prefix): env = environ.get_dict(config=m.config, m=m) env["CONDA_BUILD_STATE"] = "BUILD" # set variables like CONDA_PY in the test environment env.update(set_language_env_vars(m.config.variant)) for name in 'BIN', 'INC', 'LIB': path = env['LIBRARY_' + name] if not isdir(path): os.makedirs(path) src_dir = m.config.work_dir if os.path.isfile(bld_bat): with open(bld_bat) as fi: data = fi.read() with open(join(src_dir, 'bld.bat'), 'w') as fo: # more debuggable with echo on fo.write('@echo on\n') for key, value in env.items(): if value: fo.write('set "{key}={value}"\n'.format(key=key, value=value)) if not m.uses_new_style_compiler_activation: fo.write(msvc_env_cmd(bits=bits, config=m.config, override=m.get_value('build/msvc_compiler', None))) # Reset echo on, because MSVC scripts might have turned it off fo.write('@echo on\n') fo.write('set "INCLUDE={};%INCLUDE%"\n'.format(env["LIBRARY_INC"])) fo.write('set "LIB={};%LIB%"\n'.format(env["LIBRARY_LIB"])) if m.config.activate: fo.write('call "{conda_root}\\activate.bat" "{prefix}"\n'.format( conda_root=root_script_dir, prefix=m.config.build_prefix)) if m.is_cross: # HACK: we need both build and host envs "active" - i.e. on PATH, # and with their activate.d scripts sourced. Conda only # lets us activate one, though. This is a # vile hack to trick conda into "stacking" # two environments. # # Net effect: binaries come from host first, then build # # Conda 4.4 may break this by reworking the activate scripts. # ^^ shouldn't be true # In conda 4.4, export CONDA_MAX_SHLVL=2 to stack envs to two # levels deep. # conda 4.4 does require that a conda-meta/history file # exists to identify a valid conda environment history_file = join(m.config.host_prefix, 'conda-meta', 'history') if not isfile(history_file): if not isdir(dirname(history_file)): os.makedirs(dirname(history_file)) open(history_file, 'a').close() # removing this placeholder should make conda double-activate with conda 4.3 fo.write('set "PATH=%PATH:CONDA_PATH_PLACEHOLDER;=%"\n') fo.write('set CONDA_MAX_SHLVL=2\n') fo.write('call "{conda_root}\\activate.bat" "{prefix}"\n'.format( conda_root=root_script_dir, prefix=m.config.host_prefix)) fo.write("REM ===== end generated header =====\n") fo.write(data) cmd = ['cmd.exe', '/c', 'bld.bat'] check_call_env(cmd, cwd=src_dir) fix_staged_scripts(join(m.config.build_prefix, 'Scripts'))
def create_env(prefix, specs, config, clear_cache=True): ''' Create a conda envrionment for the given prefix and specs. ''' if config.debug: logging.getLogger("conda").setLevel(logging.DEBUG) logging.getLogger("binstar").setLevel(logging.DEBUG) logging.getLogger("install").setLevel(logging.DEBUG) logging.getLogger("conda.install").setLevel(logging.DEBUG) logging.getLogger("fetch").setLevel(logging.DEBUG) logging.getLogger("print").setLevel(logging.DEBUG) logging.getLogger("progress").setLevel(logging.DEBUG) logging.getLogger("dotupdate").setLevel(logging.DEBUG) logging.getLogger("stdoutlog").setLevel(logging.DEBUG) logging.getLogger("requests").setLevel(logging.DEBUG) else: silence_loggers(show_warnings_and_errors=True) if os.path.isdir(prefix): rm_rf(prefix) specs = list(specs) for feature, value in feature_list: if value: specs.append('%s@' % feature) if specs: # Don't waste time if there is nothing to do with path_prepended(prefix): locks = [] try: cc.pkgs_dirs = cc.pkgs_dirs[:1] locked_folders = cc.pkgs_dirs + list(config.bldpkgs_dirs) for folder in locked_folders: if not os.path.isdir(folder): os.makedirs(folder) lock = filelock.SoftFileLock(join(folder, '.conda_lock')) if not folder.endswith('pkgs'): update_index(folder, config=config, lock=lock, could_be_mirror=False) lock.acquire(timeout=config.timeout) locks.append(lock) index = get_build_index(config=config, clear_cache=True) actions = plan.install_actions(prefix, index, specs) if config.disable_pip: actions['LINK'] = [spec for spec in actions['LINK'] if not spec.startswith('pip-')] # noqa actions['LINK'] = [spec for spec in actions['LINK'] if not spec.startswith('setuptools-')] # noqa plan.display_actions(actions, index) if on_win: for k, v in os.environ.items(): os.environ[k] = str(v) plan.execute_actions(actions, index, verbose=config.debug) except (SystemExit, PaddingError, LinkError) as exc: if (("too short in" in str(exc) or 'post-link failed for: openssl' in str(exc) or isinstance(exc, PaddingError)) and config.prefix_length > 80): log.warn("Build prefix failed with prefix length %d", config.prefix_length) log.warn("Error was: ") log.warn(str(exc)) log.warn("One or more of your package dependencies needs to be rebuilt " "with a longer prefix length.") log.warn("Falling back to legacy prefix length of 80 characters.") log.warn("Your package will not install into prefixes > 80 characters.") config.prefix_length = 80 # Set this here and use to create environ # Setting this here is important because we use it below (symlink) prefix = config.build_prefix for lock in locks: lock.release() if os.path.isfile(lock._lock_file): os.remove(lock._lock_file) create_env(prefix, specs, config=config, clear_cache=clear_cache) else: for lock in locks: lock.release() if os.path.isfile(lock._lock_file): os.remove(lock._lock_file) raise finally: for lock in locks: lock.release() if os.path.isfile(lock._lock_file): os.remove(lock._lock_file) warn_on_old_conda_build(index=index) # ensure prefix exists, even if empty, i.e. when specs are empty if not isdir(prefix): os.makedirs(prefix) if on_win: shell = "cmd.exe" else: shell = "bash" symlink_conda(prefix, sys.prefix, shell)
def create_env(prefix, specs_or_actions, env, config, subdir, clear_cache=True, retry=0, locks=None, is_cross=False): ''' Create a conda envrionment for the given prefix and specs. ''' if config.debug: external_logger_context = utils.LoggingContext(logging.DEBUG) else: external_logger_context = utils.LoggingContext(logging.ERROR) with external_logger_context: log = utils.get_logger(__name__) # if os.path.isdir(prefix): # utils.rm_rf(prefix) if specs_or_actions: # Don't waste time if there is nothing to do log.debug("Creating environment in %s", prefix) log.debug(str(specs_or_actions)) with utils.path_prepended(prefix): if not locks: locks = utils.get_conda_operation_locks(config) try: with utils.try_acquire_locks(locks, timeout=config.timeout): # input is a list - it's specs in MatchSpec format if not hasattr(specs_or_actions, 'keys'): specs = list(set(specs_or_actions)) actions = get_install_actions(prefix, tuple(specs), env, subdir=subdir, verbose=config.verbose, debug=config.debug, locking=config.locking, bldpkgs_dirs=tuple(config.bldpkgs_dirs), timeout=config.timeout, disable_pip=config.disable_pip, max_env_retry=config.max_env_retry, output_folder=config.output_folder, channel_urls=tuple(config.channel_urls)) else: actions = specs_or_actions index, index_ts = get_build_index(subdir=subdir, bldpkgs_dir=config.bldpkgs_dir, output_folder=config.output_folder, channel_urls=config.channel_urls, debug=config.debug, verbose=config.verbose, locking=config.locking, timeout=config.timeout) utils.trim_empty_keys(actions) display_actions(actions, index) if utils.on_win: for k, v in os.environ.items(): os.environ[k] = str(v) execute_actions(actions, index, verbose=config.debug) except (SystemExit, PaddingError, LinkError, DependencyNeedsBuildingError, CondaError) as exc: if (("too short in" in str(exc) or re.search('post-link failed for: (?:[a-zA-Z]*::)?openssl', str(exc)) or isinstance(exc, PaddingError)) and config.prefix_length > 80): if config.prefix_length_fallback: log.warn("Build prefix failed with prefix length %d", config.prefix_length) log.warn("Error was: ") log.warn(str(exc)) log.warn("One or more of your package dependencies needs to be rebuilt " "with a longer prefix length.") log.warn("Falling back to legacy prefix length of 80 characters.") log.warn("Your package will not install into prefixes > 80 characters.") config.prefix_length = 80 # Set this here and use to create environ # Setting this here is important because we use it below (symlink) prefix = config.build_prefix actions['PREFIX'] = prefix create_env(prefix, actions, config=config, subdir=subdir, env=env, clear_cache=clear_cache, is_cross=is_cross) else: raise elif 'lock' in str(exc): if retry < config.max_env_retry: log.warn("failed to create env, retrying. exception was: %s", str(exc)) create_env(prefix, actions, config=config, subdir=subdir, env=env, clear_cache=clear_cache, retry=retry + 1, is_cross=is_cross) elif ('requires a minimum conda version' in str(exc) or 'link a source that does not' in str(exc)): with utils.try_acquire_locks(locks, timeout=config.timeout): pkg_dir = str(exc) folder = 0 while os.path.dirname(pkg_dir) not in pkgs_dirs and folder < 20: pkg_dir = os.path.dirname(pkg_dir) folder += 1 log.warn("I think conda ended up with a partial extraction for %s. " "Removing the folder and retrying", pkg_dir) if os.path.isdir(pkg_dir): utils.rm_rf(pkg_dir) if retry < config.max_env_retry: log.warn("failed to create env, retrying. exception was: %s", str(exc)) create_env(prefix, actions, config=config, subdir=subdir, env=env, clear_cache=clear_cache, retry=retry + 1, is_cross=is_cross) else: log.error("Failed to create env, max retries exceeded.") raise else: raise # HACK: some of the time, conda screws up somehow and incomplete packages result. # Just retry. except (AssertionError, IOError, ValueError, RuntimeError, LockError) as exc: if isinstance(exc, AssertionError): with utils.try_acquire_locks(locks, timeout=config.timeout): pkg_dir = os.path.dirname(os.path.dirname(str(exc))) log.warn("I think conda ended up with a partial extraction for %s. " "Removing the folder and retrying", pkg_dir) if os.path.isdir(pkg_dir): utils.rm_rf(pkg_dir) if retry < config.max_env_retry: log.warn("failed to create env, retrying. exception was: %s", str(exc)) create_env(prefix, actions, config=config, subdir=subdir, env=env, clear_cache=clear_cache, retry=retry + 1, is_cross=is_cross) else: log.error("Failed to create env, max retries exceeded.") raise # We must not symlink conda across different platforms when cross-compiling. # On second thought, I think we must, because activating the host env does # the symlink for us anyway, and when activate does it, we end up with # conda symlinks in every package. =() # if os.path.basename(prefix) == '_build_env' or not is_cross: if utils.on_win: shell = "cmd.exe" else: shell = "bash" symlink_conda(prefix, sys.prefix, shell)
def build(m, config, post=None, need_source_download=True, need_reparse_in_env=False): ''' Build the package with the specified metadata. :param m: Package metadata :type m: Metadata :type post: bool or None. None means run the whole build. True means run post only. False means stop just before the post. :type keep_old_work: bool: Keep any previous work directory. :type need_source_download: bool: if rendering failed to download source (due to missing tools), retry here after build env is populated ''' if m.skip(): print_skip_message(m) return False if config.skip_existing: package_exists = is_package_built(m, config) if package_exists: print(m.dist(), "is already built in {0}, skipping.".format(package_exists)) return False if post in [False, None]: print("BUILD START:", m.dist()) if m.uses_jinja and (need_source_download or need_reparse_in_env): print(" (actual version deferred until further download or env creation)") specs = [ms.spec for ms in m.ms_depends('build')] create_env(config.build_prefix, specs, config=config) vcs_source = m.uses_vcs_in_build if vcs_source and vcs_source not in specs: vcs_executable = "hg" if vcs_source == "mercurial" else vcs_source has_vcs_available = os.path.isfile(external.find_executable(vcs_executable, config.build_prefix) or "") if not has_vcs_available: if (vcs_source != "mercurial" or not any(spec.startswith('python') and "3." in spec for spec in specs)): specs.append(vcs_source) log.warn("Your recipe depends on %s at build time (for templates), " "but you have not listed it as a build dependency. Doing " "so for this build.", vcs_source) # Display the name only # Version number could be missing due to dependency on source info. create_env(config.build_prefix, specs, config=config) else: raise ValueError("Your recipe uses mercurial in build, but mercurial" " does not yet support Python 3. Please handle all of " "your mercurial actions outside of your build script.") if need_source_download: # Execute any commands fetching the source (e.g., git) in the _build environment. # This makes it possible to provide source fetchers (eg. git, hg, svn) as build # dependencies. with path_prepended(config.build_prefix): m, need_source_download, need_reparse_in_env = parse_or_try_download(m, no_download_source=False, force_download=True, config=config) assert not need_source_download, "Source download failed. Please investigate." if m.uses_jinja: print("BUILD START (revised):", m.dist()) if need_reparse_in_env: reparse(m, config=config) print("BUILD START (revised):", m.dist()) if m.name() in [i.rsplit('-', 2)[0] for i in linked(config.build_prefix)]: print("%s is installed as a build dependency. Removing." % m.name()) index = get_build_index(config=config, clear_cache=False) actions = plan.remove_actions(config.build_prefix, [m.name()], index=index) assert not plan.nothing_to_do(actions), actions plan.display_actions(actions, index) plan.execute_actions(actions, index) print("Package:", m.dist()) with filelock.SoftFileLock(join(config.build_folder, ".conda_lock"), timeout=config.timeout): # get_dir here might be just work, or it might be one level deeper, # dependening on the source. src_dir = config.work_dir if isdir(src_dir): print("source tree in:", src_dir) else: print("no source - creating empty work folder") os.makedirs(src_dir) rm_rf(config.info_dir) files1 = prefix_files(prefix=config.build_prefix) for pat in m.always_include_files(): has_matches = False for f in set(files1): if fnmatch.fnmatch(f, pat): print("Including in package existing file", f) files1.discard(f) has_matches = True if not has_matches: log.warn("Glob %s from always_include_files does not match any files", pat) # Save this for later with open(join(config.croot, 'prefix_files.txt'), 'w') as f: f.write(u'\n'.join(sorted(list(files1)))) f.write(u'\n') # Use script from recipe? script = m.get_value('build/script', None) if script: if isinstance(script, list): script = '\n'.join(script) if isdir(src_dir): if on_win: build_file = join(m.path, 'bld.bat') if script: build_file = join(src_dir, 'bld.bat') with open(build_file, 'w') as bf: bf.write(script) import conda_build.windows as windows windows.build(m, build_file, config=config) else: build_file = join(m.path, 'build.sh') # There is no sense in trying to run an empty build script. if isfile(build_file) or script: with path_prepended(config.build_prefix): env = environ.get_dict(config=config, m=m) env["CONDA_BUILD_STATE"] = "BUILD" work_file = join(config.work_dir, 'conda_build.sh') if script: with open(work_file, 'w') as bf: bf.write(script) if config.activate: if isfile(build_file): data = open(build_file).read() else: data = open(work_file).read() with open(work_file, 'w') as bf: bf.write("source {conda_root}activate {build_prefix} &> " "/dev/null\n".format(conda_root=root_script_dir + os.path.sep, build_prefix=config.build_prefix)) bf.write(data) else: if not isfile(work_file): copy_into(build_file, work_file, config.timeout) os.chmod(work_file, 0o766) if isfile(work_file): cmd = [shell_path, '-x', '-e', work_file] # this should raise if any problems occur while building _check_call(cmd, env=env, cwd=src_dir) if post in [True, None]: if post: with open(join(config.croot, 'prefix_files.txt'), 'r') as f: files1 = set(f.read().splitlines()) get_build_metadata(m, config=config) create_post_scripts(m, config=config) if not is_noarch_python(m): create_entry_points(m.get_value('build/entry_points'), config=config) files2 = prefix_files(prefix=config.build_prefix) post_process(sorted(files2 - files1), prefix=config.build_prefix, config=config, preserve_egg_dir=bool(m.get_value('build/preserve_egg_dir')), noarch=m.get_value('build/noarch')) # The post processing may have deleted some files (like easy-install.pth) files2 = prefix_files(prefix=config.build_prefix) if any(config.meta_dir in join(config.build_prefix, f) for f in files2 - files1): meta_files = (tuple(f for f in files2 - files1 if config.meta_dir in join(config.build_prefix, f)),) sys.exit(indent("""Error: Untracked file(s) %s found in conda-meta directory. This error usually comes from using conda in the build script. Avoid doing this, as it can lead to packages that include their dependencies.""" % meta_files)) post_build(m, sorted(files2 - files1), prefix=config.build_prefix, build_python=config.build_python, croot=config.croot) entry_point_script_names = get_entry_point_script_names(get_entry_points(config, m)) if is_noarch_python(m): pkg_files = [f for f in sorted(files2 - files1) if f not in entry_point_script_names] else: pkg_files = sorted(files2 - files1) create_info_files(m, pkg_files, config=config, prefix=config.build_prefix) if m.get_value('build/noarch_python'): noarch_python.transform(m, sorted(files2 - files1), config.build_prefix) elif is_noarch_python(m): noarch_python.populate_files( m, pkg_files, config.build_prefix, entry_point_script_names) files3 = prefix_files(prefix=config.build_prefix) fix_permissions(files3 - files1, config.build_prefix) path = bldpkg_path(m, config) # lock the output directory while we build this file # create the tarball in a temporary directory to minimize lock time with TemporaryDirectory() as tmp: tmp_path = os.path.join(tmp, os.path.basename(path)) t = tarfile.open(tmp_path, 'w:bz2') def order(f): # we don't care about empty files so send them back via 100000 fsize = os.stat(join(config.build_prefix, f)).st_size or 100000 # info/* records will be False == 0, others will be 1. info_order = int(os.path.dirname(f) != 'info') return info_order, fsize # add files in order of a) in info directory, b) increasing size so # we can access small manifest or json files without decompressing # possible large binary or data files for f in sorted(files3 - files1, key=order): t.add(join(config.build_prefix, f), f) t.close() # we're done building, perform some checks tarcheck.check_all(tmp_path) copy_into(tmp_path, path, config.timeout) update_index(config.bldpkgs_dir, config, could_be_mirror=False) else: print("STOPPING BUILD BEFORE POST:", m.dist()) # returning true here says package is OK to test return True
def download_source(m): # Download all the stuff that's necessary with utils.path_prepended(m.config.build_prefix): try_download(m, no_download_source=False, raise_error=True)
def create_env(prefix, specs, config, subdir, clear_cache=True, retry=0, index=None, locks=None): ''' Create a conda envrionment for the given prefix and specs. ''' if config.debug: utils.get_logger("conda_build").setLevel(logging.DEBUG) external_logger_context = utils.LoggingContext(logging.DEBUG) else: utils.get_logger("conda_build").setLevel(logging.INFO) external_logger_context = utils.LoggingContext(logging.ERROR) with external_logger_context: log = utils.get_logger(__name__) if os.path.isdir(prefix): utils.rm_rf(prefix) specs = list(set(specs)) for feature, value in feature_list: if value: specs.append('%s@' % feature) if specs: # Don't waste time if there is nothing to do log.debug("Creating environment in %s", prefix) log.debug(str(specs)) with utils.path_prepended(prefix): if not locks: locks = utils.get_conda_operation_locks(config) try: with utils.try_acquire_locks(locks, timeout=config.timeout): if not index: index = get_build_index(config=config, subdir=subdir) actions = get_install_actions(prefix, index, specs, config) plan.display_actions(actions, index) if utils.on_win: for k, v in os.environ.items(): os.environ[k] = str(v) plan.execute_actions(actions, index, verbose=config.debug) except (SystemExit, PaddingError, LinkError, DependencyNeedsBuildingError, CondaError) as exc: if (("too short in" in str(exc) or re.search( 'post-link failed for: (?:[a-zA-Z]*::)?openssl', str(exc)) or isinstance(exc, PaddingError)) and config.prefix_length > 80): if config.prefix_length_fallback: log.warn( "Build prefix failed with prefix length %d", config.prefix_length) log.warn("Error was: ") log.warn(str(exc)) log.warn( "One or more of your package dependencies needs to be rebuilt " "with a longer prefix length.") log.warn( "Falling back to legacy prefix length of 80 characters." ) log.warn( "Your package will not install into prefixes > 80 characters." ) config.prefix_length = 80 # Set this here and use to create environ # Setting this here is important because we use it below (symlink) prefix = config.build_prefix create_env(prefix, specs, config=config, subdir=subdir, clear_cache=clear_cache) else: raise elif 'lock' in str(exc): if retry < config.max_env_retry: log.warn( "failed to create env, retrying. exception was: %s", str(exc)) create_env(prefix, specs, config=config, subdir=subdir, clear_cache=clear_cache, retry=retry + 1) elif ('requires a minimum conda version' in str(exc) or 'link a source that does not' in str(exc)): with utils.try_acquire_locks(locks, timeout=config.timeout): pkg_dir = str(exc) folder = 0 while os.path.dirname( pkg_dir) not in pkgs_dirs and folder < 20: pkg_dir = os.path.dirname(pkg_dir) folder += 1 log.warn( "I think conda ended up with a partial extraction for %s. " "Removing the folder and retrying", pkg_dir) if os.path.isdir(pkg_dir): utils.rm_rf(pkg_dir) if retry < config.max_env_retry: log.warn( "failed to create env, retrying. exception was: %s", str(exc)) create_env(prefix, specs, config=config, subdir=subdir, clear_cache=clear_cache, retry=retry + 1) else: log.error( "Failed to create env, max retries exceeded.") raise else: raise # HACK: some of the time, conda screws up somehow and incomplete packages result. # Just retry. except (AssertionError, IOError, ValueError, RuntimeError, LockError) as exc: if isinstance(exc, AssertionError): with utils.try_acquire_locks(locks, timeout=config.timeout): pkg_dir = os.path.dirname(os.path.dirname( str(exc))) log.warn( "I think conda ended up with a partial extraction for %s. " "Removing the folder and retrying", pkg_dir) if os.path.isdir(pkg_dir): utils.rm_rf(pkg_dir) if retry < config.max_env_retry: log.warn( "failed to create env, retrying. exception was: %s", str(exc)) create_env(prefix, specs, config=config, subdir=subdir, clear_cache=clear_cache, retry=retry + 1) else: log.error( "Failed to create env, max retries exceeded.") raise if utils.on_win: shell = "cmd.exe" else: shell = "bash" symlink_conda(prefix, sys.prefix, shell)
def create_env(prefix, specs_or_actions, env, config, subdir, clear_cache=True, retry=0, locks=None, is_cross=False, is_conda=False): ''' Create a conda envrionment for the given prefix and specs. ''' if config.debug: external_logger_context = utils.LoggingContext(logging.DEBUG) else: external_logger_context = utils.LoggingContext(logging.WARN) with external_logger_context: log = utils.get_logger(__name__) # if os.path.isdir(prefix): # utils.rm_rf(prefix) if specs_or_actions: # Don't waste time if there is nothing to do log.debug("Creating environment in %s", prefix) log.debug(str(specs_or_actions)) with utils.path_prepended(prefix): if not locks: locks = utils.get_conda_operation_locks(config) try: with utils.try_acquire_locks(locks, timeout=config.timeout): # input is a list - it's specs in MatchSpec format if not hasattr(specs_or_actions, 'keys'): specs = list(set(specs_or_actions)) actions = get_install_actions( prefix, tuple(specs), env, subdir=subdir, verbose=config.verbose, debug=config.debug, locking=config.locking, bldpkgs_dirs=tuple(config.bldpkgs_dirs), timeout=config.timeout, disable_pip=config.disable_pip, max_env_retry=config.max_env_retry, output_folder=config.output_folder, channel_urls=tuple(config.channel_urls)) else: actions = specs_or_actions index, index_ts = get_build_index( subdir=subdir, bldpkgs_dir=config.bldpkgs_dir, output_folder=config.output_folder, channel_urls=config.channel_urls, debug=config.debug, verbose=config.verbose, locking=config.locking, timeout=config.timeout) utils.trim_empty_keys(actions) display_actions(actions, index) if utils.on_win: for k, v in os.environ.items(): os.environ[k] = str(v) execute_actions(actions, index, verbose=config.debug) except (SystemExit, PaddingError, LinkError, DependencyNeedsBuildingError, CondaError) as exc: if (("too short in" in str(exc) or re.search( 'post-link failed for: (?:[a-zA-Z]*::)?openssl', str(exc)) or isinstance(exc, PaddingError)) and config.prefix_length > 80): if config.prefix_length_fallback: log.warn( "Build prefix failed with prefix length %d", config.prefix_length) log.warn("Error was: ") log.warn(str(exc)) log.warn( "One or more of your package dependencies needs to be rebuilt " "with a longer prefix length.") log.warn( "Falling back to legacy prefix length of 80 characters." ) log.warn( "Your package will not install into prefixes > 80 characters." ) config.prefix_length = 80 host = '_h_env' in prefix # Set this here and use to create environ # Setting this here is important because we use it below (symlink) prefix = config.host_prefix if host else config.build_prefix actions['PREFIX'] = prefix create_env(prefix, actions, config=config, subdir=subdir, env=env, clear_cache=clear_cache, is_cross=is_cross) else: raise elif 'lock' in str(exc): if retry < config.max_env_retry: log.warn( "failed to create env, retrying. exception was: %s", str(exc)) create_env(prefix, actions, config=config, subdir=subdir, env=env, clear_cache=clear_cache, retry=retry + 1, is_cross=is_cross) elif ('requires a minimum conda version' in str(exc) or 'link a source that does not' in str(exc)): with utils.try_acquire_locks(locks, timeout=config.timeout): pkg_dir = str(exc) folder = 0 while os.path.dirname( pkg_dir) not in pkgs_dirs and folder < 20: pkg_dir = os.path.dirname(pkg_dir) folder += 1 log.warn( "I think conda ended up with a partial extraction for %s. " "Removing the folder and retrying", pkg_dir) if os.path.isdir(pkg_dir): utils.rm_rf(pkg_dir) if retry < config.max_env_retry: log.warn( "failed to create env, retrying. exception was: %s", str(exc)) create_env(prefix, actions, config=config, subdir=subdir, env=env, clear_cache=clear_cache, retry=retry + 1, is_cross=is_cross) else: log.error( "Failed to create env, max retries exceeded.") raise else: raise # HACK: some of the time, conda screws up somehow and incomplete packages result. # Just retry. except (AssertionError, IOError, ValueError, RuntimeError, LockError) as exc: if isinstance(exc, AssertionError): with utils.try_acquire_locks(locks, timeout=config.timeout): pkg_dir = os.path.dirname(os.path.dirname( str(exc))) log.warn( "I think conda ended up with a partial extraction for %s. " "Removing the folder and retrying", pkg_dir) if os.path.isdir(pkg_dir): utils.rm_rf(pkg_dir) if retry < config.max_env_retry: log.warn( "failed to create env, retrying. exception was: %s", str(exc)) create_env(prefix, actions, config=config, subdir=subdir, env=env, clear_cache=clear_cache, retry=retry + 1, is_cross=is_cross) else: log.error( "Failed to create env, max retries exceeded.") raise if not is_conda: # Symlinking conda is critical here to make sure that activate scripts are not # accidentally included in packages. if utils.on_win: shell = "cmd.exe" else: shell = "bash" symlink_conda(prefix, sys.prefix, shell)
def test(m, config, move_broken=True): ''' Execute any test scripts for the given package. :param m: Package's metadata. :type m: Metadata ''' if not os.path.isdir(config.build_folder): os.makedirs(config.build_folder) clean_pkg_cache(m.dist(), config.timeout) with filelock.SoftFileLock(join(config.build_folder, ".conda_lock"), timeout=config.timeout): tmp_dir = config.test_dir if not isdir(tmp_dir): os.makedirs(tmp_dir) create_files(tmp_dir, m, config) # Make Perl or Python-specific test files if m.name().startswith('perl-'): pl_files = create_pl_files(tmp_dir, m) py_files = False lua_files = False else: py_files = create_py_files(tmp_dir, m) pl_files = False lua_files = False shell_files = create_shell_files(tmp_dir, m, config) if not (py_files or shell_files or pl_files or lua_files): print("Nothing to test for:", m.dist()) return print("TEST START:", m.dist()) get_build_metadata(m, config=config) specs = ['%s %s %s' % (m.name(), m.version(), m.build_id())] # add packages listed in the run environment and test/requires specs.extend(ms.spec for ms in m.ms_depends('run')) specs += m.get_value('test/requires', []) if py_files: # as the tests are run by python, ensure that python is installed. # (If they already provided python as a run or test requirement, # this won't hurt anything.) specs += ['python %s*' % environ.get_py_ver(config)] if pl_files: # as the tests are run by perl, we need to specify it specs += ['perl %s*' % environ.get_perl_ver(config)] if lua_files: # not sure how this shakes out specs += ['lua %s*' % environ.get_lua_ver(config)] create_env(config.test_prefix, specs, config=config) with path_prepended(config.test_prefix): env = dict(os.environ.copy()) env.update(environ.get_dict(config=config, m=m, prefix=config.test_prefix)) env["CONDA_BUILD_STATE"] = "TEST" if not config.activate: # prepend bin (or Scripts) directory env = prepend_bin_path(env, config.test_prefix, prepend_prefix=True) if on_win: env['PATH'] = config.test_prefix + os.pathsep + env['PATH'] for varname in 'CONDA_PY', 'CONDA_NPY', 'CONDA_PERL', 'CONDA_LUA': env[varname] = str(getattr(config, varname) or '') # Python 2 Windows requires that envs variables be string, not unicode env = {str(key): str(value) for key, value in env.items()} suffix = "bat" if on_win else "sh" test_script = join(tmp_dir, "conda_test_runner.{suffix}".format(suffix=suffix)) with open(test_script, 'w') as tf: if config.activate: ext = ".bat" if on_win else "" tf.write("{source} {conda_root}activate{ext} {test_env} {squelch}\n".format( conda_root=root_script_dir + os.path.sep, source="call" if on_win else "source", ext=ext, test_env=config.test_prefix, squelch=">nul 2>&1" if on_win else "&> /dev/null")) if on_win: tf.write("if errorlevel 1 exit 1\n") if py_files: tf.write("{python} -s {test_file}\n".format( python=config.test_python, test_file=join(tmp_dir, 'run_test.py'))) if on_win: tf.write("if errorlevel 1 exit 1\n") if pl_files: tf.write("{perl} {test_file}\n".format( perl=config.test_perl, test_file=join(tmp_dir, 'run_test.pl'))) if on_win: tf.write("if errorlevel 1 exit 1\n") if lua_files: tf.write("{lua} {test_file}\n".format( lua=config.test_lua, test_file=join(tmp_dir, 'run_test.lua'))) if on_win: tf.write("if errorlevel 1 exit 1\n") if shell_files: test_file = join(tmp_dir, 'run_test.' + suffix) if on_win: tf.write("call {test_file}\n".format(test_file=test_file)) if on_win: tf.write("if errorlevel 1 exit 1\n") else: # TODO: Run the test/commands here instead of in run_test.py tf.write("{shell_path} -x -e {test_file}\n".format(shell_path=shell_path, test_file=test_file)) if on_win: cmd = ['cmd.exe', "/d", "/c", test_script] else: cmd = [shell_path, '-x', '-e', test_script] try: subprocess.check_call(cmd, env=env, cwd=tmp_dir) except subprocess.CalledProcessError: tests_failed(m, move_broken=move_broken, broken_dir=config.broken_dir, config=config) print("TEST END:", m.dist())