def provide(recipe_dir, meta, patch=True): """ given a recipe_dir: - download (if necessary) - unpack - apply patches (if any) """ print("Removing old work directory") if sys.platform == 'win32': if isdir(WORK_DIR): move_to_trash(WORK_DIR, '') else: rm_rf(WORK_DIR) if any(k in meta for k in ('fn', 'url')): unpack(meta) elif 'git_url' in meta: git_source(meta, recipe_dir) elif 'hg_url' in meta: hg_source(meta) elif 'svn_url' in meta: svn_source(meta) elif 'path' in meta: print("Copying %s to %s" % (abspath(join(recipe_dir, meta.get('path'))), WORK_DIR)) copytree(abspath(join(recipe_dir, meta.get('path'))), WORK_DIR) else: # no source os.makedirs(WORK_DIR) if patch: src_dir = get_dir() for patch in meta.get('patches', []): apply_patch(src_dir, join(recipe_dir, patch))
def render_recipe(recipe_path, no_download_source, verbose, dirty=False): if not isdir(config.croot): os.makedirs(config.croot) with Locked(config.croot): if not dirty: if sys.platform == 'win32': if isdir(source.WORK_DIR): move_to_trash(source.WORK_DIR, '') else: rm_rf(source.WORK_DIR) assert not isdir(source.WORK_DIR), ("Failed to clean work directory. Please close open" " programs/terminals/folders and try again.") arg = recipe_path # Don't use byte literals for paths in Python 2 if not PY3: arg = arg.decode(getpreferredencoding() or 'utf-8') if isfile(arg): if arg.endswith(('.tar', '.tar.gz', '.tgz', '.tar.bz2')): recipe_dir = tempfile.mkdtemp() t = tarfile.open(arg, 'r:*') t.extractall(path=recipe_dir) t.close() need_cleanup = True else: print("Ignoring non-recipe: %s" % arg) return else: recipe_dir = abspath(arg) need_cleanup = False if not isdir(recipe_dir): sys.exit("Error: no such directory: %s" % recipe_dir) try: m = MetaData(recipe_dir) except exceptions.YamlParsingError as e: sys.stderr.write(e.error_msg()) sys.exit(1) m, need_download, need_reparse_in_env = parse_or_try_download(m, no_download_source=no_download_source, verbose=verbose, dirty=dirty) if need_cleanup: shutil.rmtree(recipe_dir) return m, need_download, need_reparse_in_env
def provide(recipe_dir, meta, verbose=False, patch=True, dirty=False): """ given a recipe_dir: - download (if necessary) - unpack - apply patches (if any) """ if not dirty: if sys.platform == 'win32': if isdir(WORK_DIR): move_to_trash(WORK_DIR, '') else: rm_rf(WORK_DIR) if not os.path.exists(WORK_DIR): if any(k in meta for k in ('fn', 'url')): unpack(meta, verbose=verbose) elif 'git_url' in meta: git_source(meta, recipe_dir, verbose=verbose) # build to make sure we have a work directory with source in it. We want to make sure that # whatever version that is does not interfere with the test we run next. elif 'hg_url' in meta: hg_source(meta, verbose=verbose) elif 'svn_url' in meta: svn_source(meta, verbose=verbose) elif 'path' in meta: if verbose: print("Copying %s to %s" % (abspath(join(recipe_dir, meta.get('path'))), WORK_DIR)) copytree(abspath(join(recipe_dir, meta.get('path'))), WORK_DIR) else: # no source os.makedirs(WORK_DIR) if patch: src_dir = get_dir() for patch in meta.get('patches', []): apply_patch(src_dir, join(recipe_dir, patch))
def test(m, verbose=True, channel_urls=(), override_channels=False): ''' Execute any test scripts for the given package. :param m: Package's metadata. :type m: Metadata ''' # remove from package cache rm_pkgs_cache(m.dist()) tmp_dir = join(config.croot, 'test-tmp_dir') rm_rf(tmp_dir) os.makedirs(tmp_dir) create_files(tmp_dir, m) # Make Perl or Python-specific test files if m.name().startswith('perl-'): pl_files = create_pl_files(tmp_dir, m) py_files = False else: py_files = create_py_files(tmp_dir, m) pl_files = False shell_files = create_shell_files(tmp_dir, m) if not (py_files or shell_files or pl_files): print("Nothing to test for:", m.dist()) return print("TEST START:", m.dist()) if on_win: if isdir(config.build_prefix): move_to_trash(config.build_prefix, '') if isdir(config.test_prefix): move_to_trash(config.test_prefix, '') else: rm_rf(config.build_prefix) rm_rf(config.test_prefix) specs = ['%s %s %s' % (m.name(), m.version(), m.build_id())] # add packages listed in test/requires specs_include_python = False for spec in m.get_value('test/requires', []): specs.append(spec) if spec.startswith('python ') or spec == 'python': specs_include_python = True if py_files and not specs_include_python: # as the tests are run by python, we need to specify it specs += ['python %s*' % environ.get_py_ver()] if pl_files: # as the tests are run by perl, we need to specify it specs += ['perl %s*' % environ.get_perl_ver()] create_env(config.test_prefix, specs, verbose=verbose, channel_urls=channel_urls, override_channels=override_channels) env = dict(os.environ) # TODO: Include all the same environment variables that are used in # building. env.update(environ.get_dict(m, prefix=config.test_prefix)) # prepend bin (or Scripts) directory env['PATH'] = (join(config.test_prefix, bin_dirname) + os.pathsep + os.getenv('PATH')) if sys.platform == 'win32': env['PATH'] = config.test_prefix + os.pathsep + env['PATH'] for varname in 'CONDA_PY', 'CONDA_NPY', 'CONDA_PERL': env[varname] = str(getattr(config, varname) or '') env['PREFIX'] = config.test_prefix # Python 2 Windows requires that envs variables be string, not unicode env = {str(key): str(value) for key, value in env.items()} if py_files: try: subprocess.check_call([config.test_python, '-s', join(tmp_dir, 'run_test.py')], env=env, cwd=tmp_dir) except subprocess.CalledProcessError: tests_failed(m) if pl_files: try: subprocess.check_call([config.test_perl, join(tmp_dir, 'run_test.pl')], env=env, cwd=tmp_dir) except subprocess.CalledProcessError: tests_failed(m) if shell_files: if sys.platform == 'win32': test_file = join(tmp_dir, 'run_test.bat') cmd = [os.environ['COMSPEC'], '/c', 'call', test_file] try: subprocess.check_call(cmd, env=env, cwd=tmp_dir) except subprocess.CalledProcessError: tests_failed(m) else: test_file = join(tmp_dir, 'run_test.sh') # TODO: Run the test/commands here instead of in run_test.py cmd = ['/bin/bash', '-x', '-e', test_file] try: subprocess.check_call(cmd, env=env, cwd=tmp_dir) except subprocess.CalledProcessError: tests_failed(m) print("TEST END:", m.dist())
def build(m, get_src=True, verbose=True, post=None, channel_urls=(), override_channels=False, include_recipe=True): ''' Build the package with the specified metadata. :param m: Package metadata :type m: Metadata :param get_src: Should we download the source? :type get_src: bool :type post: bool or None. None means run the whole build. True means run post only. False means stop just before the post. ''' if (m.get_value('build/detect_binary_files_with_prefix') or m.binary_has_prefix_files()): # We must use a long prefix here as the package will only be # installable into prefixes shorter than this one. config.use_long_build_prefix = True else: # In case there are multiple builds in the same process config.use_long_build_prefix = False if m.skip(): print("Skipped: The %s recipe defines build/skip for this " "configuration." % m.dist()) sys.exit(0) if post in [False, None]: print("Removing old build environment") if on_win: if isdir(config.short_build_prefix): move_to_trash(config.short_build_prefix, '') if isdir(config.long_build_prefix): move_to_trash(config.long_build_prefix, '') else: rm_rf(config.short_build_prefix) rm_rf(config.long_build_prefix) print("Removing old work directory") if on_win: if isdir(source.WORK_DIR): move_to_trash(source.WORK_DIR, '') else: rm_rf(source.WORK_DIR) # Display the name only # Version number could be missing due to dependency on source info. print("BUILD START:", m.dist()) create_env(config.build_prefix, [ms.spec for ms in m.ms_depends('build')], verbose=verbose, channel_urls=channel_urls, override_channels=override_channels) if m.name() in [i.rsplit('-', 2)[0] for i in linked(config.build_prefix)]: print("%s is installed as a build dependency. Removing." % m.name()) index = get_build_index(clear_cache=False, channel_urls=channel_urls, override_channels=override_channels) actions = plan.remove_actions(config.build_prefix, [m.name()], index=index) assert not plan.nothing_to_do(actions), actions plan.display_actions(actions, index) plan.execute_actions(actions, index) if get_src: source.provide(m.path, m.get_section('source')) # Parse our metadata again because we did not initialize the source # information before. m.parse_again() print("Package:", m.dist()) assert isdir(source.WORK_DIR) src_dir = source.get_dir() contents = os.listdir(src_dir) if contents: print("source tree in:", src_dir) else: print("no source") rm_rf(config.info_dir) files1 = prefix_files() for pat in m.always_include_files(): has_matches = False for f in set(files1): if fnmatch.fnmatch(f, pat): print("Including in package existing file", f) files1.discard(f) has_matches = True if not has_matches: sys.exit("Error: Glob %s from always_include_files does not match any files" % pat) # Save this for later with open(join(config.croot, 'prefix_files.txt'), 'w') as f: f.write(u'\n'.join(sorted(list(files1)))) f.write(u'\n') if sys.platform == 'win32': import conda_build.windows as windows windows.build(m) else: env = environ.get_dict(m) build_file = join(m.path, 'build.sh') script = m.get_value('build/script', None) if script: if isinstance(script, list): script = '\n'.join(script) build_file = join(source.get_dir(), 'conda_build.sh') with open(build_file, 'w') as bf: bf.write(script) os.chmod(build_file, 0o766) if isfile(build_file): cmd = ['/bin/bash', '-x', '-e', build_file] _check_call(cmd, env=env, cwd=src_dir) if post in [True, None]: if post == True: with open(join(config.croot, 'prefix_files.txt'), 'r') as f: files1 = set(f.read().splitlines()) get_build_metadata(m) create_post_scripts(m) create_entry_points(m.get_value('build/entry_points')) assert not exists(config.info_dir) files2 = prefix_files() post_process(sorted(files2 - files1), preserve_egg_dir=bool(m.get_value('build/preserve_egg_dir'))) # The post processing may have deleted some files (like easy-install.pth) files2 = prefix_files() if any(config.meta_dir in join(config.build_prefix, f) for f in files2 - files1): sys.exit(indent("""Error: Untracked file(s) %s found in conda-meta directory. This error usually comes from using conda in the build script. Avoid doing this, as it can lead to packages that include their dependencies.""" % (tuple(f for f in files2 - files1 if config.meta_dir in join(config.build_prefix, f)),))) post_build(m, sorted(files2 - files1)) create_info_files(m, sorted(files2 - files1), include_recipe=bool(m.path) and include_recipe) if m.get_value('build/noarch_python'): import conda_build.noarch_python as noarch_python noarch_python.transform(m, sorted(files2 - files1)) files3 = prefix_files() fix_permissions(files3 - files1) path = bldpkg_path(m) t = tarfile.open(path, 'w:bz2') for f in sorted(files3 - files1): t.add(join(config.build_prefix, f), f) t.close() print("BUILD END:", m.dist()) # we're done building, perform some checks tarcheck.check_all(path) update_index(config.bldpkgs_dir) else: print("STOPPING BUILD BEFORE POST:", m.dist())
def build(m, post=None, include_recipe=True, keep_old_work=False, need_source_download=True, need_reparse_in_env=False, verbose=True, dirty=False, activate=True, debug=False): ''' Build the package with the specified metadata. :param m: Package metadata :type m: Metadata :type post: bool or None. None means run the whole build. True means run post only. False means stop just before the post. :type keep_old_work: bool: Keep any previous work directory. :type need_source_download: bool: if rendering failed to download source (due to missing tools), retry here after build env is populated ''' if (m.get_value('build/detect_binary_files_with_prefix') or m.binary_has_prefix_files()) and not on_win: # We must use a long prefix here as the package will only be # installable into prefixes shorter than this one. config.use_long_build_prefix = True else: # In case there are multiple builds in the same process config.use_long_build_prefix = False if m.skip(): print("Skipped: The %s recipe defines build/skip for this " "configuration." % m.dist()) return with Locked(cc.root_dir): # If --keep-old-work, then move the contents of source.WORK_DIR to a # temporary directory for the duration of the build. # The source unpacking procedure is too varied and complex # to allow this to be written cleanly (see source.get_dir() for example) if keep_old_work: old_WORK_DIR = tempfile.mkdtemp() old_sub_dirs = [ name for name in os.listdir(source.WORK_DIR) if os.path.isdir(os.path.join(source.WORK_DIR, name)) ] if len(old_sub_dirs): print("Keeping old work directory backup: %s => %s" % (old_sub_dirs, old_WORK_DIR)) for old_sub in old_sub_dirs: shutil.move(os.path.join(source.WORK_DIR, old_sub), old_WORK_DIR) if post in [False, None]: print("Removing old build environment") print("BUILD START:", m.dist()) if not need_source_download or not need_reparse_in_env: print( " (actual version deferred until further download or env creation)" ) if on_win: if isdir(config.short_build_prefix): move_to_trash(config.short_build_prefix, '') if isdir(config.long_build_prefix): move_to_trash(config.long_build_prefix, '') else: rm_rf(config.short_build_prefix) rm_rf(config.long_build_prefix) specs = [ms.spec for ms in m.ms_depends('build')] if activate: # If we activate the build envrionment, we need to be sure that we # have the appropriate VCS available in the environment. People # are not used to explicitly listing it in recipes, though. # We add it for them here, but warn them about it. vcs_source = m.uses_vcs_in_build() if vcs_source and vcs_source not in specs: vcs_executable = "hg" if vcs_source == "mercurial" else vcs_source has_vcs_available = os.path.isfile( external.find_executable(vcs_executable) or "") if not has_vcs_available: if (vcs_source != "mercurial" or not any( spec.startswith('python') and "3." in spec for spec in specs)): specs.append(vcs_source) log.warn( "Your recipe depends on {} at build time (for templates), " "but you have not listed it as a build dependency. Doing " "so for this build.") else: raise ValueError( "Your recipe uses mercurial in build, but mercurial" " does not yet support Python 3. Please handle all of " "your mercurial actions outside of your build script." ) # Display the name only # Version number could be missing due to dependency on source info. create_env(config.build_prefix, specs, debug=debug) if need_source_download: # Execute any commands fetching the source (e.g., git) in the _build environment. # This makes it possible to provide source fetchers (eg. git, hg, svn) as build # dependencies. if not activate: _old_path = os.environ['PATH'] os.environ['PATH'] = prepend_bin_path( {'PATH': _old_path}, config.build_prefix)['PATH'] try: m, need_source_download, need_reparse_in_env = parse_or_try_download( m, no_download_source=False, force_download=True, verbose=verbose, dirty=dirty) assert not need_source_download, "Source download failed. Please investigate." finally: if not activate: os.environ['PATH'] = _old_path print("BUILD START:", m.dist()) if need_reparse_in_env: reparse(m) print("BUILD START:", m.dist()) if m.name() in [ i.rsplit('-', 2)[0] for i in linked(config.build_prefix) ]: print("%s is installed as a build dependency. Removing." % m.name()) index = get_build_index(clear_cache=False) actions = plan.remove_actions(config.build_prefix, [m.name()], index=index) assert not plan.nothing_to_do(actions), actions plan.display_actions(actions, index) plan.execute_actions(actions, index) print("Package:", m.dist()) src_dir = source.get_dir() if isdir(source.WORK_DIR): print("source tree in:", src_dir) else: print("no source - creating empty work folder") os.makedirs(source.WORK_DIR) rm_rf(config.info_dir) files1 = prefix_files() for pat in m.always_include_files(): has_matches = False for f in set(files1): if fnmatch.fnmatch(f, pat): print("Including in package existing file", f) files1.discard(f) has_matches = True if not has_matches: log.warn( "Glob %s from always_include_files does not match any files" % pat) # Save this for later with open(join(config.croot, 'prefix_files.txt'), 'w') as f: f.write(u'\n'.join(sorted(list(files1)))) f.write(u'\n') # Use script from recipe? script = m.get_value('build/script', None) if script: if isinstance(script, list): script = '\n'.join(script) if isdir(source.WORK_DIR): if on_win: build_file = join(m.path, 'bld.bat') if script: build_file = join(source.get_dir(), 'bld.bat') with open(join(source.get_dir(), 'bld.bat'), 'w') as bf: bf.write(script) import conda_build.windows as windows windows.build(m, build_file, dirty=dirty, activate=activate) else: build_file = join(m.path, 'build.sh') # There is no sense in trying to run an empty build script. if isfile(build_file) or script: env = environ.get_dict(m, dirty=dirty) work_file = join(source.get_dir(), 'conda_build.sh') if script: with open(work_file, 'w') as bf: bf.write(script) if activate: if isfile(build_file): data = open(build_file).read() else: data = open(work_file).read() with open(work_file, 'w') as bf: bf.write( "source activate {build_prefix}\n".format( build_prefix=config.build_prefix)) bf.write(data) else: if not isfile(work_file): shutil.copy(build_file, work_file) os.chmod(work_file, 0o766) if isfile(work_file): cmd = [shell_path, '-x', '-e', work_file] _check_call(cmd, env=env, cwd=src_dir) if post in [True, None]: if post: with open(join(config.croot, 'prefix_files.txt'), 'r') as f: files1 = set(f.read().splitlines()) get_build_metadata(m) create_post_scripts(m) create_entry_points(m.get_value('build/entry_points')) assert not exists(config.info_dir) files2 = prefix_files() post_process(sorted(files2 - files1), preserve_egg_dir=bool( m.get_value('build/preserve_egg_dir'))) # The post processing may have deleted some files (like easy-install.pth) files2 = prefix_files() if any(config.meta_dir in join(config.build_prefix, f) for f in files2 - files1): sys.exit( indent( """Error: Untracked file(s) %s found in conda-meta directory. This error usually comes from using conda in the build script. Avoid doing this, as it can lead to packages that include their dependencies.""" % (tuple(f for f in files2 - files1 if config.meta_dir in join(config.build_prefix, f)), ))) post_build(m, sorted(files2 - files1)) create_info_files(m, sorted(files2 - files1), include_recipe=bool(m.path) and include_recipe) if m.get_value('build/noarch_python'): import conda_build.noarch_python as noarch_python noarch_python.transform(m, sorted(files2 - files1)) files3 = prefix_files() fix_permissions(files3 - files1) path = bldpkg_path(m) t = tarfile.open(path, 'w:bz2') def order(f): # we don't care about empty files so send them back via 100000 fsize = os.stat(join(config.build_prefix, f)).st_size or 100000 # info/* records will be False == 0, others will be 1. info_order = int(os.path.dirname(f) != 'info') return info_order, fsize # add files in order of a) in info directory, b) increasing size so # we can access small manifest or json files without decompressing # possible large binary or data files for f in sorted(files3 - files1, key=order): t.add(join(config.build_prefix, f), f) t.close() print("BUILD END:", m.dist()) # we're done building, perform some checks tarcheck.check_all(path) update_index(config.bldpkgs_dir) else: print("STOPPING BUILD BEFORE POST:", m.dist()) if keep_old_work and len(old_sub_dirs): print("Restoring old work directory backup: %s :: %s => %s" % (old_WORK_DIR, old_sub_dirs, source.WORK_DIR)) for old_sub in old_sub_dirs: if os.path.exists(os.path.join(source.WORK_DIR, old_sub)): print( "Not restoring old source directory %s over new build's version" % (old_sub)) else: shutil.move(os.path.join(old_WORK_DIR, old_sub), source.WORK_DIR) shutil.rmtree(old_WORK_DIR, ignore_errors=True)
def test(m, move_broken=True): ''' Execute any test scripts for the given package. :param m: Package's metadata. :type m: Metadata ''' # remove from package cache rm_pkgs_cache(m.dist()) tmp_dir = join(config.croot, 'test-tmp_dir') rm_rf(tmp_dir) os.makedirs(tmp_dir) create_files(tmp_dir, m) # Make Perl or Python-specific test files if m.name().startswith('perl-'): pl_files = create_pl_files(tmp_dir, m) py_files = False else: py_files = create_py_files(tmp_dir, m) pl_files = False shell_files = create_shell_files(tmp_dir, m) if not (py_files or shell_files or pl_files): print("Nothing to test for:", m.dist()) return print("TEST START:", m.dist()) if on_win: if isdir(config.build_prefix): move_to_trash(config.build_prefix, '') if isdir(config.test_prefix): move_to_trash(config.test_prefix, '') else: rm_rf(config.build_prefix) rm_rf(config.test_prefix) get_build_metadata(m) specs = ['%s %s %s' % (m.name(), m.version(), m.build_id())] # add packages listed in test/requires specs += m.get_value('test/requires', []) if py_files: # as the tests are run by python, ensure that python is installed. # (If they already provided python as a run or test requirement, this won't hurt anything.) specs += ['python %s*' % environ.get_py_ver()] if pl_files: # as the tests are run by perl, we need to specify it specs += ['perl %s*' % environ.get_perl_ver()] create_env(config.test_prefix, specs) env = dict(os.environ) env.update(environ.get_dict(m, prefix=config.test_prefix)) # prepend bin (or Scripts) directory env = prepend_bin_path(env, config.test_prefix, prepend_prefix=True) if sys.platform == 'win32': env['PATH'] = config.test_prefix + os.pathsep + env['PATH'] for varname in 'CONDA_PY', 'CONDA_NPY', 'CONDA_PERL': env[varname] = str(getattr(config, varname) or '') env['PREFIX'] = config.test_prefix # Python 2 Windows requires that envs variables be string, not unicode env = {str(key): str(value) for key, value in env.items()} if py_files: try: subprocess.check_call([config.test_python, '-s', join(tmp_dir, 'run_test.py')], env=env, cwd=tmp_dir) except subprocess.CalledProcessError: tests_failed(m, move_broken=move_broken) if pl_files: try: subprocess.check_call([config.test_perl, join(tmp_dir, 'run_test.pl')], env=env, cwd=tmp_dir) except subprocess.CalledProcessError: tests_failed(m, move_broken=move_broken) if shell_files: if sys.platform == 'win32': test_file = join(tmp_dir, 'run_test.bat') cmd = [os.environ['COMSPEC'], '/c', 'call', test_file] try: subprocess.check_call(cmd, env=env, cwd=tmp_dir) except subprocess.CalledProcessError: tests_failed(m, move_broken=move_broken) else: test_file = join(tmp_dir, 'run_test.sh') # TODO: Run the test/commands here instead of in run_test.py cmd = [shell_path, '-x', '-e', test_file] try: subprocess.check_call(cmd, env=env, cwd=tmp_dir) except subprocess.CalledProcessError: tests_failed(m, move_broken=move_broken) print("TEST END:", m.dist())
def test(m, move_broken=True): ''' Execute any test scripts for the given package. :param m: Package's metadata. :type m: Metadata ''' # remove from package cache rm_pkgs_cache(m.dist()) tmp_dir = join(config.croot, 'test-tmp_dir') rm_rf(tmp_dir) if on_win: time.sleep( 1) # wait for rm_rf(tmp_dir) to finish before recreating tmp_dir os.makedirs(tmp_dir) create_files(tmp_dir, m) # Make Perl or Python-specific test files if m.name().startswith('perl-'): pl_files = create_pl_files(tmp_dir, m) py_files = False lua_files = False else: py_files = create_py_files(tmp_dir, m) pl_files = False lua_files = False shell_files = create_shell_files(tmp_dir, m) if not (py_files or shell_files or pl_files or lua_files): print("Nothing to test for:", m.dist()) return print("TEST START:", m.dist()) if on_win: if isdir(config.build_prefix): move_to_trash(config.build_prefix, '') if isdir(config.test_prefix): move_to_trash(config.test_prefix, '') else: rm_rf(config.build_prefix) rm_rf(config.test_prefix) get_build_metadata(m) specs = ['%s %s %s' % (m.name(), m.version(), m.build_id())] # add packages listed in test/requires specs += m.get_value('test/requires', []) if py_files: # as the tests are run by python, ensure that python is installed. # (If they already provided python as a run or test requirement, this won't hurt anything.) specs += ['python %s*' % environ.get_py_ver()] if pl_files: # as the tests are run by perl, we need to specify it specs += ['perl %s*' % environ.get_perl_ver()] if lua_files: # not sure how this shakes out specs += ['lua %s*' % environ.get_lua_ver()] create_env(config.test_prefix, specs) env = dict(os.environ) env.update(environ.get_dict(m, prefix=config.test_prefix)) # prepend bin (or Scripts) directory env = prepend_bin_path(env, config.test_prefix, prepend_prefix=True) if sys.platform == 'win32': env['PATH'] = config.test_prefix + os.pathsep + env['PATH'] for varname in 'CONDA_PY', 'CONDA_NPY', 'CONDA_PERL', 'CONDA_LUA': env[varname] = str(getattr(config, varname) or '') env['PREFIX'] = config.test_prefix # Python 2 Windows requires that envs variables be string, not unicode env = {str(key): str(value) for key, value in env.items()} if py_files: try: subprocess.check_call( [config.test_python, '-s', join(tmp_dir, 'run_test.py')], env=env, cwd=tmp_dir) except subprocess.CalledProcessError: tests_failed(m, move_broken=move_broken) if pl_files: try: subprocess.check_call( [config.test_perl, join(tmp_dir, 'run_test.pl')], env=env, cwd=tmp_dir) except subprocess.CalledProcessError: tests_failed(m, move_broken=move_broken) if lua_files: try: subprocess.check_call( [config.test_lua, join(tmp_dir, 'run_test.lua')], env=env, cwd=tmp_dir) except subprocess.CalledProcessError: tests_failed(m) if shell_files: if sys.platform == 'win32': test_file = join(tmp_dir, 'run_test.bat') cmd = [os.environ['COMSPEC'], '/c', 'call', test_file] try: subprocess.check_call(cmd, env=env, cwd=tmp_dir) except subprocess.CalledProcessError: tests_failed(m, move_broken=move_broken) else: test_file = join(tmp_dir, 'run_test.sh') # TODO: Run the test/commands here instead of in run_test.py cmd = [shell_path, '-x', '-e', test_file] try: subprocess.check_call(cmd, env=env, cwd=tmp_dir) except subprocess.CalledProcessError: tests_failed(m, move_broken=move_broken) print("TEST END:", m.dist())
def build(m, get_src=True, post=None, include_recipe=True): ''' Build the package with the specified metadata. :param m: Package metadata :type m: Metadata :param get_src: Should we download the source? :type get_src: bool :type post: bool or None. None means run the whole build. True means run post only. False means stop just before the post. ''' if (m.get_value('build/detect_binary_files_with_prefix') or m.binary_has_prefix_files()): # We must use a long prefix here as the package will only be # installable into prefixes shorter than this one. config.use_long_build_prefix = True else: # In case there are multiple builds in the same process config.use_long_build_prefix = False if m.skip(): print("Skipped: The %s recipe defines build/skip for this " "configuration." % m.dist()) return if post in [False, None]: print("Removing old build environment") if on_win: if isdir(config.short_build_prefix): move_to_trash(config.short_build_prefix, '') if isdir(config.long_build_prefix): move_to_trash(config.long_build_prefix, '') else: rm_rf(config.short_build_prefix) rm_rf(config.long_build_prefix) print("Removing old work directory") if on_win: if isdir(source.WORK_DIR): move_to_trash(source.WORK_DIR, '') else: rm_rf(source.WORK_DIR) # Display the name only # Version number could be missing due to dependency on source info. print("BUILD START:", m.dist()) create_env(config.build_prefix, [ms.spec for ms in m.ms_depends('build')]) if m.name() in [ i.rsplit('-', 2)[0] for i in linked(config.build_prefix) ]: print("%s is installed as a build dependency. Removing." % m.name()) index = get_build_index(clear_cache=False) actions = plan.remove_actions(config.build_prefix, [m.name()], index=index) assert not plan.nothing_to_do(actions), actions plan.display_actions(actions, index) plan.execute_actions(actions, index) if get_src: source.provide(m.path, m.get_section('source')) # Parse our metadata again because we did not initialize the source # information before. # By now, all jinja variables should be defined, so don't permit undefined vars. m.parse_again(permit_undefined_jinja=False) print("Package:", m.dist()) assert isdir(source.WORK_DIR) src_dir = source.get_dir() contents = os.listdir(src_dir) if contents: print("source tree in:", src_dir) else: print("no source") rm_rf(config.info_dir) files1 = prefix_files() for pat in m.always_include_files(): has_matches = False for f in set(files1): if fnmatch.fnmatch(f, pat): print("Including in package existing file", f) files1.discard(f) has_matches = True if not has_matches: sys.exit( "Error: Glob %s from always_include_files does not match any files" % pat) # Save this for later with open(join(config.croot, 'prefix_files.txt'), 'w') as f: f.write(u'\n'.join(sorted(list(files1)))) f.write(u'\n') # Use script from recipe? script = m.get_value('build/script', None) if script: if isinstance(script, list): script = '\n'.join(script) if sys.platform == 'win32': build_file = join(m.path, 'bld.bat') if script: build_file = join(source.get_dir(), 'bld.bat') with open(join(source.get_dir(), 'bld.bat'), 'w') as bf: bf.write(script) import conda_build.windows as windows windows.build(m, build_file) else: env = environ.get_dict(m) build_file = join(m.path, 'build.sh') if script: build_file = join(source.get_dir(), 'conda_build.sh') with open(build_file, 'w') as bf: bf.write(script) os.chmod(build_file, 0o766) if isfile(build_file): cmd = [shell_path, '-x', '-e', build_file] _check_call(cmd, env=env, cwd=src_dir) if post in [True, None]: if post == True: with open(join(config.croot, 'prefix_files.txt'), 'r') as f: files1 = set(f.read().splitlines()) get_build_metadata(m) create_post_scripts(m) create_entry_points(m.get_value('build/entry_points')) assert not exists(config.info_dir) files2 = prefix_files() post_process(sorted(files2 - files1), preserve_egg_dir=bool( m.get_value('build/preserve_egg_dir'))) # The post processing may have deleted some files (like easy-install.pth) files2 = prefix_files() if any(config.meta_dir in join(config.build_prefix, f) for f in files2 - files1): sys.exit( indent( """Error: Untracked file(s) %s found in conda-meta directory. This error usually comes from using conda in the build script. Avoid doing this, as it can lead to packages that include their dependencies.""" % (tuple(f for f in files2 - files1 if config.meta_dir in join( config.build_prefix, f)), ))) post_build(m, sorted(files2 - files1)) create_info_files(m, sorted(files2 - files1), include_recipe=bool(m.path) and include_recipe) if m.get_value('build/noarch_python'): import conda_build.noarch_python as noarch_python noarch_python.transform(m, sorted(files2 - files1)) files3 = prefix_files() fix_permissions(files3 - files1) path = bldpkg_path(m) t = tarfile.open(path, 'w:bz2') for f in sorted(files3 - files1): t.add(join(config.build_prefix, f), f) t.close() print("BUILD END:", m.dist()) # we're done building, perform some checks tarcheck.check_all(path) update_index(config.bldpkgs_dir) else: print("STOPPING BUILD BEFORE POST:", m.dist())
def build(m, post=None, include_recipe=True, keep_old_work=False, need_source_download=True, verbose=True, dirty=False): ''' Build the package with the specified metadata. :param m: Package metadata :type m: Metadata :type post: bool or None. None means run the whole build. True means run post only. False means stop just before the post. :type keep_old_work: bool: Keep any previous work directory. :type need_source_download: bool: if rendering failed to download source (due to missing tools), retry here after build env is populated ''' if (m.get_value('build/detect_binary_files_with_prefix') or m.binary_has_prefix_files()) and not on_win: # We must use a long prefix here as the package will only be # installable into prefixes shorter than this one. config.use_long_build_prefix = True else: # In case there are multiple builds in the same process config.use_long_build_prefix = False if m.skip(): print("Skipped: The %s recipe defines build/skip for this " "configuration." % m.dist()) return with Locked(cc.root_dir): # If --keep-old-work, then move the contents of source.WORK_DIR to a # temporary directory for the duration of the build. # The source unpacking procedure is too varied and complex # to allow this to be written cleanly (see source.get_dir() for example) if keep_old_work: old_WORK_DIR = tempfile.mkdtemp() old_sub_dirs = [name for name in os.listdir(source.WORK_DIR) if os.path.isdir(os.path.join(source.WORK_DIR, name))] if len(old_sub_dirs): print("Keeping old work directory backup: %s => %s" % (old_sub_dirs, old_WORK_DIR)) for old_sub in old_sub_dirs: shutil.move(os.path.join(source.WORK_DIR, old_sub), old_WORK_DIR) if post in [False, None]: print("Removing old build environment") print("BUILD START:", m.dist()) if on_win: if isdir(config.short_build_prefix): move_to_trash(config.short_build_prefix, '') if isdir(config.long_build_prefix): move_to_trash(config.long_build_prefix, '') else: rm_rf(config.short_build_prefix) rm_rf(config.long_build_prefix) # Display the name only # Version number could be missing due to dependency on source info. create_env(config.build_prefix, [ms.spec for ms in m.ms_depends('build')]) if need_source_download: # Execute any commands fetching the source (e.g., git) in the _build environment. # This makes it possible to provide source fetchers (eg. git, hg, svn) as build # dependencies. _old_path = os.environ['PATH'] try: os.environ['PATH'] = prepend_bin_path({'PATH': _old_path}, config.build_prefix)['PATH'] m, need_source_download = parse_or_try_download(m, no_download_source=False, force_download=True, verbose=verbose, dirty=dirty) assert not need_source_download, "Source download failed. Please investigate." finally: os.environ['PATH'] = _old_path if m.name() in [i.rsplit('-', 2)[0] for i in linked(config.build_prefix)]: print("%s is installed as a build dependency. Removing." % m.name()) index = get_build_index(clear_cache=False) actions = plan.remove_actions(config.build_prefix, [m.name()], index=index) assert not plan.nothing_to_do(actions), actions plan.display_actions(actions, index) plan.execute_actions(actions, index) print("Package:", m.dist()) assert isdir(source.WORK_DIR) src_dir = source.get_dir() contents = os.listdir(src_dir) if contents: print("source tree in:", src_dir) else: print("no source") rm_rf(config.info_dir) files1 = prefix_files() for pat in m.always_include_files(): has_matches = False for f in set(files1): if fnmatch.fnmatch(f, pat): print("Including in package existing file", f) files1.discard(f) has_matches = True if not has_matches: sys.exit("Error: Glob %s from always_include_files does not match any files" % pat) # Save this for later with open(join(config.croot, 'prefix_files.txt'), 'w') as f: f.write(u'\n'.join(sorted(list(files1)))) f.write(u'\n') # Use script from recipe? script = m.get_value('build/script', None) if script: if isinstance(script, list): script = '\n'.join(script) if sys.platform == 'win32': build_file = join(m.path, 'bld.bat') if script: build_file = join(source.get_dir(), 'bld.bat') with open(join(source.get_dir(), 'bld.bat'), 'w') as bf: bf.write(script) import conda_build.windows as windows windows.build(m, build_file, dirty=dirty) else: env = environ.get_dict(m, dirty=dirty) build_file = join(m.path, 'build.sh') if script: build_file = join(source.get_dir(), 'conda_build.sh') with open(build_file, 'w') as bf: bf.write(script) os.chmod(build_file, 0o766) if isfile(build_file): cmd = [shell_path, '-x', '-e', build_file] _check_call(cmd, env=env, cwd=src_dir) if post in [True, None]: if post: with open(join(config.croot, 'prefix_files.txt'), 'r') as f: files1 = set(f.read().splitlines()) get_build_metadata(m) create_post_scripts(m) create_entry_points(m.get_value('build/entry_points')) assert not exists(config.info_dir) files2 = prefix_files() post_process(sorted(files2 - files1), preserve_egg_dir=bool(m.get_value('build/preserve_egg_dir'))) # The post processing may have deleted some files (like easy-install.pth) files2 = prefix_files() if any(config.meta_dir in join(config.build_prefix, f) for f in files2 - files1): sys.exit(indent("""Error: Untracked file(s) %s found in conda-meta directory. This error usually comes from using conda in the build script. Avoid doing this, as it can lead to packages that include their dependencies.""" % (tuple(f for f in files2 - files1 if config.meta_dir in join(config.build_prefix, f)),))) post_build(m, sorted(files2 - files1)) create_info_files(m, sorted(files2 - files1), include_recipe=bool(m.path) and include_recipe) if m.get_value('build/noarch_python'): import conda_build.noarch_python as noarch_python noarch_python.transform(m, sorted(files2 - files1)) files3 = prefix_files() fix_permissions(files3 - files1) path = bldpkg_path(m) t = tarfile.open(path, 'w:bz2') def order(f): # we don't care about empty files so send them back via 100000 fsize = os.stat(join(config.build_prefix, f)).st_size or 100000 # info/* records will be False == 0, others will be 1. info_order = int(os.path.dirname(f) != 'info') return info_order, fsize # add files in order of a) in info directory, b) increasing size so # we can access small manifest or json files without decompressing # possible large binary or data files for f in sorted(files3 - files1, key=order): t.add(join(config.build_prefix, f), f) t.close() print("BUILD END:", m.dist()) # we're done building, perform some checks tarcheck.check_all(path) update_index(config.bldpkgs_dir) else: print("STOPPING BUILD BEFORE POST:", m.dist()) if keep_old_work and len(old_sub_dirs): print("Restoring old work directory backup: %s :: %s => %s" % (old_WORK_DIR, old_sub_dirs, source.WORK_DIR)) for old_sub in old_sub_dirs: if os.path.exists(os.path.join(source.WORK_DIR, old_sub)): print("Not restoring old source directory %s over new build's version" % (old_sub)) else: shutil.move(os.path.join(old_WORK_DIR, old_sub), source.WORK_DIR) shutil.rmtree(old_WORK_DIR, ignore_errors=True)
def test(m, verbose=True, channel_urls=(), override_channels=False): ''' Execute any test scripts for the given package. :param m: Package's metadata. :type m: Metadata ''' # remove from package cache rm_pkgs_cache(m.dist()) tmp_dir = join(config.croot, 'test-tmp_dir') rm_rf(tmp_dir) os.makedirs(tmp_dir) create_files(tmp_dir, m) # Make Perl or Python-specific test files if m.name().startswith('perl-'): pl_files = create_pl_files(tmp_dir, m) py_files = False else: py_files = create_py_files(tmp_dir, m) pl_files = False shell_files = create_shell_files(tmp_dir, m) if not (py_files or shell_files or pl_files): print("Nothing to test for:", m.dist()) return print("TEST START:", m.dist()) if on_win: if isdir(config.build_prefix): move_to_trash(config.build_prefix, '') if isdir(config.test_prefix): move_to_trash(config.test_prefix, '') else: rm_rf(config.build_prefix) rm_rf(config.test_prefix) specs = ['%s %s %s' % (m.name(), m.version(), m.build_id())] # add packages listed in test/requires specs_include_python = False for spec in m.get_value('test/requires', []): specs.append(spec) if spec.startswith('python ') or spec == 'python': specs_include_python = True if py_files and not specs_include_python: # as the tests are run by python, we need to specify it specs += ['python %s*' % environ.get_py_ver()] if pl_files: # as the tests are run by perl, we need to specify it specs += ['perl %s*' % environ.get_perl_ver()] create_env(config.test_prefix, specs, verbose=verbose, channel_urls=channel_urls, override_channels=override_channels) env = dict(os.environ) # TODO: Include all the same environment variables that are used in # building. env.update(environ.get_dict(m, prefix=config.test_prefix)) # prepend bin (or Scripts) directory env['PATH'] = (join(config.test_prefix, bin_dirname) + os.pathsep + os.getenv('PATH')) if sys.platform == 'win32': env['PATH'] = config.test_prefix + os.pathsep + env['PATH'] for varname in 'CONDA_PY', 'CONDA_NPY', 'CONDA_PERL': env[varname] = str(getattr(config, varname) or '') env['PREFIX'] = config.test_prefix # Python 2 Windows requires that envs variables be string, not unicode env = {str(key): str(value) for key, value in env.items()} if py_files: try: subprocess.check_call( [config.test_python, '-s', join(tmp_dir, 'run_test.py')], env=env, cwd=tmp_dir) except subprocess.CalledProcessError: tests_failed(m) if pl_files: try: subprocess.check_call( [config.test_perl, join(tmp_dir, 'run_test.pl')], env=env, cwd=tmp_dir) except subprocess.CalledProcessError: tests_failed(m) if shell_files: if sys.platform == 'win32': test_file = join(tmp_dir, 'run_test.bat') cmd = [os.environ['COMSPEC'], '/c', 'call', test_file] try: subprocess.check_call(cmd, env=env, cwd=tmp_dir) except subprocess.CalledProcessError: tests_failed(m) else: test_file = join(tmp_dir, 'run_test.sh') # TODO: Run the test/commands here instead of in run_test.py cmd = ['/bin/bash', '-x', '-e', test_file] try: subprocess.check_call(cmd, env=env, cwd=tmp_dir) except subprocess.CalledProcessError: tests_failed(m) print("TEST END:", m.dist())
def test(m, move_broken=True, activate=True, debug=False): ''' Execute any test scripts for the given package. :param m: Package's metadata. :type m: Metadata ''' with Locked(cc.root_dir): # remove from package cache rm_pkgs_cache(m.dist()) tmp_dir = join(config.croot, 'test-tmp_dir') rm_rf(tmp_dir) if on_win: time.sleep( 1 ) # wait for rm_rf(tmp_dir) to finish before recreating tmp_dir os.makedirs(tmp_dir) create_files(tmp_dir, m) # Make Perl or Python-specific test files if m.name().startswith('perl-'): pl_files = create_pl_files(tmp_dir, m) py_files = False lua_files = False else: py_files = create_py_files(tmp_dir, m) pl_files = False lua_files = False shell_files = create_shell_files(tmp_dir, m) if not (py_files or shell_files or pl_files or lua_files): print("Nothing to test for:", m.dist()) return print("TEST START:", m.dist()) if on_win: if isdir(config.build_prefix): move_to_trash(config.build_prefix, '') if isdir(config.test_prefix): move_to_trash(config.test_prefix, '') else: rm_rf(config.build_prefix) rm_rf(config.test_prefix) get_build_metadata(m) specs = ['%s %s %s' % (m.name(), m.version(), m.build_id())] # add packages listed in the run environment and test/requires specs.extend(ms.spec for ms in m.ms_depends('run')) specs += m.get_value('test/requires', []) if py_files: # as the tests are run by python, ensure that python is installed. # (If they already provided python as a run or test requirement, # this won't hurt anything.) specs += ['python %s*' % environ.get_py_ver()] if pl_files: # as the tests are run by perl, we need to specify it specs += ['perl %s*' % environ.get_perl_ver()] if lua_files: # not sure how this shakes out specs += ['lua %s*' % environ.get_lua_ver()] create_env(config.test_prefix, specs, debug=debug) env = dict(os.environ.copy()) env.update(environ.get_dict(m, prefix=config.test_prefix)) if not activate: # prepend bin (or Scripts) directory env = prepend_bin_path(env, config.test_prefix, prepend_prefix=True) if on_win: env['PATH'] = config.test_prefix + os.pathsep + env['PATH'] for varname in 'CONDA_PY', 'CONDA_NPY', 'CONDA_PERL', 'CONDA_LUA': env[varname] = str(getattr(config, varname) or '') env['PREFIX'] = config.test_prefix # Python 2 Windows requires that envs variables be string, not unicode env = {str(key): str(value) for key, value in env.items()} suffix = "bat" if on_win else "sh" test_script = join(tmp_dir, "conda_test_runner.{suffix}".format(suffix=suffix)) with open(test_script, 'w') as tf: if activate: source = "call " if on_win else "source " ext = ".bat" if on_win else "" tf.write("{source}activate{ext} {test_env}\n".format( source=source, ext=ext, test_env=config.test_prefix)) tf.write("if errorlevel 1 exit 1\n") if on_win else None if py_files: tf.write("{python} -s {test_file}\n".format( python=config.test_python, test_file=join(tmp_dir, 'run_test.py'))) tf.write("if errorlevel 1 exit 1\n") if on_win else None if pl_files: tf.write("{perl} {test_file}\n".format(python=config.test_perl, test_file=join( tmp_dir, 'run_test.pl'))) tf.write("if errorlevel 1 exit 1\n") if on_win else None if lua_files: tf.write("{lua} {test_file}\n".format(python=config.test_perl, test_file=join( tmp_dir, 'run_test.lua'))) tf.write("if errorlevel 1 exit 1\n") if on_win else None if shell_files: test_file = join(tmp_dir, 'run_test.' + suffix) if on_win: tf.write("call {test_file}\n".format(test_file=test_file)) tf.write("if errorlevel 1 exit 1\n") else: # TODO: Run the test/commands here instead of in run_test.py tf.write("{shell_path} -x -e {test_file}\n".format( shell_path=shell_path, test_file=test_file)) if on_win: cmd = [env["COMSPEC"], "/d", "/c", test_script] else: cmd = [shell_path, '-x', '-e', test_script] try: subprocess.check_call(cmd, env=env, cwd=tmp_dir) except subprocess.CalledProcessError: tests_failed(m, move_broken=move_broken) print("TEST END:", m.dist())
def test(m, move_broken=True, activate=True, debug=False): ''' Execute any test scripts for the given package. :param m: Package's metadata. :type m: Metadata ''' with Locked(cc.root_dir): # remove from package cache rm_pkgs_cache(m.dist()) tmp_dir = join(config.croot, 'test-tmp_dir') rm_rf(tmp_dir) if on_win: time.sleep(1) # wait for rm_rf(tmp_dir) to finish before recreating tmp_dir os.makedirs(tmp_dir) create_files(tmp_dir, m) # Make Perl or Python-specific test files if m.name().startswith('perl-'): pl_files = create_pl_files(tmp_dir, m) py_files = False lua_files = False else: py_files = create_py_files(tmp_dir, m) pl_files = False lua_files = False shell_files = create_shell_files(tmp_dir, m) if not (py_files or shell_files or pl_files or lua_files): print("Nothing to test for:", m.dist()) return print("TEST START:", m.dist()) if on_win: if isdir(config.build_prefix): move_to_trash(config.build_prefix, '') if isdir(config.test_prefix): move_to_trash(config.test_prefix, '') else: rm_rf(config.build_prefix) rm_rf(config.test_prefix) get_build_metadata(m) specs = ['%s %s %s' % (m.name(), m.version(), m.build_id())] # add packages listed in the run environment and test/requires specs.extend(ms.spec for ms in m.ms_depends('run')) specs += m.get_value('test/requires', []) if py_files: # as the tests are run by python, ensure that python is installed. # (If they already provided python as a run or test requirement, # this won't hurt anything.) specs += ['python %s*' % environ.get_py_ver()] if pl_files: # as the tests are run by perl, we need to specify it specs += ['perl %s*' % environ.get_perl_ver()] if lua_files: # not sure how this shakes out specs += ['lua %s*' % environ.get_lua_ver()] create_env(config.test_prefix, specs, debug=debug) env = dict(os.environ.copy()) env.update(environ.get_dict(m, prefix=config.test_prefix)) if not activate: # prepend bin (or Scripts) directory env = prepend_bin_path(env, config.test_prefix, prepend_prefix=True) if on_win: env['PATH'] = config.test_prefix + os.pathsep + env['PATH'] for varname in 'CONDA_PY', 'CONDA_NPY', 'CONDA_PERL', 'CONDA_LUA': env[varname] = str(getattr(config, varname) or '') env['PREFIX'] = config.test_prefix # Python 2 Windows requires that envs variables be string, not unicode env = {str(key): str(value) for key, value in env.items()} suffix = "bat" if on_win else "sh" test_script = join(tmp_dir, "conda_test_runner.{suffix}".format(suffix=suffix)) with open(test_script, 'w') as tf: if activate: source = "call " if on_win else "source " ext = ".bat" if on_win else "" tf.write("{source}activate{ext} {test_env}\n".format(source=source, ext=ext, test_env=config.test_prefix)) tf.write("if errorlevel 1 exit 1\n") if on_win else None if py_files: tf.write("{python} -s {test_file}\n".format( python=config.test_python, test_file=join(tmp_dir, 'run_test.py'))) tf.write("if errorlevel 1 exit 1\n") if on_win else None if pl_files: tf.write("{perl} {test_file}\n".format( python=config.test_perl, test_file=join(tmp_dir, 'run_test.pl'))) tf.write("if errorlevel 1 exit 1\n") if on_win else None if lua_files: tf.write("{lua} {test_file}\n".format( python=config.test_perl, test_file=join(tmp_dir, 'run_test.lua'))) tf.write("if errorlevel 1 exit 1\n") if on_win else None if shell_files: test_file = join(tmp_dir, 'run_test.' + suffix) if on_win: tf.write("call {test_file}\n".format(test_file=test_file)) tf.write("if errorlevel 1 exit 1\n") else: # TODO: Run the test/commands here instead of in run_test.py tf.write("{shell_path} -x -e {test_file}\n".format(shell_path=shell_path, test_file=test_file)) if on_win: cmd = [env["COMSPEC"], "/d", "/c", test_script] else: cmd = [shell_path, '-x', '-e', test_script] try: subprocess.check_call(cmd, env=env, cwd=tmp_dir) except subprocess.CalledProcessError: tests_failed(m, move_broken=move_broken) print("TEST END:", m.dist())
def build(m, verbose=True, channel_urls=(), override_channels=False, wheel_dir="./build"): ''' Build the package with the specified metadata. :param m: Package metadata :type m: Metadata ''' if (m.get_value('build/detect_binary_files_with_prefix') or m.binary_has_prefix_files()): # We must use a long prefix here as the package will only be # installable into prefixes shorter than this one. config.use_long_build_prefix = True else: # In case there are multiple builds in the same process config.use_long_build_prefix = False if m.skip(): print("Skipped: The %s recipe defines build/skip for this " "configuration." % m.dist()) return print("Removing old build environment") if on_win: if isdir(config.short_build_prefix): move_to_trash(config.short_build_prefix, '') if isdir(config.long_build_prefix): move_to_trash(config.long_build_prefix, '') else: rm_rf(config.short_build_prefix) rm_rf(config.long_build_prefix) print("Removing old work directory") if on_win: if isdir(source.WORK_DIR): move_to_trash(source.WORK_DIR, '') else: rm_rf(source.WORK_DIR) # Display the name only # Version number could be missing due to dependency on source info. print("BUILD START:", m.dist()) create_env(config.build_prefix, [ms.spec for ms in m.ms_depends('build')], verbose=verbose, channel_urls=channel_urls, override_channels=override_channels) if m.name() in [i.rsplit('-', 2)[0] for i in linked(config.build_prefix)]: print("%s is installed as a build dependency. Removing." % m.name()) index = get_build_index(clear_cache=False, channel_urls=channel_urls, override_channels=override_channels) actions = plan.remove_actions(config.build_prefix, [m.name()], index=index) assert not plan.nothing_to_do(actions), actions plan.display_actions(actions, index) plan.execute_actions(actions, index) # downlaod source code... source.provide(m.path, m.get_section('source')) # Parse our metadata again because we did not initialize the source # information before. m.parse_again() print("Package:", m.dist()) assert isdir(source.WORK_DIR) src_dir = source.get_dir() contents = os.listdir(src_dir) if contents: print("source tree in:", src_dir) else: print("no source") rm_rf(config.info_dir) files1 = prefix_files() for pat in m.always_include_files(): has_matches = False for f in set(files1): if fnmatch.fnmatch(f, pat): print("Including in package existing file", f) files1.discard(f) has_matches = True if not has_matches: sys.exit("Error: Glob %s from always_include_files does not match any files" % pat) # Save this for later with open(join(config.croot, 'prefix_files.txt'), 'w') as f: f.write(u'\n'.join(sorted(list(files1)))) f.write(u'\n') print("Source dir: %s" % src_dir) if sys.platform == 'win32': windows_build(m) else: env = environ.get_dict(m) build_file = join(m.path, 'build_wheel.sh') if not isfile(build_file): print("Using plain 'python setup.py bdist_wheel' as build script") build_file = join(src_dir, 'build_wheel.sh') with open(build_file, 'w') as fo: fo.write('\n') fo.write('# Autogenerated build command:\n') fo.write('python setup.py bdist_wheel\n') fo.write('\n') cmd = [shell_path, '-x', '-e', build_file] _check_call(cmd, env=env, cwd=src_dir) all_wheels = glob(join(src_dir, "dist", '*.whl')) if len(all_wheels) == 0: print("No wheels produced!") else: if len(all_wheels) == 1: print("More than one wheel produced!") try: os.makedirs(wheel_dir) print("Created wheel dir: %s:" % wheel_dir) except OSError: if not isdir(wheel_dir): raise print("Copying to %s:" % wheel_dir) for wheel in all_wheels: shutil.copy(wheel, wheel_dir) print(" %s" % basename(wheel))
def test(m, verbose=True, channel_urls=(), override_channels=False): """ Execute any test scripts for the given package. :param m: Package's metadata. :type m: Metadata """ # remove from package cache rm_pkgs_cache(m.dist()) tmp_dir = join(config.croot, "test-tmp_dir") rm_rf(tmp_dir) os.makedirs(tmp_dir) create_files(tmp_dir, m) # Make Perl or Python-specific test files if m.name().startswith("perl-"): pl_files = create_pl_files(tmp_dir, m) py_files = False else: py_files = create_py_files(tmp_dir, m) pl_files = False shell_files = create_shell_files(tmp_dir, m) if not (py_files or shell_files or pl_files): print("Nothing to test for:", m.dist()) return print("TEST START:", m.dist()) if on_win: if isdir(config.build_prefix): move_to_trash(config.build_prefix, "") if isdir(config.test_prefix): move_to_trash(config.test_prefix, "") else: rm_rf(config.build_prefix) rm_rf(config.test_prefix) specs = ["%s %s %s" % (m.name(), m.version(), m.build_id())] # add packages listed in test/requires specs += m.get_value("test/requires", []) if py_files: # as the tests are run by python, ensure that python is installed. # (If they already provided python as a run or test requirement, this won't hurt anything.) specs += ["python"] if pl_files: # as the tests are run by perl, we need to specify it specs += ["perl %s*" % environ.get_perl_ver()] create_env( config.test_prefix, specs, verbose=verbose, channel_urls=channel_urls, override_channels=override_channels ) env = dict(os.environ) # TODO: Include all the same environment variables that are used in # building. env.update(environ.get_dict(m, prefix=config.test_prefix)) # prepend bin (or Scripts) directory env["PATH"] = join(config.test_prefix, bin_dirname) + os.pathsep + os.getenv("PATH") if sys.platform == "win32": env["PATH"] = config.test_prefix + os.pathsep + env["PATH"] for varname in "CONDA_PY", "CONDA_NPY", "CONDA_PERL": env[varname] = str(getattr(config, varname) or "") env["PREFIX"] = config.test_prefix # Python 2 Windows requires that envs variables be string, not unicode env = {str(key): str(value) for key, value in env.items()} if py_files: try: subprocess.check_call([config.test_python, "-s", join(tmp_dir, "run_test.py")], env=env, cwd=tmp_dir) except subprocess.CalledProcessError: tests_failed(m) if pl_files: try: subprocess.check_call([config.test_perl, join(tmp_dir, "run_test.pl")], env=env, cwd=tmp_dir) except subprocess.CalledProcessError: tests_failed(m) if shell_files: if sys.platform == "win32": test_file = join(tmp_dir, "run_test.bat") cmd = [os.environ["COMSPEC"], "/c", "call", test_file] try: subprocess.check_call(cmd, env=env, cwd=tmp_dir) except subprocess.CalledProcessError: tests_failed(m) else: test_file = join(tmp_dir, "run_test.sh") # TODO: Run the test/commands here instead of in run_test.py cmd = ["/bin/bash", "-x", "-e", test_file] try: subprocess.check_call(cmd, env=env, cwd=tmp_dir) except subprocess.CalledProcessError: tests_failed(m) print("TEST END:", m.dist())
def build(m, verbose=True, channel_urls=(), override_channels=False, wheel_dir="./build"): ''' Build the package with the specified metadata. :param m: Package metadata :type m: Metadata ''' if (m.get_value('build/detect_binary_files_with_prefix') or m.binary_has_prefix_files()): # We must use a long prefix here as the package will only be # installable into prefixes shorter than this one. config.use_long_build_prefix = True else: # In case there are multiple builds in the same process config.use_long_build_prefix = False if m.skip(): print("Skipped: The %s recipe defines build/skip for this " "configuration." % m.dist()) return print("Removing old build environment") if on_win: if isdir(config.short_build_prefix): move_to_trash(config.short_build_prefix, '') if isdir(config.long_build_prefix): move_to_trash(config.long_build_prefix, '') else: rm_rf(config.short_build_prefix) rm_rf(config.long_build_prefix) print("Removing old work directory") if on_win: if isdir(source.WORK_DIR): move_to_trash(source.WORK_DIR, '') else: rm_rf(source.WORK_DIR) # Display the name only # Version number could be missing due to dependency on source info. print("BUILD START:", m.dist()) create_env(config.build_prefix, [ms.spec for ms in m.ms_depends('build')], verbose=verbose, channel_urls=channel_urls, override_channels=override_channels) if m.name() in [i.rsplit('-', 2)[0] for i in linked(config.build_prefix)]: print("%s is installed as a build dependency. Removing." % m.name()) index = get_build_index(clear_cache=False, channel_urls=channel_urls, override_channels=override_channels) actions = plan.remove_actions(config.build_prefix, [m.name()], index=index) assert not plan.nothing_to_do(actions), actions plan.display_actions(actions, index) plan.execute_actions(actions, index) # downlaod source code... source.provide(m.path, m.get_section('source')) # Parse our metadata again because we did not initialize the source # information before. m.parse_again() print("Package:", m.dist()) assert isdir(source.WORK_DIR) src_dir = source.get_dir() contents = os.listdir(src_dir) if contents: print("source tree in:", src_dir) else: print("no source") rm_rf(config.info_dir) files1 = prefix_files() for pat in m.always_include_files(): has_matches = False for f in set(files1): if fnmatch.fnmatch(f, pat): print("Including in package existing file", f) files1.discard(f) has_matches = True if not has_matches: sys.exit( "Error: Glob %s from always_include_files does not match any files" % pat) # Save this for later with open(join(config.croot, 'prefix_files.txt'), 'w') as f: f.write(u'\n'.join(sorted(list(files1)))) f.write(u'\n') print("Source dir: %s" % src_dir) if sys.platform == 'win32': windows_build(m) else: env = environ.get_dict(m) build_file = join(m.path, 'build_wheel.sh') if not isfile(build_file): print("Using plain 'python setup.py bdist_wheel' as build script") build_file = join(src_dir, 'build_wheel.sh') with open(build_file, 'w') as fo: fo.write('\n') fo.write('# Autogenerated build command:\n') fo.write('python setup.py bdist_wheel\n') fo.write('\n') cmd = [shell_path, '-x', '-e', build_file] _check_call(cmd, env=env, cwd=src_dir) all_wheels = glob(join(src_dir, "dist", '*.whl')) if len(all_wheels) == 0: print("No wheels produced!") else: if len(all_wheels) == 1: print("More than one wheel produced!") try: os.makedirs(wheel_dir) print("Created wheel dir: %s:" % wheel_dir) except OSError: if not isdir(wheel_dir): raise print("Copying to %s:" % wheel_dir) for wheel in all_wheels: shutil.copy(wheel, wheel_dir) print(" %s" % basename(wheel))