def run_setuppy(src_dir, temp_dir, args): ''' Patch distutils and then run setup.py in a subprocess. :param src_dir: Directory containing the source code :type src_dir: str :param temp_dir: Temporary directory for doing for storing pkginfo.yaml :type temp_dir: str ''' # Do everything in the build env in case the setup.py install goes # haywire. # TODO: Try with another version of Python if this one fails. Some # packages are Python 2 or Python 3 only. create_env(build_prefix, ['python %s*' % args.python_version, 'pyyaml', 'setuptools'], clear_cache=False) stdlib_dir = join( build_prefix, 'Lib' if sys.platform == 'win32' else 'lib/python%s' % args.python_version) patch = join(temp_dir, 'pypi-distutils.patch') with open(patch, 'w') as f: f.write(DISTUTILS_PATCH.format(temp_dir.replace('\\', '\\\\'))) if exists(join(stdlib_dir, 'distutils', 'core.py-copy')): rm_rf(join(stdlib_dir, 'distutils', 'core.py')) copy2(join(stdlib_dir, 'distutils', 'core.py-copy'), join(stdlib_dir, 'distutils', 'core.py')) # Avoid race conditions. Invalidate the cache. if PY3: rm_rf( join(stdlib_dir, 'distutils', '__pycache__', 'core.cpython-%s%s.pyc' % sys.version_info[:2])) rm_rf( join(stdlib_dir, 'distutils', '__pycache__', 'core.cpython-%s%s.pyo' % sys.version_info[:2])) else: rm_rf(join(stdlib_dir, 'distutils', 'core.pyc')) rm_rf(join(stdlib_dir, 'distutils', 'core.pyo')) else: copy2(join(stdlib_dir, 'distutils', 'core.py'), join(stdlib_dir, 'distutils', 'core.py-copy')) apply_patch(join(stdlib_dir, 'distutils'), patch) # Save PYTHONPATH for later env = os.environ.copy() if 'PYTHONPATH' in env: env[str('PYTHONPATH')] = str(src_dir + ':' + env['PYTHONPATH']) else: env[str('PYTHONPATH')] = str(src_dir) cwd = getcwd() chdir(src_dir) args = [build_python, 'setup.py', 'install'] try: subprocess.check_call(args, env=env) except subprocess.CalledProcessError: print('$PYTHONPATH = %s' % env['PYTHONPATH']) sys.exit('Error: command failed: %s' % ' '.join(args)) finally: chdir(cwd)
def run_setuppy(src_dir, temp_dir, python_version, setup_options): ''' Patch distutils and then run setup.py in a subprocess. :param src_dir: Directory containing the source code :type src_dir: str :param temp_dir: Temporary directory for doing for storing pkginfo.yaml :type temp_dir: str ''' # Do everything in the build env in case the setup.py install goes # haywire. # TODO: Try with another version of Python if this one fails. Some # packages are Python 2 or Python 3 only. create_env(config.build_prefix, ['python %s*' % python_version, 'pyyaml', 'yaml', 'setuptools', 'numpy'], clear_cache=False) stdlib_dir = join(config.build_prefix, 'Lib' if sys.platform == 'win32' else 'lib/python%s' % python_version) patch = join(temp_dir, 'pypi-distutils.patch') with open(patch, 'w') as f: f.write(DISTUTILS_PATCH.format(temp_dir.replace('\\', '\\\\'))) if exists(join(stdlib_dir, 'distutils', 'core.py-copy')): rm_rf(join(stdlib_dir, 'distutils', 'core.py')) copy2(join(stdlib_dir, 'distutils', 'core.py-copy'), join(stdlib_dir, 'distutils', 'core.py')) # Avoid race conditions. Invalidate the cache. if PY3: rm_rf(join(stdlib_dir, 'distutils', '__pycache__', 'core.cpython-%s%s.pyc' % sys.version_info[:2])) rm_rf(join(stdlib_dir, 'distutils', '__pycache__', 'core.cpython-%s%s.pyo' % sys.version_info[:2])) else: rm_rf(join(stdlib_dir, 'distutils', 'core.pyc')) rm_rf(join(stdlib_dir, 'distutils', 'core.pyo')) else: copy2(join(stdlib_dir, 'distutils', 'core.py'), join(stdlib_dir, 'distutils', 'core.py-copy')) apply_patch(join(stdlib_dir, 'distutils'), patch) # Save PYTHONPATH for later env = os.environ.copy() if 'PYTHONPATH' in env: env[str('PYTHONPATH')] = str(src_dir + ':' + env['PYTHONPATH']) else: env[str('PYTHONPATH')] = str(src_dir) cwd = getcwd() chdir(src_dir) cmdargs = [config.build_python, 'setup.py', 'install'] cmdargs.extend(setup_options) try: subprocess.check_call(cmdargs, env=env) except subprocess.CalledProcessError: print('$PYTHONPATH = %s' % env['PYTHONPATH']) sys.exit('Error: command failed: %s' % ' '.join(cmdargs)) finally: chdir(cwd)
def run_setuppy(src_dir, temp_dir, python_version, config, setup_options): """ Patch distutils and then run setup.py in a subprocess. :param src_dir: Directory containing the source code :type src_dir: str :param temp_dir: Temporary directory for doing for storing pkginfo.yaml :type temp_dir: str """ specs = ["python %s*" % python_version, "pyyaml", "setuptools"] with open(os.path.join(src_dir, "setup.py")) as setup: text = setup.read() if "import numpy" in text or "from numpy" in text: specs.append("numpy") # Do everything in the build env in case the setup.py install goes # haywire. # TODO: Try with another version of Python if this one fails. Some # packages are Python 2 or Python 3 only. create_env(config.build_prefix, specs=specs, clear_cache=False, config=config) stdlib_dir = join(config.build_prefix, "Lib" if sys.platform == "win32" else "lib/python%s" % python_version) patch = join(temp_dir, "pypi-distutils.patch") with open(patch, "w") as f: f.write(DISTUTILS_PATCH.format(temp_dir.replace("\\", "\\\\"))) if exists(join(stdlib_dir, "distutils", "core.py-copy")): rm_rf(join(stdlib_dir, "distutils", "core.py")) copy2(join(stdlib_dir, "distutils", "core.py-copy"), join(stdlib_dir, "distutils", "core.py")) # Avoid race conditions. Invalidate the cache. if PY3: rm_rf(join(stdlib_dir, "distutils", "__pycache__", "core.cpython-%s%s.pyc" % sys.version_info[:2])) rm_rf(join(stdlib_dir, "distutils", "__pycache__", "core.cpython-%s%s.pyo" % sys.version_info[:2])) else: rm_rf(join(stdlib_dir, "distutils", "core.pyc")) rm_rf(join(stdlib_dir, "distutils", "core.pyo")) else: copy2(join(stdlib_dir, "distutils", "core.py"), join(stdlib_dir, "distutils", "core.py-copy")) apply_patch(join(stdlib_dir, "distutils"), patch, config=config) # Save PYTHONPATH for later env = os.environ.copy() if "PYTHONPATH" in env: env[str("PYTHONPATH")] = str(src_dir + ":" + env["PYTHONPATH"]) else: env[str("PYTHONPATH")] = str(src_dir) cwd = getcwd() chdir(src_dir) cmdargs = [config.build_python, "setup.py", "install"] cmdargs.extend(setup_options) try: subprocess.check_call(cmdargs, env=env) except subprocess.CalledProcessError: print("$PYTHONPATH = %s" % env["PYTHONPATH"]) sys.exit("Error: command failed: %s" % " ".join(cmdargs)) finally: chdir(cwd)
def test_env_creation_with_prefix_fallback_disabled(): test_base = os.path.expanduser("~/cbtmp") config = api.Config(croot=test_base, anaconda_upload=False, verbose=True, prefix_length_fallback=False) recipe_path = os.path.join(metadata_dir, "has_prefix_files") metadata, _, _ = api.render(recipe_path, config=config) metadata.meta['package']['name'] = 'test_env_creation_with_short_prefix' fn = api.get_output_file_path(metadata) if os.path.isfile(fn): os.remove(fn) config.prefix_length = 80 with pytest.raises((SystemExit, PaddingError, LinkError)): api.build(metadata) pkg_name = os.path.basename(fn).replace("-1.0-0.tar.bz2", "") assert not api.inspect_prefix_length(fn, 255) config.prefix_length = 255 build.create_env(config.build_prefix, specs=["python", pkg_name], config=config)
def test_env_creation_with_short_prefix_does_not_deadlock(caplog): test_base = os.path.expanduser("~/cbtmp") config = api.Config(croot=test_base, anaconda_upload=False, verbose=True) recipe_path = os.path.join(metadata_dir, "has_prefix_files") metadata, _, _ = api.render(recipe_path, config=config) metadata.meta['package']['name'] = 'test_env_creation_with_short_prefix' fn = api.get_output_file_path(metadata) if os.path.isfile(fn): os.remove(fn) config.prefix_length = 80 try: api.build(metadata) pkg_name = os.path.basename(fn).replace("-1.0-0.tar.bz2", "") assert not api.inspect_prefix_length(fn, 255) config.prefix_length = 255 build.create_env(config.build_prefix, specs=["python", pkg_name], config=config) except: raise finally: rm_rf(test_base) assert 'One or more of your package dependencies needs to be rebuilt' in caplog.text()
def test_env_creation_with_short_prefix_does_not_deadlock(caplog): test_base = os.path.expanduser("~/cbtmp") config = api.Config(croot=test_base, anaconda_upload=False, verbose=True) recipe_path = os.path.join(metadata_dir, "has_prefix_files") metadata, _, _ = api.render(recipe_path, config=config) metadata.meta['package']['name'] = 'test_env_creation_with_short_prefix' fn = api.get_output_file_path(metadata) if os.path.isfile(fn): os.remove(fn) config.prefix_length = 80 try: api.build(metadata) pkg_name = os.path.basename(fn).replace("-1.0-0.tar.bz2", "") assert not api.inspect_prefix_length(fn, 255) config.prefix_length = 255 build.create_env(config.build_prefix, specs=["python", pkg_name], config=config) except: raise finally: rm_rf(test_base) assert 'One or more of your package dependencies needs to be rebuilt' in caplog.text
def test_environment_creation_preserves_PATH(testing_workdir, test_config): ref_path = os.environ['PATH'] build.create_env(testing_workdir, ['python'], test_config) assert os.environ['PATH'] == ref_path
def build(m, verbose=True, channel_urls=(), override_channels=False, wheel_dir="./build"): ''' Build the package with the specified metadata. :param m: Package metadata :type m: Metadata ''' if (m.get_value('build/detect_binary_files_with_prefix') or m.binary_has_prefix_files()): # We must use a long prefix here as the package will only be # installable into prefixes shorter than this one. config.use_long_build_prefix = True else: # In case there are multiple builds in the same process config.use_long_build_prefix = False if m.skip(): print("Skipped: The %s recipe defines build/skip for this " "configuration." % m.dist()) return print("Removing old build environment") if on_win: if isdir(config.short_build_prefix): move_to_trash(config.short_build_prefix, '') if isdir(config.long_build_prefix): move_to_trash(config.long_build_prefix, '') else: rm_rf(config.short_build_prefix) rm_rf(config.long_build_prefix) print("Removing old work directory") if on_win: if isdir(source.WORK_DIR): move_to_trash(source.WORK_DIR, '') else: rm_rf(source.WORK_DIR) # Display the name only # Version number could be missing due to dependency on source info. print("BUILD START:", m.dist()) create_env(config.build_prefix, [ms.spec for ms in m.ms_depends('build')], verbose=verbose, channel_urls=channel_urls, override_channels=override_channels) if m.name() in [i.rsplit('-', 2)[0] for i in linked(config.build_prefix)]: print("%s is installed as a build dependency. Removing." % m.name()) index = get_build_index(clear_cache=False, channel_urls=channel_urls, override_channels=override_channels) actions = plan.remove_actions(config.build_prefix, [m.name()], index=index) assert not plan.nothing_to_do(actions), actions plan.display_actions(actions, index) plan.execute_actions(actions, index) # downlaod source code... source.provide(m.path, m.get_section('source')) # Parse our metadata again because we did not initialize the source # information before. m.parse_again() print("Package:", m.dist()) assert isdir(source.WORK_DIR) src_dir = source.get_dir() contents = os.listdir(src_dir) if contents: print("source tree in:", src_dir) else: print("no source") rm_rf(config.info_dir) files1 = prefix_files() for pat in m.always_include_files(): has_matches = False for f in set(files1): if fnmatch.fnmatch(f, pat): print("Including in package existing file", f) files1.discard(f) has_matches = True if not has_matches: sys.exit("Error: Glob %s from always_include_files does not match any files" % pat) # Save this for later with open(join(config.croot, 'prefix_files.txt'), 'w') as f: f.write(u'\n'.join(sorted(list(files1)))) f.write(u'\n') print("Source dir: %s" % src_dir) if sys.platform == 'win32': windows_build(m) else: env = environ.get_dict(m) build_file = join(m.path, 'build_wheel.sh') if not isfile(build_file): print("Using plain 'python setup.py bdist_wheel' as build script") build_file = join(src_dir, 'build_wheel.sh') with open(build_file, 'w') as fo: fo.write('\n') fo.write('# Autogenerated build command:\n') fo.write('python setup.py bdist_wheel\n') fo.write('\n') cmd = [shell_path, '-x', '-e', build_file] _check_call(cmd, env=env, cwd=src_dir) all_wheels = glob(join(src_dir, "dist", '*.whl')) if len(all_wheels) == 0: print("No wheels produced!") else: if len(all_wheels) == 1: print("More than one wheel produced!") try: os.makedirs(wheel_dir) print("Created wheel dir: %s:" % wheel_dir) except OSError: if not isdir(wheel_dir): raise print("Copying to %s:" % wheel_dir) for wheel in all_wheels: shutil.copy(wheel, wheel_dir) print(" %s" % basename(wheel))
def build(m, verbose=True, channel_urls=(), override_channels=False, wheel_dir="./build"): ''' Build the package with the specified metadata. :param m: Package metadata :type m: Metadata ''' if (m.get_value('build/detect_binary_files_with_prefix') or m.binary_has_prefix_files()): # We must use a long prefix here as the package will only be # installable into prefixes shorter than this one. config.use_long_build_prefix = True else: # In case there are multiple builds in the same process config.use_long_build_prefix = False if m.skip(): print("Skipped: The %s recipe defines build/skip for this " "configuration." % m.dist()) return print("Removing old build environment") if on_win: if isdir(config.short_build_prefix): move_to_trash(config.short_build_prefix, '') if isdir(config.long_build_prefix): move_to_trash(config.long_build_prefix, '') else: rm_rf(config.short_build_prefix) rm_rf(config.long_build_prefix) print("Removing old work directory") if on_win: if isdir(source.WORK_DIR): move_to_trash(source.WORK_DIR, '') else: rm_rf(source.WORK_DIR) # Display the name only # Version number could be missing due to dependency on source info. print("BUILD START:", m.dist()) create_env(config.build_prefix, [ms.spec for ms in m.ms_depends('build')], verbose=verbose, channel_urls=channel_urls, override_channels=override_channels) if m.name() in [i.rsplit('-', 2)[0] for i in linked(config.build_prefix)]: print("%s is installed as a build dependency. Removing." % m.name()) index = get_build_index(clear_cache=False, channel_urls=channel_urls, override_channels=override_channels) actions = plan.remove_actions(config.build_prefix, [m.name()], index=index) assert not plan.nothing_to_do(actions), actions plan.display_actions(actions, index) plan.execute_actions(actions, index) # downlaod source code... source.provide(m.path, m.get_section('source')) # Parse our metadata again because we did not initialize the source # information before. m.parse_again() print("Package:", m.dist()) assert isdir(source.WORK_DIR) src_dir = source.get_dir() contents = os.listdir(src_dir) if contents: print("source tree in:", src_dir) else: print("no source") rm_rf(config.info_dir) files1 = prefix_files() for pat in m.always_include_files(): has_matches = False for f in set(files1): if fnmatch.fnmatch(f, pat): print("Including in package existing file", f) files1.discard(f) has_matches = True if not has_matches: sys.exit( "Error: Glob %s from always_include_files does not match any files" % pat) # Save this for later with open(join(config.croot, 'prefix_files.txt'), 'w') as f: f.write(u'\n'.join(sorted(list(files1)))) f.write(u'\n') print("Source dir: %s" % src_dir) if sys.platform == 'win32': windows_build(m) else: env = environ.get_dict(m) build_file = join(m.path, 'build_wheel.sh') if not isfile(build_file): print("Using plain 'python setup.py bdist_wheel' as build script") build_file = join(src_dir, 'build_wheel.sh') with open(build_file, 'w') as fo: fo.write('\n') fo.write('# Autogenerated build command:\n') fo.write('python setup.py bdist_wheel\n') fo.write('\n') cmd = [shell_path, '-x', '-e', build_file] _check_call(cmd, env=env, cwd=src_dir) all_wheels = glob(join(src_dir, "dist", '*.whl')) if len(all_wheels) == 0: print("No wheels produced!") else: if len(all_wheels) == 1: print("More than one wheel produced!") try: os.makedirs(wheel_dir) print("Created wheel dir: %s:" % wheel_dir) except OSError: if not isdir(wheel_dir): raise print("Copying to %s:" % wheel_dir) for wheel in all_wheels: shutil.copy(wheel, wheel_dir) print(" %s" % basename(wheel))