def build(m, get_src=True, pypi=False): rm_rf(prefix) create_env(prefix, [ms.spec for ms in m.ms_depends('build')], pypi) print("BUILD START:", m.dist()) if get_src: source.provide(m.path, m.get_section('source')) assert isdir(source.WORK_DIR) if os.listdir(source.get_dir()): print("source tree in:", source.get_dir()) else: print("no source") rm_rf(info_dir) files1 = prefix_files() if sys.platform == 'win32': import conda_build.windows as windows windows.build(m) else: env = environ.get_dict(m) cmd = ['/bin/bash', '-x', '-e', join(m.path, 'build.sh')] _check_call(cmd, env=env, cwd=source.get_dir()) create_post_scripts(m) create_entry_points(m.get_value('build/entry_points')) post_process(preserve_egg_dir=bool( m.get_value('build/preserve_egg_dir'))) assert not exists(info_dir) files2 = prefix_files() post_build(sorted(files2 - files1)) create_info_files(m, sorted(files2 - files1)) files3 = prefix_files() fix_permissions(files3 - files1) path = bldpkg_path(m) t = tarfile.open(path, 'w:bz2') for f in sorted(files3 - files1): t.add(join(prefix, f), f) t.close() print("BUILD END:", m.dist()) # we're done building, perform some checks tarcheck.check_all(path) update_index(bldpkgs_dir)
def build(m, get_src=True, verbose=True): ''' Build the package with the specified metadata. :param m: Package metadata :type m: Metadata :param get_src: Should we download the source? :type get_src: bool ''' rm_rf(prefix) print("BUILD START:", m.dist()) create_env(prefix, [ms.spec for ms in m.ms_depends('build')], verbose=verbose) if get_src: source.provide(m.path, m.get_section('source')) assert isdir(source.WORK_DIR) if os.listdir(source.get_dir()): print("source tree in:", source.get_dir()) else: print("no source") rm_rf(info_dir) files1 = prefix_files() if sys.platform == 'win32': import conda_build.windows as windows windows.build(m) else: env = environ.get_dict(m) build_file = join(m.path, 'build.sh') if exists(build_file): script = m.get_value('build/script', None) if script: if isinstance(script, list): script = '\n'.join(script) with open(build_file, 'w', encoding='utf-8') as bf: bf.write(script) os.chmod(build_file, 0o766) cmd = ['/bin/bash', '-x', '-e', build_file] _check_call(cmd, env=env, cwd=source.get_dir()) get_build_metadata(m) create_post_scripts(m) create_entry_points(m.get_value('build/entry_points')) post_process(preserve_egg_dir=bool(m.get_value('build/preserve_egg_dir'))) assert not exists(info_dir) files2 = prefix_files() post_build(sorted(files2 - files1), binary_relocation=bool( m.get_value('build/binary_relocation', True))) create_info_files(m, sorted(files2 - files1), include_recipe=bool(m.path)) files3 = prefix_files() fix_permissions(files3 - files1) path = bldpkg_path(m) t = tarfile.open(path, 'w:bz2') for f in sorted(files3 - files1): t.add(join(prefix, f), f) t.close() print("BUILD END:", m.dist()) # we're done building, perform some checks tarcheck.check_all(path) update_index(config.bldpkgs_dir)
def build(m, get_src=True, verbose=True, post=None, channel_urls=(), override_channels=False): ''' Build the package with the specified metadata. :param m: Package metadata :type m: Metadata :param get_src: Should we download the source? :type get_src: bool :type post: bool or None. None means run the whole build. True means run post only. False means stop just before the post. ''' if post in [False, None]: print("Removing old build directory") rm_rf(config.short_build_prefix) rm_rf(config.long_build_prefix) print("Removing old work directory") rm_rf(source.WORK_DIR) if (m.get_value('build/detect_binary_files_with_prefix') or m.binary_has_prefix_files()): # We must use a long prefix here as the package will only be # installable into prefixes shorter than this one. config.use_long_build_prefix = True else: # In case there are multiple builds in the same process config.use_long_build_prefix = False # Display the name only # Version number could be missing due to dependency on source info. print("BUILD START:", m.dist()) create_env(config.build_prefix, [ms.spec for ms in m.ms_depends('build')], verbose=verbose, channel_urls=channel_urls, override_channels=override_channels) if get_src: source.provide(m.path, m.get_section('source')) # Parse our metadata again because we did not initialize the source # information before. m.parse_again() print("Package:", m.dist()) assert isdir(source.WORK_DIR) src_dir = source.get_dir() contents = os.listdir(src_dir) if contents: print("source tree in:", src_dir) else: print("no source") rm_rf(config.info_dir) files1 = prefix_files().difference(set(m.always_include_files())) # Save this for later with open(join(config.croot, 'prefix_files.txt'), 'w') as f: f.write(u'\n'.join(sorted(list(files1)))) f.write(u'\n') if sys.platform == 'win32': import conda_build.windows as windows windows.build(m) else: env = environ.get_dict(m) build_file = join(m.path, 'build.sh') script = m.get_value('build/script', None) if script: if isinstance(script, list): script = '\n'.join(script) build_file = join(source.get_dir(), 'conda_build.sh') with open(build_file, 'w') as bf: bf.write(script) os.chmod(build_file, 0o766) if exists(build_file): cmd = ['/bin/bash', '-x', '-e', build_file] _check_call(cmd, env=env, cwd=src_dir) if post in [True, None]: if post == True: with open(join(config.croot, 'prefix_files.txt'), 'r') as f: files1 = set(f.read().splitlines()) get_build_metadata(m) create_post_scripts(m) create_entry_points(m.get_value('build/entry_points')) assert not exists(config.info_dir) files2 = prefix_files() post_process(sorted(files2 - files1), preserve_egg_dir=bool(m.get_value('build/preserve_egg_dir'))) # The post processing may have deleted some files (like easy-install.pth) files2 = prefix_files() post_build(m, sorted(files2 - files1)) create_info_files(m, sorted(files2 - files1), include_recipe=bool(m.path)) if m.get_value('build/noarch_python'): import conda_build.noarch_python as noarch_python noarch_python.transform(m, sorted(files2 - files1)) files3 = prefix_files() fix_permissions(files3 - files1) path = bldpkg_path(m) t = tarfile.open(path, 'w:bz2') for f in sorted(files3 - files1): t.add(join(config.build_prefix, f), f) t.close() print("BUILD END:", m.dist()) # we're done building, perform some checks tarcheck.check_all(path) update_index(config.bldpkgs_dir) else: print("STOPPING BUILD BEFORE POST:", m.dist())
def build(m, get_src=True, verbose=True, post=None, channel_urls=(), override_channels=False): """ Build the package with the specified metadata. :param m: Package metadata :type m: Metadata :param get_src: Should we download the source? :type get_src: bool :type post: bool or None. None means run the whole build. True means run post only. False means stop just before the post. """ if m.get_value("build/detect_binary_files_with_prefix") or m.binary_has_prefix_files(): # We must use a long prefix here as the package will only be # installable into prefixes shorter than this one. config.use_long_build_prefix = True else: # In case there are multiple builds in the same process config.use_long_build_prefix = False if post in [False, None]: print("Removing old build directory") rm_rf(config.short_build_prefix) rm_rf(config.long_build_prefix) print("Removing old work directory") rm_rf(source.WORK_DIR) # Display the name only # Version number could be missing due to dependency on source info. print("BUILD START:", m.dist()) create_env( config.build_prefix, [ms.spec for ms in m.ms_depends("build")], verbose=verbose, channel_urls=channel_urls, override_channels=override_channels, ) if m.name() in [i.rsplit("-", 2)[0] for i in linked(config.build_prefix)]: print("%s is installed as a build dependency. Removing." % m.name()) index = get_build_index(clear_cache=False, channel_urls=channel_urls, override_channels=override_channels) actions = plan.remove_actions(config.build_prefix, [m.name()], index=index) assert not plan.nothing_to_do(actions), actions plan.display_actions(actions, index) plan.execute_actions(actions, index) if get_src: source.provide(m.path, m.get_section("source")) # Parse our metadata again because we did not initialize the source # information before. m.parse_again() print("Package:", m.dist()) assert isdir(source.WORK_DIR) src_dir = source.get_dir() contents = os.listdir(src_dir) if contents: print("source tree in:", src_dir) else: print("no source") rm_rf(config.info_dir) files1 = prefix_files() for rx in m.always_include_files(): pat = re.compile(rx) has_matches = False for f in set(files1): if pat.match(f): print("Including in package existing file", f) files1.discard(f) has_matches = True if not has_matches: sys.exit("Error: Regex %s from always_include_files does not match any files" % rx) # Save this for later with open(join(config.croot, "prefix_files.txt"), "w") as f: f.write(u"\n".join(sorted(list(files1)))) f.write(u"\n") if sys.platform == "win32": import conda_build.windows as windows windows.build(m) else: env = environ.get_dict(m) build_file = join(m.path, "build.sh") script = m.get_value("build/script", None) if script: if isinstance(script, list): script = "\n".join(script) build_file = join(source.get_dir(), "conda_build.sh") with open(build_file, "w") as bf: bf.write(script) os.chmod(build_file, 0o766) if exists(build_file): cmd = ["/bin/bash", "-x", "-e", build_file] _check_call(cmd, env=env, cwd=src_dir) if post in [True, None]: if post == True: with open(join(config.croot, "prefix_files.txt"), "r") as f: files1 = set(f.read().splitlines()) get_build_metadata(m) create_post_scripts(m) create_entry_points(m.get_value("build/entry_points")) assert not exists(config.info_dir) files2 = prefix_files() post_process(sorted(files2 - files1), preserve_egg_dir=bool(m.get_value("build/preserve_egg_dir"))) # The post processing may have deleted some files (like easy-install.pth) files2 = prefix_files() assert not any(config.meta_dir in join(config.build_prefix, f) for f in files2 - files1) post_build(m, sorted(files2 - files1)) create_info_files(m, sorted(files2 - files1), include_recipe=bool(m.path)) if m.get_value("build/noarch_python"): import conda_build.noarch_python as noarch_python noarch_python.transform(m, sorted(files2 - files1)) files3 = prefix_files() fix_permissions(files3 - files1) path = bldpkg_path(m) t = tarfile.open(path, "w:bz2") for f in sorted(files3 - files1): t.add(join(config.build_prefix, f), f) t.close() print("BUILD END:", m.dist()) # we're done building, perform some checks tarcheck.check_all(path) update_index(config.bldpkgs_dir) else: print("STOPPING BUILD BEFORE POST:", m.dist())
def build(m, get_src=True, verbose=True, post=None, channel_urls=(), override_channels=False, include_recipe=True): ''' Build the package with the specified metadata. :param m: Package metadata :type m: Metadata :param get_src: Should we download the source? :type get_src: bool :type post: bool or None. None means run the whole build. True means run post only. False means stop just before the post. ''' if (m.get_value('build/detect_binary_files_with_prefix') or m.binary_has_prefix_files()): # We must use a long prefix here as the package will only be # installable into prefixes shorter than this one. config.use_long_build_prefix = True else: # In case there are multiple builds in the same process config.use_long_build_prefix = False if post in [False, None]: print("Removing old build environment") if on_win: if isdir(config.short_build_prefix): move_to_trash(config.short_build_prefix, '') if isdir(config.long_build_prefix): move_to_trash(config.long_build_prefix, '') else: rm_rf(config.short_build_prefix) rm_rf(config.long_build_prefix) print("Removing old work directory") if on_win: if isdir(source.WORK_DIR): move_to_trash(source.WORK_DIR, '') else: rm_rf(source.WORK_DIR) # Display the name only # Version number could be missing due to dependency on source info. print("BUILD START:", m.dist()) create_env(config.build_prefix, [ms.spec for ms in m.ms_depends('build')], verbose=verbose, channel_urls=channel_urls, override_channels=override_channels) if m.name() in [ i.rsplit('-', 2)[0] for i in linked(config.build_prefix) ]: print("%s is installed as a build dependency. Removing." % m.name()) index = get_build_index(clear_cache=False, channel_urls=channel_urls, override_channels=override_channels) actions = plan.remove_actions(config.build_prefix, [m.name()], index=index) assert not plan.nothing_to_do(actions), actions plan.display_actions(actions, index) plan.execute_actions(actions, index) if get_src: source.provide(m.path, m.get_section('source')) # Parse our metadata again because we did not initialize the source # information before. m.parse_again() print("Package:", m.dist()) assert isdir(source.WORK_DIR) src_dir = source.get_dir() contents = os.listdir(src_dir) if contents: print("source tree in:", src_dir) else: print("no source") rm_rf(config.info_dir) files1 = prefix_files() for pat in m.always_include_files(): has_matches = False for f in set(files1): if fnmatch.fnmatch(f, pat): print("Including in package existing file", f) files1.discard(f) has_matches = True if not has_matches: sys.exit( "Error: Glob %s from always_include_files does not match any files" % pat) # Save this for later with open(join(config.croot, 'prefix_files.txt'), 'w') as f: f.write(u'\n'.join(sorted(list(files1)))) f.write(u'\n') if sys.platform == 'win32': import conda_build.windows as windows windows.build(m) else: env = environ.get_dict(m) build_file = join(m.path, 'build.sh') script = m.get_value('build/script', None) if script: if isinstance(script, list): script = '\n'.join(script) build_file = join(source.get_dir(), 'conda_build.sh') with open(build_file, 'w') as bf: bf.write(script) os.chmod(build_file, 0o766) if isfile(build_file): cmd = ['/bin/bash', '-x', '-e', build_file] _check_call(cmd, env=env, cwd=src_dir) if post in [True, None]: if post == True: with open(join(config.croot, 'prefix_files.txt'), 'r') as f: files1 = set(f.read().splitlines()) get_build_metadata(m) create_post_scripts(m) create_entry_points(m.get_value('build/entry_points')) assert not exists(config.info_dir) files2 = prefix_files() post_process(sorted(files2 - files1), preserve_egg_dir=bool( m.get_value('build/preserve_egg_dir'))) # The post processing may have deleted some files (like easy-install.pth) files2 = prefix_files() if any(config.meta_dir in join(config.build_prefix, f) for f in files2 - files1): sys.exit( indent( """Error: Untracked file(s) %s found in conda-meta directory. This error usually comes from using conda in the build script. Avoid doing this, as it can lead to packages that include their dependencies.""" % (tuple(f for f in files2 - files1 if config.meta_dir in join( config.build_prefix, f)), ))) post_build(m, sorted(files2 - files1)) create_info_files(m, sorted(files2 - files1), include_recipe=bool(m.path) and include_recipe) if m.get_value('build/noarch_python'): import conda_build.noarch_python as noarch_python noarch_python.transform(m, sorted(files2 - files1)) files3 = prefix_files() fix_permissions(files3 - files1) path = bldpkg_path(m) t = tarfile.open(path, 'w:bz2') for f in sorted(files3 - files1): t.add(join(config.build_prefix, f), f) t.close() print("BUILD END:", m.dist()) # we're done building, perform some checks tarcheck.check_all(path) update_index(config.bldpkgs_dir) else: print("STOPPING BUILD BEFORE POST:", m.dist())
def build(m, config, post=None, need_source_download=True, need_reparse_in_env=False): ''' Build the package with the specified metadata. :param m: Package metadata :type m: Metadata :type post: bool or None. None means run the whole build. True means run post only. False means stop just before the post. :type keep_old_work: bool: Keep any previous work directory. :type need_source_download: bool: if rendering failed to download source (due to missing tools), retry here after build env is populated ''' if m.skip(): print_skip_message(m) return False if config.skip_existing: package_exists = is_package_built(m, config) if package_exists: print(m.dist(), "is already built in {0}, skipping.".format(package_exists)) return False if post in [False, None]: print("BUILD START:", m.dist()) if m.uses_jinja and (need_source_download or need_reparse_in_env): print( " (actual version deferred until further download or env creation)" ) specs = [ms.spec for ms in m.ms_depends('build')] create_env(config.build_prefix, specs, config=config) vcs_source = m.uses_vcs_in_build if vcs_source and vcs_source not in specs: vcs_executable = "hg" if vcs_source == "mercurial" else vcs_source has_vcs_available = os.path.isfile( external.find_executable(vcs_executable, config.build_prefix) or "") if not has_vcs_available: if (vcs_source != "mercurial" or not any( spec.startswith('python') and "3." in spec for spec in specs)): specs.append(vcs_source) log.warn( "Your recipe depends on %s at build time (for templates), " "but you have not listed it as a build dependency. Doing " "so for this build.", vcs_source) # Display the name only # Version number could be missing due to dependency on source info. create_env(config.build_prefix, specs, config=config) else: raise ValueError( "Your recipe uses mercurial in build, but mercurial" " does not yet support Python 3. Please handle all of " "your mercurial actions outside of your build script.") if need_source_download: # Execute any commands fetching the source (e.g., git) in the _build environment. # This makes it possible to provide source fetchers (eg. git, hg, svn) as build # dependencies. with path_prepended(config.build_prefix): m, need_source_download, need_reparse_in_env = parse_or_try_download( m, no_download_source=False, force_download=True, config=config) assert not need_source_download, "Source download failed. Please investigate." if m.uses_jinja: print("BUILD START (revised):", m.dist()) if need_reparse_in_env: reparse(m, config=config) print("BUILD START (revised):", m.dist()) if m.name() in [ i.rsplit('-', 2)[0] for i in linked(config.build_prefix) ]: print("%s is installed as a build dependency. Removing." % m.name()) index = get_build_index(config=config, clear_cache=False) actions = plan.remove_actions(config.build_prefix, [m.name()], index=index) assert not plan.nothing_to_do(actions), actions plan.display_actions(actions, index) plan.execute_actions(actions, index) print("Package:", m.dist()) with filelock.SoftFileLock(join(config.build_folder, ".conda_lock"), timeout=config.timeout): # get_dir here might be just work, or it might be one level deeper, # dependening on the source. src_dir = source.get_dir(config) if isdir(src_dir): print("source tree in:", src_dir) else: print("no source - creating empty work folder") os.makedirs(src_dir) rm_rf(config.info_dir) files1 = prefix_files(prefix=config.build_prefix) for pat in m.always_include_files(): has_matches = False for f in set(files1): if fnmatch.fnmatch(f, pat): print("Including in package existing file", f) files1.discard(f) has_matches = True if not has_matches: log.warn( "Glob %s from always_include_files does not match any files", pat) # Save this for later with open(join(config.croot, 'prefix_files.txt'), 'w') as f: f.write(u'\n'.join(sorted(list(files1)))) f.write(u'\n') # Use script from recipe? script = m.get_value('build/script', None) if script: if isinstance(script, list): script = '\n'.join(script) if isdir(src_dir): if on_win: build_file = join(m.path, 'bld.bat') if script: build_file = join(src_dir, 'bld.bat') with open(build_file, 'w') as bf: bf.write(script) import conda_build.windows as windows windows.build(m, build_file, config=config) else: build_file = join(m.path, 'build.sh') # There is no sense in trying to run an empty build script. if isfile(build_file) or script: with path_prepended(config.build_prefix): env = environ.get_dict(config=config, m=m) env["CONDA_BUILD_STATE"] = "BUILD" work_file = join(source.get_dir(config), 'conda_build.sh') if script: with open(work_file, 'w') as bf: bf.write(script) if config.activate: if isfile(build_file): data = open(build_file).read() else: data = open(work_file).read() with open(work_file, 'w') as bf: bf.write( "source {conda_root}activate {build_prefix} &> " "/dev/null\n".format( conda_root=root_script_dir + os.path.sep, build_prefix=config.build_prefix)) bf.write(data) else: if not isfile(work_file): copy_into(build_file, work_file, config.timeout) os.chmod(work_file, 0o766) if isfile(work_file): cmd = [shell_path, '-x', '-e', work_file] # this should raise if any problems occur while building _check_call(cmd, env=env, cwd=src_dir) if post in [True, None]: if post: with open(join(config.croot, 'prefix_files.txt'), 'r') as f: files1 = set(f.read().splitlines()) get_build_metadata(m, config=config) create_post_scripts(m, config=config) create_entry_points(m.get_value('build/entry_points'), config=config) files2 = prefix_files(prefix=config.build_prefix) post_process(sorted(files2 - files1), prefix=config.build_prefix, config=config, preserve_egg_dir=bool( m.get_value('build/preserve_egg_dir'))) # The post processing may have deleted some files (like easy-install.pth) files2 = prefix_files(prefix=config.build_prefix) if any(config.meta_dir in join(config.build_prefix, f) for f in files2 - files1): meta_files = (tuple( f for f in files2 - files1 if config.meta_dir in join(config.build_prefix, f)), ) sys.exit( indent( """Error: Untracked file(s) %s found in conda-meta directory. This error usually comes from using conda in the build script. Avoid doing this, as it can lead to packages that include their dependencies.""" % meta_files)) post_build(m, sorted(files2 - files1), prefix=config.build_prefix, build_python=config.build_python, croot=config.croot) create_info_files(m, sorted(files2 - files1), config=config, prefix=config.build_prefix) if m.get_value('build/noarch_python'): import conda_build.noarch_python as noarch_python noarch_python.transform(m, sorted(files2 - files1), config.build_prefix) files3 = prefix_files(prefix=config.build_prefix) fix_permissions(files3 - files1, config.build_prefix) path = bldpkg_path(m, config) # lock the output directory while we build this file # create the tarball in a temporary directory to minimize lock time with TemporaryDirectory() as tmp: tmp_path = os.path.join(tmp, os.path.basename(path)) t = tarfile.open(tmp_path, 'w:bz2') def order(f): # we don't care about empty files so send them back via 100000 fsize = os.stat(join(config.build_prefix, f)).st_size or 100000 # info/* records will be False == 0, others will be 1. info_order = int(os.path.dirname(f) != 'info') return info_order, fsize # add files in order of a) in info directory, b) increasing size so # we can access small manifest or json files without decompressing # possible large binary or data files for f in sorted(files3 - files1, key=order): t.add(join(config.build_prefix, f), f) t.close() # we're done building, perform some checks tarcheck.check_all(tmp_path) copy_into(tmp_path, path, config.timeout) update_index(config.bldpkgs_dir, config) else: print("STOPPING BUILD BEFORE POST:", m.dist()) # returning true here says package is OK to test return True
def build(m, get_src=True, verbose=True, post=None, channel_urls=(), override_channels=False): ''' Build the package with the specified metadata. :param m: Package metadata :type m: Metadata :param get_src: Should we download the source? :type get_src: bool :type post: bool or None. None means run the whole build. True means run post only. False means stop just before the post. ''' if (m.get_value('build/detect_binary_files_with_prefix') or m.binary_has_prefix_files()): # We must use a long prefix here as the package will only be # installable into prefixes shorter than this one. config.use_long_build_prefix = True else: # In case there are multiple builds in the same process config.use_long_build_prefix = False if post in [False, None]: print("Removing old build directory") rm_rf(config.short_build_prefix) rm_rf(config.long_build_prefix) print("Removing old work directory") rm_rf(source.WORK_DIR) # Display the name only # Version number could be missing due to dependency on source info. print("BUILD START:", m.dist()) create_env(config.build_prefix, [ms.spec for ms in m.ms_depends('build')], verbose=verbose, channel_urls=channel_urls, override_channels=override_channels) if get_src: source.provide(m.path, m.get_section('source')) # Parse our metadata again because we did not initialize the source # information before. m.parse_again() print("Package:", m.dist()) assert isdir(source.WORK_DIR) src_dir = source.get_dir() contents = os.listdir(src_dir) if contents: print("source tree in:", src_dir) else: print("no source") rm_rf(config.info_dir) files1 = prefix_files() for f in m.always_include_files(): if f not in files1: sys.exit("Error: File %s from always_include_files not found" % f) files1 = files1.difference(set(m.always_include_files())) # Save this for later with open(join(config.croot, 'prefix_files.txt'), 'w') as f: f.write(u'\n'.join(sorted(list(files1)))) f.write(u'\n') if sys.platform == 'win32': import conda_build.windows as windows windows.build(m) else: env = environ.get_dict(m) build_file = join(m.path, 'build.sh') script = m.get_value('build/script', None) if script: if isinstance(script, list): script = '\n'.join(script) build_file = join(source.get_dir(), 'conda_build.sh') with open(build_file, 'w') as bf: bf.write(script) os.chmod(build_file, 0o766) if exists(build_file): cmd = ['/bin/bash', '-x', '-e', build_file] _check_call(cmd, env=env, cwd=src_dir) if post in [True, None]: if post == True: with open(join(config.croot, 'prefix_files.txt'), 'r') as f: files1 = set(f.read().splitlines()) get_build_metadata(m) create_post_scripts(m) create_entry_points(m.get_value('build/entry_points')) assert not exists(config.info_dir) files2 = prefix_files() post_process(sorted(files2 - files1), preserve_egg_dir=bool( m.get_value('build/preserve_egg_dir'))) # The post processing may have deleted some files (like easy-install.pth) files2 = prefix_files() post_build(m, sorted(files2 - files1)) create_info_files(m, sorted(files2 - files1), include_recipe=bool(m.path)) if m.get_value('build/noarch_python'): import conda_build.noarch_python as noarch_python noarch_python.transform(m, sorted(files2 - files1)) files3 = prefix_files() fix_permissions(files3 - files1) path = bldpkg_path(m) t = tarfile.open(path, 'w:bz2') for f in sorted(files3 - files1): t.add(join(config.build_prefix, f), f) t.close() print("BUILD END:", m.dist()) # we're done building, perform some checks tarcheck.check_all(path) update_index(config.bldpkgs_dir) else: print("STOPPING BUILD BEFORE POST:", m.dist())
def build(m, post=None, include_recipe=True, keep_old_work=False, need_source_download=True, verbose=True, dirty=False, activate=True): ''' Build the package with the specified metadata. :param m: Package metadata :type m: Metadata :type post: bool or None. None means run the whole build. True means run post only. False means stop just before the post. :type keep_old_work: bool: Keep any previous work directory. :type need_source_download: bool: if rendering failed to download source (due to missing tools), retry here after build env is populated ''' if (m.get_value('build/detect_binary_files_with_prefix') or m.binary_has_prefix_files()) and not on_win: # We must use a long prefix here as the package will only be # installable into prefixes shorter than this one. config.use_long_build_prefix = True else: # In case there are multiple builds in the same process config.use_long_build_prefix = False if m.skip(): print("Skipped: The %s recipe defines build/skip for this " "configuration." % m.dist()) return with Locked(cc.root_dir): # If --keep-old-work, then move the contents of source.WORK_DIR to a # temporary directory for the duration of the build. # The source unpacking procedure is too varied and complex # to allow this to be written cleanly (see source.get_dir() for example) if keep_old_work: old_WORK_DIR = tempfile.mkdtemp() old_sub_dirs = [ name for name in os.listdir(source.WORK_DIR) if os.path.isdir(os.path.join(source.WORK_DIR, name)) ] if len(old_sub_dirs): print("Keeping old work directory backup: %s => %s" % (old_sub_dirs, old_WORK_DIR)) for old_sub in old_sub_dirs: shutil.move(os.path.join(source.WORK_DIR, old_sub), old_WORK_DIR) if post in [False, None]: print("Removing old build environment") print("BUILD START:", m.dist()) if on_win: if isdir(config.short_build_prefix): move_to_trash(config.short_build_prefix, '') if isdir(config.long_build_prefix): move_to_trash(config.long_build_prefix, '') else: rm_rf(config.short_build_prefix) rm_rf(config.long_build_prefix) # Display the name only # Version number could be missing due to dependency on source info. create_env(config.build_prefix, [ms.spec for ms in m.ms_depends('build')]) if need_source_download: # Execute any commands fetching the source (e.g., git) in the _build environment. # This makes it possible to provide source fetchers (eg. git, hg, svn) as build # dependencies. m, need_source_download = parse_or_try_download( m, no_download_source=False, force_download=True, verbose=verbose, dirty=dirty) assert not need_source_download, "Source download failed. Please investigate." if m.name() in [ i.rsplit('-', 2)[0] for i in linked(config.build_prefix) ]: print("%s is installed as a build dependency. Removing." % m.name()) index = get_build_index(clear_cache=False) actions = plan.remove_actions(config.build_prefix, [m.name()], index=index) assert not plan.nothing_to_do(actions), actions plan.display_actions(actions, index) plan.execute_actions(actions, index) print("Package:", m.dist()) assert isdir(source.WORK_DIR) src_dir = source.get_dir() contents = os.listdir(src_dir) if contents: print("source tree in:", src_dir) else: print("no source") rm_rf(config.info_dir) files1 = prefix_files() for pat in m.always_include_files(): has_matches = False for f in set(files1): if fnmatch.fnmatch(f, pat): print("Including in package existing file", f) files1.discard(f) has_matches = True if not has_matches: sys.exit( "Error: Glob %s from always_include_files does not match any files" % pat) # Save this for later with open(join(config.croot, 'prefix_files.txt'), 'w') as f: f.write(u'\n'.join(sorted(list(files1)))) f.write(u'\n') # Use script from recipe? script = m.get_value('build/script', None) if script: if isinstance(script, list): script = '\n'.join(script) if on_win: build_file = join(m.path, 'bld.bat') if script: build_file = join(source.get_dir(), 'bld.bat') with open(join(source.get_dir(), 'bld.bat'), 'w') as bf: bf.write(script) import conda_build.windows as windows windows.build(m, build_file, dirty=dirty, activate=activate) else: build_file = join(m.path, 'build.sh') # There is no sense in trying to run an empty build script. if isfile(build_file) or script: env = environ.get_dict(m, dirty=dirty) work_file = join(source.get_dir(), 'conda_build.sh') if script: with open(work_file, 'w') as bf: bf.write(script) if activate: if isfile(build_file): data = open(build_file).read() else: data = open(work_file).read() with open(work_file, 'w') as bf: bf.write("source activate {build_prefix}\n".format( build_prefix=config.build_prefix)) bf.write(data) else: if not isfile(work_file): shutil.copy(build_file, work_file) os.chmod(work_file, 0o766) if isfile(work_file): cmd = [shell_path, '-x', '-e', work_file] _check_call(cmd, env=env, cwd=src_dir) if post in [True, None]: if post: with open(join(config.croot, 'prefix_files.txt'), 'r') as f: files1 = set(f.read().splitlines()) get_build_metadata(m) create_post_scripts(m) create_entry_points(m.get_value('build/entry_points')) assert not exists(config.info_dir) files2 = prefix_files() post_process(sorted(files2 - files1), preserve_egg_dir=bool( m.get_value('build/preserve_egg_dir'))) # The post processing may have deleted some files (like easy-install.pth) files2 = prefix_files() if any(config.meta_dir in join(config.build_prefix, f) for f in files2 - files1): sys.exit( indent( """Error: Untracked file(s) %s found in conda-meta directory. This error usually comes from using conda in the build script. Avoid doing this, as it can lead to packages that include their dependencies.""" % (tuple(f for f in files2 - files1 if config.meta_dir in join(config.build_prefix, f)), ))) post_build(m, sorted(files2 - files1)) create_info_files(m, sorted(files2 - files1), include_recipe=bool(m.path) and include_recipe) if m.get_value('build/noarch_python'): import conda_build.noarch_python as noarch_python noarch_python.transform(m, sorted(files2 - files1)) files3 = prefix_files() fix_permissions(files3 - files1) path = bldpkg_path(m) t = tarfile.open(path, 'w:bz2') def order(f): # we don't care about empty files so send them back via 100000 fsize = os.stat(join(config.build_prefix, f)).st_size or 100000 # info/* records will be False == 0, others will be 1. info_order = int(os.path.dirname(f) != 'info') return info_order, fsize # add files in order of a) in info directory, b) increasing size so # we can access small manifest or json files without decompressing # possible large binary or data files for f in sorted(files3 - files1, key=order): t.add(join(config.build_prefix, f), f) t.close() print("BUILD END:", m.dist()) # we're done building, perform some checks tarcheck.check_all(path) update_index(config.bldpkgs_dir) else: print("STOPPING BUILD BEFORE POST:", m.dist()) if keep_old_work and len(old_sub_dirs): print("Restoring old work directory backup: %s :: %s => %s" % (old_WORK_DIR, old_sub_dirs, source.WORK_DIR)) for old_sub in old_sub_dirs: if os.path.exists(os.path.join(source.WORK_DIR, old_sub)): print( "Not restoring old source directory %s over new build's version" % (old_sub)) else: shutil.move(os.path.join(old_WORK_DIR, old_sub), source.WORK_DIR) shutil.rmtree(old_WORK_DIR, ignore_errors=True)
def build(m, get_src=True, verbose=True, post=None, channel_urls=(), override_channels=False, include_recipe=True): ''' Build the package with the specified metadata. :param m: Package metadata :type m: Metadata :param get_src: Should we download the source? :type get_src: bool :type post: bool or None. None means run the whole build. True means run post only. False means stop just before the post. ''' if (m.get_value('build/detect_binary_files_with_prefix') or m.binary_has_prefix_files()): # We must use a long prefix here as the package will only be # installable into prefixes shorter than this one. config.use_long_build_prefix = True else: # In case there are multiple builds in the same process config.use_long_build_prefix = False if m.skip(): print("Skipped: The %s recipe defines build/skip for this " "configuration." % m.dist()) sys.exit(0) if post in [False, None]: print("Removing old build environment") if on_win: if isdir(config.short_build_prefix): move_to_trash(config.short_build_prefix, '') if isdir(config.long_build_prefix): move_to_trash(config.long_build_prefix, '') else: rm_rf(config.short_build_prefix) rm_rf(config.long_build_prefix) print("Removing old work directory") if on_win: if isdir(source.WORK_DIR): move_to_trash(source.WORK_DIR, '') else: rm_rf(source.WORK_DIR) # Display the name only # Version number could be missing due to dependency on source info. print("BUILD START:", m.dist()) create_env(config.build_prefix, [ms.spec for ms in m.ms_depends('build')], verbose=verbose, channel_urls=channel_urls, override_channels=override_channels) if m.name() in [i.rsplit('-', 2)[0] for i in linked(config.build_prefix)]: print("%s is installed as a build dependency. Removing." % m.name()) index = get_build_index(clear_cache=False, channel_urls=channel_urls, override_channels=override_channels) actions = plan.remove_actions(config.build_prefix, [m.name()], index=index) assert not plan.nothing_to_do(actions), actions plan.display_actions(actions, index) plan.execute_actions(actions, index) if get_src: source.provide(m.path, m.get_section('source')) # Parse our metadata again because we did not initialize the source # information before. m.parse_again() print("Package:", m.dist()) assert isdir(source.WORK_DIR) src_dir = source.get_dir() contents = os.listdir(src_dir) if contents: print("source tree in:", src_dir) else: print("no source") rm_rf(config.info_dir) files1 = prefix_files() for pat in m.always_include_files(): has_matches = False for f in set(files1): if fnmatch.fnmatch(f, pat): print("Including in package existing file", f) files1.discard(f) has_matches = True if not has_matches: sys.exit("Error: Glob %s from always_include_files does not match any files" % pat) # Save this for later with open(join(config.croot, 'prefix_files.txt'), 'w') as f: f.write(u'\n'.join(sorted(list(files1)))) f.write(u'\n') if sys.platform == 'win32': import conda_build.windows as windows windows.build(m) else: env = environ.get_dict(m) build_file = join(m.path, 'build.sh') script = m.get_value('build/script', None) if script: if isinstance(script, list): script = '\n'.join(script) build_file = join(source.get_dir(), 'conda_build.sh') with open(build_file, 'w') as bf: bf.write(script) os.chmod(build_file, 0o766) if isfile(build_file): cmd = ['/bin/bash', '-x', '-e', build_file] _check_call(cmd, env=env, cwd=src_dir) if post in [True, None]: if post == True: with open(join(config.croot, 'prefix_files.txt'), 'r') as f: files1 = set(f.read().splitlines()) get_build_metadata(m) create_post_scripts(m) create_entry_points(m.get_value('build/entry_points')) assert not exists(config.info_dir) files2 = prefix_files() post_process(sorted(files2 - files1), preserve_egg_dir=bool(m.get_value('build/preserve_egg_dir'))) # The post processing may have deleted some files (like easy-install.pth) files2 = prefix_files() if any(config.meta_dir in join(config.build_prefix, f) for f in files2 - files1): sys.exit(indent("""Error: Untracked file(s) %s found in conda-meta directory. This error usually comes from using conda in the build script. Avoid doing this, as it can lead to packages that include their dependencies.""" % (tuple(f for f in files2 - files1 if config.meta_dir in join(config.build_prefix, f)),))) post_build(m, sorted(files2 - files1)) create_info_files(m, sorted(files2 - files1), include_recipe=bool(m.path) and include_recipe) if m.get_value('build/noarch_python'): import conda_build.noarch_python as noarch_python noarch_python.transform(m, sorted(files2 - files1)) files3 = prefix_files() fix_permissions(files3 - files1) path = bldpkg_path(m) t = tarfile.open(path, 'w:bz2') for f in sorted(files3 - files1): t.add(join(config.build_prefix, f), f) t.close() print("BUILD END:", m.dist()) # we're done building, perform some checks tarcheck.check_all(path) update_index(config.bldpkgs_dir) else: print("STOPPING BUILD BEFORE POST:", m.dist())
def post_process_files(m, initial_prefix_files): get_build_metadata(m) create_post_scripts(m) # this is new-style noarch, with a value of 'python' if m.noarch != "python": utils.create_entry_points(m.get_value("build/entry_points"), config=m.config) current_prefix_files = utils.prefix_files(prefix=m.config.host_prefix) python = (m.config.build_python if os.path.isfile(m.config.build_python) else m.config.host_python) post_process( m.get_value("package/name"), m.get_value("package/version"), sorted(current_prefix_files - initial_prefix_files), prefix=m.config.host_prefix, config=m.config, preserve_egg_dir=bool(m.get_value("build/preserve_egg_dir")), noarch=m.get_value("build/noarch"), skip_compile_pyc=m.get_value("build/skip_compile_pyc"), ) # The post processing may have deleted some files (like easy-install.pth) current_prefix_files = utils.prefix_files(prefix=m.config.host_prefix) new_files = sorted(current_prefix_files - initial_prefix_files) new_files = utils.filter_files(new_files, prefix=m.config.host_prefix) host_prefix = m.config.host_prefix meta_dir = m.config.meta_dir if any(meta_dir in join(host_prefix, f) for f in new_files): meta_files = (tuple( f for f in new_files if m.config.meta_dir in join(m.config.host_prefix, f)), ) sys.exit( indent( """Error: Untracked file(s) %s found in conda-meta directory. This error usually comes from using conda in the build script. Avoid doing this, as it can lead to packages that include their dependencies.""" % meta_files)) post_build(m, new_files, build_python=python) entry_point_script_names = get_entry_point_script_names( m.get_value("build/entry_points")) if m.noarch == "python": pkg_files = [ fi for fi in new_files if fi not in entry_point_script_names ] else: pkg_files = new_files # the legacy noarch if m.get_value("build/noarch_python"): noarch_python.transform(m, new_files, m.config.host_prefix) # new way: build/noarch: python elif m.noarch == "python": noarch_python.populate_files(m, pkg_files, m.config.host_prefix, entry_point_script_names) current_prefix_files = utils.prefix_files(prefix=m.config.host_prefix) new_files = current_prefix_files - initial_prefix_files fix_permissions(new_files, m.config.host_prefix) return new_files
def build(m, get_src=True, verbose=True, post=None): ''' Build the package with the specified metadata. :param m: Package metadata :type m: Metadata :param get_src: Should we download the source? :type get_src: bool :type post: bool or None. None means run the whole build. True means run post only. False means stop just before the post. ''' if post in [False, None]: rm_rf(prefix) print("BUILD START:", m.dist()) create_env(prefix, [ms.spec for ms in m.ms_depends('build')], verbose=verbose) if get_src: source.provide(m.path, m.get_section('source')) assert isdir(source.WORK_DIR) if os.listdir(source.get_dir()): print("source tree in:", source.get_dir()) else: print("no source") rm_rf(info_dir) files1 = prefix_files() if post == False: # Save this for later with open(join(source.WORK_DIR, 'prefix_files'), 'w') as f: json.dump(list(files1), f) if sys.platform == 'win32': import conda_build.windows as windows windows.build(m) else: env = environ.get_dict(m) build_file = join(m.path, 'build.sh') if exists(build_file): script = m.get_value('build/script', None) if script: if isinstance(script, list): script = '\n'.join(script) with open(build_file, 'w', encoding='utf-8') as bf: bf.write(script) os.chmod(build_file, 0o766) cmd = ['/bin/bash', '-x', '-e', build_file] _check_call(cmd, env=env, cwd=source.get_dir()) if post in [True, None]: if post == True: with open(join(source.WORK_DIR, 'prefix_files')) as f: files1 = set(json.load(f)) get_build_metadata(m) create_post_scripts(m) create_entry_points(m.get_value('build/entry_points')) post_process(preserve_egg_dir=bool(m.get_value('build/preserve_egg_dir'))) assert not exists(info_dir) files2 = prefix_files() post_build(sorted(files2 - files1), binary_relocation=bool(m.get_value('build/binary_relocation', True))) create_info_files(m, sorted(files2 - files1), include_recipe=bool(m.path)) files3 = prefix_files() fix_permissions(files3 - files1) path = bldpkg_path(m) t = tarfile.open(path, 'w:bz2') for f in sorted(files3 - files1): t.add(join(prefix, f), f) t.close() print("BUILD END:", m.dist()) # we're done building, perform some checks tarcheck.check_all(path) update_index(config.bldpkgs_dir) else: print("STOPPING BUILD BEFORE POST:", m.dist())
def build(m, post=None, include_recipe=True, keep_old_work=False, need_source_download=True, need_reparse_in_env=False, verbose=True, dirty=False, activate=True, debug=False): ''' Build the package with the specified metadata. :param m: Package metadata :type m: Metadata :type post: bool or None. None means run the whole build. True means run post only. False means stop just before the post. :type keep_old_work: bool: Keep any previous work directory. :type need_source_download: bool: if rendering failed to download source (due to missing tools), retry here after build env is populated ''' if (m.get_value('build/detect_binary_files_with_prefix') or m.binary_has_prefix_files()) and not on_win: # We must use a long prefix here as the package will only be # installable into prefixes shorter than this one. config.use_long_build_prefix = True else: # In case there are multiple builds in the same process config.use_long_build_prefix = False if m.skip(): print("Skipped: The %s recipe defines build/skip for this " "configuration." % m.dist()) return with Locked(cc.root_dir): # If --keep-old-work, then move the contents of source.WORK_DIR to a # temporary directory for the duration of the build. # The source unpacking procedure is too varied and complex # to allow this to be written cleanly (see source.get_dir() for example) if keep_old_work: old_WORK_DIR = tempfile.mkdtemp() old_sub_dirs = [name for name in os.listdir(source.WORK_DIR) if os.path.isdir(os.path.join(source.WORK_DIR, name))] if len(old_sub_dirs): print("Keeping old work directory backup: %s => %s" % (old_sub_dirs, old_WORK_DIR)) for old_sub in old_sub_dirs: shutil.move(os.path.join(source.WORK_DIR, old_sub), old_WORK_DIR) if post in [False, None]: print("Removing old build environment") print("BUILD START:", m.dist()) if need_source_download or need_reparse_in_env: print(" (actual version deferred until further download or env creation)") if on_win: if isdir(config.short_build_prefix): move_to_trash(config.short_build_prefix, '') if isdir(config.long_build_prefix): move_to_trash(config.long_build_prefix, '') else: rm_rf(config.short_build_prefix) rm_rf(config.long_build_prefix) specs = [ms.spec for ms in m.ms_depends('build')] if activate: # If we activate the build envrionment, we need to be sure that we # have the appropriate VCS available in the environment. People # are not used to explicitly listing it in recipes, though. # We add it for them here, but warn them about it. vcs_source = m.uses_vcs_in_build() if vcs_source and vcs_source not in specs: vcs_executable = "hg" if vcs_source == "mercurial" else vcs_source has_vcs_available = os.path.isfile(external.find_executable(vcs_executable) or "") if not has_vcs_available: if (vcs_source != "mercurial" or not any(spec.startswith('python') and "3." in spec for spec in specs)): specs.append(vcs_source) log.warn("Your recipe depends on {} at build time (for templates), " "but you have not listed it as a build dependency. Doing " "so for this build.") else: raise ValueError("Your recipe uses mercurial in build, but mercurial" " does not yet support Python 3. Please handle all of " "your mercurial actions outside of your build script.") # Display the name only # Version number could be missing due to dependency on source info. create_env(config.build_prefix, specs, debug=debug) if need_source_download: # Execute any commands fetching the source (e.g., git) in the _build environment. # This makes it possible to provide source fetchers (eg. git, hg, svn) as build # dependencies. if not activate: _old_path = os.environ['PATH'] os.environ['PATH'] = prepend_bin_path({'PATH': _old_path}, config.build_prefix)['PATH'] try: m, need_source_download, need_reparse_in_env = parse_or_try_download(m, no_download_source=False, force_download=True, verbose=verbose, dirty=dirty) assert not need_source_download, "Source download failed. Please investigate." finally: if not activate: os.environ['PATH'] = _old_path print("BUILD START:", m.dist()) if need_reparse_in_env: reparse(m) print("BUILD START:", m.dist()) if m.name() in [i.rsplit('-', 2)[0] for i in linked(config.build_prefix)]: print("%s is installed as a build dependency. Removing." % m.name()) index = get_build_index(clear_cache=False) actions = plan.remove_actions(config.build_prefix, [m.name()], index=index) assert not plan.nothing_to_do(actions), actions plan.display_actions(actions, index) plan.execute_actions(actions, index) print("Package:", m.dist()) src_dir = source.get_dir() if isdir(src_dir): print("source tree in:", src_dir) else: print("no source - creating empty work folder") os.makedirs(src_dir) rm_rf(config.info_dir) files1 = prefix_files() for pat in m.always_include_files(): has_matches = False for f in set(files1): if fnmatch.fnmatch(f, pat): print("Including in package existing file", f) files1.discard(f) has_matches = True if not has_matches: log.warn("Glob %s from always_include_files does not match any files" % pat) # Save this for later with open(join(config.croot, 'prefix_files.txt'), 'w') as f: f.write(u'\n'.join(sorted(list(files1)))) f.write(u'\n') # Use script from recipe? script = m.get_value('build/script', None) if script: if isinstance(script, list): script = '\n'.join(script) if isdir(source.WORK_DIR): if on_win: build_file = join(m.path, 'bld.bat') if script: build_file = join(source.get_dir(), 'bld.bat') with open(join(source.get_dir(), 'bld.bat'), 'w') as bf: bf.write(script) import conda_build.windows as windows windows.build(m, build_file, dirty=dirty, activate=activate) else: build_file = join(m.path, 'build.sh') # There is no sense in trying to run an empty build script. if isfile(build_file) or script: env = environ.get_dict(m, dirty=dirty) work_file = join(source.get_dir(), 'conda_build.sh') if script: with open(work_file, 'w') as bf: bf.write(script) if activate: if isfile(build_file): data = open(build_file).read() else: data = open(work_file).read() with open(work_file, 'w') as bf: bf.write("source activate {build_prefix}\n".format( build_prefix=config.build_prefix)) bf.write(data) else: if not isfile(work_file): shutil.copy(build_file, work_file) os.chmod(work_file, 0o766) if isfile(work_file): cmd = [shell_path, '-x', '-e', work_file] _check_call(cmd, env=env, cwd=src_dir) if post in [True, None]: if post: with open(join(config.croot, 'prefix_files.txt'), 'r') as f: files1 = set(f.read().splitlines()) get_build_metadata(m) create_post_scripts(m) create_entry_points(m.get_value('build/entry_points')) assert not exists(config.info_dir) files2 = prefix_files() post_process(sorted(files2 - files1), preserve_egg_dir=bool(m.get_value('build/preserve_egg_dir'))) # The post processing may have deleted some files (like easy-install.pth) files2 = prefix_files() if any(config.meta_dir in join(config.build_prefix, f) for f in files2 - files1): sys.exit(indent("""Error: Untracked file(s) %s found in conda-meta directory. This error usually comes from using conda in the build script. Avoid doing this, as it can lead to packages that include their dependencies.""" % (tuple(f for f in files2 - files1 if config.meta_dir in join(config.build_prefix, f)),))) post_build(m, sorted(files2 - files1)) create_info_files(m, sorted(files2 - files1), include_recipe=bool(m.path) and include_recipe) if m.get_value('build/noarch_python'): import conda_build.noarch_python as noarch_python noarch_python.transform(m, sorted(files2 - files1)) files3 = prefix_files() fix_permissions(files3 - files1) path = bldpkg_path(m) t = tarfile.open(path, 'w:bz2') def order(f): # we don't care about empty files so send them back via 100000 fsize = os.stat(join(config.build_prefix, f)).st_size or 100000 # info/* records will be False == 0, others will be 1. info_order = int(os.path.dirname(f) != 'info') return info_order, fsize # add files in order of a) in info directory, b) increasing size so # we can access small manifest or json files without decompressing # possible large binary or data files for f in sorted(files3 - files1, key=order): t.add(join(config.build_prefix, f), f) t.close() print("BUILD END:", m.dist()) # we're done building, perform some checks tarcheck.check_all(path) update_index(config.bldpkgs_dir) else: print("STOPPING BUILD BEFORE POST:", m.dist()) if keep_old_work and len(old_sub_dirs): print("Restoring old work directory backup: %s :: %s => %s" % (old_WORK_DIR, old_sub_dirs, source.WORK_DIR)) for old_sub in old_sub_dirs: if os.path.exists(os.path.join(source.WORK_DIR, old_sub)): print("Not restoring old source directory %s over new build's version" % (old_sub)) else: shutil.move(os.path.join(old_WORK_DIR, old_sub), source.WORK_DIR) shutil.rmtree(old_WORK_DIR, ignore_errors=True)
def build(m, config, post=None, need_source_download=True, need_reparse_in_env=False): ''' Build the package with the specified metadata. :param m: Package metadata :type m: Metadata :type post: bool or None. None means run the whole build. True means run post only. False means stop just before the post. :type keep_old_work: bool: Keep any previous work directory. :type need_source_download: bool: if rendering failed to download source (due to missing tools), retry here after build env is populated ''' if m.skip(): print_skip_message(m) return False if config.skip_existing: package_exists = is_package_built(m, config) if package_exists: print(m.dist(), "is already built in {0}, skipping.".format(package_exists)) return False if post in [False, None]: print("BUILD START:", m.dist()) if m.uses_jinja and (need_source_download or need_reparse_in_env): print(" (actual version deferred until further download or env creation)") specs = [ms.spec for ms in m.ms_depends('build')] create_env(config.build_prefix, specs, config=config) vcs_source = m.uses_vcs_in_build if vcs_source and vcs_source not in specs: vcs_executable = "hg" if vcs_source == "mercurial" else vcs_source has_vcs_available = os.path.isfile(external.find_executable(vcs_executable, config.build_prefix) or "") if not has_vcs_available: if (vcs_source != "mercurial" or not any(spec.startswith('python') and "3." in spec for spec in specs)): specs.append(vcs_source) log.warn("Your recipe depends on %s at build time (for templates), " "but you have not listed it as a build dependency. Doing " "so for this build.", vcs_source) # Display the name only # Version number could be missing due to dependency on source info. create_env(config.build_prefix, specs, config=config) else: raise ValueError("Your recipe uses mercurial in build, but mercurial" " does not yet support Python 3. Please handle all of " "your mercurial actions outside of your build script.") if need_source_download: # Execute any commands fetching the source (e.g., git) in the _build environment. # This makes it possible to provide source fetchers (eg. git, hg, svn) as build # dependencies. with path_prepended(config.build_prefix): m, need_source_download, need_reparse_in_env = parse_or_try_download(m, no_download_source=False, force_download=True, config=config) assert not need_source_download, "Source download failed. Please investigate." if m.uses_jinja: print("BUILD START (revised):", m.dist()) if need_reparse_in_env: reparse(m, config=config) print("BUILD START (revised):", m.dist()) if m.name() in [i.rsplit('-', 2)[0] for i in linked(config.build_prefix)]: print("%s is installed as a build dependency. Removing." % m.name()) index = get_build_index(config=config, clear_cache=False) actions = plan.remove_actions(config.build_prefix, [m.name()], index=index) assert not plan.nothing_to_do(actions), actions plan.display_actions(actions, index) plan.execute_actions(actions, index) print("Package:", m.dist()) with filelock.SoftFileLock(join(config.build_folder, ".conda_lock"), timeout=config.timeout): # get_dir here might be just work, or it might be one level deeper, # dependening on the source. src_dir = config.work_dir if isdir(src_dir): print("source tree in:", src_dir) else: print("no source - creating empty work folder") os.makedirs(src_dir) rm_rf(config.info_dir) files1 = prefix_files(prefix=config.build_prefix) for pat in m.always_include_files(): has_matches = False for f in set(files1): if fnmatch.fnmatch(f, pat): print("Including in package existing file", f) files1.discard(f) has_matches = True if not has_matches: log.warn("Glob %s from always_include_files does not match any files", pat) # Save this for later with open(join(config.croot, 'prefix_files.txt'), 'w') as f: f.write(u'\n'.join(sorted(list(files1)))) f.write(u'\n') # Use script from recipe? script = m.get_value('build/script', None) if script: if isinstance(script, list): script = '\n'.join(script) if isdir(src_dir): if on_win: build_file = join(m.path, 'bld.bat') if script: build_file = join(src_dir, 'bld.bat') with open(build_file, 'w') as bf: bf.write(script) import conda_build.windows as windows windows.build(m, build_file, config=config) else: build_file = join(m.path, 'build.sh') # There is no sense in trying to run an empty build script. if isfile(build_file) or script: with path_prepended(config.build_prefix): env = environ.get_dict(config=config, m=m) env["CONDA_BUILD_STATE"] = "BUILD" work_file = join(config.work_dir, 'conda_build.sh') if script: with open(work_file, 'w') as bf: bf.write(script) if config.activate: if isfile(build_file): data = open(build_file).read() else: data = open(work_file).read() with open(work_file, 'w') as bf: bf.write("source {conda_root}activate {build_prefix} &> " "/dev/null\n".format(conda_root=root_script_dir + os.path.sep, build_prefix=config.build_prefix)) bf.write(data) else: if not isfile(work_file): copy_into(build_file, work_file, config.timeout) os.chmod(work_file, 0o766) if isfile(work_file): cmd = [shell_path, '-x', '-e', work_file] # this should raise if any problems occur while building _check_call(cmd, env=env, cwd=src_dir) if post in [True, None]: if post: with open(join(config.croot, 'prefix_files.txt'), 'r') as f: files1 = set(f.read().splitlines()) get_build_metadata(m, config=config) create_post_scripts(m, config=config) if not is_noarch_python(m): create_entry_points(m.get_value('build/entry_points'), config=config) files2 = prefix_files(prefix=config.build_prefix) post_process(sorted(files2 - files1), prefix=config.build_prefix, config=config, preserve_egg_dir=bool(m.get_value('build/preserve_egg_dir')), noarch=m.get_value('build/noarch')) # The post processing may have deleted some files (like easy-install.pth) files2 = prefix_files(prefix=config.build_prefix) if any(config.meta_dir in join(config.build_prefix, f) for f in files2 - files1): meta_files = (tuple(f for f in files2 - files1 if config.meta_dir in join(config.build_prefix, f)),) sys.exit(indent("""Error: Untracked file(s) %s found in conda-meta directory. This error usually comes from using conda in the build script. Avoid doing this, as it can lead to packages that include their dependencies.""" % meta_files)) post_build(m, sorted(files2 - files1), prefix=config.build_prefix, build_python=config.build_python, croot=config.croot) entry_point_script_names = get_entry_point_script_names(get_entry_points(config, m)) if is_noarch_python(m): pkg_files = [f for f in sorted(files2 - files1) if f not in entry_point_script_names] else: pkg_files = sorted(files2 - files1) create_info_files(m, pkg_files, config=config, prefix=config.build_prefix) if m.get_value('build/noarch_python'): noarch_python.transform(m, sorted(files2 - files1), config.build_prefix) elif is_noarch_python(m): noarch_python.populate_files( m, pkg_files, config.build_prefix, entry_point_script_names) files3 = prefix_files(prefix=config.build_prefix) fix_permissions(files3 - files1, config.build_prefix) path = bldpkg_path(m, config) # lock the output directory while we build this file # create the tarball in a temporary directory to minimize lock time with TemporaryDirectory() as tmp: tmp_path = os.path.join(tmp, os.path.basename(path)) t = tarfile.open(tmp_path, 'w:bz2') def order(f): # we don't care about empty files so send them back via 100000 fsize = os.stat(join(config.build_prefix, f)).st_size or 100000 # info/* records will be False == 0, others will be 1. info_order = int(os.path.dirname(f) != 'info') return info_order, fsize # add files in order of a) in info directory, b) increasing size so # we can access small manifest or json files without decompressing # possible large binary or data files for f in sorted(files3 - files1, key=order): t.add(join(config.build_prefix, f), f) t.close() # we're done building, perform some checks tarcheck.check_all(tmp_path) copy_into(tmp_path, path, config.timeout) update_index(config.bldpkgs_dir, config, could_be_mirror=False) else: print("STOPPING BUILD BEFORE POST:", m.dist()) # returning true here says package is OK to test return True
def build(m, get_src=True, verbose=True, post=None): ''' Build the package with the specified metadata. :param m: Package metadata :type m: Metadata :param get_src: Should we download the source? :type get_src: bool :type post: bool or None. None means run the whole build. True means run post only. False means stop just before the post. ''' if post in [False, None]: rm_rf(prefix) print("BUILD START:", m.dist()) create_env(prefix, [ms.spec for ms in m.ms_depends('build')], verbose=verbose) if get_src: source.provide(m.path, m.get_section('source')) assert isdir(source.WORK_DIR) src_dir = source.get_dir() contents = os.listdir(src_dir) if contents: print("source tree in:", src_dir) else: print("no source") rm_rf(info_dir) files1 = prefix_files() if post == False: # Save this for later with open(join(source.WORK_DIR, 'prefix_files.txt'), 'w') as f: f.write(u'\n'.join(sorted(list(files1)))) f.write(u'\n') if sys.platform == 'win32': import conda_build.windows as windows windows.build(m) else: env = environ.get_dict(m) build_file = join(m.path, 'build.sh') script = m.get_value('build/script', None) if script: if isinstance(script, list): script = '\n'.join(script) with open(build_file, 'w', encoding='utf-8') as bf: bf.write(script) os.chmod(build_file, 0o766) if exists(build_file): cmd = ['/bin/bash', '-x', '-e', build_file] _check_call(cmd, env=env, cwd=src_dir) if post in [True, None]: if post == True: with open(join(source.WORK_DIR, 'prefix_files.txt'), 'r') as f: files1 = set(f.read().splitlines()) get_build_metadata(m) create_post_scripts(m) create_entry_points(m.get_value('build/entry_points')) post_process( preserve_egg_dir=bool(m.get_value('build/preserve_egg_dir'))) assert not exists(info_dir) files2 = prefix_files() post_build(sorted(files2 - files1), binary_relocation=bool( m.get_value('build/binary_relocation', True))) create_info_files(m, sorted(files2 - files1), include_recipe=bool(m.path)) files3 = prefix_files() fix_permissions(files3 - files1) path = bldpkg_path(m) t = tarfile.open(path, 'w:bz2') for f in sorted(files3 - files1): t.add(join(prefix, f), f) t.close() print("BUILD END:", m.dist()) # we're done building, perform some checks tarcheck.check_all(path) update_index(config.bldpkgs_dir) else: print("STOPPING BUILD BEFORE POST:", m.dist())