def get_tests(path): "Extract tests from a built package" tmp = tempfile.mkdtemp() t = tarfile.open(path) t.extractall(tmp) input_dir = os.path.join(tmp, 'info', 'recipe') tests = [ '/usr/local/env-execute true', '. /usr/local/env-activate.sh', ] recipe_meta = MetaData(input_dir) tests_commands = recipe_meta.get_value('test/commands') tests_imports = recipe_meta.get_value('test/imports') requirements = recipe_meta.get_value('requirements/run') if tests_imports or tests_commands: if tests_commands: tests.append(' && '.join(tests_commands)) if tests_imports and 'python' in requirements: tests.append(' && '.join('python -c "import %s"' % imp for imp in tests_imports)) elif tests_imports and ('perl' in requirements or 'perl-threaded' in requirements): tests.append(' && '.join('''perl -e "use %s;"''' % imp for imp in tests_imports)) tests = ' && '.join(tests) tests = tests.replace('$R ', 'Rscript ') # this is specific to involucro, the way how we build our containers tests = tests.replace('$PREFIX', '/usr/local') tests = tests.replace('${PREFIX}', '/usr/local') return f"bash -c {shlex.quote(tests)}"
def get_tests(path): "Extract tests from a built package" tmp = tempfile.mkdtemp() t = tarfile.open(path) t.extractall(tmp) input_dir = os.path.join(tmp, 'info', 'recipe') tests = [] recipe_meta = MetaData(input_dir) tests_commands = recipe_meta.get_value('test/commands') tests_imports = recipe_meta.get_value('test/imports') requirements = recipe_meta.get_value('requirements/run') if tests_imports or tests_commands: if tests_commands: tests.append(' && '.join(tests_commands)) if tests_imports and 'python' in requirements: tests.append(' && '.join('python -c "import %s"' % imp for imp in tests_imports)) elif tests_imports and ('perl' in requirements or 'perl-threaded' in requirements): tests.append(' && '.join('''perl -e "use %s;"''' % imp for imp in tests_imports)) tests = ' && '.join(tests) tests = tests.replace('$R ', 'Rscript ') return tests
def tobuild(recipe, env): pkg = os.path.basename(built_package_path(recipe, env)) in_channels = [ channel for channel, pkgs in channel_packages.items() if pkg in pkgs ] if in_channels and not force: logger.debug( 'FILTER: not building %s because ' 'it is in channel(s) and it is not forced: %s', pkg, in_channels) return False # with temp_env, MetaData will see everything in env added to # os.environ. with temp_env(env): # with temp_os, we can fool the MetaData if needed. platform = os.environ.get('TRAVIS_OS_NAME', sys.platform) # TRAVIS_OS_NAME uses 'osx', but sys.platform uses 'darwin', and # that's what conda will be looking for. if platform == 'osx': platform = 'darwin' with temp_os(platform): meta = MetaData(recipe) if meta.skip(): logger.debug( 'FILTER: not building %s because ' 'it defines skip for this env', pkg) return False # If on travis, handle noarch. if os.environ.get('TRAVIS', None) == 'true': if meta.get_value('build/noarch'): if platform != 'linux': logger.debug('FILTER: only building %s on ' 'linux because it defines noarch.', pkg) return False assert not pkg.endswith("_.tar.bz2"), ( "rendered path {} does not " "contain a build number and recipe does not " "define skip for this environment. " "This is a conda bug.".format(pkg)) logger.debug( 'FILTER: building %s because it is not in channels and ' 'does not define skip', pkg) return True
def get_deps(recipe, build=True): """ Generator of dependencies for a single recipe, which can be specified as a path or as a parsed MetaData. Only names (not versions) of dependencies are yielded. Use `build=True` to yield build dependencies, otherwise yield run dependencies. """ if isinstance(recipe, str): metadata = MetaData(recipe) else: metadata = recipe for dep in metadata.get_value( "requirements/{}".format("build" if build else "run"), []): yield dep.split()[0]
def get_deps(recipe, build=True): """ Generator of dependencies for a single recipe, which can be specified as a path or as a parsed MetaData. Only names (not versions) of dependencies are yielded. Use `build=True` to yield build dependencies, otherwise yield run dependencies. """ if isinstance(recipe, str): metadata = MetaData(recipe) else: metadata = recipe for dep in metadata.get_value( "requirements/{}".format("build" if build else "run"), [] ): yield dep.split()[0]
def render_recipe(recipe_path, config, no_download_source=False): arg = recipe_path # Don't use byte literals for paths in Python 2 if not PY3: arg = arg.decode(getpreferredencoding() or 'utf-8') if isfile(arg): if arg.endswith(('.tar', '.tar.gz', '.tgz', '.tar.bz2')): recipe_dir = tempfile.mkdtemp() t = tarfile.open(arg, 'r:*') t.extractall(path=recipe_dir) t.close() need_cleanup = True elif arg.endswith('.yaml'): recipe_dir = os.path.dirname(arg) need_cleanup = False else: print("Ignoring non-recipe: %s" % arg) return else: recipe_dir = abspath(arg) need_cleanup = False if not isdir(recipe_dir): sys.exit("Error: no such directory: %s" % recipe_dir) if config.set_build_id: # updates a unique build id if not already computed config.compute_build_id(os.path.basename(recipe_dir)) try: m = MetaData(recipe_dir, config=config) except exceptions.YamlParsingError as e: sys.stderr.write(e.error_msg()) sys.exit(1) m, need_download, need_reparse_in_env = parse_or_try_download(m, no_download_source=no_download_source, config=config) if need_download and no_download_source: raise ValueError("no_download_source specified, but can't fully render recipe without" " downloading source. Please fix the recipe, or don't use " "no_download_source.") config.noarch = bool(m.get_value('build/noarch')) if need_cleanup: rm_rf(recipe_dir) return m, need_download, need_reparse_in_env
def render_recipe(recipe_path, config, no_download_source=False): arg = recipe_path # Don't use byte literals for paths in Python 2 if not PY3: arg = arg.decode(getpreferredencoding() or 'utf-8') if isfile(arg): if arg.endswith(('.tar', '.tar.gz', '.tgz', '.tar.bz2')): recipe_dir = tempfile.mkdtemp() t = tarfile.open(arg, 'r:*') t.extractall(path=recipe_dir) t.close() need_cleanup = True elif arg.endswith('.yaml'): recipe_dir = os.path.dirname(arg) need_cleanup = False else: print("Ignoring non-recipe: %s" % arg) return else: recipe_dir = abspath(arg) need_cleanup = False if not isdir(recipe_dir): sys.exit("Error: no such directory: %s" % recipe_dir) if config.set_build_id: # updates a unique build id if not already computed config.compute_build_id(os.path.basename(recipe_dir)) try: m = MetaData(recipe_dir, config=config) except exceptions.YamlParsingError as e: sys.stderr.write(e.error_msg()) sys.exit(1) m, need_download, need_reparse_in_env = parse_or_try_download( m, no_download_source=no_download_source, config=config) if need_download and no_download_source: raise ValueError( "no_download_source specified, but can't fully render recipe without" " downloading source. Please fix the recipe, or don't use " "no_download_source.") config.noarch = bool(m.get_value('build/noarch')) if need_cleanup: rm_rf(recipe_dir) return m, need_download, need_reparse_in_env
def have_noarch_python_build_number(meta: MetaData) -> bool: """Checks if we have a noarch:python build with same version+build_number Args: meta: Variant MetaData object Returns: True if noarch:python and version+build_number exists already in repodata """ if meta.get_value('build/noarch') != 'python': return False res = RepoData().get_package_data( name=meta.name(), version=meta.version(), build_number=meta.build_number(), platform=['noarch'], ) if res: logger.debug("Package %s=%s[build_number=%s, subdir=noarch] exists", meta.name(), meta.version(), meta.build_number()) return res
def get_deps(recipe, build=True): """ Generator of dependencies for a single recipe Only names (not versions) of dependencies are yielded. Parameters ---------- recipe : str or MetaData If string, it is a path to the recipe; otherwise assume it is a parsed conda_build.metadata.MetaData instance. build : bool If True yield build dependencies, if False yield run dependencies. """ if isinstance(recipe, str): metadata = MetaData(recipe) else: metadata = recipe for dep in metadata.get_value( "requirements/{}".format("build" if build else "run"), []): yield dep.split()[0]
recipe_folder = args[0] config = args[1] package_list = args[2] assert os.path.exists(recipe_folder), "Recipe folder exists" assert os.path.exists(config), "Config file exists" assert os.path.exists(package_list), "Package list file exists" handle = open(package_list, "r") for line in handle: pkg = line.strip() metafile = recipe_folder + "/" + pkg + "/meta.yaml" assert os.path.exists(metafile), "meta.yaml exists for %s" % (pkg) metadata = MetaData(metafile) # Get the package name (capitalized correctly), version, and Bioconductor # release from the meta.yaml file. The purpose of this script is to fix # existing recipes, not to update the package to the latest version. name, version = metadata.get_value("source/fn").rstrip(".tar.gz").split( "_") bioc = metadata.get_value("source/url")[0].split("/")[4] sys.stderr.write("Writing recipe for %s %s (%s)\n" % (name, version, bioc)) write_recipe(package=name, recipe_dir=recipe_folder, config=config, force=True, bioc_version=bioc, pkg_version=version) handle.close()
def build_wheel(recipe, versions_combis={"python": None, "numpy": None}, conda_channel_urls=(), conda_override_channels=(), upload=[], wheel_dir="./build"): import sys import shutil import tarfile import tempfile from os.path import abspath, isdir, isfile from conda.lock import Locked from conda_build.config import config from conda_build.metadata import MetaData import conda_build_wheel.build_wheel as build conda_version = { 'python': 'CONDA_PY', 'numpy': 'CONDA_NPY', } for lang in ['python', 'numpy']: versions = versions_combis[lang] if not versions: continue if versions == ['all']: if all_versions[lang]: versions = all_versions[lang] else: print("'all' is not supported for --%s" % lang) if len(versions) > 1: for ver in versions[:]: versions_combis[lang] = [str(ver)] build_wheel(recipe, versions_combis, conda_channel_urls=conda_channel_urls, conda_override_channels=conda_override_channels, upload=upload, wheel_dir=wheel_dir) # This is necessary to make all combinations build. versions_combis[lang] = versions return else: version = versions[0] if lang in ('python', 'numpy'): version = int(version.replace('.', '')) setattr(config, conda_version[lang], version) if not len(str(version)) in (2, 3) and lang in ['python', 'numpy']: if all_versions[lang]: raise RuntimeError("%s must be major.minor, like %s, not %s" % (conda_version[lang], all_versions[lang][-1] / 10, version)) else: raise RuntimeError("%s must be major.minor, not %s" % (conda_version[lang], version)) # Using --python, --numpy etc. is equivalent to using CONDA_PY, CONDA_NPY, etc. # Auto-set those env variables for var in conda_version.values(): if getattr(config, var): # Set the env variable. os_environ[var] = str(getattr(config, var)) with Locked(config.croot): # Don't use byte literals for paths in Python 2 if not PY3: recipe = recipe.decode(getpreferredencoding() or 'utf-8') if isfile(recipe): if recipe.endswith(('.tar', '.tar.gz', '.tgz', '.tar.bz2')): recipe_dir = tempfile.mkdtemp() t = tarfile.open(recipe, 'r:*') t.extractall(path=recipe_dir) t.close() need_cleanup = True else: print("Ignoring non-recipe: %s" % recipe) return else: recipe_dir = abspath(recipe) need_cleanup = False if not isdir(recipe_dir): sys.exit("Error: no such directory: %s" % recipe_dir) try: m = MetaData(recipe_dir) if m.get_value('build/noarch_python'): config.noarch = True except exceptions.YamlParsingError as e: sys.stderr.write(e.error_msg()) sys.exit(1) m.check_fields() if m.skip(): print( "Skipped: The %s recipe defines build/skip for this " "configuration." % m.dist()) return build.build(m, channel_urls=conda_channel_urls, override_channels=conda_override_channels, wheel_dir=wheel_dir) if need_cleanup: shutil.rmtree(recipe_dir)
from conda_build.metadata import MetaData res = list() for meta_path in open(sys.argv[1]): input_dir = os.path.join( './bioconda-recipes', os.path.dirname(meta_path) ) if os.path.exists(input_dir): for arch in ['osx-', 'linux-64']: package = dict() package['arch'] = arch # set the architechture before parsing the metadata cc.subdir = arch recipe_meta = MetaData(input_dir) package['name'] = recipe_meta.get_value('package/name') package['version'] = recipe_meta.get_value('package/version') url = recipe_meta.get_value('source/url') if url: package['sha256'] = recipe_meta.get_value('source/sha256') package['md5'] = recipe_meta.get_value('source/md5') else: # git_url and hopefully git_rev git_url = recipe_meta.get_value('source/git_url') git_rev = recipe_meta.get_value('source/git_rev') url = '%s/%s.tar.gz' % (git_url.rstrip('.git'), git_rev) if not git_rev: sys.exit('git revision is missing for: %s' % input_dir) package['url'] = url res.append(package)
def execute(args, parser): import sys import shutil import tarfile import tempfile from os import makedirs from os.path import abspath, isdir, isfile from conda.lock import Locked import conda_build.build as build import conda_build.source as source from conda_build.config import config from conda_build.metadata import MetaData check_external() # change globals in build module, see comment there as well build.channel_urls = args.channel or () build.override_channels = args.override_channels build.verbose = not args.quiet if on_win: try: # needs to happen before any c extensions are imported that might be # hard-linked by files in the trash. one of those is markupsafe, # used by jinja2. see https://github.com/conda/conda-build/pull/520 delete_trash(None) except: # when we can't delete the trash, don't crash on AssertionError, # instead inform the user and try again next time. # see https://github.com/conda/conda-build/pull/744 warnings.warn("Cannot delete trash; some c extension has been " "imported that is hard-linked by files in the trash. " "Will try again on next run.") conda_version = { 'python': 'CONDA_PY', 'numpy': 'CONDA_NPY', 'perl': 'CONDA_PERL', 'R': 'CONDA_R', } for lang in ['python', 'numpy', 'perl', 'R']: versions = getattr(args, lang) if not versions: continue if versions == ['all']: if all_versions[lang]: versions = all_versions[lang] else: parser.error("'all' is not supported for --%s" % lang) if len(versions) > 1: for ver in versions[:]: setattr(args, lang, [str(ver)]) execute(args, parser) # This is necessary to make all combinations build. setattr(args, lang, versions) return else: version = versions[0] if lang in ('python', 'numpy'): version = int(version.replace('.', '')) setattr(config, conda_version[lang], version) if not len(str(version)) in (2, 3) and lang in ['python', 'numpy']: if all_versions[lang]: raise RuntimeError("%s must be major.minor, like %s, not %s" % (conda_version[lang], all_versions[lang][-1]/10, version)) else: raise RuntimeError("%s must be major.minor, not %s" % (conda_version[lang], version)) # Using --python, --numpy etc. is equivalent to using CONDA_PY, CONDA_NPY, etc. # Auto-set those env variables for var in conda_version.values(): if getattr(config, var): # Set the env variable. os_environ[var] = str(getattr(config, var)) if args.skip_existing: if not isdir(config.bldpkgs_dir): makedirs(config.bldpkgs_dir) update_index(config.bldpkgs_dir) index = build.get_build_index(clear_cache=True) already_built = [] to_build_recursive = [] with Locked(config.croot): recipes = deque(args.recipe) while recipes: arg = recipes.popleft() try_again = False # Don't use byte literals for paths in Python 2 if not PY3: arg = arg.decode(getpreferredencoding() or 'utf-8') if isfile(arg): if arg.endswith(('.tar', '.tar.gz', '.tgz', '.tar.bz2')): recipe_dir = tempfile.mkdtemp() t = tarfile.open(arg, 'r:*') t.extractall(path=recipe_dir) t.close() need_cleanup = True else: print("Ignoring non-recipe: %s" % arg) continue else: recipe_dir = abspath(arg) need_cleanup = False if not isdir(recipe_dir): sys.exit("Error: no such directory: %s" % recipe_dir) try: m = MetaData(recipe_dir) if m.get_value('build/noarch_python'): config.noarch = True except exceptions.YamlParsingError as e: sys.stderr.write(e.error_msg()) sys.exit(1) binstar_upload = False if args.check and len(args.recipe) > 1: print(m.path) m.check_fields() if args.check: continue if args.skip_existing: if m.pkg_fn() in index or m.pkg_fn() in already_built: print("%s is already built, skipping." % m.dist()) continue if m.skip(): print("Skipped: The %s recipe defines build/skip for this " "configuration." % m.dist()) continue if args.output: try: m.parse_again(permit_undefined_jinja=False) except SystemExit: # Something went wrong; possibly due to undefined GIT_ jinja variables. # Maybe we need to actually download the source in order to resolve the build_id. source.provide(m.path, m.get_section('source')) # Parse our metadata again because we did not initialize the source # information before. m.parse_again(permit_undefined_jinja=False) print(build.bldpkg_path(m)) continue elif args.test: build.test(m, move_broken=False) elif args.source: source.provide(m.path, m.get_section('source')) print('Source tree in:', source.get_dir()) else: # This loop recursively builds dependencies if recipes exist if args.build_only: post = False args.notest = True args.binstar_upload = False elif args.post: post = True args.notest = True args.binstar_upload = False else: post = None try: build.build(m, post=post, include_recipe=args.include_recipe) except (RuntimeError, SystemExit) as e: error_str = str(e) if error_str.startswith('No packages found') or error_str.startswith('Could not find some'): # Build dependency if recipe exists dep_pkg = error_str.split(': ')[1] # Handle package names that contain version deps. if ' ' in dep_pkg: dep_pkg = dep_pkg.split(' ')[0] recipe_glob = glob(dep_pkg + '-[v0-9][0-9.]*') if exists(dep_pkg): recipe_glob.append(dep_pkg) if recipe_glob: recipes.appendleft(arg) try_again = True for recipe_dir in recipe_glob: if dep_pkg in to_build_recursive: sys.exit(str(e)) print(("Missing dependency {0}, but found" + " recipe directory, so building " + "{0} first").format(dep_pkg)) recipes.appendleft(recipe_dir) to_build_recursive.append(dep_pkg) else: raise elif error_str.strip().startswith("Hint:"): lines = [line for line in error_str.splitlines() if line.strip().startswith('- ')] pkgs = [line.lstrip('- ') for line in lines] # Typically if a conflict is with one of these # packages, the other package needs to be rebuilt # (e.g., a conflict with 'python 3.5*' and 'x' means # 'x' isn't build for Python 3.5 and needs to be # rebuilt). skip_names = ['python', 'r'] pkgs = [pkg for pkg in pkgs if pkg.split(' ')[0] not in skip_names] for pkg in pkgs: # Handle package names that contain version deps. if ' ' in pkg: pkg = pkg.split(' ')[0] recipe_glob = glob(pkg + '-[v0-9][0-9.]*') if exists(pkg): recipe_glob.append(pkg) if recipe_glob: recipes.appendleft(arg) try_again = True for recipe_dir in recipe_glob: if pkg in to_build_recursive: sys.exit(str(e)) print(error_str) print(("Missing dependency {0}, but found" + " recipe directory, so building " + "{0} first").format(pkg)) recipes.appendleft(recipe_dir) to_build_recursive.append(pkg) else: raise else: raise if try_again: continue if not args.notest: build.test(m) binstar_upload = True if need_cleanup: shutil.rmtree(recipe_dir) if binstar_upload: handle_binstar_upload(build.bldpkg_path(m), args) already_built.append(m.pkg_fn())
def debug(recipe_or_package_path_or_metadata_tuples, path=None, test=False, output_id=None, config=None, verbose=True, link_source_method='auto', **kwargs): """Set up either build/host or test environments, leaving you with a quick tool to debug your package's build or test phase. """ from fnmatch import fnmatch import logging import os import time from conda_build.conda_interface import string_types from conda_build.build import test as run_test, build as run_build from conda_build.utils import CONDA_TARBALL_EXTENSIONS, on_win, LoggingContext is_package = False default_config = get_or_merge_config(config, **kwargs) args = {"set_build_id": False} path_is_build_dir = False workdirs = [os.path.join(recipe_or_package_path_or_metadata_tuples, d) for d in (os.listdir(recipe_or_package_path_or_metadata_tuples) if os.path.isdir(recipe_or_package_path_or_metadata_tuples) else []) if (d.startswith('work') and os.path.isdir(os.path.join(recipe_or_package_path_or_metadata_tuples, d)))] metadatas_conda_debug = [os.path.join(f, "metadata_conda_debug.yaml") for f in workdirs if os.path.isfile(os.path.join(f, "metadata_conda_debug.yaml"))] metadatas_conda_debug = sorted(metadatas_conda_debug) if len(metadatas_conda_debug): path_is_build_dir = True path = recipe_or_package_path_or_metadata_tuples if not path: path = os.path.join(default_config.croot, "debug_{}".format(int(time.time() * 1000))) config = get_or_merge_config(config=default_config, croot=path, verbose=verbose, _prefix_length=10, **args) config.channel_urls = get_channel_urls(kwargs) metadata_tuples = [] best_link_source_method = 'skip' if isinstance(recipe_or_package_path_or_metadata_tuples, string_types): if path_is_build_dir: for metadata_conda_debug in metadatas_conda_debug: best_link_source_method = 'symlink' from conda_build.metadata import MetaData metadata = MetaData(metadata_conda_debug, config, {}) metadata_tuples.append((metadata, False, True)) else: ext = os.path.splitext(recipe_or_package_path_or_metadata_tuples)[1] if not ext or not any(ext in _ for _ in CONDA_TARBALL_EXTENSIONS): metadata_tuples = render(recipe_or_package_path_or_metadata_tuples, config=config, **kwargs) else: # this is a package, we only support testing test = True is_package = True else: metadata_tuples = recipe_or_package_path_or_metadata_tuples if metadata_tuples: outputs = get_output_file_paths(metadata_tuples) matched_outputs = outputs if output_id: matched_outputs = [_ for _ in outputs if fnmatch(os.path.basename(_), output_id)] if len(matched_outputs) > 1: raise ValueError("Specified --output-id matches more than one output ({}). Please refine your output id so that only " "a single output is found.".format(matched_outputs)) elif not matched_outputs: raise ValueError("Specified --output-id did not match any outputs. Available outputs are: {} Please check it and try again".format(outputs)) if len(matched_outputs) > 1 and not path_is_build_dir: raise ValueError("More than one output found for this recipe ({}). Please use the --output-id argument to filter down " "to a single output.".format(outputs)) else: matched_outputs = outputs target_metadata = metadata_tuples[outputs.index(matched_outputs[0])][0] # make sure that none of the _placehold stuff gets added to env paths target_metadata.config.prefix_length = 10 if best_link_source_method == 'symlink': for metadata, _, _ in metadata_tuples: debug_source_loc = os.path.join(os.sep + 'usr', 'local', 'src', 'conda', '{}-{}'.format(metadata.get_value('package/name'), metadata.get_value('package/version'))) link_target = os.path.dirname(metadata.meta_path) try: dn = os.path.dirname(debug_source_loc) try: os.makedirs(dn) except FileExistsError: pass try: os.unlink(debug_source_loc) except: pass print("Making debug info source symlink: {} => {}".format(debug_source_loc, link_target)) os.symlink(link_target, debug_source_loc) except PermissionError as e: raise Exception("You do not have the necessary permissions to create symlinks in {}\nerror: {}" .format(dn, str(e))) except Exception as e: raise Exception("Unknown error creating symlinks in {}\nerror: {}" .format(dn, str(e))) ext = ".bat" if on_win else ".sh" if verbose: log_context = LoggingContext() else: log_context = LoggingContext(logging.CRITICAL + 1) if path_is_build_dir: activation_file = "build_env_setup" + ext activation_string = "cd {work_dir} && {source} {activation_file}\n".format( work_dir=target_metadata.config.work_dir, source="call" if on_win else "source", activation_file=os.path.join(target_metadata.config.work_dir, activation_file)) elif not test: with log_context: run_build(target_metadata, stats={}, provision_only=True) activation_file = "build_env_setup" + ext activation_string = "cd {work_dir} && {source} {activation_file}\n".format( work_dir=target_metadata.config.work_dir, source="call" if on_win else "source", activation_file=os.path.join(target_metadata.config.work_dir, activation_file)) else: if not is_package: raise ValueError("Debugging for test mode is only supported for package files that already exist. " "Please build your package first, then use it to create the debugging environment.") else: test_input = recipe_or_package_path_or_metadata_tuples # use the package to create an env and extract the test files. Stop short of running the tests. # tell people what steps to take next with log_context: run_test(test_input, config=config, stats={}, provision_only=True) activation_file = os.path.join(config.test_dir, "conda_test_env_vars" + ext) activation_string = "cd {work_dir} && {source} {activation_file}\n".format( work_dir=config.test_dir, source="call" if on_win else "source", activation_file=os.path.join(config.test_dir, activation_file)) return activation_string
def debug(recipe_or_package_path_or_metadata_tuples, path=None, test=False, output_id=None, config=None, verbose=True, link_source_method='auto', **kwargs): """Set up either build/host or test environments, leaving you with a quick tool to debug your package's build or test phase. """ from fnmatch import fnmatch import logging import os import time from conda_build.conda_interface import string_types from conda_build.build import test as run_test, build as run_build from conda_build.utils import CONDA_TARBALL_EXTENSIONS, on_win, LoggingContext is_package = False default_config = get_or_merge_config(config, **kwargs) args = {"set_build_id": False} path_is_build_dir = False workdirs = [ os.path.join(recipe_or_package_path_or_metadata_tuples, d) for d in ( os.listdir(recipe_or_package_path_or_metadata_tuples) if os.path. isdir(recipe_or_package_path_or_metadata_tuples) else []) if (d.startswith('work') and os.path.isdir( os.path.join(recipe_or_package_path_or_metadata_tuples, d))) ] metadatas_conda_debug = [ os.path.join(f, "metadata_conda_debug.yaml") for f in workdirs if os.path.isfile(os.path.join(f, "metadata_conda_debug.yaml")) ] metadatas_conda_debug = sorted(metadatas_conda_debug) if len(metadatas_conda_debug): path_is_build_dir = True path = recipe_or_package_path_or_metadata_tuples if not path: path = os.path.join(default_config.croot, "debug_{}".format(int(time.time() * 1000))) config = get_or_merge_config(config=default_config, croot=path, verbose=verbose, _prefix_length=10, **args) config.channel_urls = get_channel_urls(kwargs) metadata_tuples = [] best_link_source_method = 'skip' if isinstance(recipe_or_package_path_or_metadata_tuples, string_types): if path_is_build_dir: for metadata_conda_debug in metadatas_conda_debug: best_link_source_method = 'symlink' from conda_build.metadata import MetaData metadata = MetaData(metadata_conda_debug, config, {}) metadata_tuples.append((metadata, False, True)) else: ext = os.path.splitext( recipe_or_package_path_or_metadata_tuples)[1] if not ext or not any(ext in _ for _ in CONDA_TARBALL_EXTENSIONS): metadata_tuples = render( recipe_or_package_path_or_metadata_tuples, config=config, **kwargs) else: # this is a package, we only support testing test = True is_package = True else: metadata_tuples = recipe_or_package_path_or_metadata_tuples if metadata_tuples: outputs = get_output_file_paths(metadata_tuples) matched_outputs = outputs if output_id: matched_outputs = [ _ for _ in outputs if fnmatch(os.path.basename(_), output_id) ] if len(matched_outputs) > 1: raise ValueError( "Specified --output-id matches more than one output ({}). Please refine your output id so that only " "a single output is found.".format(matched_outputs)) elif not matched_outputs: raise ValueError( "Specified --output-id did not match any outputs. Available outputs are: {} Please check it and try again" .format(outputs)) if len(matched_outputs) > 1 and not path_is_build_dir: raise ValueError( "More than one output found for this recipe ({}). Please use the --output-id argument to filter down " "to a single output.".format(outputs)) else: matched_outputs = outputs target_metadata = metadata_tuples[outputs.index(matched_outputs[0])][0] # make sure that none of the _placehold stuff gets added to env paths target_metadata.config.prefix_length = 10 if best_link_source_method == 'symlink': for metadata, _, _ in metadata_tuples: debug_source_loc = os.path.join( os.sep + 'usr', 'local', 'src', 'conda', '{}-{}'.format(metadata.get_value('package/name'), metadata.get_value('package/version'))) link_target = os.path.dirname(metadata.meta_path) try: dn = os.path.dirname(debug_source_loc) try: os.makedirs(dn) except FileExistsError: pass try: os.unlink(debug_source_loc) except: pass print("Making debug info source symlink: {} => {}".format( debug_source_loc, link_target)) os.symlink(link_target, debug_source_loc) except PermissionError as e: raise Exception( "You do not have the necessary permissions to create symlinks in {}\nerror: {}" .format(dn, str(e))) except Exception as e: raise Exception( "Unknown error creating symlinks in {}\nerror: {}".format( dn, str(e))) ext = ".bat" if on_win else ".sh" if verbose: log_context = LoggingContext() else: log_context = LoggingContext(logging.CRITICAL + 1) if path_is_build_dir: activation_file = "build_env_setup" + ext activation_string = "cd {work_dir} && {source} {activation_file}\n".format( work_dir=target_metadata.config.work_dir, source="call" if on_win else "source", activation_file=os.path.join(target_metadata.config.work_dir, activation_file)) elif not test: with log_context: run_build(target_metadata, stats={}, provision_only=True) activation_file = "build_env_setup" + ext activation_string = "cd {work_dir} && {source} {activation_file}\n".format( work_dir=target_metadata.config.work_dir, source="call" if on_win else "source", activation_file=os.path.join(target_metadata.config.work_dir, activation_file)) else: if not is_package: raise ValueError( "Debugging for test mode is only supported for package files that already exist. " "Please build your package first, then use it to create the debugging environment." ) else: test_input = recipe_or_package_path_or_metadata_tuples # use the package to create an env and extract the test files. Stop short of running the tests. # tell people what steps to take next with log_context: run_test(test_input, config=config, stats={}, provision_only=True) activation_file = os.path.join(config.test_dir, "conda_test_env_vars" + ext) activation_string = "cd {work_dir} && {source} {activation_file}\n".format( work_dir=config.test_dir, source="call" if on_win else "source", activation_file=os.path.join(config.test_dir, activation_file)) return activation_string
def build_wheel(recipe, versions_combis={ "python": None, "numpy": None }, conda_channel_urls=(), conda_override_channels=(), upload=[], wheel_dir="./build"): import sys import shutil import tarfile import tempfile from os.path import abspath, isdir, isfile from conda.lock import Locked from conda_build.config import config from conda_build.metadata import MetaData import conda_build_wheel.build_wheel as build conda_version = { 'python': 'CONDA_PY', 'numpy': 'CONDA_NPY', } for lang in ['python', 'numpy']: versions = versions_combis[lang] if not versions: continue if versions == ['all']: if all_versions[lang]: versions = all_versions[lang] else: print("'all' is not supported for --%s" % lang) if len(versions) > 1: for ver in versions[:]: versions_combis[lang] = [str(ver)] build_wheel(recipe, versions_combis, conda_channel_urls=conda_channel_urls, conda_override_channels=conda_override_channels, upload=upload, wheel_dir=wheel_dir) # This is necessary to make all combinations build. versions_combis[lang] = versions return else: version = versions[0] if lang in ('python', 'numpy'): version = int(version.replace('.', '')) setattr(config, conda_version[lang], version) if not len(str(version)) in (2, 3) and lang in ['python', 'numpy']: if all_versions[lang]: raise RuntimeError("%s must be major.minor, like %s, not %s" % (conda_version[lang], all_versions[lang][-1] / 10, version)) else: raise RuntimeError("%s must be major.minor, not %s" % (conda_version[lang], version)) # Using --python, --numpy etc. is equivalent to using CONDA_PY, CONDA_NPY, etc. # Auto-set those env variables for var in conda_version.values(): if getattr(config, var): # Set the env variable. os_environ[var] = str(getattr(config, var)) with Locked(config.croot): # Don't use byte literals for paths in Python 2 if not PY3: recipe = recipe.decode(getpreferredencoding() or 'utf-8') if isfile(recipe): if recipe.endswith(('.tar', '.tar.gz', '.tgz', '.tar.bz2')): recipe_dir = tempfile.mkdtemp() t = tarfile.open(recipe, 'r:*') t.extractall(path=recipe_dir) t.close() need_cleanup = True else: print("Ignoring non-recipe: %s" % recipe) return else: recipe_dir = abspath(recipe) need_cleanup = False if not isdir(recipe_dir): sys.exit("Error: no such directory: %s" % recipe_dir) try: m = MetaData(recipe_dir) if m.get_value('build/noarch_python'): config.noarch = True except exceptions.YamlParsingError as e: sys.stderr.write(e.error_msg()) sys.exit(1) m.check_fields() if m.skip(): print("Skipped: The %s recipe defines build/skip for this " "configuration." % m.dist()) return build.build(m, channel_urls=conda_channel_urls, override_channels=conda_override_channels, wheel_dir=wheel_dir) if need_cleanup: shutil.rmtree(recipe_dir)
def execute(args, parser): import sys import shutil import tarfile import tempfile from os.path import abspath, isdir, isfile from conda.lock import Locked import conda_build.build as build import conda_build.source as source from conda_build.config import config from conda_build.metadata import MetaData check_external() channel_urls = args.channel or () if on_win: # needs to happen before any c extensions are imported that might be # hard-linked by files in the trash. one of those is markupsafe, used # by jinja2. see https://github.com/conda/conda-build/pull/520 assert 'markupsafe' not in sys.modules delete_trash(None) conda_version = { 'python': 'CONDA_PY', 'numpy': 'CONDA_NPY', 'perl': 'CONDA_PERL', 'R': 'CONDA_R', } for lang in ['python', 'numpy', 'perl', 'R']: versions = getattr(args, lang) if not versions: continue if versions == ['all']: if all_versions[lang]: versions = all_versions[lang] else: parser.error("'all' is not supported for --%s" % lang) if len(versions) > 1: for ver in versions[:]: setattr(args, lang, [str(ver)]) execute(args, parser) # This is necessary to make all combinations build. setattr(args, lang, versions) return else: version = int(versions[0].replace('.', '')) setattr(config, conda_version[lang], version) if not len(str(version)) == 2 and lang in ['python', 'numpy']: if all_versions[lang]: raise RuntimeError("%s must be major.minor, like %s, not %s" % (conda_version[lang], all_versions[lang][-1]/10, version)) else: raise RuntimeError("%s must be major.minor, not %s" % (conda_version[lang], version)) if args.skip_existing: update_index(config.bldpkgs_dir) index = build.get_build_index(clear_cache=True, channel_urls=channel_urls, override_channels=args.override_channels) already_built = [] to_build = args.recipe[:] with Locked(config.croot): recipes = deque(args.recipe) while recipes: arg = recipes.popleft() try_again = False # Don't use byte literals for paths in Python 2 if not PY3: arg = arg.decode(getpreferredencoding() or 'utf-8') if isfile(arg): if arg.endswith(('.tar', '.tar.gz', '.tgz', '.tar.bz2')): recipe_dir = tempfile.mkdtemp() t = tarfile.open(arg, 'r:*') t.extractall(path=recipe_dir) t.close() need_cleanup = True else: print("Ignoring non-recipe: %s" % arg) continue else: recipe_dir = abspath(arg) need_cleanup = False if not isdir(recipe_dir): sys.exit("Error: no such directory: %s" % recipe_dir) try: m = MetaData(recipe_dir) if m.get_value('build/noarch_python'): config.noarch = True except exceptions.YamlParsingError as e: sys.stderr.write(e.error_msg()) sys.exit(1) binstar_upload = False if args.check and len(args.recipe) > 1: print(m.path) m.check_fields() if args.check: continue if args.skip_existing: if m.pkg_fn() in index or m.pkg_fn() in already_built: print("%s is already built, skipping." % m.dist()) continue if args.output: print(build.bldpkg_path(m)) continue elif args.test: build.test(m, verbose=not args.quiet, channel_urls=channel_urls, override_channels=args.override_channels) elif args.source: source.provide(m.path, m.get_section('source')) print('Source tree in:', source.get_dir()) else: # This loop recursively builds dependencies if recipes exist if args.build_only: post = False args.notest = True args.binstar_upload = False elif args.post: post = True args.notest = True args.binstar_upload = False else: post = None try: build.build(m, verbose=not args.quiet, post=post, channel_urls=channel_urls, override_channels=args.override_channels, include_recipe=args.include_recipe) except RuntimeError as e: error_str = str(e) if error_str.startswith('No packages found') or error_str.startswith('Could not find some'): # Build dependency if recipe exists dep_pkg = error_str.split(': ')[1] # Handle package names that contain version deps. if ' ' in dep_pkg: dep_pkg = dep_pkg.split(' ')[0] recipe_glob = glob(dep_pkg + '-[v0-9][0-9.]*') if exists(dep_pkg): recipe_glob.append(dep_pkg) if recipe_glob: recipes.appendleft(arg) try_again = True for recipe_dir in recipe_glob: if dep_pkg in to_build: sys.exit(str(e)) print(("Missing dependency {0}, but found" + " recipe directory, so building " + "{0} first").format(dep_pkg)) recipes.appendleft(recipe_dir) to_build.append(dep_pkg) else: raise else: raise if try_again: continue if not args.notest: build.test(m, verbose=not args.quiet, channel_urls=channel_urls, override_channels=args.override_channels) binstar_upload = True if need_cleanup: shutil.rmtree(recipe_dir) if binstar_upload: handle_binstar_upload(build.bldpkg_path(m), args) already_built.append(m.pkg_fn())
def execute(args, parser): import sys import shutil import tarfile import tempfile from os import makedirs from os.path import abspath, isdir, isfile from conda.lock import Locked import conda_build.build as build import conda_build.source as source from conda_build.config import config from conda_build.metadata import MetaData check_external() channel_urls = args.channel or () if on_win: # needs to happen before any c extensions are imported that might be # hard-linked by files in the trash. one of those is markupsafe, used # by jinja2. see https://github.com/conda/conda-build/pull/520 assert "markupsafe" not in sys.modules delete_trash(None) conda_version = {"python": "CONDA_PY", "numpy": "CONDA_NPY", "perl": "CONDA_PERL", "R": "CONDA_R"} for lang in ["python", "numpy", "perl", "R"]: versions = getattr(args, lang) if not versions: continue if versions == ["all"]: if all_versions[lang]: versions = all_versions[lang] else: parser.error("'all' is not supported for --%s" % lang) if len(versions) > 1: for ver in versions[:]: setattr(args, lang, [str(ver)]) execute(args, parser) # This is necessary to make all combinations build. setattr(args, lang, versions) return else: version = versions[0] if lang in ("python", "numpy"): version = int(version.replace(".", "")) setattr(config, conda_version[lang], version) if not len(str(version)) in (2, 3) and lang in ["python", "numpy"]: if all_versions[lang]: raise RuntimeError( "%s must be major.minor, like %s, not %s" % (conda_version[lang], all_versions[lang][-1] / 10, version) ) else: raise RuntimeError("%s must be major.minor, not %s" % (conda_version[lang], version)) # Using --python, --numpy etc. is equivalent to using CONDA_PY, CONDA_NPY, etc. # Auto-set those env variables for var in conda_version.values(): if getattr(config, var): # Set the env variable. os_environ[var] = str(getattr(config, var)) if args.skip_existing: if not isdir(config.bldpkgs_dir): makedirs(config.bldpkgs_dir) update_index(config.bldpkgs_dir) index = build.get_build_index( clear_cache=True, channel_urls=channel_urls, override_channels=args.override_channels ) already_built = [] to_build_recursive = [] with Locked(config.croot): recipes = deque(args.recipe) while recipes: arg = recipes.popleft() try_again = False # Don't use byte literals for paths in Python 2 if not PY3: arg = arg.decode(getpreferredencoding() or "utf-8") if isfile(arg): if arg.endswith((".tar", ".tar.gz", ".tgz", ".tar.bz2")): recipe_dir = tempfile.mkdtemp() t = tarfile.open(arg, "r:*") t.extractall(path=recipe_dir) t.close() need_cleanup = True else: print("Ignoring non-recipe: %s" % arg) continue else: recipe_dir = abspath(arg) need_cleanup = False if not isdir(recipe_dir): sys.exit("Error: no such directory: %s" % recipe_dir) try: m = MetaData(recipe_dir) if m.get_value("build/noarch_python"): config.noarch = True except exceptions.YamlParsingError as e: sys.stderr.write(e.error_msg()) sys.exit(1) binstar_upload = False if args.check and len(args.recipe) > 1: print(m.path) m.check_fields() if args.check: continue if args.skip_existing: if m.pkg_fn() in index or m.pkg_fn() in already_built: print("%s is already built, skipping." % m.dist()) continue if args.output: print(build.bldpkg_path(m)) continue elif args.test: build.test( m, verbose=not args.quiet, channel_urls=channel_urls, override_channels=args.override_channels ) elif args.source: source.provide(m.path, m.get_section("source")) print("Source tree in:", source.get_dir()) else: # This loop recursively builds dependencies if recipes exist if args.build_only: post = False args.notest = True args.binstar_upload = False elif args.post: post = True args.notest = True args.binstar_upload = False else: post = None try: if m.skip(): print("Skipped: The %s recipe defines build/skip for this " "configuration." % m.dist()) continue build.build( m, verbose=not args.quiet, post=post, channel_urls=channel_urls, override_channels=args.override_channels, include_recipe=args.include_recipe, ) except (RuntimeError, SystemExit) as e: error_str = str(e) if error_str.startswith("No packages found") or error_str.startswith("Could not find some"): # Build dependency if recipe exists dep_pkg = error_str.split(": ")[1] # Handle package names that contain version deps. if " " in dep_pkg: dep_pkg = dep_pkg.split(" ")[0] recipe_glob = glob(dep_pkg + "-[v0-9][0-9.]*") if exists(dep_pkg): recipe_glob.append(dep_pkg) if recipe_glob: recipes.appendleft(arg) try_again = True for recipe_dir in recipe_glob: if dep_pkg in to_build_recursive: sys.exit(str(e)) print( ( "Missing dependency {0}, but found" + " recipe directory, so building " + "{0} first" ).format(dep_pkg) ) recipes.appendleft(recipe_dir) to_build_recursive.append(dep_pkg) else: raise elif error_str.strip().startswith("Hint:"): lines = [line for line in error_str.splitlines() if line.strip().startswith("- ")] pkgs = [line.lstrip("- ") for line in lines] # Typically if a conflict is with one of these # packages, the other package needs to be rebuilt # (e.g., a conflict with 'python 3.5*' and 'x' means # 'x' isn't build for Python 3.5 and needs to be # rebuilt). skip_names = ["python", "r"] pkgs = [pkg for pkg in pkgs if pkg.split(" ")[0] not in skip_names] for pkg in pkgs: # Handle package names that contain version deps. if " " in pkg: pkg = pkg.split(" ")[0] recipe_glob = glob(pkg + "-[v0-9][0-9.]*") if exists(pkg): recipe_glob.append(pkg) if recipe_glob: recipes.appendleft(arg) try_again = True for recipe_dir in recipe_glob: if pkg in to_build_recursive: sys.exit(str(e)) print(error_str) print( ( "Missing dependency {0}, but found" + " recipe directory, so building " + "{0} first" ).format(pkg) ) recipes.appendleft(recipe_dir) to_build_recursive.append(pkg) else: raise else: raise if try_again: continue if not args.notest: build.test( m, verbose=not args.quiet, channel_urls=channel_urls, override_channels=args.override_channels ) binstar_upload = True if need_cleanup: shutil.rmtree(recipe_dir) if binstar_upload: handle_binstar_upload(build.bldpkg_path(m), args) already_built.append(m.pkg_fn())
def execute(args, parser): import sys import shutil import tarfile import tempfile from os import makedirs from os.path import abspath, isdir, isfile from conda.lock import Locked import conda_build.build as build import conda_build.source as source from conda_build.config import config from conda_build.metadata import MetaData check_external() # change globals in build module, see comment there as well build.channel_urls = args.channel or () build.override_channels = args.override_channels build.verbose = not args.quiet if on_win: try: # needs to happen before any c extensions are imported that might be # hard-linked by files in the trash. one of those is markupsafe, # used by jinja2. see https://github.com/conda/conda-build/pull/520 delete_trash(None) except: # when we can't delete the trash, don't crash on AssertionError, # instead inform the user and try again next time. # see https://github.com/conda/conda-build/pull/744 warnings.warn( "Cannot delete trash; some c extension has been " "imported that is hard-linked by files in the trash. " "Will try again on next run.") conda_version = { 'python': 'CONDA_PY', 'numpy': 'CONDA_NPY', 'perl': 'CONDA_PERL', 'R': 'CONDA_R', } for lang in ['python', 'numpy', 'perl', 'R']: versions = getattr(args, lang) if not versions: continue if versions == ['all']: if all_versions[lang]: versions = all_versions[lang] else: parser.error("'all' is not supported for --%s" % lang) if len(versions) > 1: for ver in versions[:]: setattr(args, lang, [str(ver)]) execute(args, parser) # This is necessary to make all combinations build. setattr(args, lang, versions) return else: version = versions[0] if lang in ('python', 'numpy'): version = int(version.replace('.', '')) setattr(config, conda_version[lang], version) if not len(str(version)) in (2, 3) and lang in ['python', 'numpy']: if all_versions[lang]: raise RuntimeError("%s must be major.minor, like %s, not %s" % (conda_version[lang], all_versions[lang][-1] / 10, version)) else: raise RuntimeError("%s must be major.minor, not %s" % (conda_version[lang], version)) # Using --python, --numpy etc. is equivalent to using CONDA_PY, CONDA_NPY, etc. # Auto-set those env variables for var in conda_version.values(): if getattr(config, var): # Set the env variable. os_environ[var] = str(getattr(config, var)) if args.skip_existing: if not isdir(config.bldpkgs_dir): makedirs(config.bldpkgs_dir) update_index(config.bldpkgs_dir) index = build.get_build_index(clear_cache=True) already_built = [] to_build_recursive = [] with Locked(config.croot): recipes = deque(args.recipe) while recipes: arg = recipes.popleft() try_again = False # Don't use byte literals for paths in Python 2 if not PY3: arg = arg.decode(getpreferredencoding() or 'utf-8') if isfile(arg): if arg.endswith(('.tar', '.tar.gz', '.tgz', '.tar.bz2')): recipe_dir = tempfile.mkdtemp() t = tarfile.open(arg, 'r:*') t.extractall(path=recipe_dir) t.close() need_cleanup = True else: print("Ignoring non-recipe: %s" % arg) continue else: recipe_dir = abspath(arg) need_cleanup = False if not isdir(recipe_dir): sys.exit("Error: no such directory: %s" % recipe_dir) try: m = MetaData(recipe_dir) if m.get_value('build/noarch_python'): config.noarch = True except exceptions.YamlParsingError as e: sys.stderr.write(e.error_msg()) sys.exit(1) binstar_upload = False if args.check and len(args.recipe) > 1: print(m.path) m.check_fields() if args.check: continue if args.skip_existing: if m.pkg_fn() in index or m.pkg_fn() in already_built: print("%s is already built, skipping." % m.dist()) continue if m.skip(): print("Skipped: The %s recipe defines build/skip for this " "configuration." % m.dist()) continue if args.output: try: m.parse_again(permit_undefined_jinja=False) except SystemExit: # Something went wrong; possibly due to undefined GIT_ jinja variables. # Maybe we need to actually download the source in order to resolve the build_id. source.provide(m.path, m.get_section('source')) # Parse our metadata again because we did not initialize the source # information before. m.parse_again(permit_undefined_jinja=False) print(build.bldpkg_path(m)) continue elif args.test: build.test(m, move_broken=False) elif args.source: source.provide(m.path, m.get_section('source')) print('Source tree in:', source.get_dir()) else: # This loop recursively builds dependencies if recipes exist if args.build_only: post = False args.notest = True args.binstar_upload = False elif args.post: post = True args.notest = True args.binstar_upload = False else: post = None try: build.build(m, post=post, include_recipe=args.include_recipe) except (RuntimeError, SystemExit) as e: error_str = str(e) if error_str.startswith( 'No packages found') or error_str.startswith( 'Could not find some'): # Build dependency if recipe exists dep_pkg = error_str.split(': ')[1] # Handle package names that contain version deps. if ' ' in dep_pkg: dep_pkg = dep_pkg.split(' ')[0] recipe_glob = glob(dep_pkg + '-[v0-9][0-9.]*') if exists(dep_pkg): recipe_glob.append(dep_pkg) if recipe_glob: recipes.appendleft(arg) try_again = True for recipe_dir in recipe_glob: if dep_pkg in to_build_recursive: sys.exit(str(e)) print(("Missing dependency {0}, but found" + " recipe directory, so building " + "{0} first").format(dep_pkg)) recipes.appendleft(recipe_dir) to_build_recursive.append(dep_pkg) else: raise elif error_str.strip().startswith("Hint:"): lines = [ line for line in error_str.splitlines() if line.strip().startswith('- ') ] pkgs = [line.lstrip('- ') for line in lines] # Typically if a conflict is with one of these # packages, the other package needs to be rebuilt # (e.g., a conflict with 'python 3.5*' and 'x' means # 'x' isn't build for Python 3.5 and needs to be # rebuilt). skip_names = ['python', 'r'] pkgs = [ pkg for pkg in pkgs if pkg.split(' ')[0] not in skip_names ] for pkg in pkgs: # Handle package names that contain version deps. if ' ' in pkg: pkg = pkg.split(' ')[0] recipe_glob = glob(pkg + '-[v0-9][0-9.]*') if exists(pkg): recipe_glob.append(pkg) if recipe_glob: recipes.appendleft(arg) try_again = True for recipe_dir in recipe_glob: if pkg in to_build_recursive: sys.exit(str(e)) print(error_str) print( ("Missing dependency {0}, but found" + " recipe directory, so building " + "{0} first").format(pkg)) recipes.appendleft(recipe_dir) to_build_recursive.append(pkg) else: raise else: raise if try_again: continue if not args.notest: build.test(m) binstar_upload = True if need_cleanup: shutil.rmtree(recipe_dir) if binstar_upload: handle_binstar_upload(build.bldpkg_path(m), args) already_built.append(m.pkg_fn())
cc.subdir = platform from conda_build.metadata import MetaData res = list() for meta_path in url_file: input_dir = os.path.join('./bioconda-recipes', os.path.dirname(meta_path)) if os.path.exists(input_dir): try: package = dict() package['arch'] = platform # set the architechture before parsing the metadata cc.subdir = platform recipe_meta = MetaData(input_dir) package['name'] = recipe_meta.get_value('package/name') package['version'] = recipe_meta.get_value('package/version') url = recipe_meta.get_value('source/url') if url: package['sha256'] = recipe_meta.get_value('source/sha256') package['md5'] = recipe_meta.get_value('source/md5') else: # git_url and hopefully git_rev git_url = recipe_meta.get_value('source/git_url') git_rev = recipe_meta.get_value('source/git_rev') url = '%s/%s.tar.gz' % (git_url.rstrip('.git'), git_rev) if not git_rev: sys.stderr.write('git revision is missing for: %s\n' % input_dir) continue package['url'] = url
def execute(args, parser): import sys import shutil import tarfile import tempfile from os.path import abspath, isdir, isfile from conda.lock import Locked import conda_build.build as build import conda_build.source as source from conda_build.config import config from conda_build.metadata import MetaData check_external() channel_urls = args.channel or () all_versions = { 'python': [26, 27, 33, 34], 'numpy': [16, 17, 18, 19], 'perl': None, 'R': None, } conda_version = { 'python': 'CONDA_PY', 'numpy': 'CONDA_NPY', 'perl': 'CONDA_PERL', 'R': 'CONDA_R', } for lang in ['python', 'numpy', 'perl', 'R']: versions = getattr(args, lang) if not versions: continue if versions == ['all']: if all_versions[lang]: versions = all_versions[lang] else: parser.error("'all' is not supported for --%s" % lang) if len(versions) > 1: for ver in versions[:]: setattr(args, lang, [str(ver)]) execute(args, parser) # This is necessary to make all combinations build. setattr(args, lang, versions) return else: version = int(versions[0].replace('.', '')) setattr(config, conda_version[lang], version) if not len(str(version)) == 2: if all_versions[lang]: raise RuntimeError("%s must be major.minor, like %s, not %s" % (conda_version[lang], all_versions[lang][-1]/10, version)) else: raise RuntimeError("%s must be major.minor, not %s" % (conda_version[lang], version)) if args.skip_existing: update_index(config.bldpkgs_dir) index = build.get_build_index(clear_cache=True, channel_urls=channel_urls, override_channels=args.override_channels) with Locked(config.croot): recipes = deque(args.recipe) while recipes: arg = recipes.popleft() try_again = False # Don't use byte literals for paths in Python 2 if not PY3: arg = arg.decode(getpreferredencoding() or 'utf-8') if isfile(arg): if arg.endswith(('.tar', '.tar.gz', '.tgz', '.tar.bz2')): recipe_dir = tempfile.mkdtemp() t = tarfile.open(arg, 'r:*') t.extractall(path=recipe_dir) t.close() need_cleanup = True else: print("Ignoring non-recipe: %s" % arg) continue else: recipe_dir = abspath(arg) need_cleanup = False if not isdir(recipe_dir): sys.exit("Error: no such directory: %s" % recipe_dir) try: m = MetaData(recipe_dir) if m.get_value('build/noarch_python'): config.noarch = True except exceptions.YamlParsingError as e: sys.stderr.write(e.error_msg()) sys.exit(1) binstar_upload = False if args.check and len(args.recipe) > 1: print(m.path) m.check_fields() if args.check: continue if args.skip_existing: if m.pkg_fn() in index: print("%s is already built, skipping." % m.dist()) continue if args.output: print(build.bldpkg_path(m)) continue elif args.test: build.test(m, verbose=not args.quiet, channel_urls=channel_urls, override_channels=args.override_channels) elif args.source: source.provide(m.path, m.get_section('source')) print('Source tree in:', source.get_dir()) else: # This loop recursively builds dependencies if recipes exist if args.build_only: post = False args.notest = True args.binstar_upload = False elif args.post: post = True args.notest = True args.binstar_upload = False else: post = None try: build.build(m, verbose=not args.quiet, post=post, channel_urls=channel_urls, override_channels=args.override_channels) except RuntimeError as e: error_str = str(e) if error_str.startswith('No packages found') or error_str.startswith('Could not find some'): # Build dependency if recipe exists dep_pkg = error_str.split(': ')[1] # Handle package names that contain version deps. if ' ' in dep_pkg: dep_pkg = dep_pkg.split(' ')[0] recipe_glob = glob(dep_pkg + '-[v0-9][0-9.]*') if exists(dep_pkg): recipe_glob.append(dep_pkg) if recipe_glob: recipes.appendleft(arg) try_again = True for recipe_dir in recipe_glob: print(("Missing dependency {0}, but found" + " recipe directory, so building " + "{0} first").format(dep_pkg)) recipes.appendleft(recipe_dir) else: raise else: raise if try_again: continue if not args.notest: build.test(m, verbose=not args.quiet, channel_urls=channel_urls, override_channels=args.override_channels) binstar_upload = True if need_cleanup: shutil.rmtree(recipe_dir) if binstar_upload: handle_binstar_upload(build.bldpkg_path(m), args)
def execute(args, parser): import sys import shutil import tarfile import tempfile from os.path import abspath, isdir, isfile from conda.lock import Locked import conda_build.build as build import conda_build.source as source from conda_build.config import config from conda_build.metadata import MetaData check_external() channel_urls = args.channel or () all_versions = {"python": [26, 27, 33, 34], "numpy": [16, 17, 18, 19], "perl": None, "R": None} conda_version = {"python": "CONDA_PY", "numpy": "CONDA_NPY", "perl": "CONDA_PERL", "R": "CONDA_R"} for lang in ["python", "numpy", "perl", "R"]: versions = getattr(args, lang) if not versions: continue if versions == ["all"]: if all_versions[lang]: versions = all_versions[lang] else: parser.error("'all' is not supported for --%s" % lang) if len(versions) > 1: for ver in versions[:]: setattr(args, lang, [str(ver)]) execute(args, parser) # This is necessary to make all combinations build. setattr(args, lang, versions) return else: version = int(versions[0].replace(".", "")) setattr(config, conda_version[lang], version) if not len(str(version)) == 2: if all_versions[lang]: raise RuntimeError( "%s must be major.minor, like %s, not %s" % (conda_version[lang], all_versions[lang][-1] / 10, version) ) else: raise RuntimeError("%s must be major.minor, not %s" % (conda_version[lang], version)) if args.skip_existing: update_index(config.bldpkgs_dir) index = build.get_build_index( clear_cache=True, channel_urls=channel_urls, override_channels=args.override_channels ) with Locked(config.croot): recipes = deque(args.recipe) while recipes: arg = recipes.popleft() try_again = False # Don't use byte literals for paths in Python 2 if not PY3: arg = arg.decode(getpreferredencoding() or "utf-8") if isfile(arg): if arg.endswith((".tar", ".tar.gz", ".tgz", ".tar.bz2")): recipe_dir = tempfile.mkdtemp() t = tarfile.open(arg, "r:*") t.extractall(path=recipe_dir) t.close() need_cleanup = True else: print("Ignoring non-recipe: %s" % arg) continue else: recipe_dir = abspath(arg) need_cleanup = False if not isdir(recipe_dir): sys.exit("Error: no such directory: %s" % recipe_dir) try: m = MetaData(recipe_dir) if m.get_value("build/noarch_python"): config.noarch = True except exceptions.YamlParsingError as e: sys.stderr.write(e.error_msg()) sys.exit(1) binstar_upload = False if args.check and len(args.recipe) > 1: print(m.path) m.check_fields() if args.check: continue if args.skip_existing: if m.pkg_fn() in index: print("%s is already built, skipping." % m.dist()) continue if args.output: print(build.bldpkg_path(m)) continue elif args.test: build.test( m, verbose=not args.quiet, channel_urls=channel_urls, override_channels=args.override_channels ) elif args.source: source.provide(m.path, m.get_section("source")) print("Source tree in:", source.get_dir()) else: # This loop recursively builds dependencies if recipes exist if args.build_only: post = False args.notest = True args.binstar_upload = False elif args.post: post = True args.notest = True args.binstar_upload = False else: post = None try: build.build( m, verbose=not args.quiet, post=post, channel_urls=channel_urls, override_channels=args.override_channels, ) except RuntimeError as e: error_str = str(e) if error_str.startswith("No packages found") or error_str.startswith("Could not find some"): # Build dependency if recipe exists dep_pkg = error_str.split(": ")[1] # Handle package names that contain version deps. if " " in dep_pkg: dep_pkg = dep_pkg.split(" ")[0] recipe_glob = glob(dep_pkg + "-[v0-9][0-9.]*") if exists(dep_pkg): recipe_glob.append(dep_pkg) if recipe_glob: recipes.appendleft(arg) try_again = True for recipe_dir in recipe_glob: print( ( "Missing dependency {0}, but found" + " recipe directory, so building " + "{0} first" ).format(dep_pkg) ) recipes.appendleft(recipe_dir) else: raise else: raise if try_again: continue if not args.notest: build.test( m, verbose=not args.quiet, channel_urls=channel_urls, override_channels=args.override_channels ) binstar_upload = True if need_cleanup: shutil.rmtree(recipe_dir) if binstar_upload: handle_binstar_upload(build.bldpkg_path(m), args)