def execute(args, parser): import sys import shutil import tarfile import tempfile from os.path import abspath, isdir, isfile, join from conda.lock import Locked import conda_build.build as build import conda_build.source as source from conda_build.config import croot from conda_build.metadata import MetaData check_external() with Locked(croot): for arg in args.recipe: if isfile(arg): if arg.endswith(('.tar', '.tar.gz', '.tgz', '.tar.bz2')): recipe_dir = tempfile.mkdtemp() t = tarfile.open(arg, 'r:*') t.extractall(path=recipe_dir) t.close() need_cleanup = True else: print("Ignoring non-recipe: %s" % arg) continue else: recipe_dir = abspath(arg) need_cleanup = False if not isdir(recipe_dir): sys.exit("Error: no such directory: %s" % recipe_dir) m = MetaData(recipe_dir) binstar_upload = False if args.check and len(args.recipe) > 1: print(m.path) m.check_fields() if args.check: continue if args.output: print(build.bldpkg_path(m)) continue elif args.test: build.test(m) elif args.source: source.provide(m.path, m.get_section('source')) print('Source tree in:', source.get_dir()) else: build.build(m) if not args.notest: build.test(m) binstar_upload = True if need_cleanup: shutil.rmtree(recipe_dir) if binstar_upload: handle_binstar_upload(build.bldpkg_path(m), args)
def build(meta, test=True): """Build (and optionally test) a recipe directory.""" with Locked(conda_build.config.croot): meta.check_fields() if os.path.exists(conda_build.config.config.info_dir): shutil.rmtree(conda_build.config.config.info_dir) build_module.build(meta, verbose=False, post=None) if test: build_module.test(meta, verbose=False) return meta
def test(recipedir_or_package_or_metadata, move_broken=True, config=None, stats=None, **kwargs): """Run tests on either packages (.tar.bz2 or extracted) or recipe folders For a recipe folder, it renders the recipe enough to know what package to download, and obtains it from your currently configuured channels.""" from conda_build.build import test if hasattr(recipedir_or_package_or_metadata, 'config'): config = recipedir_or_package_or_metadata.config else: config = get_or_merge_config(config, **kwargs) # if people don't pass in an object to capture stats in, they won't get them returned. # We'll still track them, though. if not stats: stats = {} with config: # This will create a new local build folder if and only if config # doesn't already have one. What this means is that if we're # running a test immediately after build, we use the one that the # build already provided test_result = test(recipedir_or_package_or_metadata, config=config, move_broken=move_broken, stats=stats) return test_result
def test(recipedir_or_package_or_metadata, move_broken=True, config=None, **kwargs): """Run tests on either a package or a recipe folder For a recipe folder, it renders the recipe enough to know what package to download, and obtains it from your currently configuured channels.""" from conda_build.build import test if hasattr(recipedir_or_package_or_metadata, 'config'): config = recipedir_or_package_or_metadata.config else: config = get_or_merge_config(config, **kwargs) with config: # This will create a new local build folder if and only if config doesn't already have one. # What this means is that if we're running a test immediately after build, we use the one # that the build already provided test_result = test(recipedir_or_package_or_metadata, config=config, move_broken=move_broken) return test_result
def test(recipedir_or_package_or_metadata, move_broken=True, config=None, **kwargs): import os from conda_build.build import test from conda_build.render import render_recipe config = get_or_merge_config(config, **kwargs) if hasattr(recipedir_or_package_or_metadata, 'config'): metadata = recipedir_or_package_or_metadata recipe_config = metadata.config elif os.path.isdir(recipedir_or_package_or_metadata): # This will create a new local build folder if and only if config doesn't already have one. # What this means is that if we're running a test immediately after build, we use the one # that the build already provided config.compute_build_id(recipedir_or_package_or_metadata) metadata, _, _ = render_recipe(recipedir_or_package_or_metadata, config=config) recipe_config = config else: # fall back to old way (use recipe, rather than package) metadata, _, _ = render_recipe(recipedir_or_package_or_metadata, no_download_source=False, config=config, **kwargs) recipe_config = config with recipe_config: # This will create a new local build folder if and only if config doesn't already have one. # What this means is that if we're running a test immediately after build, we use the one # that the build already provided config.compute_build_id(metadata.name()) test_result = test(metadata, config=recipe_config, move_broken=move_broken) return test_result
def test(recipedir_or_package_or_metadata, move_broken=True, config=None, **kwargs): import os from conda_build.conda_interface import url_path from conda_build.build import test from conda_build.render import render_recipe from conda_build.utils import get_recipe_abspath, rm_rf from conda_build import source config = get_or_merge_config(config, **kwargs) # we want to know if we're dealing with package input. If so, we can move the input on success. is_package = False if hasattr(recipedir_or_package_or_metadata, 'config'): metadata = recipedir_or_package_or_metadata recipe_config = metadata.config else: recipe_dir, need_cleanup = get_recipe_abspath(recipedir_or_package_or_metadata) config.need_cleanup = need_cleanup # This will create a new local build folder if and only if config doesn't already have one. # What this means is that if we're running a test immediately after build, we use the one # that the build already provided metadata, _, _ = render_recipe(recipe_dir, config=config) recipe_config = config # this recipe came from an extracted tarball. if need_cleanup: # ensure that the local location of the package is indexed, so that conda can find the # local package local_location = os.path.dirname(recipedir_or_package_or_metadata) # strip off extra subdir folders for platform in ('win', 'linux', 'osx'): if os.path.basename(local_location).startswith(platform + "-"): local_location = os.path.dirname(local_location) update_index(local_location, config=config) local_url = url_path(local_location) # channel_urls is an iterable, but we don't know if it's a tuple or list. Don't know # how to add elements. recipe_config.channel_urls = list(recipe_config.channel_urls) recipe_config.channel_urls.insert(0, local_url) is_package = True if metadata.meta.get('test') and metadata.meta['test'].get('source_files'): source.provide(metadata.path, metadata.get_section('source'), config=config) rm_rf(recipe_dir) with recipe_config: # This will create a new local build folder if and only if config doesn't already have one. # What this means is that if we're running a test immediately after build, we use the one # that the build already provided recipe_config.compute_build_id(metadata.name()) test_result = test(metadata, config=recipe_config, move_broken=move_broken) if (test_result and is_package and hasattr(recipe_config, 'output_folder') and recipe_config.output_folder): os.rename(recipedir_or_package_or_metadata, os.path.join(recipe_config.output_folder, os.path.basename(recipedir_or_package_or_metadata))) return test_result
def build(meta, test=True): """Build (and optionally test) a recipe directory.""" with Locked(conda_build.config.croot): # Check whether verbose is a module-level variable in # this version of conda_build and set it properly if it is. if 'verbose' in dir(build_module): build_module.verbose = False kwd = {} else: kwd = {'verbose': False} meta.check_fields() if os.path.exists(conda_build.config.config.info_dir): shutil.rmtree(conda_build.config.config.info_dir) build_module.build(meta, post=None, **kwd) if test: build_module.test(meta, **kwd) return meta
def test(recipedir_or_package_or_metadata, move_broken=True, config=None, **kwargs): from conda_build.build import test if hasattr(recipedir_or_package_or_metadata, 'config'): config = recipedir_or_package_or_metadata.config else: config = get_or_merge_config(config, **kwargs) with config: # This will create a new local build folder if and only if config doesn't already have one. # What this means is that if we're running a test immediately after build, we use the one # that the build already provided test_result = test(recipedir_or_package_or_metadata, config=config, move_broken=move_broken) return test_result
def execute(args, parser): import sys import shutil import tarfile import tempfile from os.path import abspath, isdir, isfile from conda.lock import Locked import conda_build.build as build import conda_build.source as source from conda_build.config import config from conda_build.metadata import MetaData check_external() all_versions = { 'python': [26, 27, 33, 34], 'numpy': [16, 17, 18, 19], 'perl': None, 'R': None, } conda_version = { 'python': 'CONDA_PY', 'numpy': 'CONDA_NPY', 'perl': 'CONDA_PERL', 'R': 'CONDA_R', } for lang in ['python', 'numpy', 'perl', 'R']: versions = getattr(args, lang) if not versions: continue if versions == ['all']: if all_versions[lang]: versions = all_versions[lang] else: parser.error("'all' is not supported for --%s" % lang) if len(versions) > 1: for ver in versions[:]: setattr(args, lang, [str(ver)]) execute(args, parser) # This is necessary to make all combinations build. setattr(args, lang, versions) return else: version = int(versions[0].replace('.', '')) setattr(config, conda_version[lang], version) if not len(str(version)) == 2: if all_versions[lang]: raise RuntimeError("%s must be major.minor, like %s, not %s" % (conda_version[lang], all_versions[lang][-1]/10, version)) else: raise RuntimeError("%s must be major.minor, not %s" % (conda_version[lang], version)) with Locked(config.croot): recipes = deque(args.recipe) while recipes: arg = recipes.popleft() try_again = False # Don't use byte literals for paths in Python 2 if not PY3: arg = arg.decode(getpreferredencoding() or 'utf-8') if isfile(arg): if arg.endswith(('.tar', '.tar.gz', '.tgz', '.tar.bz2')): recipe_dir = tempfile.mkdtemp() t = tarfile.open(arg, 'r:*') t.extractall(path=recipe_dir) t.close() need_cleanup = True else: print("Ignoring non-recipe: %s" % arg) continue else: recipe_dir = abspath(arg) need_cleanup = False if not isdir(recipe_dir): sys.exit("Error: no such directory: %s" % recipe_dir) try: m = MetaData(recipe_dir) except exceptions.YamlParsingError as e: sys.stderr.write(e.error_msg()) sys.exit(1) binstar_upload = False if args.check and len(args.recipe) > 1: print(m.path) m.check_fields() if args.check: continue if args.output: print(build.bldpkg_path(m)) continue elif args.test: build.test(m, verbose=not args.quiet) elif args.source: source.provide(m.path, m.get_section('source')) print('Source tree in:', source.get_dir()) else: # This loop recursively builds dependencies if recipes exist if args.build_only: post = False args.notest = True args.binstar_upload = False elif args.post: post = True args.notest = True args.binstar_upload = False else: post = None channel_urls = args.channel or () try: build.build(m, verbose=not args.quiet, post=post, channel_urls=channel_urls, override_channels=args.override_channels) except RuntimeError as e: error_str = str(e) if error_str.startswith('No packages found') or error_str.startswith('Could not find some'): # Build dependency if recipe exists dep_pkg = error_str.split(': ')[1] # Handle package names that contain version deps. if ' ' in dep_pkg: dep_pkg = dep_pkg.split(' ')[0] recipe_glob = glob(dep_pkg + '-[v0-9][0-9.]*') if exists(dep_pkg): recipe_glob.append(dep_pkg) if recipe_glob: recipes.appendleft(arg) try_again = True for recipe_dir in recipe_glob: print(("Missing dependency {0}, but found" + " recipe directory, so building " + "{0} first").format(dep_pkg)) recipes.appendleft(recipe_dir) else: raise else: raise if try_again: continue if not args.notest: build.test(m, verbose=not args.quiet, channel_urls=channel_urls, override_channels=args.override_channels) binstar_upload = True if need_cleanup: shutil.rmtree(recipe_dir) if binstar_upload: handle_binstar_upload(build.bldpkg_path(m), args)
def execute(args, parser): import sys import shutil import tarfile import tempfile from os.path import abspath, isdir, isfile from conda.lock import Locked import conda_build.build as build import conda_build.source as source from conda_build.config import config from conda_build.metadata import MetaData check_external() channel_urls = args.channel or () all_versions = { 'python': [26, 27, 33, 34], 'numpy': [16, 17, 18, 19], 'perl': None, 'R': None, } conda_version = { 'python': 'CONDA_PY', 'numpy': 'CONDA_NPY', 'perl': 'CONDA_PERL', 'R': 'CONDA_R', } for lang in ['python', 'numpy', 'perl', 'R']: versions = getattr(args, lang) if not versions: continue if versions == ['all']: if all_versions[lang]: versions = all_versions[lang] else: parser.error("'all' is not supported for --%s" % lang) if len(versions) > 1: for ver in versions[:]: setattr(args, lang, [str(ver)]) execute(args, parser) # This is necessary to make all combinations build. setattr(args, lang, versions) return else: version = int(versions[0].replace('.', '')) setattr(config, conda_version[lang], version) if not len(str(version)) == 2: if all_versions[lang]: raise RuntimeError("%s must be major.minor, like %s, not %s" % (conda_version[lang], all_versions[lang][-1]/10, version)) else: raise RuntimeError("%s must be major.minor, not %s" % (conda_version[lang], version)) if args.skip_existing: update_index(config.bldpkgs_dir) index = build.get_build_index(clear_cache=True, channel_urls=channel_urls, override_channels=args.override_channels) with Locked(config.croot): recipes = deque(args.recipe) while recipes: arg = recipes.popleft() try_again = False # Don't use byte literals for paths in Python 2 if not PY3: arg = arg.decode(getpreferredencoding() or 'utf-8') if isfile(arg): if arg.endswith(('.tar', '.tar.gz', '.tgz', '.tar.bz2')): recipe_dir = tempfile.mkdtemp() t = tarfile.open(arg, 'r:*') t.extractall(path=recipe_dir) t.close() need_cleanup = True else: print("Ignoring non-recipe: %s" % arg) continue else: recipe_dir = abspath(arg) need_cleanup = False if not isdir(recipe_dir): sys.exit("Error: no such directory: %s" % recipe_dir) try: m = MetaData(recipe_dir) if m.get_value('build/noarch_python'): config.noarch = True except exceptions.YamlParsingError as e: sys.stderr.write(e.error_msg()) sys.exit(1) binstar_upload = False if args.check and len(args.recipe) > 1: print(m.path) m.check_fields() if args.check: continue if args.skip_existing: if m.pkg_fn() in index: print("%s is already built, skipping." % m.dist()) continue if args.output: print(build.bldpkg_path(m)) continue elif args.test: build.test(m, verbose=not args.quiet, channel_urls=channel_urls, override_channels=args.override_channels) elif args.source: source.provide(m.path, m.get_section('source')) print('Source tree in:', source.get_dir()) else: # This loop recursively builds dependencies if recipes exist if args.build_only: post = False args.notest = True args.binstar_upload = False elif args.post: post = True args.notest = True args.binstar_upload = False else: post = None try: build.build(m, verbose=not args.quiet, post=post, channel_urls=channel_urls, override_channels=args.override_channels) except RuntimeError as e: error_str = str(e) if error_str.startswith('No packages found') or error_str.startswith('Could not find some'): # Build dependency if recipe exists dep_pkg = error_str.split(': ')[1] # Handle package names that contain version deps. if ' ' in dep_pkg: dep_pkg = dep_pkg.split(' ')[0] recipe_glob = glob(dep_pkg + '-[v0-9][0-9.]*') if exists(dep_pkg): recipe_glob.append(dep_pkg) if recipe_glob: recipes.appendleft(arg) try_again = True for recipe_dir in recipe_glob: print(("Missing dependency {0}, but found" + " recipe directory, so building " + "{0} first").format(dep_pkg)) recipes.appendleft(recipe_dir) else: raise else: raise if try_again: continue if not args.notest: build.test(m, verbose=not args.quiet, channel_urls=channel_urls, override_channels=args.override_channels) binstar_upload = True if need_cleanup: shutil.rmtree(recipe_dir) if binstar_upload: handle_binstar_upload(build.bldpkg_path(m), args)
def run(self): # Make sure the metadata has the conda attributes, even if the # distclass isn't CondaDistribution. We primarily do this to simplify # the code below. metadata = self.distribution.metadata for attr in CondaDistribution.conda_attrs: if not hasattr(metadata, attr): setattr(metadata, attr, CondaDistribution.conda_attrs[attr]) # The command line takes precedence if self.buildnum is not None: metadata.conda_buildnum = self.buildnum with Locked(config.croot): d = defaultdict(dict) # PyPI allows uppercase letters but conda does not, so we fix the # name here. d['package']['name'] = metadata.name.lower() d['package']['version'] = metadata.version d['build']['number'] = metadata.conda_buildnum # MetaData does the auto stuff if the build string is None d['build']['string'] = metadata.conda_buildstr d['build']['binary_relocation'] = metadata.conda_binary_relocation d['build']['preserve_egg_dir'] = metadata.conda_preserve_egg_dir d['build']['features'] = metadata.conda_features d['build']['track_features'] = metadata.conda_track_features # XXX: I'm not really sure if it is correct to combine requires # and install_requires d['requirements']['run'] = d['requirements']['build'] = \ [spec_from_line(i) for i in (metadata.requires or []) + (getattr(self.distribution, 'install_requires', []) or [])] + ['python'] if hasattr(self.distribution, 'tests_require'): # A lot of packages use extras_require['test'], but # tests_require is the one that is officially supported by # setuptools. d['test']['requires'] = [spec_from_line(i) for i in self.distribution.tests_require or []] d['about']['home'] = metadata.url # Don't worry about classifiers. This isn't skeleton pypi. We # don't need to make this work with random stuff in the wild. If # someone writes their setup.py wrong and this doesn't work, it's # their fault. d['about']['license'] = metadata.license d['about']['summary'] = metadata.description # This is similar logic from conda skeleton pypi entry_points = getattr(self.distribution, 'entry_points', []) if entry_points: if isinstance(entry_points, string_types): # makes sure it is left-shifted newstr = "\n".join(x.strip() for x in entry_points.splitlines()) c = configparser.ConfigParser() entry_points = {} try: c.readfp(StringIO(newstr)) except Exception as err: # This seems to be the best error here raise DistutilsGetoptError("ERROR: entry-points not understood: " + str(err) + "\nThe string was" + newstr) else: for section in config.sections(): if section in ['console_scripts', 'gui_scripts']: value = ['%s=%s' % (option, config.get(section, option)) for option in config.options(section)] entry_points[section] = value else: # Make sure setuptools is added as a dependency below entry_points[section] = None if not isinstance(entry_points, dict): raise DistutilsGetoptError("ERROR: Could not add entry points. They were:\n" + entry_points) else: rs = entry_points.get('scripts', []) cs = entry_points.get('console_scripts', []) gs = entry_points.get('gui_scripts', []) # We have *other* kinds of entry-points so we need # setuptools at run-time if not rs and not cs and not gs and len(entry_points) > 1: d['requirements']['run'].append('setuptools') d['requirements']['build'].append('setuptools') entry_list = rs + cs + gs if gs and conda.config.platform == 'osx': d['build']['osx_is_app'] = True if len(cs + gs) != 0: d['build']['entry_points'] = entry_list if metadata.conda_command_tests is True: d['test']['commands'] = list(map(unicode, pypi.make_entry_tests(entry_list))) if 'setuptools' in d['requirements']['run']: d['build']['preserve_egg_dir'] = True if metadata.conda_import_tests: if metadata.conda_import_tests is True: d['test']['imports'] = ((self.distribution.packages or []) + (self.distribution.py_modules or [])) else: d['test']['imports'] = metadata.conda_import_tests if (metadata.conda_command_tests and not isinstance(metadata.conda_command_tests, bool)): d['test']['commands'] = list(map(unicode, metadata.conda_command_tests)) d = dict(d) m = MetaData.fromdict(d) # Shouldn't fail, but do you really trust the code above? m.check_fields() build.build(m, post=False) # Do the install if not PY3: # Command is an old-style class in Python 2 install.run(self) else: super().run() build.build(m, post=True) build.test(m) if self.binstar_upload: class args: binstar_upload = self.binstar_upload handle_binstar_upload(render.bldpkg_path(m), args) else: no_upload_message = """\ # If you want to upload this package to anaconda.org later, type: # # $ anaconda upload %s """ % render.bldpkg_path(m) print(no_upload_message)
def execute(args, parser): import sys import shutil import tarfile import tempfile from os.path import abspath, isdir, isfile from conda.lock import Locked import conda_build.build as build import conda_build.source as source from conda_build.config import config from conda_build.metadata import MetaData check_external() channel_urls = args.channel or () if on_win: # needs to happen before any c extensions are imported that might be # hard-linked by files in the trash. one of those is markupsafe, used # by jinja2. see https://github.com/conda/conda-build/pull/520 assert 'markupsafe' not in sys.modules delete_trash(None) conda_version = { 'python': 'CONDA_PY', 'numpy': 'CONDA_NPY', 'perl': 'CONDA_PERL', 'R': 'CONDA_R', } for lang in ['python', 'numpy', 'perl', 'R']: versions = getattr(args, lang) if not versions: continue if versions == ['all']: if all_versions[lang]: versions = all_versions[lang] else: parser.error("'all' is not supported for --%s" % lang) if len(versions) > 1: for ver in versions[:]: setattr(args, lang, [str(ver)]) execute(args, parser) # This is necessary to make all combinations build. setattr(args, lang, versions) return else: version = int(versions[0].replace('.', '')) setattr(config, conda_version[lang], version) if not len(str(version)) == 2 and lang in ['python', 'numpy']: if all_versions[lang]: raise RuntimeError("%s must be major.minor, like %s, not %s" % (conda_version[lang], all_versions[lang][-1]/10, version)) else: raise RuntimeError("%s must be major.minor, not %s" % (conda_version[lang], version)) if args.skip_existing: update_index(config.bldpkgs_dir) index = build.get_build_index(clear_cache=True, channel_urls=channel_urls, override_channels=args.override_channels) already_built = [] to_build = args.recipe[:] with Locked(config.croot): recipes = deque(args.recipe) while recipes: arg = recipes.popleft() try_again = False # Don't use byte literals for paths in Python 2 if not PY3: arg = arg.decode(getpreferredencoding() or 'utf-8') if isfile(arg): if arg.endswith(('.tar', '.tar.gz', '.tgz', '.tar.bz2')): recipe_dir = tempfile.mkdtemp() t = tarfile.open(arg, 'r:*') t.extractall(path=recipe_dir) t.close() need_cleanup = True else: print("Ignoring non-recipe: %s" % arg) continue else: recipe_dir = abspath(arg) need_cleanup = False if not isdir(recipe_dir): sys.exit("Error: no such directory: %s" % recipe_dir) try: m = MetaData(recipe_dir) if m.get_value('build/noarch_python'): config.noarch = True except exceptions.YamlParsingError as e: sys.stderr.write(e.error_msg()) sys.exit(1) binstar_upload = False if args.check and len(args.recipe) > 1: print(m.path) m.check_fields() if args.check: continue if args.skip_existing: if m.pkg_fn() in index or m.pkg_fn() in already_built: print("%s is already built, skipping." % m.dist()) continue if args.output: print(build.bldpkg_path(m)) continue elif args.test: build.test(m, verbose=not args.quiet, channel_urls=channel_urls, override_channels=args.override_channels) elif args.source: source.provide(m.path, m.get_section('source')) print('Source tree in:', source.get_dir()) else: # This loop recursively builds dependencies if recipes exist if args.build_only: post = False args.notest = True args.binstar_upload = False elif args.post: post = True args.notest = True args.binstar_upload = False else: post = None try: build.build(m, verbose=not args.quiet, post=post, channel_urls=channel_urls, override_channels=args.override_channels, include_recipe=args.include_recipe) except RuntimeError as e: error_str = str(e) if error_str.startswith('No packages found') or error_str.startswith('Could not find some'): # Build dependency if recipe exists dep_pkg = error_str.split(': ')[1] # Handle package names that contain version deps. if ' ' in dep_pkg: dep_pkg = dep_pkg.split(' ')[0] recipe_glob = glob(dep_pkg + '-[v0-9][0-9.]*') if exists(dep_pkg): recipe_glob.append(dep_pkg) if recipe_glob: recipes.appendleft(arg) try_again = True for recipe_dir in recipe_glob: if dep_pkg in to_build: sys.exit(str(e)) print(("Missing dependency {0}, but found" + " recipe directory, so building " + "{0} first").format(dep_pkg)) recipes.appendleft(recipe_dir) to_build.append(dep_pkg) else: raise else: raise if try_again: continue if not args.notest: build.test(m, verbose=not args.quiet, channel_urls=channel_urls, override_channels=args.override_channels) binstar_upload = True if need_cleanup: shutil.rmtree(recipe_dir) if binstar_upload: handle_binstar_upload(build.bldpkg_path(m), args) already_built.append(m.pkg_fn())
def execute(args, parser): import sys import shutil import tarfile import tempfile from os import makedirs from os.path import abspath, isdir, isfile from conda.lock import Locked import conda_build.build as build import conda_build.source as source from conda_build.config import config from conda_build.metadata import MetaData check_external() # change globals in build module, see comment there as well build.channel_urls = args.channel or () build.override_channels = args.override_channels build.verbose = not args.quiet if on_win: try: # needs to happen before any c extensions are imported that might be # hard-linked by files in the trash. one of those is markupsafe, # used by jinja2. see https://github.com/conda/conda-build/pull/520 delete_trash(None) except: # when we can't delete the trash, don't crash on AssertionError, # instead inform the user and try again next time. # see https://github.com/conda/conda-build/pull/744 warnings.warn( "Cannot delete trash; some c extension has been " "imported that is hard-linked by files in the trash. " "Will try again on next run.") conda_version = { 'python': 'CONDA_PY', 'numpy': 'CONDA_NPY', 'perl': 'CONDA_PERL', 'R': 'CONDA_R', } for lang in ['python', 'numpy', 'perl', 'R']: versions = getattr(args, lang) if not versions: continue if versions == ['all']: if all_versions[lang]: versions = all_versions[lang] else: parser.error("'all' is not supported for --%s" % lang) if len(versions) > 1: for ver in versions[:]: setattr(args, lang, [str(ver)]) execute(args, parser) # This is necessary to make all combinations build. setattr(args, lang, versions) return else: version = versions[0] if lang in ('python', 'numpy'): version = int(version.replace('.', '')) setattr(config, conda_version[lang], version) if not len(str(version)) in (2, 3) and lang in ['python', 'numpy']: if all_versions[lang]: raise RuntimeError("%s must be major.minor, like %s, not %s" % (conda_version[lang], all_versions[lang][-1] / 10, version)) else: raise RuntimeError("%s must be major.minor, not %s" % (conda_version[lang], version)) # Using --python, --numpy etc. is equivalent to using CONDA_PY, CONDA_NPY, etc. # Auto-set those env variables for var in conda_version.values(): if getattr(config, var): # Set the env variable. os_environ[var] = str(getattr(config, var)) if args.skip_existing: if not isdir(config.bldpkgs_dir): makedirs(config.bldpkgs_dir) update_index(config.bldpkgs_dir) index = build.get_build_index(clear_cache=True) already_built = [] to_build_recursive = [] with Locked(config.croot): recipes = deque(args.recipe) while recipes: arg = recipes.popleft() try_again = False # Don't use byte literals for paths in Python 2 if not PY3: arg = arg.decode(getpreferredencoding() or 'utf-8') if isfile(arg): if arg.endswith(('.tar', '.tar.gz', '.tgz', '.tar.bz2')): recipe_dir = tempfile.mkdtemp() t = tarfile.open(arg, 'r:*') t.extractall(path=recipe_dir) t.close() need_cleanup = True else: print("Ignoring non-recipe: %s" % arg) continue else: recipe_dir = abspath(arg) need_cleanup = False if not isdir(recipe_dir): sys.exit("Error: no such directory: %s" % recipe_dir) try: m = MetaData(recipe_dir) if m.get_value('build/noarch_python'): config.noarch = True except exceptions.YamlParsingError as e: sys.stderr.write(e.error_msg()) sys.exit(1) binstar_upload = False if args.check and len(args.recipe) > 1: print(m.path) m.check_fields() if args.check: continue if args.skip_existing: if m.pkg_fn() in index or m.pkg_fn() in already_built: print("%s is already built, skipping." % m.dist()) continue if m.skip(): print("Skipped: The %s recipe defines build/skip for this " "configuration." % m.dist()) continue if args.output: try: m.parse_again(permit_undefined_jinja=False) except SystemExit: # Something went wrong; possibly due to undefined GIT_ jinja variables. # Maybe we need to actually download the source in order to resolve the build_id. source.provide(m.path, m.get_section('source')) # Parse our metadata again because we did not initialize the source # information before. m.parse_again(permit_undefined_jinja=False) print(build.bldpkg_path(m)) continue elif args.test: build.test(m, move_broken=False) elif args.source: source.provide(m.path, m.get_section('source')) print('Source tree in:', source.get_dir()) else: # This loop recursively builds dependencies if recipes exist if args.build_only: post = False args.notest = True args.binstar_upload = False elif args.post: post = True args.notest = True args.binstar_upload = False else: post = None try: build.build(m, post=post, include_recipe=args.include_recipe) except (RuntimeError, SystemExit) as e: error_str = str(e) if error_str.startswith( 'No packages found') or error_str.startswith( 'Could not find some'): # Build dependency if recipe exists dep_pkg = error_str.split(': ')[1] # Handle package names that contain version deps. if ' ' in dep_pkg: dep_pkg = dep_pkg.split(' ')[0] recipe_glob = glob(dep_pkg + '-[v0-9][0-9.]*') if exists(dep_pkg): recipe_glob.append(dep_pkg) if recipe_glob: recipes.appendleft(arg) try_again = True for recipe_dir in recipe_glob: if dep_pkg in to_build_recursive: sys.exit(str(e)) print(("Missing dependency {0}, but found" + " recipe directory, so building " + "{0} first").format(dep_pkg)) recipes.appendleft(recipe_dir) to_build_recursive.append(dep_pkg) else: raise elif error_str.strip().startswith("Hint:"): lines = [ line for line in error_str.splitlines() if line.strip().startswith('- ') ] pkgs = [line.lstrip('- ') for line in lines] # Typically if a conflict is with one of these # packages, the other package needs to be rebuilt # (e.g., a conflict with 'python 3.5*' and 'x' means # 'x' isn't build for Python 3.5 and needs to be # rebuilt). skip_names = ['python', 'r'] pkgs = [ pkg for pkg in pkgs if pkg.split(' ')[0] not in skip_names ] for pkg in pkgs: # Handle package names that contain version deps. if ' ' in pkg: pkg = pkg.split(' ')[0] recipe_glob = glob(pkg + '-[v0-9][0-9.]*') if exists(pkg): recipe_glob.append(pkg) if recipe_glob: recipes.appendleft(arg) try_again = True for recipe_dir in recipe_glob: if pkg in to_build_recursive: sys.exit(str(e)) print(error_str) print( ("Missing dependency {0}, but found" + " recipe directory, so building " + "{0} first").format(pkg)) recipes.appendleft(recipe_dir) to_build_recursive.append(pkg) else: raise else: raise if try_again: continue if not args.notest: build.test(m) binstar_upload = True if need_cleanup: shutil.rmtree(recipe_dir) if binstar_upload: handle_binstar_upload(build.bldpkg_path(m), args) already_built.append(m.pkg_fn())
def execute(args, parser): import sys import shutil import tarfile import tempfile from os.path import abspath, isdir, isfile from conda.lock import Locked import conda_build.build as build import conda_build.source as source from conda_build.config import config from conda_build.metadata import MetaData check_external() if args.python: if args.python == ['all']: for py in [26, 27, 33, 34]: args.python = [str(py)] execute(args, parser) return if len(args.python) > 1: for py in args.python[:]: args.python = [py] execute(args, parser) else: config.CONDA_PY = int(args.python[0].replace('.', '')) if args.perl: config.CONDA_PERL = args.perl if args.numpy: if args.numpy == ['all']: for npy in [16, 17, 18]: args.numpy = [str(npy)] execute(args, parser) return if len(args.numpy) > 1: for npy in args.numpy[:]: args.numpy = [npy] execute(args, parser) else: config.CONDA_NPY = int(args.numpy[0].replace('.', '')) with Locked(config.croot): recipes = deque(args.recipe) while recipes: arg = recipes.popleft() try_again = False # Don't use byte literals for paths in Python 2 if not PY3: arg = arg.decode(getpreferredencoding()) if isfile(arg): if arg.endswith(('.tar', '.tar.gz', '.tgz', '.tar.bz2')): recipe_dir = tempfile.mkdtemp() t = tarfile.open(arg, 'r:*') t.extractall(path=recipe_dir) t.close() need_cleanup = True else: print("Ignoring non-recipe: %s" % arg) continue else: recipe_dir = abspath(arg) need_cleanup = False if not isdir(recipe_dir): sys.exit("Error: no such directory: %s" % recipe_dir) m = MetaData(recipe_dir) binstar_upload = False if args.check and len(args.recipe) > 1: print(m.path) m.check_fields() if args.check: continue if args.output: print(build.bldpkg_path(m)) continue elif args.test: build.test(m, verbose=not args.quiet) elif args.source: source.provide(m.path, m.get_section('source')) print('Source tree in:', source.get_dir()) else: # This loop recursively builds dependencies if recipes exist if args.build_only: post = False args.notest = True args.binstar_upload = False elif args.post: post = True args.notest = True args.binstar_upload = False else: post = None try: build.build(m, verbose=not args.quiet, post=post) except RuntimeError as e: error_str = str(e) if error_str.startswith('No packages found matching:'): # Build dependency if recipe exists dep_pkg = error_str.split(': ')[1] # Handle package names that contain version deps. if ' ' in dep_pkg: dep_pkg = dep_pkg.split(' ')[0] recipe_glob = glob(dep_pkg + '-[v0-9][0-9.]*') if exists(dep_pkg): recipe_glob.append(dep_pkg) if recipe_glob: recipes.appendleft(arg) try_again = True for recipe_dir in recipe_glob: print(("Missing dependency {0}, but found" + " recipe directory, so building " + "{0} first").format(dep_pkg)) recipes.appendleft(recipe_dir) else: raise else: raise if try_again: continue if not args.notest: build.test(m, verbose=not args.quiet) binstar_upload = True if need_cleanup: shutil.rmtree(recipe_dir) if binstar_upload: handle_binstar_upload(build.bldpkg_path(m), args)
def execute(args, parser): import sys import shutil import tarfile import tempfile from os import makedirs from os.path import abspath, isdir, isfile import conda_build.build as build import conda_build.source as source from conda_build.config import config check_external() # change globals in build module, see comment there as well build.channel_urls = args.channel or () build.override_channels = args.override_channels build.verbose = not args.quiet if on_win: try: # needs to happen before any c extensions are imported that might be # hard-linked by files in the trash. one of those is markupsafe, # used by jinja2. see https://github.com/conda/conda-build/pull/520 delete_trash(None) except: # when we can't delete the trash, don't crash on AssertionError, # instead inform the user and try again next time. # see https://github.com/conda/conda-build/pull/744 warnings.warn("Cannot delete trash; some c extension has been " "imported that is hard-linked by files in the trash. " "Will try again on next run.") set_language_env_vars(args, parser, execute=execute) if args.skip_existing: for d in config.bldpkgs_dirs: if not isdir(d): makedirs(d) update_index(d) index = build.get_build_index(clear_cache=True) already_built = set() to_build_recursive = [] recipes = deque(args.recipe) while recipes: arg = recipes.popleft() try_again = False # Don't use byte literals for paths in Python 2 if not PY3: arg = arg.decode(getpreferredencoding() or 'utf-8') if isfile(arg): if arg.endswith(('.tar', '.tar.gz', '.tgz', '.tar.bz2')): recipe_dir = tempfile.mkdtemp() t = tarfile.open(arg, 'r:*') t.extractall(path=recipe_dir) t.close() need_cleanup = True else: print("Ignoring non-recipe: %s" % arg) continue else: recipe_dir = abspath(arg) need_cleanup = False # recurse looking for meta.yaml that is potentially not in immediate folder recipe_dir = find_recipe(recipe_dir) if not isdir(recipe_dir): sys.exit("Error: no such directory: %s" % recipe_dir) # this fully renders any jinja templating, throwing an error if any data is missing m, need_source_download = render_recipe(recipe_dir, no_download_source=False, verbose=False) if m.get_value('build/noarch_python'): config.noarch = True if args.check and len(args.recipe) > 1: print(m.path) m.check_fields() if args.check: continue if m.skip(): print("Skipped: The %s recipe defines build/skip for this " "configuration." % m.dist()) continue if args.skip_existing: # 'or m.pkg_fn() in index' is for conda <4.1 and could be removed in the future. if ('local::' + m.pkg_fn() in index or m.pkg_fn() in index or m.pkg_fn() in already_built): print(m.dist(), "is already built, skipping.") continue if args.output: print(bldpkg_path(m)) continue elif args.test: build.test(m, move_broken=False) elif args.source: source.provide(m.path, m.get_section('source'), verbose=build.verbose) print('Source tree in:', source.get_dir()) else: # This loop recursively builds dependencies if recipes exist if args.build_only: post = False args.notest = True args.binstar_upload = False elif args.post: post = True args.notest = True args.binstar_upload = False else: post = None try: build.build(m, post=post, include_recipe=args.include_recipe, keep_old_work=args.keep_old_work, need_source_download=need_source_download, dirty=args.dirty) except (NoPackagesFound, Unsatisfiable) as e: error_str = str(e) # Typically if a conflict is with one of these # packages, the other package needs to be rebuilt # (e.g., a conflict with 'python 3.5*' and 'x' means # 'x' isn't build for Python 3.5 and needs to be # rebuilt). skip_names = ['python', 'r'] add_recipes = [] for line in error_str.splitlines(): if not line.startswith(' - '): continue pkg = line.lstrip(' - ').split(' -> ')[-1] pkg = pkg.strip().split(' ')[0] if pkg in skip_names: continue recipe_glob = glob(pkg + '-[v0-9][0-9.]*') if os.path.exists(pkg): recipe_glob.append(pkg) if recipe_glob: try_again = True for recipe_dir in recipe_glob: if pkg in to_build_recursive: sys.exit(str(e)) print(error_str) print(("Missing dependency {0}, but found" + " recipe directory, so building " + "{0} first").format(pkg)) add_recipes.append(recipe_dir) to_build_recursive.append(pkg) else: raise recipes.appendleft(arg) recipes.extendleft(reversed(add_recipes)) if try_again: continue if not args.notest: build.test(m) if need_cleanup: shutil.rmtree(recipe_dir) # outputs message, or does upload, depending on value of args.binstar_upload handle_binstar_upload(build.bldpkg_path(m), args) already_built.add(m.pkg_fn())
def execute(args, parser): import sys import shutil import tarfile import tempfile from os import makedirs from os.path import abspath, isdir, isfile from conda.lock import Locked import conda_build.build as build import conda_build.source as source from conda_build.config import config from conda_build.metadata import MetaData check_external() channel_urls = args.channel or () if on_win: # needs to happen before any c extensions are imported that might be # hard-linked by files in the trash. one of those is markupsafe, used # by jinja2. see https://github.com/conda/conda-build/pull/520 assert "markupsafe" not in sys.modules delete_trash(None) conda_version = {"python": "CONDA_PY", "numpy": "CONDA_NPY", "perl": "CONDA_PERL", "R": "CONDA_R"} for lang in ["python", "numpy", "perl", "R"]: versions = getattr(args, lang) if not versions: continue if versions == ["all"]: if all_versions[lang]: versions = all_versions[lang] else: parser.error("'all' is not supported for --%s" % lang) if len(versions) > 1: for ver in versions[:]: setattr(args, lang, [str(ver)]) execute(args, parser) # This is necessary to make all combinations build. setattr(args, lang, versions) return else: version = versions[0] if lang in ("python", "numpy"): version = int(version.replace(".", "")) setattr(config, conda_version[lang], version) if not len(str(version)) in (2, 3) and lang in ["python", "numpy"]: if all_versions[lang]: raise RuntimeError( "%s must be major.minor, like %s, not %s" % (conda_version[lang], all_versions[lang][-1] / 10, version) ) else: raise RuntimeError("%s must be major.minor, not %s" % (conda_version[lang], version)) # Using --python, --numpy etc. is equivalent to using CONDA_PY, CONDA_NPY, etc. # Auto-set those env variables for var in conda_version.values(): if getattr(config, var): # Set the env variable. os_environ[var] = str(getattr(config, var)) if args.skip_existing: if not isdir(config.bldpkgs_dir): makedirs(config.bldpkgs_dir) update_index(config.bldpkgs_dir) index = build.get_build_index( clear_cache=True, channel_urls=channel_urls, override_channels=args.override_channels ) already_built = [] to_build_recursive = [] with Locked(config.croot): recipes = deque(args.recipe) while recipes: arg = recipes.popleft() try_again = False # Don't use byte literals for paths in Python 2 if not PY3: arg = arg.decode(getpreferredencoding() or "utf-8") if isfile(arg): if arg.endswith((".tar", ".tar.gz", ".tgz", ".tar.bz2")): recipe_dir = tempfile.mkdtemp() t = tarfile.open(arg, "r:*") t.extractall(path=recipe_dir) t.close() need_cleanup = True else: print("Ignoring non-recipe: %s" % arg) continue else: recipe_dir = abspath(arg) need_cleanup = False if not isdir(recipe_dir): sys.exit("Error: no such directory: %s" % recipe_dir) try: m = MetaData(recipe_dir) if m.get_value("build/noarch_python"): config.noarch = True except exceptions.YamlParsingError as e: sys.stderr.write(e.error_msg()) sys.exit(1) binstar_upload = False if args.check and len(args.recipe) > 1: print(m.path) m.check_fields() if args.check: continue if args.skip_existing: if m.pkg_fn() in index or m.pkg_fn() in already_built: print("%s is already built, skipping." % m.dist()) continue if args.output: print(build.bldpkg_path(m)) continue elif args.test: build.test( m, verbose=not args.quiet, channel_urls=channel_urls, override_channels=args.override_channels ) elif args.source: source.provide(m.path, m.get_section("source")) print("Source tree in:", source.get_dir()) else: # This loop recursively builds dependencies if recipes exist if args.build_only: post = False args.notest = True args.binstar_upload = False elif args.post: post = True args.notest = True args.binstar_upload = False else: post = None try: if m.skip(): print("Skipped: The %s recipe defines build/skip for this " "configuration." % m.dist()) continue build.build( m, verbose=not args.quiet, post=post, channel_urls=channel_urls, override_channels=args.override_channels, include_recipe=args.include_recipe, ) except (RuntimeError, SystemExit) as e: error_str = str(e) if error_str.startswith("No packages found") or error_str.startswith("Could not find some"): # Build dependency if recipe exists dep_pkg = error_str.split(": ")[1] # Handle package names that contain version deps. if " " in dep_pkg: dep_pkg = dep_pkg.split(" ")[0] recipe_glob = glob(dep_pkg + "-[v0-9][0-9.]*") if exists(dep_pkg): recipe_glob.append(dep_pkg) if recipe_glob: recipes.appendleft(arg) try_again = True for recipe_dir in recipe_glob: if dep_pkg in to_build_recursive: sys.exit(str(e)) print( ( "Missing dependency {0}, but found" + " recipe directory, so building " + "{0} first" ).format(dep_pkg) ) recipes.appendleft(recipe_dir) to_build_recursive.append(dep_pkg) else: raise elif error_str.strip().startswith("Hint:"): lines = [line for line in error_str.splitlines() if line.strip().startswith("- ")] pkgs = [line.lstrip("- ") for line in lines] # Typically if a conflict is with one of these # packages, the other package needs to be rebuilt # (e.g., a conflict with 'python 3.5*' and 'x' means # 'x' isn't build for Python 3.5 and needs to be # rebuilt). skip_names = ["python", "r"] pkgs = [pkg for pkg in pkgs if pkg.split(" ")[0] not in skip_names] for pkg in pkgs: # Handle package names that contain version deps. if " " in pkg: pkg = pkg.split(" ")[0] recipe_glob = glob(pkg + "-[v0-9][0-9.]*") if exists(pkg): recipe_glob.append(pkg) if recipe_glob: recipes.appendleft(arg) try_again = True for recipe_dir in recipe_glob: if pkg in to_build_recursive: sys.exit(str(e)) print(error_str) print( ( "Missing dependency {0}, but found" + " recipe directory, so building " + "{0} first" ).format(pkg) ) recipes.appendleft(recipe_dir) to_build_recursive.append(pkg) else: raise else: raise if try_again: continue if not args.notest: build.test( m, verbose=not args.quiet, channel_urls=channel_urls, override_channels=args.override_channels ) binstar_upload = True if need_cleanup: shutil.rmtree(recipe_dir) if binstar_upload: handle_binstar_upload(build.bldpkg_path(m), args) already_built.append(m.pkg_fn())
def run(self): # Make sure the metadata has the conda attributes, even if the # distclass isn't CondaDistribution. We primarily do this to simplify # the code below. metadata = self.distribution.metadata for attr in CondaDistribution.conda_attrs: if not hasattr(metadata, attr): setattr(metadata, attr, CondaDistribution.conda_attrs[attr]) # The command line takes precedence if self.buildnum is not None: metadata.conda_buildnum = self.buildnum with Locked(config.croot): d = defaultdict(dict) # Needs to be lowercase d['package']['name'] = metadata.name d['package']['version'] = metadata.version d['build']['number'] = metadata.conda_buildnum # MetaData does the auto stuff if the build string is None d['build']['string'] = metadata.conda_buildstr d['build']['binary_relocation'] = metadata.conda_binary_relocation d['build']['preserve_egg_dir'] = metadata.conda_preserve_egg_dir d['build']['features'] = metadata.conda_features d['build']['track_features'] = metadata.conda_track_features # XXX: I'm not really sure if it is correct to combine requires # and install_requires d['requirements']['run'] = d['requirements']['build'] = \ [spec_from_line(i) for i in (metadata.requires or []) + (getattr(self.distribution, 'install_requires', []) or [])] + ['python'] if hasattr(self.distribution, 'tests_require'): # A lot of packages use extras_require['test'], but # tests_require is the one that is officially supported by # setuptools. d['test']['requires'] = [ spec_from_line(i) for i in self.distribution.tests_require or [] ] d['about']['home'] = metadata.url # Don't worry about classifiers. This isn't skeleton pypi. We # don't need to make this work with random stuff in the wild. If # someone writes their setup.py wrong and this doesn't work, it's # their fault. d['about']['license'] = metadata.license d['about']['summary'] = self.distribution.description # This is similar logic from conda skeleton pypi entry_points = getattr(self.distribution, 'entry_points', []) if entry_points: if isinstance(entry_points, string_types): # makes sure it is left-shifted newstr = "\n".join(x.strip() for x in entry_points.split('\n')) c = configparser.ConfigParser() entry_points = {} try: c.readfp(StringIO(newstr)) except Exception as err: # This seems to be the best error here raise DistutilsGetoptError( "ERROR: entry-points not understood: " + str(err) + "\nThe string was" + newstr) else: for section in config.sections(): if section in ['console_scripts', 'gui_scripts']: value = [ '%s=%s' % (option, config.get(section, option)) for option in config.options(section) ] entry_points[section] = value else: # Make sure setuptools is added as a dependency below entry_points[section] = None if not isinstance(entry_points, dict): raise DistutilsGetoptError( "ERROR: Could not add entry points. They were:\n" + entry_points) else: cs = entry_points.get('console_scripts', []) gs = entry_points.get('gui_scripts', []) # We have *other* kinds of entry-points so we need # setuptools at run-time if not cs and not gs and len(entry_points) > 1: d['requirements']['run'].append('setuptools') d['requirements']['build'].append('setuptools') entry_list = cs + gs if gs and conda.config.platform == 'osx': d['build']['osx_is_app'] = True if len(cs + gs) != 0: d['build']['entry_points'] = entry_list if metadata.conda_command_tests is True: d['test']['commands'] = list( map(unicode, pypi.make_entry_tests(entry_list))) if 'setuptools' in d['requirements']['run']: d['build']['preserve_egg_dir'] = True if metadata.conda_import_tests: if metadata.conda_import_tests is True: d['test']['imports'] = ( (self.distribution.packages or []) + (self.distribution.py_modules or [])) else: d['test']['imports'] = metadata.conda_import_tests if (metadata.conda_command_tests and not isinstance(metadata.conda_command_tests, bool)): d['test']['commands'] = list( map(unicode, metadata.conda_command_tests)) d = dict(d) m = MetaData.fromdict(d) # Shouldn't fail, but do you really trust the code above? m.check_fields() build.build(m, post=False) # Do the install if not PY3: # Command is an old-style class in Python 2 install.run(self) else: super().run() build.build(m, post=True) build.test(m) if self.binstar_upload: class args: binstar_upload = self.binstar_upload handle_binstar_upload(build.bldpkg_path(m), args) else: no_upload_message = """\ # If you want to upload this package to binstar.org later, type: # # $ binstar upload %s """ % build.bldpkg_path(m) print(no_upload_message)
def execute(args, parser): import sys import shutil import tarfile import tempfile from os.path import abspath, isdir, isfile from conda.lock import Locked import conda_build.build as build import conda_build.source as source from conda_build.config import croot from conda_build.metadata import MetaData check_external() with Locked(croot): recipes = deque(args.recipe) while recipes: arg = recipes.popleft() try_again = False # Don't use byte literals for paths in Python 2 if not PY3: arg = arg.decode(getpreferredencoding()) if isfile(arg): if arg.endswith(('.tar', '.tar.gz', '.tgz', '.tar.bz2')): recipe_dir = tempfile.mkdtemp() t = tarfile.open(arg, 'r:*') t.extractall(path=recipe_dir) t.close() need_cleanup = True else: print("Ignoring non-recipe: %s" % arg) continue else: recipe_dir = abspath(arg) need_cleanup = False if not isdir(recipe_dir): sys.exit("Error: no such directory: %s" % recipe_dir) m = MetaData(recipe_dir) binstar_upload = False if args.check and len(args.recipe) > 1: print(m.path) m.check_fields() if args.check: continue if args.output: print(build.bldpkg_path(m)) continue elif args.test: build.test(m) elif args.source: source.provide(m.path, m.get_section('source')) print('Source tree in:', source.get_dir()) else: # This loop recursively builds dependencies if recipes exist try: build.build(m) except RuntimeError as e: error_str = str(e) if error_str.startswith('No packages found matching:'): # Build dependency if recipe exists dep_pkg = error_str.split(': ')[1].replace(' ', '-') recipe_glob = glob(dep_pkg + '-[v0-9][0-9.]*') if exists(dep_pkg): recipe_glob.append(dep_pkg) if recipe_glob: recipes.appendleft(arg) try_again = True for recipe_dir in recipe_glob: print(("Missing dependency {0}, but found" + " recipe directory, so building " + "{0} first").format(dep_pkg)) recipes.appendleft(recipe_dir) else: raise else: raise if try_again: continue if not args.notest: build.test(m) binstar_upload = True if need_cleanup: shutil.rmtree(recipe_dir) if binstar_upload: handle_binstar_upload(build.bldpkg_path(m), args)
def execute(args, parser): import sys import shutil import tarfile import tempfile from os.path import abspath, isdir, isfile from conda.lock import Locked import conda_build.build as build import conda_build.source as source from conda_build.config import config from conda_build.metadata import MetaData check_external() if args.python: if args.python == ['all']: for py in [26, 27, 33, 34]: args.python = [str(py)] execute(args, parser) return if len(args.python) > 1: for py in args.python[:]: args.python = [py] execute(args, parser) else: config.CONDA_PY = int(args.python[0].replace('.', '')) if args.perl: config.CONDA_PERL = args.perl if args.numpy: if args.numpy == ['all']: for npy in [16, 17, 18, 19]: args.numpy = [str(npy)] execute(args, parser) return if len(args.numpy) > 1: for npy in args.numpy[:]: args.numpy = [npy] execute(args, parser) else: config.CONDA_NPY = int(args.numpy[0].replace('.', '')) with Locked(config.croot): recipes = deque(args.recipe) while recipes: arg = recipes.popleft() try_again = False # Don't use byte literals for paths in Python 2 if not PY3: arg = arg.decode(getpreferredencoding()) if isfile(arg): if arg.endswith(('.tar', '.tar.gz', '.tgz', '.tar.bz2')): recipe_dir = tempfile.mkdtemp() t = tarfile.open(arg, 'r:*') t.extractall(path=recipe_dir) t.close() need_cleanup = True else: print("Ignoring non-recipe: %s" % arg) continue else: recipe_dir = abspath(arg) need_cleanup = False if not isdir(recipe_dir): sys.exit("Error: no such directory: %s" % recipe_dir) m = MetaData(recipe_dir) binstar_upload = False if args.check and len(args.recipe) > 1: print(m.path) m.check_fields() if args.check: continue if args.output: print(build.bldpkg_path(m)) continue elif args.test: build.test(m, verbose=not args.quiet) elif args.source: source.provide(m.path, m.get_section('source')) print('Source tree in:', source.get_dir()) else: # This loop recursively builds dependencies if recipes exist if args.build_only: post = False args.notest = True args.binstar_upload = False elif args.post: post = True args.notest = True args.binstar_upload = False else: post = None try: build.build(m, verbose=not args.quiet, post=post) except RuntimeError as e: error_str = str(e) if error_str.startswith('No packages found'): # Build dependency if recipe exists dep_pkg = error_str.split(': ')[1] # Handle package names that contain version deps. if ' ' in dep_pkg: dep_pkg = dep_pkg.split(' ')[0] recipe_glob = glob(dep_pkg + '-[v0-9][0-9.]*') if exists(dep_pkg): recipe_glob.append(dep_pkg) if recipe_glob: recipes.appendleft(arg) try_again = True for recipe_dir in recipe_glob: print(("Missing dependency {0}, but found" + " recipe directory, so building " + "{0} first").format(dep_pkg)) recipes.appendleft(recipe_dir) else: raise else: raise if try_again: continue if not args.notest: build.test(m, verbose=not args.quiet) binstar_upload = True if need_cleanup: shutil.rmtree(recipe_dir) if binstar_upload: handle_binstar_upload(build.bldpkg_path(m), args)
def execute(args, parser): import sys import shutil import tarfile import tempfile from os import makedirs from os.path import abspath, isdir, isfile import conda_build.build as build import conda_build.source as source from conda_build.config import config check_external() # change globals in build module, see comment there as well build.channel_urls = args.channel or () build.override_channels = args.override_channels build.verbose = not args.quiet if on_win: try: # needs to happen before any c extensions are imported that might be # hard-linked by files in the trash. one of those is markupsafe, # used by jinja2. see https://github.com/conda/conda-build/pull/520 delete_trash(None) except: # when we can't delete the trash, don't crash on AssertionError, # instead inform the user and try again next time. # see https://github.com/conda/conda-build/pull/744 warnings.warn( "Cannot delete trash; some c extension has been " "imported that is hard-linked by files in the trash. " "Will try again on next run.") set_language_env_vars(args, parser, execute=execute) if args.skip_existing: for d in config.bldpkgs_dirs: if not isdir(d): makedirs(d) update_index(d) index = build.get_build_index(clear_cache=True) already_built = set() to_build_recursive = [] recipes = deque(args.recipe) while recipes: arg = recipes.popleft() try_again = False # Don't use byte literals for paths in Python 2 if not PY3: arg = arg.decode(getpreferredencoding() or 'utf-8') if isfile(arg): if arg.endswith(('.tar', '.tar.gz', '.tgz', '.tar.bz2')): recipe_dir = tempfile.mkdtemp() t = tarfile.open(arg, 'r:*') t.extractall(path=recipe_dir) t.close() need_cleanup = True else: print("Ignoring non-recipe: %s" % arg) continue else: recipe_dir = abspath(arg) need_cleanup = False # recurse looking for meta.yaml that is potentially not in immediate folder recipe_dir = find_recipe(recipe_dir) if not isdir(recipe_dir): sys.exit("Error: no such directory: %s" % recipe_dir) # this fully renders any jinja templating, throwing an error if any data is missing m, need_source_download = render_recipe(recipe_dir, no_download_source=False, verbose=False, dirty=args.dirty) if m.get_value('build/noarch_python'): config.noarch = True if args.check and len(args.recipe) > 1: print(m.path) m.check_fields() if args.check: continue if m.skip(): print("Skipped: The %s recipe defines build/skip for this " "configuration." % m.dist()) continue if args.skip_existing: # 'or m.pkg_fn() in index' is for conda <4.1 and could be removed in the future. if ('local::' + m.pkg_fn() in index or m.pkg_fn() in index or m.pkg_fn() in already_built): print(m.dist(), "is already built, skipping.") continue if args.output: print(bldpkg_path(m)) continue elif args.test: build.test(m, move_broken=False) elif args.source: source.provide(m.path, m.get_section('source'), verbose=build.verbose) print('Source tree in:', source.get_dir()) else: # This loop recursively builds dependencies if recipes exist if args.build_only: post = False args.notest = True args.binstar_upload = False elif args.post: post = True args.notest = True args.binstar_upload = False else: post = None try: build.build(m, post=post, include_recipe=args.include_recipe, keep_old_work=args.keep_old_work, need_source_download=need_source_download, dirty=args.dirty) except (NoPackagesFound, Unsatisfiable) as e: error_str = str(e) # Typically if a conflict is with one of these # packages, the other package needs to be rebuilt # (e.g., a conflict with 'python 3.5*' and 'x' means # 'x' isn't build for Python 3.5 and needs to be # rebuilt). skip_names = ['python', 'r'] add_recipes = [] for line in error_str.splitlines(): if not line.startswith(' - '): continue pkg = line.lstrip(' - ').split(' -> ')[-1] pkg = pkg.strip().split(' ')[0] if pkg in skip_names: continue recipe_glob = glob(pkg + '-[v0-9][0-9.]*') if os.path.exists(pkg): recipe_glob.append(pkg) if recipe_glob: try_again = True for recipe_dir in recipe_glob: if pkg in to_build_recursive: sys.exit(str(e)) print(error_str) print(("Missing dependency {0}, but found" + " recipe directory, so building " + "{0} first").format(pkg)) add_recipes.append(recipe_dir) to_build_recursive.append(pkg) else: raise recipes.appendleft(arg) recipes.extendleft(reversed(add_recipes)) if try_again: continue if not args.notest: build.test(m) if need_cleanup: shutil.rmtree(recipe_dir) # outputs message, or does upload, depending on value of args.binstar_upload handle_binstar_upload(build.bldpkg_path(m), args) already_built.add(m.pkg_fn())
def execute(args, parser): import sys import shutil import tarfile import tempfile from os import makedirs from os.path import abspath, isdir, isfile from conda.lock import Locked import conda_build.build as build import conda_build.source as source from conda_build.config import config from conda_build.metadata import MetaData check_external() # change globals in build module, see comment there as well build.channel_urls = args.channel or () build.override_channels = args.override_channels build.verbose = not args.quiet if on_win: try: # needs to happen before any c extensions are imported that might be # hard-linked by files in the trash. one of those is markupsafe, # used by jinja2. see https://github.com/conda/conda-build/pull/520 delete_trash(None) except: # when we can't delete the trash, don't crash on AssertionError, # instead inform the user and try again next time. # see https://github.com/conda/conda-build/pull/744 warnings.warn("Cannot delete trash; some c extension has been " "imported that is hard-linked by files in the trash. " "Will try again on next run.") conda_version = { 'python': 'CONDA_PY', 'numpy': 'CONDA_NPY', 'perl': 'CONDA_PERL', 'R': 'CONDA_R', } for lang in ['python', 'numpy', 'perl', 'R']: versions = getattr(args, lang) if not versions: continue if versions == ['all']: if all_versions[lang]: versions = all_versions[lang] else: parser.error("'all' is not supported for --%s" % lang) if len(versions) > 1: for ver in versions[:]: setattr(args, lang, [str(ver)]) execute(args, parser) # This is necessary to make all combinations build. setattr(args, lang, versions) return else: version = versions[0] if lang in ('python', 'numpy'): version = int(version.replace('.', '')) setattr(config, conda_version[lang], version) if not len(str(version)) in (2, 3) and lang in ['python', 'numpy']: if all_versions[lang]: raise RuntimeError("%s must be major.minor, like %s, not %s" % (conda_version[lang], all_versions[lang][-1]/10, version)) else: raise RuntimeError("%s must be major.minor, not %s" % (conda_version[lang], version)) # Using --python, --numpy etc. is equivalent to using CONDA_PY, CONDA_NPY, etc. # Auto-set those env variables for var in conda_version.values(): if getattr(config, var): # Set the env variable. os_environ[var] = str(getattr(config, var)) if args.skip_existing: if not isdir(config.bldpkgs_dir): makedirs(config.bldpkgs_dir) update_index(config.bldpkgs_dir) index = build.get_build_index(clear_cache=True) already_built = [] to_build_recursive = [] with Locked(config.croot): recipes = deque(args.recipe) while recipes: arg = recipes.popleft() try_again = False # Don't use byte literals for paths in Python 2 if not PY3: arg = arg.decode(getpreferredencoding() or 'utf-8') if isfile(arg): if arg.endswith(('.tar', '.tar.gz', '.tgz', '.tar.bz2')): recipe_dir = tempfile.mkdtemp() t = tarfile.open(arg, 'r:*') t.extractall(path=recipe_dir) t.close() need_cleanup = True else: print("Ignoring non-recipe: %s" % arg) continue else: recipe_dir = abspath(arg) need_cleanup = False if not isdir(recipe_dir): sys.exit("Error: no such directory: %s" % recipe_dir) try: m = MetaData(recipe_dir) if m.get_value('build/noarch_python'): config.noarch = True except exceptions.YamlParsingError as e: sys.stderr.write(e.error_msg()) sys.exit(1) binstar_upload = False if args.check and len(args.recipe) > 1: print(m.path) m.check_fields() if args.check: continue if args.skip_existing: if m.pkg_fn() in index or m.pkg_fn() in already_built: print("%s is already built, skipping." % m.dist()) continue if m.skip(): print("Skipped: The %s recipe defines build/skip for this " "configuration." % m.dist()) continue if args.output: try: m.parse_again(permit_undefined_jinja=False) except SystemExit: # Something went wrong; possibly due to undefined GIT_ jinja variables. # Maybe we need to actually download the source in order to resolve the build_id. source.provide(m.path, m.get_section('source')) # Parse our metadata again because we did not initialize the source # information before. m.parse_again(permit_undefined_jinja=False) print(build.bldpkg_path(m)) continue elif args.test: build.test(m, move_broken=False) elif args.source: source.provide(m.path, m.get_section('source')) print('Source tree in:', source.get_dir()) else: # This loop recursively builds dependencies if recipes exist if args.build_only: post = False args.notest = True args.binstar_upload = False elif args.post: post = True args.notest = True args.binstar_upload = False else: post = None try: build.build(m, post=post, include_recipe=args.include_recipe) except (RuntimeError, SystemExit) as e: error_str = str(e) if error_str.startswith('No packages found') or error_str.startswith('Could not find some'): # Build dependency if recipe exists dep_pkg = error_str.split(': ')[1] # Handle package names that contain version deps. if ' ' in dep_pkg: dep_pkg = dep_pkg.split(' ')[0] recipe_glob = glob(dep_pkg + '-[v0-9][0-9.]*') if exists(dep_pkg): recipe_glob.append(dep_pkg) if recipe_glob: recipes.appendleft(arg) try_again = True for recipe_dir in recipe_glob: if dep_pkg in to_build_recursive: sys.exit(str(e)) print(("Missing dependency {0}, but found" + " recipe directory, so building " + "{0} first").format(dep_pkg)) recipes.appendleft(recipe_dir) to_build_recursive.append(dep_pkg) else: raise elif error_str.strip().startswith("Hint:"): lines = [line for line in error_str.splitlines() if line.strip().startswith('- ')] pkgs = [line.lstrip('- ') for line in lines] # Typically if a conflict is with one of these # packages, the other package needs to be rebuilt # (e.g., a conflict with 'python 3.5*' and 'x' means # 'x' isn't build for Python 3.5 and needs to be # rebuilt). skip_names = ['python', 'r'] pkgs = [pkg for pkg in pkgs if pkg.split(' ')[0] not in skip_names] for pkg in pkgs: # Handle package names that contain version deps. if ' ' in pkg: pkg = pkg.split(' ')[0] recipe_glob = glob(pkg + '-[v0-9][0-9.]*') if exists(pkg): recipe_glob.append(pkg) if recipe_glob: recipes.appendleft(arg) try_again = True for recipe_dir in recipe_glob: if pkg in to_build_recursive: sys.exit(str(e)) print(error_str) print(("Missing dependency {0}, but found" + " recipe directory, so building " + "{0} first").format(pkg)) recipes.appendleft(recipe_dir) to_build_recursive.append(pkg) else: raise else: raise if try_again: continue if not args.notest: build.test(m) binstar_upload = True if need_cleanup: shutil.rmtree(recipe_dir) if binstar_upload: handle_binstar_upload(build.bldpkg_path(m), args) already_built.append(m.pkg_fn())