def built_distribution_already_exists(cli, meta, owner): """ Checks to see whether the built recipe (aka distribution) already exists on the owner/user's binstar account. """ plat = 'noarch' if meta.noarch else conda.config.subdir distro_name = '{}/{}.tar.bz2'.format(plat, meta.dist()) try: config = Config() fname = bldpkg_path(meta, config) except TypeError: # conda-build < 2.0.0 takes a single config argument fname = bldpkg_path(meta) try: dist_info = cli.distribution(owner, meta.name(), meta.version(), distro_name) except binstar_client.errors.NotFound: dist_info = {} exists = bool(dist_info) # Unfortunately, we cannot check the md5 quality of the built distribution, as # this will depend on fstat information such as modification date (because # distributions are tar files). Therefore we can only assume that the distribution # just built, and the one on anaconda.org are the same. # if exists: # md5_on_binstar = dist_info.get('md5') # with open(fname, 'rb') as fh: # md5_of_build = hashlib.md5(fh.read()).hexdigest() # # if md5_on_binstar != md5_of_build: # raise ValueError('This build ({}), and the build already on binstar ' # '({}) are different.'.format(md5_of_build, md5_on_binstar)) return exists
def built_distribution_already_exists(cli, meta, owner): """ Checks to see whether the built recipe (aka distribution) already exists on the owner/user's binstar account. """ distro_name = '{}/{}.tar.bz2'.format(conda.config.subdir, meta.dist()) try: config = Config() fname = bldpkg_path(meta, config) except TypeError: # conda-build < 2.0.0 takes a single config argument fname = bldpkg_path(meta) try: dist_info = cli.distribution(owner, meta.name(), meta.version(), distro_name) except binstar_client.errors.NotFound: dist_info = {} exists = bool(dist_info) # Unfortunately, we cannot check the md5 quality of the built distribution, as # this will depend on fstat information such as modification date (because # distributions are tar files). Therefore we can only assume that the distribution # just built, and the one on anaconda.org are the same. # if exists: # md5_on_binstar = dist_info.get('md5') # with open(fname, 'rb') as fh: # md5_of_build = hashlib.md5(fh.read()).hexdigest() # # if md5_on_binstar != md5_of_build: # raise ValueError('This build ({}), and the build already on binstar ' # '({}) are different.'.format(md5_of_build, md5_on_binstar)) return exists
def main(): token = os.environ.get('BINSTAR_TOKEN') description = ('Upload or check consistency of a built version of a ' 'conda recipe with binstar. Note: The existence of the ' 'BINSTAR_TOKEN environment variable determines ' 'whether the upload should actually take place.') parser = argparse.ArgumentParser(description=description) parser.add_argument('recipe_dir', help='the conda recipe directory') parser.add_argument('owner', help='the binstar owner/user') parser.add_argument('--channel', help='the binstar channel', default='main') args = parser.parse_args() recipe_dir, owner, channel = args.recipe_dir, args.owner, args.channel cli = get_server_api(token=token) meta_main = MetaData(recipe_dir) for _, meta in meta_main.get_output_metadata_set(files=None): print("Processing {}".format(meta.name())) if meta.skip(): print("No upload to take place - this configuration was skipped in build/skip.") continue exists = built_distribution_already_exists(cli, meta, owner) if token: if not exists: upload(cli, meta, owner, channel) print('Uploaded {}'.format(bldpkg_path(meta))) else: print('Distribution {} already \nexists for {}.' ''.format(bldpkg_path(meta), owner)) else: print("No BINSTAR_TOKEN present, so no upload is taking place. " "The distribution just built {} already available for {}." "".format('is' if exists else 'is not', owner))
def execute(args, parser): import sys import shutil import tarfile import tempfile from os.path import abspath, isdir, isfile, join from conda.lock import Locked import conda_build.build as build import conda_build.source as source from conda_build.config import croot from conda_build.metadata import MetaData check_external() with Locked(croot): for arg in args.recipe: if isfile(arg): if arg.endswith(('.tar', '.tar.gz', '.tgz', '.tar.bz2')): recipe_dir = tempfile.mkdtemp() t = tarfile.open(arg, 'r:*') t.extractall(path=recipe_dir) t.close() need_cleanup = True else: print("Ignoring non-recipe: %s" % arg) continue else: recipe_dir = abspath(arg) need_cleanup = False if not isdir(recipe_dir): sys.exit("Error: no such directory: %s" % recipe_dir) m = MetaData(recipe_dir) binstar_upload = False if args.check and len(args.recipe) > 1: print(m.path) m.check_fields() if args.check: continue if args.output: print(build.bldpkg_path(m)) continue elif args.test: build.test(m) elif args.source: source.provide(m.path, m.get_section('source')) print('Source tree in:', source.get_dir()) else: build.build(m) if not args.notest: build.test(m) binstar_upload = True if need_cleanup: shutil.rmtree(recipe_dir) if binstar_upload: handle_binstar_upload(build.bldpkg_path(m), args)
def upload(cli, meta, owner, channels=['main'], config=None): """Upload a distribution, given the build metadata.""" if hasattr(conda_build, 'api'): fname = bldpkg_path(meta, config) else: fname = bldpkg_path(meta) package_type = detect_package_type(fname) package_attrs, release_attrs, file_attrs = get_attrs(package_type, fname) package_name = package_attrs['name'] version = release_attrs['version'] # Check the package exists, otherwise create one. try: cli.package(owner, package_name) except binstar_client.NotFound: print('Creating the {} package on {}'.format(package_name, owner)) summary = package_attrs['summary'] cli.add_package(owner, package_name, summary, package_attrs.get('license'), public=True) # Check the release exists, otherwise create one. try: cli.release(owner, package_name, version) except binstar_client.NotFound: # TODO: Add readme.md support for descriptions? cli.add_release(owner, package_name, version, requirements=[], announce=None, description='') try: cli.distribution(owner, package_name, version, file_attrs['basename']) except binstar_client.NotFound: # The file doesn't exist. pass else: print('Distribution %s already exists ... removing' % (file_attrs['basename'], )) cli.remove_dist(owner, package_name, version, file_attrs['basename']) with open(fname, 'rb') as fd: print('\nUploading file %s/%s/%s/%s to %s...' % (owner, package_name, version, file_attrs['basename'], channels)) upload_info = cli.upload(owner, package_name, version, file_attrs['basename'], fd, package_type, description='', dependencies=file_attrs.get('dependencies'), attrs=file_attrs['attrs'], channels=channels) return upload_info
def get_conda_build_path(recipe_dir): from conda_build.metadata import MetaData from conda_build.build import bldpkg_path m = MetaData(recipe_dir) try: # conda-build >= 2 from conda_build.config import Config config = Config() fname = bldpkg_path(m, config) except TypeError: fname = bldpkg_path(m) return fname.strip()
def build_package(package, version=None, noarch_python=False): if ' ' in package: package, version = package.split(' ') try: directory = build_recipe(package, version=version) dependencies = convert_recipe(directory, package, noarch_python=noarch_python) except RuntimeError: directory, dependencies = make_recipe(package, version, noarch_python=noarch_python) return_code = 0 try: print("package = %s" % package) print(" dependencies = %s" % dependencies) # Dependencies will be either package_name or # package_name version_number # Only == dependency specs get version numbers # All else are just handled without a version spec for depend in dependencies: build_package(depend) args = build_template.format(directory).split() print("Building conda package for {0}".format(package.lower())) try: utils.execute(args, check_exit_code=True) except subprocess.CalledProcessError as exc: return_code = exc.return_code else: m = MetaData(directory) handle_binstar_upload(build.bldpkg_path(m)) finally: rm_rf(directory) return return_code
def main(): p = get_render_parser() p.add_argument( '-f', '--file', action="store", help="write YAML to file, given as argument here.\ Overwrites existing files." ) # we do this one separately because we only allow one entry to conda render p.add_argument( 'recipe', action="store", metavar='RECIPE_PATH', choices=RecipeCompleter(), help="Path to recipe directory.", ) args = p.parse_args() set_language_env_vars(args, p) metadata = render_recipe(find_recipe(args.recipe), no_download_source=args.no_source) if args.output: print(bldpkg_path(metadata)) else: output = yaml.dump(MetaYaml(metadata.meta), Dumper=IndentDumper, default_flow_style=False, indent=4) if args.file: with open(args.file, "w") as f: f.write(output) else: print(output)
def package_built_name(package, root_dir): package_dir = os.path.join(root_dir, package) if hasattr(conda_build, 'api'): return conda_build.api.get_output_file_path(package_dir) else: meta = MetaData(package_dir) return bldpkg_path(meta)
def build_package(package, version=None): if conda_package_exists(package): return 0 if ' ' in package: package, version = package.split(' ') try: directory = build_recipe(package, version=version) dependencies = convert_recipe(directory, package) except RuntimeError: directory, dependencies = make_recipe(package, version) try: print("package = %s" % package) print(" dependencies = %s" % dependencies) # Dependencies will be either package_name or # package_name version_number # Only == dependency specs get version numbers # All else are just handled without a version spec for depend in dependencies: build_package(depend) args = build_template.format(directory).split() print("Building conda package for {0}".format(package.lower())) result = subprocess.Popen(args).wait() if result == 0 and binstar_upload: m = MetaData(directory) handle_binstar_upload(build.bldpkg_path(m)) finally: rm_rf(directory) return result
def build_package(package, version=None): if conda_package_exists(package): return 0 if ' ' in package: package, version = depend.split(' ') try: directory = build_recipe(package, version=version) dependencies = convert_recipe(directory, package) except RuntimeError: directory, dependencies = make_recipe(package, version) try: print("package = %s" % package) print(" dependences = %s" % dependencies) # Dependencies will be either package_name or # package_name version_number # Only == dependency specs get version numbers # All else are just handled without a version spec for depend in dependencies: build_package(depend) args = build_template.format(directory).split() print("Building conda package for {0}".format(package.lower())) result = subprocess.Popen(args).wait() if result == 0 and binstar_upload: m = MetaData(directory) handle_binstar_upload(build.bldpkg_path(m)) finally: rm_rf(directory) return result
def built_distribution_already_exists(cli, meta, owner): """ Checks to see whether the built recipe (aka distribution) already exists on the owner/user's binstar account. """ distro_name = '{}/{}.tar.bz2'.format(conda.config.subdir, meta.dist()) fname = bldpkg_path(meta) try: dist_info = cli.distribution(owner, meta.name(), meta.version(), distro_name) except binstar_client.errors.NotFound: dist_info = {} exists = bool(dist_info) if exists: md5_on_binstar = dist_info.get('md5') with open(fname, 'rb') as fh: md5_of_build = hashlib.md5(fh.read()).hexdigest() if md5_on_binstar != md5_of_build: raise ValueError( 'This build ({}), and the build already on binstar ' '({}) are different.'.format(md5_of_build, md5_on_binstar)) return exists
def upload(cli, meta, owner, channels): with get_temp_token(cli.token) as fn: subprocess.check_call(['anaconda', '--quiet', '-t', fn, 'upload', bldpkg_path(meta), '--user={}'.format(owner), '--channel={}'.format(channels)], env=os.environ)
def build(self, meta, config): print('Building ', meta.dist()) config = meta.vn_context(config=config) try: conda_build.api.build(meta.meta, config=config) return conda_build.api.get_output_file_path(meta.meta, config) except AttributeError: with meta.vn_context(): return bldpkg_path(build.build(meta.meta))
def upload(cli, meta, owner, channels=['main'], config=None): """Upload a distribution, given the build metadata.""" if hasattr(conda_build, 'api'): fname = bldpkg_path(meta, config) else: fname = bldpkg_path(meta) package_type = detect_package_type(fname) package_attrs, release_attrs, file_attrs = get_attrs(package_type, fname) package_name = package_attrs['name'] version = release_attrs['version'] # Check the package exists, otherwise create one. try: cli.package(owner, package_name) except binstar_client.NotFound: print('Creating the {} package on {}'.format(package_name, owner)) summary = package_attrs['summary'] cli.add_package(owner, package_name, summary, package_attrs.get('license'), public=True) # Check the release exists, otherwise create one. try: cli.release(owner, package_name, version) except binstar_client.NotFound: # TODO: Add readme.md support for descriptions? cli.add_release(owner, package_name, version, requirements=[], announce=None, description='') try: cli.distribution(owner, package_name, version, file_attrs['basename']) except binstar_client.NotFound: # The file doesn't exist. pass else: print('Distribution %s already exists ... removing' % (file_attrs['basename'],)) cli.remove_dist(owner, package_name, version, file_attrs['basename']) with open(fname, 'rb') as fd: print('\nUploading file %s/%s/%s/%s to %s...' % (owner, package_name, version, file_attrs['basename'], channels)) upload_info = cli.upload(owner, package_name, version, file_attrs['basename'], fd, package_type, description='', dependencies=file_attrs.get('dependencies'), attrs=file_attrs['attrs'], channels=channels) return upload_info
def upload(cli, meta, owner, channels): try: with open('binstar.token', 'w') as fh: fh.write(cli.token) subprocess.check_call(['anaconda', '--quiet', '-t', 'binstar.token', 'upload', bldpkg_path(meta), '--user={}'.format(owner), '--channel={}'.format(channels)], env=os.environ) finally: os.remove('binstar.token')
def build(self, meta, config): print('Building ', meta.dist()) config = meta.vn_context(config=config) try: output_paths = conda_build.api.build(meta.meta, config=config) except AttributeError: with meta.vn_context(): output_paths = bldpkg_path(build.build(meta.meta)) if isinstance(output_paths, string_types): output_paths = [output_paths] return output_paths
def main(): token = os.environ.get('BINSTAR_TOKEN') description = ('Upload or check consistency of a built version of a ' 'conda recipe with binstar. Note: The existence of the ' 'BINSTAR_TOKEN environment variable determines ' 'whether the upload should actually take place.') parser = argparse.ArgumentParser(description=description) parser.add_argument('recipe_dir', help='the conda recipe directory') parser.add_argument('owner', help='the binstar owner/user') parser.add_argument('--channel', help='the binstar channel', default='main') args = parser.parse_args() recipe_dir, owner, channel = args.recipe_dir, args.owner, args.channel cli = get_binstar(argparse.Namespace(token=token, site=None)) meta = MetaData(recipe_dir) if meta.skip(): print( "No upload to take place - this configuration was skipped in build/skip." ) return exists = built_distribution_already_exists(cli, meta, owner) if token: on_channel = distribution_exists_on_channel(cli, meta, owner, channel) if not exists: upload(cli, meta, owner, channel) print('Uploaded {}'.format(bldpkg_path(meta))) elif not on_channel: print('Adding distribution {} to {}\'s {} channel' ''.format(bldpkg_path(meta), owner, channel)) add_distribution_to_channel(cli, meta, owner, channel) else: print('Distribution {} already \nexists on {}\'s {} channel.' ''.format(bldpkg_path(meta), owner, channel)) else: print("No BINSTAR_TOKEN present, so no upload is taking place. " "The distribution just built {} already available on {}'s " "{} channel.".format('is' if exists else 'is not', owner, channel))
def main(): token = os.environ.get('BINSTAR_TOKEN') description = ('Upload or check consistency of a built version of a ' 'conda recipe with binstar. Note: The existence of the ' 'BINSTAR_TOKEN environment variable determines ' 'whether the upload should actually take place.') parser = argparse.ArgumentParser(description=description) parser.add_argument('recipe_dir', help='the conda recipe directory') parser.add_argument('owner', help='the binstar owner/user') parser.add_argument('--channel', help='the binstar channel', default='main') args = parser.parse_args() recipe_dir, owner, channel = args.recipe_dir, args.owner, args.channel cli = get_binstar(argparse.Namespace(token=token, site=None)) meta = MetaData(recipe_dir) if meta.skip(): print("No upload to take place - this configuration was skipped in build/skip.") return exists = built_distribution_already_exists(cli, meta, owner) if token: on_channel = distribution_exists_on_channel(cli, meta, owner, channel) if not exists: upload(cli, meta, owner, channel) print('Uploaded {}'.format(bldpkg_path(meta))) elif not on_channel: print('Adding distribution {} to {}\'s {} channel' ''.format(bldpkg_path(meta), owner, channel)) add_distribution_to_channel(cli, meta, owner, channel) else: print('Distribution {} already \nexists on {}\'s {} channel.' ''.format(bldpkg_path(meta), owner, channel)) else: print("No BINSTAR_TOKEN present, so no upload is taking place. " "The distribution just built {} already available on {}'s " "{} channel.".format('is' if exists else 'is not', owner, channel))
def execute(args, parser): d = defaultdict(dict) d['package']['name'] = args.name d['package']['version'] = args.version d['build']['number'] = args.build_number d['build']['entry_points'] = args.entry_points # MetaData does the auto stuff if the build string is None d['build']['string'] = args.build_string d['requirements']['run'] = args.dependencies d['about']['home'] = args.home d['about']['license'] = args.license d['about']['summary'] = args.summary m = MetaData.fromdict(d) build(m) handle_binstar_upload(bldpkg_path(m), args)
def execute(args, parser): d = defaultdict(dict) d['package']['name'] = args.name d['package']['version'] = args.version d['build']['number'] = args.build_number d['build']['entry_points'] = args.entry_points # MetaData does the auto stuff if the build string is None d['build']['string'] = args.build_string d['requirements']['run'] = args.dependencies d['about']['home'] = args.home d['about']['license'] = args.license d['about']['summary'] = args.summary d = dict(d) m = MetaData.fromdict(d) build(m) handle_binstar_upload(bldpkg_path(m), args)
def built_distribution_already_exists(cli, meta, owner): """ Checks to see whether the built recipe (aka distribution) already exists on the owner/user's binstar account. """ distro_name = '{}/{}.tar.bz2'.format(conda.config.subdir, meta.dist()) fname = bldpkg_path(meta) try: dist_info = cli.distribution(owner, meta.name(), meta.version(), distro_name) except binstar_client.errors.NotFound: dist_info = {} exists = bool(dist_info) if exists: md5_on_binstar = dist_info.get('md5') with open(fname, 'rb') as fh: md5_of_build = hashlib.md5(fh.read()).hexdigest() if md5_on_binstar != md5_of_build: raise ValueError('This build ({}), and the build already on binstar ' '({}) are different.'.format(md5_of_build, md5_on_binstar)) return exists
def execute(args, parser): import sys import shutil import tarfile import tempfile from os.path import abspath, isdir, isfile from conda.lock import Locked import conda_build.build as build import conda_build.source as source from conda_build.config import config from conda_build.metadata import MetaData check_external() if args.python: if args.python == ['all']: for py in [26, 27, 33, 34]: args.python = [str(py)] execute(args, parser) return if len(args.python) > 1: for py in args.python[:]: args.python = [py] execute(args, parser) else: config.CONDA_PY = int(args.python[0].replace('.', '')) if args.perl: config.CONDA_PERL = args.perl if args.numpy: if args.numpy == ['all']: for npy in [16, 17, 18]: args.numpy = [str(npy)] execute(args, parser) return if len(args.numpy) > 1: for npy in args.numpy[:]: args.numpy = [npy] execute(args, parser) else: config.CONDA_NPY = int(args.numpy[0].replace('.', '')) with Locked(config.croot): recipes = deque(args.recipe) while recipes: arg = recipes.popleft() try_again = False # Don't use byte literals for paths in Python 2 if not PY3: arg = arg.decode(getpreferredencoding()) if isfile(arg): if arg.endswith(('.tar', '.tar.gz', '.tgz', '.tar.bz2')): recipe_dir = tempfile.mkdtemp() t = tarfile.open(arg, 'r:*') t.extractall(path=recipe_dir) t.close() need_cleanup = True else: print("Ignoring non-recipe: %s" % arg) continue else: recipe_dir = abspath(arg) need_cleanup = False if not isdir(recipe_dir): sys.exit("Error: no such directory: %s" % recipe_dir) m = MetaData(recipe_dir) binstar_upload = False if args.check and len(args.recipe) > 1: print(m.path) m.check_fields() if args.check: continue if args.output: print(build.bldpkg_path(m)) continue elif args.test: build.test(m, verbose=not args.quiet) elif args.source: source.provide(m.path, m.get_section('source')) print('Source tree in:', source.get_dir()) else: # This loop recursively builds dependencies if recipes exist if args.build_only: post = False args.notest = True args.binstar_upload = False elif args.post: post = True args.notest = True args.binstar_upload = False else: post = None try: build.build(m, verbose=not args.quiet, post=post) except RuntimeError as e: error_str = str(e) if error_str.startswith('No packages found matching:'): # Build dependency if recipe exists dep_pkg = error_str.split(': ')[1] # Handle package names that contain version deps. if ' ' in dep_pkg: dep_pkg = dep_pkg.split(' ')[0] recipe_glob = glob(dep_pkg + '-[v0-9][0-9.]*') if exists(dep_pkg): recipe_glob.append(dep_pkg) if recipe_glob: recipes.appendleft(arg) try_again = True for recipe_dir in recipe_glob: print(("Missing dependency {0}, but found" + " recipe directory, so building " + "{0} first").format(dep_pkg)) recipes.appendleft(recipe_dir) else: raise else: raise if try_again: continue if not args.notest: build.test(m, verbose=not args.quiet) binstar_upload = True if need_cleanup: shutil.rmtree(recipe_dir) if binstar_upload: handle_binstar_upload(build.bldpkg_path(m), args)
#!/usr/bin/env python import shutil, os from conda_build.metadata import MetaData from conda_build.build import bldpkg_path if __name__ == '__main__': artifacts_dir = 'artifacts' os.mkdir(artifacts_dir) meta = MetaData('recipe') shutil.copy(bldpkg_path(meta), artifacts_dir)
def package_built_name(package, root_dir): package_dir = os.path.join(root_dir, package) meta = MetaData(package_dir) return bldpkg_path(meta)
def execute(args, parser): import sys import shutil import tarfile import tempfile from os import makedirs from os.path import abspath, isdir, isfile from conda.lock import Locked import conda_build.build as build import conda_build.source as source from conda_build.config import config from conda_build.metadata import MetaData check_external() # change globals in build module, see comment there as well build.channel_urls = args.channel or () build.override_channels = args.override_channels build.verbose = not args.quiet if on_win: try: # needs to happen before any c extensions are imported that might be # hard-linked by files in the trash. one of those is markupsafe, # used by jinja2. see https://github.com/conda/conda-build/pull/520 delete_trash(None) except: # when we can't delete the trash, don't crash on AssertionError, # instead inform the user and try again next time. # see https://github.com/conda/conda-build/pull/744 warnings.warn("Cannot delete trash; some c extension has been " "imported that is hard-linked by files in the trash. " "Will try again on next run.") conda_version = { 'python': 'CONDA_PY', 'numpy': 'CONDA_NPY', 'perl': 'CONDA_PERL', 'R': 'CONDA_R', } for lang in ['python', 'numpy', 'perl', 'R']: versions = getattr(args, lang) if not versions: continue if versions == ['all']: if all_versions[lang]: versions = all_versions[lang] else: parser.error("'all' is not supported for --%s" % lang) if len(versions) > 1: for ver in versions[:]: setattr(args, lang, [str(ver)]) execute(args, parser) # This is necessary to make all combinations build. setattr(args, lang, versions) return else: version = versions[0] if lang in ('python', 'numpy'): version = int(version.replace('.', '')) setattr(config, conda_version[lang], version) if not len(str(version)) in (2, 3) and lang in ['python', 'numpy']: if all_versions[lang]: raise RuntimeError("%s must be major.minor, like %s, not %s" % (conda_version[lang], all_versions[lang][-1]/10, version)) else: raise RuntimeError("%s must be major.minor, not %s" % (conda_version[lang], version)) # Using --python, --numpy etc. is equivalent to using CONDA_PY, CONDA_NPY, etc. # Auto-set those env variables for var in conda_version.values(): if getattr(config, var): # Set the env variable. os_environ[var] = str(getattr(config, var)) if args.skip_existing: if not isdir(config.bldpkgs_dir): makedirs(config.bldpkgs_dir) update_index(config.bldpkgs_dir) index = build.get_build_index(clear_cache=True) already_built = [] to_build_recursive = [] with Locked(config.croot): recipes = deque(args.recipe) while recipes: arg = recipes.popleft() try_again = False # Don't use byte literals for paths in Python 2 if not PY3: arg = arg.decode(getpreferredencoding() or 'utf-8') if isfile(arg): if arg.endswith(('.tar', '.tar.gz', '.tgz', '.tar.bz2')): recipe_dir = tempfile.mkdtemp() t = tarfile.open(arg, 'r:*') t.extractall(path=recipe_dir) t.close() need_cleanup = True else: print("Ignoring non-recipe: %s" % arg) continue else: recipe_dir = abspath(arg) need_cleanup = False if not isdir(recipe_dir): sys.exit("Error: no such directory: %s" % recipe_dir) try: m = MetaData(recipe_dir) if m.get_value('build/noarch_python'): config.noarch = True except exceptions.YamlParsingError as e: sys.stderr.write(e.error_msg()) sys.exit(1) binstar_upload = False if args.check and len(args.recipe) > 1: print(m.path) m.check_fields() if args.check: continue if args.skip_existing: if m.pkg_fn() in index or m.pkg_fn() in already_built: print("%s is already built, skipping." % m.dist()) continue if m.skip(): print("Skipped: The %s recipe defines build/skip for this " "configuration." % m.dist()) continue if args.output: try: m.parse_again(permit_undefined_jinja=False) except SystemExit: # Something went wrong; possibly due to undefined GIT_ jinja variables. # Maybe we need to actually download the source in order to resolve the build_id. source.provide(m.path, m.get_section('source')) # Parse our metadata again because we did not initialize the source # information before. m.parse_again(permit_undefined_jinja=False) print(build.bldpkg_path(m)) continue elif args.test: build.test(m, move_broken=False) elif args.source: source.provide(m.path, m.get_section('source')) print('Source tree in:', source.get_dir()) else: # This loop recursively builds dependencies if recipes exist if args.build_only: post = False args.notest = True args.binstar_upload = False elif args.post: post = True args.notest = True args.binstar_upload = False else: post = None try: build.build(m, post=post, include_recipe=args.include_recipe) except (RuntimeError, SystemExit) as e: error_str = str(e) if error_str.startswith('No packages found') or error_str.startswith('Could not find some'): # Build dependency if recipe exists dep_pkg = error_str.split(': ')[1] # Handle package names that contain version deps. if ' ' in dep_pkg: dep_pkg = dep_pkg.split(' ')[0] recipe_glob = glob(dep_pkg + '-[v0-9][0-9.]*') if exists(dep_pkg): recipe_glob.append(dep_pkg) if recipe_glob: recipes.appendleft(arg) try_again = True for recipe_dir in recipe_glob: if dep_pkg in to_build_recursive: sys.exit(str(e)) print(("Missing dependency {0}, but found" + " recipe directory, so building " + "{0} first").format(dep_pkg)) recipes.appendleft(recipe_dir) to_build_recursive.append(dep_pkg) else: raise elif error_str.strip().startswith("Hint:"): lines = [line for line in error_str.splitlines() if line.strip().startswith('- ')] pkgs = [line.lstrip('- ') for line in lines] # Typically if a conflict is with one of these # packages, the other package needs to be rebuilt # (e.g., a conflict with 'python 3.5*' and 'x' means # 'x' isn't build for Python 3.5 and needs to be # rebuilt). skip_names = ['python', 'r'] pkgs = [pkg for pkg in pkgs if pkg.split(' ')[0] not in skip_names] for pkg in pkgs: # Handle package names that contain version deps. if ' ' in pkg: pkg = pkg.split(' ')[0] recipe_glob = glob(pkg + '-[v0-9][0-9.]*') if exists(pkg): recipe_glob.append(pkg) if recipe_glob: recipes.appendleft(arg) try_again = True for recipe_dir in recipe_glob: if pkg in to_build_recursive: sys.exit(str(e)) print(error_str) print(("Missing dependency {0}, but found" + " recipe directory, so building " + "{0} first").format(pkg)) recipes.appendleft(recipe_dir) to_build_recursive.append(pkg) else: raise else: raise if try_again: continue if not args.notest: build.test(m) binstar_upload = True if need_cleanup: shutil.rmtree(recipe_dir) if binstar_upload: handle_binstar_upload(build.bldpkg_path(m), args) already_built.append(m.pkg_fn())
def build(self, meta): print('Building ', meta.dist()) with meta.vn_context(): return bldpkg_path(build.build(meta.meta))
def execute(args, parser): import sys import shutil import tarfile import tempfile from os.path import abspath, isdir, isfile from conda.lock import Locked import conda_build.build as build import conda_build.source as source from conda_build.config import config from conda_build.metadata import MetaData check_external() all_versions = { 'python': [26, 27, 33, 34], 'numpy': [16, 17, 18, 19], 'perl': None, 'R': None, } conda_version = { 'python': 'CONDA_PY', 'numpy': 'CONDA_NPY', 'perl': 'CONDA_PERL', 'R': 'CONDA_R', } for lang in ['python', 'numpy', 'perl', 'R']: versions = getattr(args, lang) if not versions: continue if versions == ['all']: if all_versions[lang]: versions = all_versions[lang] else: parser.error("'all' is not supported for --%s" % lang) if len(versions) > 1: for ver in versions[:]: setattr(args, lang, [str(ver)]) execute(args, parser) # This is necessary to make all combinations build. setattr(args, lang, versions) return else: version = int(versions[0].replace('.', '')) setattr(config, conda_version[lang], version) if not len(str(version)) == 2: if all_versions[lang]: raise RuntimeError("%s must be major.minor, like %s, not %s" % (conda_version[lang], all_versions[lang][-1]/10, version)) else: raise RuntimeError("%s must be major.minor, not %s" % (conda_version[lang], version)) with Locked(config.croot): recipes = deque(args.recipe) while recipes: arg = recipes.popleft() try_again = False # Don't use byte literals for paths in Python 2 if not PY3: arg = arg.decode(getpreferredencoding() or 'utf-8') if isfile(arg): if arg.endswith(('.tar', '.tar.gz', '.tgz', '.tar.bz2')): recipe_dir = tempfile.mkdtemp() t = tarfile.open(arg, 'r:*') t.extractall(path=recipe_dir) t.close() need_cleanup = True else: print("Ignoring non-recipe: %s" % arg) continue else: recipe_dir = abspath(arg) need_cleanup = False if not isdir(recipe_dir): sys.exit("Error: no such directory: %s" % recipe_dir) try: m = MetaData(recipe_dir) except exceptions.YamlParsingError as e: sys.stderr.write(e.error_msg()) sys.exit(1) binstar_upload = False if args.check and len(args.recipe) > 1: print(m.path) m.check_fields() if args.check: continue if args.output: print(build.bldpkg_path(m)) continue elif args.test: build.test(m, verbose=not args.quiet) elif args.source: source.provide(m.path, m.get_section('source')) print('Source tree in:', source.get_dir()) else: # This loop recursively builds dependencies if recipes exist if args.build_only: post = False args.notest = True args.binstar_upload = False elif args.post: post = True args.notest = True args.binstar_upload = False else: post = None channel_urls = args.channel or () try: build.build(m, verbose=not args.quiet, post=post, channel_urls=channel_urls, override_channels=args.override_channels) except RuntimeError as e: error_str = str(e) if error_str.startswith('No packages found') or error_str.startswith('Could not find some'): # Build dependency if recipe exists dep_pkg = error_str.split(': ')[1] # Handle package names that contain version deps. if ' ' in dep_pkg: dep_pkg = dep_pkg.split(' ')[0] recipe_glob = glob(dep_pkg + '-[v0-9][0-9.]*') if exists(dep_pkg): recipe_glob.append(dep_pkg) if recipe_glob: recipes.appendleft(arg) try_again = True for recipe_dir in recipe_glob: print(("Missing dependency {0}, but found" + " recipe directory, so building " + "{0} first").format(dep_pkg)) recipes.appendleft(recipe_dir) else: raise else: raise if try_again: continue if not args.notest: build.test(m, verbose=not args.quiet, channel_urls=channel_urls, override_channels=args.override_channels) binstar_upload = True if need_cleanup: shutil.rmtree(recipe_dir) if binstar_upload: handle_binstar_upload(build.bldpkg_path(m), args)
description = ('Upload or check consistency of a built version of a ' 'conda recipe with binstar. Note: The existence of the ' 'BINSTAR_TOKEN environment variable determines ' 'whether the upload should actually take place.') parser = argparse.ArgumentParser(description=description) parser.add_argument('recipe_dir', help='the conda recipe directory') parser.add_argument('owner', help='the binstar owner/user') parser.add_argument('--channel', help='the binstar channel', default='main') args = parser.parse_args() recipe_dir, owner, channel = args.recipe_dir, args.owner, args.channel cli = get_binstar(argparse.Namespace(token=token, site=None)) meta = MetaData(recipe_dir) exists = built_distribution_already_exists(cli, meta, owner) if token: on_channel = distribution_exists_on_channel(cli, meta, owner, channel) if not exists: upload(cli, meta, owner, channel) elif not on_channel: print('Adding distribution {} to {}\'s {} channel' ''.format(bldpkg_path(meta), owner, channel)) add_distribution_to_channel(cli, meta, owner, channel) else: print('Distribution {} already \nexists on {}\'s {} channel (md5 has been ' 'checked for consistency)'.format(bldpkg_path(meta), owner, channel)) else: print("No BINSTAR_TOKEN present, so no upload is taking place. " "The distribution just built {} already available on {}'s " "{} channel.".format('is' if exists else 'is not', owner, channel))
def execute(args, parser): import sys import shutil import tarfile import tempfile from os.path import abspath, isdir, isfile from conda.lock import Locked import conda_build.build as build import conda_build.source as source from conda_build.config import croot from conda_build.metadata import MetaData check_external() with Locked(croot): recipes = deque(args.recipe) while recipes: arg = recipes.popleft() try_again = False # Don't use byte literals for paths in Python 2 if not PY3: arg = arg.decode(getpreferredencoding()) if isfile(arg): if arg.endswith(('.tar', '.tar.gz', '.tgz', '.tar.bz2')): recipe_dir = tempfile.mkdtemp() t = tarfile.open(arg, 'r:*') t.extractall(path=recipe_dir) t.close() need_cleanup = True else: print("Ignoring non-recipe: %s" % arg) continue else: recipe_dir = abspath(arg) need_cleanup = False if not isdir(recipe_dir): sys.exit("Error: no such directory: %s" % recipe_dir) m = MetaData(recipe_dir) binstar_upload = False if args.check and len(args.recipe) > 1: print(m.path) m.check_fields() if args.check: continue if args.output: print(build.bldpkg_path(m)) continue elif args.test: build.test(m) elif args.source: source.provide(m.path, m.get_section('source')) print('Source tree in:', source.get_dir()) else: # This loop recursively builds dependencies if recipes exist try: build.build(m) except RuntimeError as e: error_str = str(e) if error_str.startswith('No packages found matching:'): # Build dependency if recipe exists dep_pkg = error_str.split(': ')[1].replace(' ', '-') recipe_glob = glob(dep_pkg + '-[v0-9][0-9.]*') if exists(dep_pkg): recipe_glob.append(dep_pkg) if recipe_glob: recipes.appendleft(arg) try_again = True for recipe_dir in recipe_glob: print(("Missing dependency {0}, but found" + " recipe directory, so building " + "{0} first").format(dep_pkg)) recipes.appendleft(recipe_dir) else: raise else: raise if try_again: continue if not args.notest: build.test(m) binstar_upload = True if need_cleanup: shutil.rmtree(recipe_dir) if binstar_upload: handle_binstar_upload(build.bldpkg_path(m), args)
def run(self): # Make sure the metadata has the conda attributes, even if the # distclass isn't CondaDistribution. We primarily do this to simplify # the code below. metadata = self.distribution.metadata for attr in CondaDistribution.conda_attrs: if not hasattr(metadata, attr): setattr(metadata, attr, CondaDistribution.conda_attrs[attr]) # The command line takes precedence if self.buildnum is not None: metadata.conda_buildnum = self.buildnum with Locked(config.croot): d = defaultdict(dict) # Needs to be lowercase d['package']['name'] = metadata.name d['package']['version'] = metadata.version d['build']['number'] = metadata.conda_buildnum # MetaData does the auto stuff if the build string is None d['build']['string'] = metadata.conda_buildstr d['build']['binary_relocation'] = metadata.conda_binary_relocation d['build']['preserve_egg_dir'] = metadata.conda_preserve_egg_dir d['build']['features'] = metadata.conda_features d['build']['track_features'] = metadata.conda_track_features # XXX: I'm not really sure if it is correct to combine requires # and install_requires d['requirements']['run'] = d['requirements']['build'] = \ [spec_from_line(i) for i in (metadata.requires or []) + (getattr(self.distribution, 'install_requires', []) or [])] + ['python'] if hasattr(self.distribution, 'tests_require'): # A lot of packages use extras_require['test'], but # tests_require is the one that is officially supported by # setuptools. d['test']['requires'] = [ spec_from_line(i) for i in self.distribution.tests_require or [] ] d['about']['home'] = metadata.url # Don't worry about classifiers. This isn't skeleton pypi. We # don't need to make this work with random stuff in the wild. If # someone writes their setup.py wrong and this doesn't work, it's # their fault. d['about']['license'] = metadata.license d['about']['summary'] = self.distribution.description # This is similar logic from conda skeleton pypi entry_points = getattr(self.distribution, 'entry_points', []) if entry_points: if isinstance(entry_points, string_types): # makes sure it is left-shifted newstr = "\n".join(x.strip() for x in entry_points.split('\n')) c = configparser.ConfigParser() entry_points = {} try: c.readfp(StringIO(newstr)) except Exception as err: # This seems to be the best error here raise DistutilsGetoptError( "ERROR: entry-points not understood: " + str(err) + "\nThe string was" + newstr) else: for section in config.sections(): if section in ['console_scripts', 'gui_scripts']: value = [ '%s=%s' % (option, config.get(section, option)) for option in config.options(section) ] entry_points[section] = value else: # Make sure setuptools is added as a dependency below entry_points[section] = None if not isinstance(entry_points, dict): raise DistutilsGetoptError( "ERROR: Could not add entry points. They were:\n" + entry_points) else: cs = entry_points.get('console_scripts', []) gs = entry_points.get('gui_scripts', []) # We have *other* kinds of entry-points so we need # setuptools at run-time if not cs and not gs and len(entry_points) > 1: d['requirements']['run'].append('setuptools') d['requirements']['build'].append('setuptools') entry_list = cs + gs if gs and conda.config.platform == 'osx': d['build']['osx_is_app'] = True if len(cs + gs) != 0: d['build']['entry_points'] = entry_list if metadata.conda_command_tests is True: d['test']['commands'] = list( map(unicode, pypi.make_entry_tests(entry_list))) if 'setuptools' in d['requirements']['run']: d['build']['preserve_egg_dir'] = True if metadata.conda_import_tests: if metadata.conda_import_tests is True: d['test']['imports'] = ( (self.distribution.packages or []) + (self.distribution.py_modules or [])) else: d['test']['imports'] = metadata.conda_import_tests if (metadata.conda_command_tests and not isinstance(metadata.conda_command_tests, bool)): d['test']['commands'] = list( map(unicode, metadata.conda_command_tests)) d = dict(d) m = MetaData.fromdict(d) # Shouldn't fail, but do you really trust the code above? m.check_fields() build.build(m, post=False) # Do the install if not PY3: # Command is an old-style class in Python 2 install.run(self) else: super().run() build.build(m, post=True) build.test(m) if self.binstar_upload: class args: binstar_upload = self.binstar_upload handle_binstar_upload(build.bldpkg_path(m), args) else: no_upload_message = """\ # If you want to upload this package to binstar.org later, type: # # $ binstar upload %s """ % build.bldpkg_path(m) print(no_upload_message)
def execute(args, parser): import sys import shutil import tarfile import tempfile from os import makedirs from os.path import abspath, isdir, isfile import conda_build.build as build import conda_build.source as source from conda_build.config import config check_external() # change globals in build module, see comment there as well build.channel_urls = args.channel or () build.override_channels = args.override_channels build.verbose = not args.quiet if on_win: try: # needs to happen before any c extensions are imported that might be # hard-linked by files in the trash. one of those is markupsafe, # used by jinja2. see https://github.com/conda/conda-build/pull/520 delete_trash(None) except: # when we can't delete the trash, don't crash on AssertionError, # instead inform the user and try again next time. # see https://github.com/conda/conda-build/pull/744 warnings.warn( "Cannot delete trash; some c extension has been " "imported that is hard-linked by files in the trash. " "Will try again on next run.") set_language_env_vars(args, parser, execute=execute) if args.skip_existing: for d in config.bldpkgs_dirs: if not isdir(d): makedirs(d) update_index(d) index = build.get_build_index(clear_cache=True) already_built = set() to_build_recursive = [] recipes = deque(args.recipe) while recipes: arg = recipes.popleft() try_again = False # Don't use byte literals for paths in Python 2 if not PY3: arg = arg.decode(getpreferredencoding() or 'utf-8') if isfile(arg): if arg.endswith(('.tar', '.tar.gz', '.tgz', '.tar.bz2')): recipe_dir = tempfile.mkdtemp() t = tarfile.open(arg, 'r:*') t.extractall(path=recipe_dir) t.close() need_cleanup = True else: print("Ignoring non-recipe: %s" % arg) continue else: recipe_dir = abspath(arg) need_cleanup = False # recurse looking for meta.yaml that is potentially not in immediate folder recipe_dir = find_recipe(recipe_dir) if not isdir(recipe_dir): sys.exit("Error: no such directory: %s" % recipe_dir) # this fully renders any jinja templating, throwing an error if any data is missing m, need_source_download = render_recipe(recipe_dir, no_download_source=False, verbose=False, dirty=args.dirty) if m.get_value('build/noarch_python'): config.noarch = True if args.check and len(args.recipe) > 1: print(m.path) m.check_fields() if args.check: continue if m.skip(): print("Skipped: The %s recipe defines build/skip for this " "configuration." % m.dist()) continue if args.skip_existing: # 'or m.pkg_fn() in index' is for conda <4.1 and could be removed in the future. if ('local::' + m.pkg_fn() in index or m.pkg_fn() in index or m.pkg_fn() in already_built): print(m.dist(), "is already built, skipping.") continue if args.output: print(bldpkg_path(m)) continue elif args.test: build.test(m, move_broken=False) elif args.source: source.provide(m.path, m.get_section('source'), verbose=build.verbose) print('Source tree in:', source.get_dir()) else: # This loop recursively builds dependencies if recipes exist if args.build_only: post = False args.notest = True args.binstar_upload = False elif args.post: post = True args.notest = True args.binstar_upload = False else: post = None try: build.build(m, post=post, include_recipe=args.include_recipe, keep_old_work=args.keep_old_work, need_source_download=need_source_download, dirty=args.dirty) except (NoPackagesFound, Unsatisfiable) as e: error_str = str(e) # Typically if a conflict is with one of these # packages, the other package needs to be rebuilt # (e.g., a conflict with 'python 3.5*' and 'x' means # 'x' isn't build for Python 3.5 and needs to be # rebuilt). skip_names = ['python', 'r'] add_recipes = [] for line in error_str.splitlines(): if not line.startswith(' - '): continue pkg = line.lstrip(' - ').split(' -> ')[-1] pkg = pkg.strip().split(' ')[0] if pkg in skip_names: continue recipe_glob = glob(pkg + '-[v0-9][0-9.]*') if os.path.exists(pkg): recipe_glob.append(pkg) if recipe_glob: try_again = True for recipe_dir in recipe_glob: if pkg in to_build_recursive: sys.exit(str(e)) print(error_str) print(("Missing dependency {0}, but found" + " recipe directory, so building " + "{0} first").format(pkg)) add_recipes.append(recipe_dir) to_build_recursive.append(pkg) else: raise recipes.appendleft(arg) recipes.extendleft(reversed(add_recipes)) if try_again: continue if not args.notest: build.test(m) if need_cleanup: shutil.rmtree(recipe_dir) # outputs message, or does upload, depending on value of args.binstar_upload handle_binstar_upload(build.bldpkg_path(m), args) already_built.add(m.pkg_fn())
def execute(args, parser): import sys import shutil import tarfile import tempfile from os.path import abspath, isdir, isfile from conda.lock import Locked import conda_build.build as build import conda_build.source as source from conda_build.config import config from conda_build.metadata import MetaData check_external() if args.python: if args.python == ['all']: for py in [26, 27, 33, 34]: args.python = [str(py)] execute(args, parser) return if len(args.python) > 1: for py in args.python[:]: args.python = [py] execute(args, parser) else: config.CONDA_PY = int(args.python[0].replace('.', '')) if args.perl: config.CONDA_PERL = args.perl if args.numpy: if args.numpy == ['all']: for npy in [16, 17, 18, 19]: args.numpy = [str(npy)] execute(args, parser) return if len(args.numpy) > 1: for npy in args.numpy[:]: args.numpy = [npy] execute(args, parser) else: config.CONDA_NPY = int(args.numpy[0].replace('.', '')) with Locked(config.croot): recipes = deque(args.recipe) while recipes: arg = recipes.popleft() try_again = False # Don't use byte literals for paths in Python 2 if not PY3: arg = arg.decode(getpreferredencoding()) if isfile(arg): if arg.endswith(('.tar', '.tar.gz', '.tgz', '.tar.bz2')): recipe_dir = tempfile.mkdtemp() t = tarfile.open(arg, 'r:*') t.extractall(path=recipe_dir) t.close() need_cleanup = True else: print("Ignoring non-recipe: %s" % arg) continue else: recipe_dir = abspath(arg) need_cleanup = False if not isdir(recipe_dir): sys.exit("Error: no such directory: %s" % recipe_dir) m = MetaData(recipe_dir) binstar_upload = False if args.check and len(args.recipe) > 1: print(m.path) m.check_fields() if args.check: continue if args.output: print(build.bldpkg_path(m)) continue elif args.test: build.test(m, verbose=not args.quiet) elif args.source: source.provide(m.path, m.get_section('source')) print('Source tree in:', source.get_dir()) else: # This loop recursively builds dependencies if recipes exist if args.build_only: post = False args.notest = True args.binstar_upload = False elif args.post: post = True args.notest = True args.binstar_upload = False else: post = None try: build.build(m, verbose=not args.quiet, post=post) except RuntimeError as e: error_str = str(e) if error_str.startswith('No packages found'): # Build dependency if recipe exists dep_pkg = error_str.split(': ')[1] # Handle package names that contain version deps. if ' ' in dep_pkg: dep_pkg = dep_pkg.split(' ')[0] recipe_glob = glob(dep_pkg + '-[v0-9][0-9.]*') if exists(dep_pkg): recipe_glob.append(dep_pkg) if recipe_glob: recipes.appendleft(arg) try_again = True for recipe_dir in recipe_glob: print(("Missing dependency {0}, but found" + " recipe directory, so building " + "{0} first").format(dep_pkg)) recipes.appendleft(recipe_dir) else: raise else: raise if try_again: continue if not args.notest: build.test(m, verbose=not args.quiet) binstar_upload = True if need_cleanup: shutil.rmtree(recipe_dir) if binstar_upload: handle_binstar_upload(build.bldpkg_path(m), args)
from conda_build.metadata import MetaData from conda_build.build import bldpkg_path from os.path import basename import shutil meta = MetaData(".") path = bldpkg_path(meta) shutil.copy(path, basename(path))
def execute(args, parser): import sys import shutil import tarfile import tempfile from os.path import abspath, isdir, isfile from conda.lock import Locked import conda_build.build as build import conda_build.source as source from conda_build.config import config from conda_build.metadata import MetaData check_external() channel_urls = args.channel or () if on_win: # needs to happen before any c extensions are imported that might be # hard-linked by files in the trash. one of those is markupsafe, used # by jinja2. see https://github.com/conda/conda-build/pull/520 assert 'markupsafe' not in sys.modules delete_trash(None) conda_version = { 'python': 'CONDA_PY', 'numpy': 'CONDA_NPY', 'perl': 'CONDA_PERL', 'R': 'CONDA_R', } for lang in ['python', 'numpy', 'perl', 'R']: versions = getattr(args, lang) if not versions: continue if versions == ['all']: if all_versions[lang]: versions = all_versions[lang] else: parser.error("'all' is not supported for --%s" % lang) if len(versions) > 1: for ver in versions[:]: setattr(args, lang, [str(ver)]) execute(args, parser) # This is necessary to make all combinations build. setattr(args, lang, versions) return else: version = int(versions[0].replace('.', '')) setattr(config, conda_version[lang], version) if not len(str(version)) == 2 and lang in ['python', 'numpy']: if all_versions[lang]: raise RuntimeError("%s must be major.minor, like %s, not %s" % (conda_version[lang], all_versions[lang][-1]/10, version)) else: raise RuntimeError("%s must be major.minor, not %s" % (conda_version[lang], version)) if args.skip_existing: update_index(config.bldpkgs_dir) index = build.get_build_index(clear_cache=True, channel_urls=channel_urls, override_channels=args.override_channels) already_built = [] to_build = args.recipe[:] with Locked(config.croot): recipes = deque(args.recipe) while recipes: arg = recipes.popleft() try_again = False # Don't use byte literals for paths in Python 2 if not PY3: arg = arg.decode(getpreferredencoding() or 'utf-8') if isfile(arg): if arg.endswith(('.tar', '.tar.gz', '.tgz', '.tar.bz2')): recipe_dir = tempfile.mkdtemp() t = tarfile.open(arg, 'r:*') t.extractall(path=recipe_dir) t.close() need_cleanup = True else: print("Ignoring non-recipe: %s" % arg) continue else: recipe_dir = abspath(arg) need_cleanup = False if not isdir(recipe_dir): sys.exit("Error: no such directory: %s" % recipe_dir) try: m = MetaData(recipe_dir) if m.get_value('build/noarch_python'): config.noarch = True except exceptions.YamlParsingError as e: sys.stderr.write(e.error_msg()) sys.exit(1) binstar_upload = False if args.check and len(args.recipe) > 1: print(m.path) m.check_fields() if args.check: continue if args.skip_existing: if m.pkg_fn() in index or m.pkg_fn() in already_built: print("%s is already built, skipping." % m.dist()) continue if args.output: print(build.bldpkg_path(m)) continue elif args.test: build.test(m, verbose=not args.quiet, channel_urls=channel_urls, override_channels=args.override_channels) elif args.source: source.provide(m.path, m.get_section('source')) print('Source tree in:', source.get_dir()) else: # This loop recursively builds dependencies if recipes exist if args.build_only: post = False args.notest = True args.binstar_upload = False elif args.post: post = True args.notest = True args.binstar_upload = False else: post = None try: build.build(m, verbose=not args.quiet, post=post, channel_urls=channel_urls, override_channels=args.override_channels, include_recipe=args.include_recipe) except RuntimeError as e: error_str = str(e) if error_str.startswith('No packages found') or error_str.startswith('Could not find some'): # Build dependency if recipe exists dep_pkg = error_str.split(': ')[1] # Handle package names that contain version deps. if ' ' in dep_pkg: dep_pkg = dep_pkg.split(' ')[0] recipe_glob = glob(dep_pkg + '-[v0-9][0-9.]*') if exists(dep_pkg): recipe_glob.append(dep_pkg) if recipe_glob: recipes.appendleft(arg) try_again = True for recipe_dir in recipe_glob: if dep_pkg in to_build: sys.exit(str(e)) print(("Missing dependency {0}, but found" + " recipe directory, so building " + "{0} first").format(dep_pkg)) recipes.appendleft(recipe_dir) to_build.append(dep_pkg) else: raise else: raise if try_again: continue if not args.notest: build.test(m, verbose=not args.quiet, channel_urls=channel_urls, override_channels=args.override_channels) binstar_upload = True if need_cleanup: shutil.rmtree(recipe_dir) if binstar_upload: handle_binstar_upload(build.bldpkg_path(m), args) already_built.append(m.pkg_fn())
parser.add_argument('recipe_dir', help='the conda recipe directory') parser.add_argument('owner', help='the binstar owner/user') parser.add_argument('--channel', help='the binstar channel', default='main') args = parser.parse_args() recipe_dir, owner, channel = args.recipe_dir, args.owner, args.channel cli = get_binstar(argparse.Namespace(token=token, site=None)) meta = MetaData(recipe_dir) exists = built_distribution_already_exists(cli, meta, owner) if token: on_channel = distribution_exists_on_channel(cli, meta, owner, channel) if not exists: upload(cli, meta, owner, channel) print('Uploaded {}'.format(bldpkg_path(meta))) elif not on_channel: print('Adding distribution {} to {}\'s {} channel' ''.format(bldpkg_path(meta), owner, channel)) add_distribution_to_channel(cli, meta, owner, channel) else: print( 'Distribution {} already \nexists on {}\'s {} channel (md5 has been ' 'checked for consistency)'.format(bldpkg_path(meta), owner, channel)) else: print("No BINSTAR_TOKEN present, so no upload is taking place. " "The distribution just built {} already available on {}'s " "{} channel.".format('is' if exists else 'is not', owner, channel))
def execute(args, parser): import sys import shutil import tarfile import tempfile from os import makedirs from os.path import abspath, isdir, isfile from conda.lock import Locked import conda_build.build as build import conda_build.source as source from conda_build.config import config from conda_build.metadata import MetaData check_external() # change globals in build module, see comment there as well build.channel_urls = args.channel or () build.override_channels = args.override_channels build.verbose = not args.quiet if on_win: try: # needs to happen before any c extensions are imported that might be # hard-linked by files in the trash. one of those is markupsafe, # used by jinja2. see https://github.com/conda/conda-build/pull/520 delete_trash(None) except: # when we can't delete the trash, don't crash on AssertionError, # instead inform the user and try again next time. # see https://github.com/conda/conda-build/pull/744 warnings.warn( "Cannot delete trash; some c extension has been " "imported that is hard-linked by files in the trash. " "Will try again on next run.") conda_version = { 'python': 'CONDA_PY', 'numpy': 'CONDA_NPY', 'perl': 'CONDA_PERL', 'R': 'CONDA_R', } for lang in ['python', 'numpy', 'perl', 'R']: versions = getattr(args, lang) if not versions: continue if versions == ['all']: if all_versions[lang]: versions = all_versions[lang] else: parser.error("'all' is not supported for --%s" % lang) if len(versions) > 1: for ver in versions[:]: setattr(args, lang, [str(ver)]) execute(args, parser) # This is necessary to make all combinations build. setattr(args, lang, versions) return else: version = versions[0] if lang in ('python', 'numpy'): version = int(version.replace('.', '')) setattr(config, conda_version[lang], version) if not len(str(version)) in (2, 3) and lang in ['python', 'numpy']: if all_versions[lang]: raise RuntimeError("%s must be major.minor, like %s, not %s" % (conda_version[lang], all_versions[lang][-1] / 10, version)) else: raise RuntimeError("%s must be major.minor, not %s" % (conda_version[lang], version)) # Using --python, --numpy etc. is equivalent to using CONDA_PY, CONDA_NPY, etc. # Auto-set those env variables for var in conda_version.values(): if getattr(config, var): # Set the env variable. os_environ[var] = str(getattr(config, var)) if args.skip_existing: if not isdir(config.bldpkgs_dir): makedirs(config.bldpkgs_dir) update_index(config.bldpkgs_dir) index = build.get_build_index(clear_cache=True) already_built = [] to_build_recursive = [] with Locked(config.croot): recipes = deque(args.recipe) while recipes: arg = recipes.popleft() try_again = False # Don't use byte literals for paths in Python 2 if not PY3: arg = arg.decode(getpreferredencoding() or 'utf-8') if isfile(arg): if arg.endswith(('.tar', '.tar.gz', '.tgz', '.tar.bz2')): recipe_dir = tempfile.mkdtemp() t = tarfile.open(arg, 'r:*') t.extractall(path=recipe_dir) t.close() need_cleanup = True else: print("Ignoring non-recipe: %s" % arg) continue else: recipe_dir = abspath(arg) need_cleanup = False if not isdir(recipe_dir): sys.exit("Error: no such directory: %s" % recipe_dir) try: m = MetaData(recipe_dir) if m.get_value('build/noarch_python'): config.noarch = True except exceptions.YamlParsingError as e: sys.stderr.write(e.error_msg()) sys.exit(1) binstar_upload = False if args.check and len(args.recipe) > 1: print(m.path) m.check_fields() if args.check: continue if args.skip_existing: if m.pkg_fn() in index or m.pkg_fn() in already_built: print("%s is already built, skipping." % m.dist()) continue if m.skip(): print("Skipped: The %s recipe defines build/skip for this " "configuration." % m.dist()) continue if args.output: try: m.parse_again(permit_undefined_jinja=False) except SystemExit: # Something went wrong; possibly due to undefined GIT_ jinja variables. # Maybe we need to actually download the source in order to resolve the build_id. source.provide(m.path, m.get_section('source')) # Parse our metadata again because we did not initialize the source # information before. m.parse_again(permit_undefined_jinja=False) print(build.bldpkg_path(m)) continue elif args.test: build.test(m, move_broken=False) elif args.source: source.provide(m.path, m.get_section('source')) print('Source tree in:', source.get_dir()) else: # This loop recursively builds dependencies if recipes exist if args.build_only: post = False args.notest = True args.binstar_upload = False elif args.post: post = True args.notest = True args.binstar_upload = False else: post = None try: build.build(m, post=post, include_recipe=args.include_recipe) except (RuntimeError, SystemExit) as e: error_str = str(e) if error_str.startswith( 'No packages found') or error_str.startswith( 'Could not find some'): # Build dependency if recipe exists dep_pkg = error_str.split(': ')[1] # Handle package names that contain version deps. if ' ' in dep_pkg: dep_pkg = dep_pkg.split(' ')[0] recipe_glob = glob(dep_pkg + '-[v0-9][0-9.]*') if exists(dep_pkg): recipe_glob.append(dep_pkg) if recipe_glob: recipes.appendleft(arg) try_again = True for recipe_dir in recipe_glob: if dep_pkg in to_build_recursive: sys.exit(str(e)) print(("Missing dependency {0}, but found" + " recipe directory, so building " + "{0} first").format(dep_pkg)) recipes.appendleft(recipe_dir) to_build_recursive.append(dep_pkg) else: raise elif error_str.strip().startswith("Hint:"): lines = [ line for line in error_str.splitlines() if line.strip().startswith('- ') ] pkgs = [line.lstrip('- ') for line in lines] # Typically if a conflict is with one of these # packages, the other package needs to be rebuilt # (e.g., a conflict with 'python 3.5*' and 'x' means # 'x' isn't build for Python 3.5 and needs to be # rebuilt). skip_names = ['python', 'r'] pkgs = [ pkg for pkg in pkgs if pkg.split(' ')[0] not in skip_names ] for pkg in pkgs: # Handle package names that contain version deps. if ' ' in pkg: pkg = pkg.split(' ')[0] recipe_glob = glob(pkg + '-[v0-9][0-9.]*') if exists(pkg): recipe_glob.append(pkg) if recipe_glob: recipes.appendleft(arg) try_again = True for recipe_dir in recipe_glob: if pkg in to_build_recursive: sys.exit(str(e)) print(error_str) print( ("Missing dependency {0}, but found" + " recipe directory, so building " + "{0} first").format(pkg)) recipes.appendleft(recipe_dir) to_build_recursive.append(pkg) else: raise else: raise if try_again: continue if not args.notest: build.test(m) binstar_upload = True if need_cleanup: shutil.rmtree(recipe_dir) if binstar_upload: handle_binstar_upload(build.bldpkg_path(m), args) already_built.append(m.pkg_fn())
def execute(args, parser): import sys import shutil import tarfile import tempfile from os.path import abspath, isdir, isfile from conda.lock import Locked import conda_build.build as build import conda_build.source as source from conda_build.config import config from conda_build.metadata import MetaData check_external() channel_urls = args.channel or () all_versions = { 'python': [26, 27, 33, 34], 'numpy': [16, 17, 18, 19], 'perl': None, 'R': None, } conda_version = { 'python': 'CONDA_PY', 'numpy': 'CONDA_NPY', 'perl': 'CONDA_PERL', 'R': 'CONDA_R', } for lang in ['python', 'numpy', 'perl', 'R']: versions = getattr(args, lang) if not versions: continue if versions == ['all']: if all_versions[lang]: versions = all_versions[lang] else: parser.error("'all' is not supported for --%s" % lang) if len(versions) > 1: for ver in versions[:]: setattr(args, lang, [str(ver)]) execute(args, parser) # This is necessary to make all combinations build. setattr(args, lang, versions) return else: version = int(versions[0].replace('.', '')) setattr(config, conda_version[lang], version) if not len(str(version)) == 2: if all_versions[lang]: raise RuntimeError("%s must be major.minor, like %s, not %s" % (conda_version[lang], all_versions[lang][-1]/10, version)) else: raise RuntimeError("%s must be major.minor, not %s" % (conda_version[lang], version)) if args.skip_existing: update_index(config.bldpkgs_dir) index = build.get_build_index(clear_cache=True, channel_urls=channel_urls, override_channels=args.override_channels) with Locked(config.croot): recipes = deque(args.recipe) while recipes: arg = recipes.popleft() try_again = False # Don't use byte literals for paths in Python 2 if not PY3: arg = arg.decode(getpreferredencoding() or 'utf-8') if isfile(arg): if arg.endswith(('.tar', '.tar.gz', '.tgz', '.tar.bz2')): recipe_dir = tempfile.mkdtemp() t = tarfile.open(arg, 'r:*') t.extractall(path=recipe_dir) t.close() need_cleanup = True else: print("Ignoring non-recipe: %s" % arg) continue else: recipe_dir = abspath(arg) need_cleanup = False if not isdir(recipe_dir): sys.exit("Error: no such directory: %s" % recipe_dir) try: m = MetaData(recipe_dir) if m.get_value('build/noarch_python'): config.noarch = True except exceptions.YamlParsingError as e: sys.stderr.write(e.error_msg()) sys.exit(1) binstar_upload = False if args.check and len(args.recipe) > 1: print(m.path) m.check_fields() if args.check: continue if args.skip_existing: if m.pkg_fn() in index: print("%s is already built, skipping." % m.dist()) continue if args.output: print(build.bldpkg_path(m)) continue elif args.test: build.test(m, verbose=not args.quiet, channel_urls=channel_urls, override_channels=args.override_channels) elif args.source: source.provide(m.path, m.get_section('source')) print('Source tree in:', source.get_dir()) else: # This loop recursively builds dependencies if recipes exist if args.build_only: post = False args.notest = True args.binstar_upload = False elif args.post: post = True args.notest = True args.binstar_upload = False else: post = None try: build.build(m, verbose=not args.quiet, post=post, channel_urls=channel_urls, override_channels=args.override_channels) except RuntimeError as e: error_str = str(e) if error_str.startswith('No packages found') or error_str.startswith('Could not find some'): # Build dependency if recipe exists dep_pkg = error_str.split(': ')[1] # Handle package names that contain version deps. if ' ' in dep_pkg: dep_pkg = dep_pkg.split(' ')[0] recipe_glob = glob(dep_pkg + '-[v0-9][0-9.]*') if exists(dep_pkg): recipe_glob.append(dep_pkg) if recipe_glob: recipes.appendleft(arg) try_again = True for recipe_dir in recipe_glob: print(("Missing dependency {0}, but found" + " recipe directory, so building " + "{0} first").format(dep_pkg)) recipes.appendleft(recipe_dir) else: raise else: raise if try_again: continue if not args.notest: build.test(m, verbose=not args.quiet, channel_urls=channel_urls, override_channels=args.override_channels) binstar_upload = True if need_cleanup: shutil.rmtree(recipe_dir) if binstar_upload: handle_binstar_upload(build.bldpkg_path(m), args)
def execute(args, parser): import sys import shutil import tarfile import tempfile from os import makedirs from os.path import abspath, isdir, isfile from conda.lock import Locked import conda_build.build as build import conda_build.source as source from conda_build.config import config from conda_build.metadata import MetaData check_external() channel_urls = args.channel or () if on_win: # needs to happen before any c extensions are imported that might be # hard-linked by files in the trash. one of those is markupsafe, used # by jinja2. see https://github.com/conda/conda-build/pull/520 assert "markupsafe" not in sys.modules delete_trash(None) conda_version = {"python": "CONDA_PY", "numpy": "CONDA_NPY", "perl": "CONDA_PERL", "R": "CONDA_R"} for lang in ["python", "numpy", "perl", "R"]: versions = getattr(args, lang) if not versions: continue if versions == ["all"]: if all_versions[lang]: versions = all_versions[lang] else: parser.error("'all' is not supported for --%s" % lang) if len(versions) > 1: for ver in versions[:]: setattr(args, lang, [str(ver)]) execute(args, parser) # This is necessary to make all combinations build. setattr(args, lang, versions) return else: version = versions[0] if lang in ("python", "numpy"): version = int(version.replace(".", "")) setattr(config, conda_version[lang], version) if not len(str(version)) in (2, 3) and lang in ["python", "numpy"]: if all_versions[lang]: raise RuntimeError( "%s must be major.minor, like %s, not %s" % (conda_version[lang], all_versions[lang][-1] / 10, version) ) else: raise RuntimeError("%s must be major.minor, not %s" % (conda_version[lang], version)) # Using --python, --numpy etc. is equivalent to using CONDA_PY, CONDA_NPY, etc. # Auto-set those env variables for var in conda_version.values(): if getattr(config, var): # Set the env variable. os_environ[var] = str(getattr(config, var)) if args.skip_existing: if not isdir(config.bldpkgs_dir): makedirs(config.bldpkgs_dir) update_index(config.bldpkgs_dir) index = build.get_build_index( clear_cache=True, channel_urls=channel_urls, override_channels=args.override_channels ) already_built = [] to_build_recursive = [] with Locked(config.croot): recipes = deque(args.recipe) while recipes: arg = recipes.popleft() try_again = False # Don't use byte literals for paths in Python 2 if not PY3: arg = arg.decode(getpreferredencoding() or "utf-8") if isfile(arg): if arg.endswith((".tar", ".tar.gz", ".tgz", ".tar.bz2")): recipe_dir = tempfile.mkdtemp() t = tarfile.open(arg, "r:*") t.extractall(path=recipe_dir) t.close() need_cleanup = True else: print("Ignoring non-recipe: %s" % arg) continue else: recipe_dir = abspath(arg) need_cleanup = False if not isdir(recipe_dir): sys.exit("Error: no such directory: %s" % recipe_dir) try: m = MetaData(recipe_dir) if m.get_value("build/noarch_python"): config.noarch = True except exceptions.YamlParsingError as e: sys.stderr.write(e.error_msg()) sys.exit(1) binstar_upload = False if args.check and len(args.recipe) > 1: print(m.path) m.check_fields() if args.check: continue if args.skip_existing: if m.pkg_fn() in index or m.pkg_fn() in already_built: print("%s is already built, skipping." % m.dist()) continue if args.output: print(build.bldpkg_path(m)) continue elif args.test: build.test( m, verbose=not args.quiet, channel_urls=channel_urls, override_channels=args.override_channels ) elif args.source: source.provide(m.path, m.get_section("source")) print("Source tree in:", source.get_dir()) else: # This loop recursively builds dependencies if recipes exist if args.build_only: post = False args.notest = True args.binstar_upload = False elif args.post: post = True args.notest = True args.binstar_upload = False else: post = None try: if m.skip(): print("Skipped: The %s recipe defines build/skip for this " "configuration." % m.dist()) continue build.build( m, verbose=not args.quiet, post=post, channel_urls=channel_urls, override_channels=args.override_channels, include_recipe=args.include_recipe, ) except (RuntimeError, SystemExit) as e: error_str = str(e) if error_str.startswith("No packages found") or error_str.startswith("Could not find some"): # Build dependency if recipe exists dep_pkg = error_str.split(": ")[1] # Handle package names that contain version deps. if " " in dep_pkg: dep_pkg = dep_pkg.split(" ")[0] recipe_glob = glob(dep_pkg + "-[v0-9][0-9.]*") if exists(dep_pkg): recipe_glob.append(dep_pkg) if recipe_glob: recipes.appendleft(arg) try_again = True for recipe_dir in recipe_glob: if dep_pkg in to_build_recursive: sys.exit(str(e)) print( ( "Missing dependency {0}, but found" + " recipe directory, so building " + "{0} first" ).format(dep_pkg) ) recipes.appendleft(recipe_dir) to_build_recursive.append(dep_pkg) else: raise elif error_str.strip().startswith("Hint:"): lines = [line for line in error_str.splitlines() if line.strip().startswith("- ")] pkgs = [line.lstrip("- ") for line in lines] # Typically if a conflict is with one of these # packages, the other package needs to be rebuilt # (e.g., a conflict with 'python 3.5*' and 'x' means # 'x' isn't build for Python 3.5 and needs to be # rebuilt). skip_names = ["python", "r"] pkgs = [pkg for pkg in pkgs if pkg.split(" ")[0] not in skip_names] for pkg in pkgs: # Handle package names that contain version deps. if " " in pkg: pkg = pkg.split(" ")[0] recipe_glob = glob(pkg + "-[v0-9][0-9.]*") if exists(pkg): recipe_glob.append(pkg) if recipe_glob: recipes.appendleft(arg) try_again = True for recipe_dir in recipe_glob: if pkg in to_build_recursive: sys.exit(str(e)) print(error_str) print( ( "Missing dependency {0}, but found" + " recipe directory, so building " + "{0} first" ).format(pkg) ) recipes.appendleft(recipe_dir) to_build_recursive.append(pkg) else: raise else: raise if try_again: continue if not args.notest: build.test( m, verbose=not args.quiet, channel_urls=channel_urls, override_channels=args.override_channels ) binstar_upload = True if need_cleanup: shutil.rmtree(recipe_dir) if binstar_upload: handle_binstar_upload(build.bldpkg_path(m), args) already_built.append(m.pkg_fn())
def run(self): # Make sure the metadata has the conda attributes, even if the # distclass isn't CondaDistribution. We primarily do this to simplify # the code below. metadata = self.distribution.metadata for attr in CondaDistribution.conda_attrs: if not hasattr(metadata, attr): setattr(metadata, attr, CondaDistribution.conda_attrs[attr]) # The command line takes precedence if self.buildnum is not None: metadata.conda_buildnum = self.buildnum with Locked(config.croot): d = defaultdict(dict) # Needs to be lowercase d['package']['name'] = metadata.name d['package']['version'] = metadata.version d['build']['number'] = metadata.conda_buildnum # MetaData does the auto stuff if the build string is None d['build']['string'] = metadata.conda_buildstr d['build']['binary_relocation'] = metadata.conda_binary_relocation d['build']['preserve_egg_dir'] = metadata.conda_preserve_egg_dir d['build']['features'] = metadata.conda_features d['build']['track_features'] = metadata.conda_track_features # XXX: I'm not really sure if it is correct to combine requires # and install_requires d['requirements']['run'] = d['requirements']['build'] = \ [spec_from_line(i) for i in (metadata.requires or []) + (getattr(self.distribution, 'install_requires', []) or [])] + ['python'] if hasattr(self.distribution, 'tests_require'): # A lot of packages use extras_require['test'], but # tests_require is the one that is officially supported by # setuptools. d['test']['requires'] = [spec_from_line(i) for i in self.distribution.tests_require or []] d['about']['home'] = metadata.url # Don't worry about classifiers. This isn't skeleton pypi. We # don't need to make this work with random stuff in the wild. If # someone writes their setup.py wrong and this doesn't work, it's # their fault. d['about']['license'] = metadata.license d['about']['summary'] = self.distribution.description # This is similar logic from conda skeleton pypi entry_points = getattr(self.distribution, 'entry_points', []) if entry_points: if isinstance(entry_points, string_types): # makes sure it is left-shifted newstr = "\n".join(x.strip() for x in entry_points.split('\n')) c = configparser.ConfigParser() entry_points = {} try: c.readfp(StringIO(newstr)) except Exception as err: # This seems to be the best error here raise DistutilsGetoptError("ERROR: entry-points not understood: " + str(err) + "\nThe string was" + newstr) else: for section in config.sections(): if section in ['console_scripts', 'gui_scripts']: value = ['%s=%s' % (option, config.get(section, option)) for option in config.options(section)] entry_points[section] = value else: # Make sure setuptools is added as a dependency below entry_points[section] = None if not isinstance(entry_points, dict): raise DistutilsGetoptError("ERROR: Could not add entry points. They were:\n" + entry_points) else: cs = entry_points.get('console_scripts', []) gs = entry_points.get('gui_scripts', []) # We have *other* kinds of entry-points so we need # setuptools at run-time if not cs and not gs and len(entry_points) > 1: d['requirements']['run'].append('setuptools') d['requirements']['build'].append('setuptools') entry_list = cs + gs if gs and conda.config.platform == 'osx': d['build']['osx_is_app'] = True if len(cs + gs) != 0: d['build']['entry_points'] = entry_list if metadata.conda_command_tests is True: d['test']['commands'] = list(map(unicode, pypi.make_entry_tests(entry_list))) if 'setuptools' in d['requirements']['run']: d['build']['preserve_egg_dir'] = True if metadata.conda_import_tests: if metadata.conda_import_tests is True: if self.distribution.packages: d['test']['imports'] = self.distribution.packages else: d['test']['imports'] = metadata.conda_import_tests if (metadata.conda_command_tests and not isinstance(metadata.conda_command_tests, bool)): d['test']['commands'] = list(map(unicode, metadata.conda_command_tests)) d = dict(d) m = MetaData.fromdict(d) # Shouldn't fail, but do you really trust the code above? m.check_fields() build.build(m, post=False) # Do the install if not PY3: # Command is an old-style class in Python 2 install.run(self) else: super().run() build.build(m, post=True) build.test(m) if self.binstar_upload: class args: binstar_upload = self.binstar_upload handle_binstar_upload(build.bldpkg_path(m), args) else: no_upload_message = """\ # If you want to upload this package to binstar.org later, type: # # $ binstar upload %s """ % build.bldpkg_path(m) print(no_upload_message)
'conda recipe with binstar. Note: The existence of the ' 'BINSTAR_TOKEN environment variable determines ' 'whether the upload should actually take place.') parser = argparse.ArgumentParser(description=description) parser.add_argument('recipe_dir', help='the conda recipe directory') parser.add_argument('owner', help='the binstar owner/user') parser.add_argument('--channel', help='the binstar channel', default='main') args = parser.parse_args() recipe_dir, owner, channel = args.recipe_dir, args.owner, args.channel cli = get_binstar(argparse.Namespace(token=token, site=None)) meta = MetaData(recipe_dir) exists = False # built_distribution_already_exists(cli, meta, owner) if token: on_channel = distribution_exists_on_channel(cli, meta, owner, channel) if not exists: upload(cli, meta, owner, channel) print('Uploaded {}'.format(bldpkg_path(meta))) elif not on_channel: print('Adding distribution {} to {}\'s {} channel' ''.format(bldpkg_path(meta), owner, channel)) add_distribution_to_channel(cli, meta, owner, channel) else: print('Distribution {} already \nexists on {}\'s {} channel (md5 has been ' 'checked for consistency)'.format(bldpkg_path(meta), owner, channel)) else: print("No BINSTAR_TOKEN present, so no upload is taking place. " "The distribution just built {} already available on {}'s " "{} channel.".format('is' if exists else 'is not', owner, channel))
def execute(args, parser): import sys import shutil import tarfile import tempfile from os import makedirs from os.path import abspath, isdir, isfile import conda_build.build as build import conda_build.source as source from conda_build.config import config check_external() # change globals in build module, see comment there as well build.channel_urls = args.channel or () build.override_channels = args.override_channels build.verbose = not args.quiet if on_win: try: # needs to happen before any c extensions are imported that might be # hard-linked by files in the trash. one of those is markupsafe, # used by jinja2. see https://github.com/conda/conda-build/pull/520 delete_trash(None) except: # when we can't delete the trash, don't crash on AssertionError, # instead inform the user and try again next time. # see https://github.com/conda/conda-build/pull/744 warnings.warn("Cannot delete trash; some c extension has been " "imported that is hard-linked by files in the trash. " "Will try again on next run.") set_language_env_vars(args, parser, execute=execute) if args.skip_existing: for d in config.bldpkgs_dirs: if not isdir(d): makedirs(d) update_index(d) index = build.get_build_index(clear_cache=True) already_built = set() to_build_recursive = [] recipes = deque(args.recipe) while recipes: arg = recipes.popleft() try_again = False # Don't use byte literals for paths in Python 2 if not PY3: arg = arg.decode(getpreferredencoding() or 'utf-8') if isfile(arg): if arg.endswith(('.tar', '.tar.gz', '.tgz', '.tar.bz2')): recipe_dir = tempfile.mkdtemp() t = tarfile.open(arg, 'r:*') t.extractall(path=recipe_dir) t.close() need_cleanup = True else: print("Ignoring non-recipe: %s" % arg) continue else: recipe_dir = abspath(arg) need_cleanup = False # recurse looking for meta.yaml that is potentially not in immediate folder recipe_dir = find_recipe(recipe_dir) if not isdir(recipe_dir): sys.exit("Error: no such directory: %s" % recipe_dir) # this fully renders any jinja templating, throwing an error if any data is missing m, need_source_download = render_recipe(recipe_dir, no_download_source=False, verbose=False) if m.get_value('build/noarch_python'): config.noarch = True if args.check and len(args.recipe) > 1: print(m.path) m.check_fields() if args.check: continue if m.skip(): print("Skipped: The %s recipe defines build/skip for this " "configuration." % m.dist()) continue if args.skip_existing: # 'or m.pkg_fn() in index' is for conda <4.1 and could be removed in the future. if ('local::' + m.pkg_fn() in index or m.pkg_fn() in index or m.pkg_fn() in already_built): print(m.dist(), "is already built, skipping.") continue if args.output: print(bldpkg_path(m)) continue elif args.test: build.test(m, move_broken=False) elif args.source: source.provide(m.path, m.get_section('source'), verbose=build.verbose) print('Source tree in:', source.get_dir()) else: # This loop recursively builds dependencies if recipes exist if args.build_only: post = False args.notest = True args.binstar_upload = False elif args.post: post = True args.notest = True args.binstar_upload = False else: post = None try: build.build(m, post=post, include_recipe=args.include_recipe, keep_old_work=args.keep_old_work, need_source_download=need_source_download, dirty=args.dirty) except (NoPackagesFound, Unsatisfiable) as e: error_str = str(e) # Typically if a conflict is with one of these # packages, the other package needs to be rebuilt # (e.g., a conflict with 'python 3.5*' and 'x' means # 'x' isn't build for Python 3.5 and needs to be # rebuilt). skip_names = ['python', 'r'] add_recipes = [] for line in error_str.splitlines(): if not line.startswith(' - '): continue pkg = line.lstrip(' - ').split(' -> ')[-1] pkg = pkg.strip().split(' ')[0] if pkg in skip_names: continue recipe_glob = glob(pkg + '-[v0-9][0-9.]*') if os.path.exists(pkg): recipe_glob.append(pkg) if recipe_glob: try_again = True for recipe_dir in recipe_glob: if pkg in to_build_recursive: sys.exit(str(e)) print(error_str) print(("Missing dependency {0}, but found" + " recipe directory, so building " + "{0} first").format(pkg)) add_recipes.append(recipe_dir) to_build_recursive.append(pkg) else: raise recipes.appendleft(arg) recipes.extendleft(reversed(add_recipes)) if try_again: continue if not args.notest: build.test(m) if need_cleanup: shutil.rmtree(recipe_dir) # outputs message, or does upload, depending on value of args.binstar_upload handle_binstar_upload(build.bldpkg_path(m), args) already_built.add(m.pkg_fn())
'conda recipe with binstar. Note: The existence of the ' 'BINSTAR_TOKEN environment variable determines ' 'whether the upload should actually take place.') parser = argparse.ArgumentParser(description=description) parser.add_argument('recipe_dir', help='the conda recipe directory') parser.add_argument('owner', help='the binstar owner/user') parser.add_argument('--channel', help='the binstar channel', default='main') args = parser.parse_args() recipe_dir, owner, channel = args.recipe_dir, args.owner, args.channel cli = get_binstar(argparse.Namespace(token=token, site=None)) meta = MetaData(recipe_dir) exists = built_distribution_already_exists(cli, meta, owner) if token: on_channel = distribution_exists_on_channel(cli, meta, owner, channel) if not exists: upload(cli, meta, owner, channel) print('Uploaded {}'.format(bldpkg_path(meta))) elif not on_channel: print('Adding distribution {} to {}\'s {} channel' ''.format(bldpkg_path(meta), owner, channel)) add_distribution_to_channel(cli, meta, owner, channel) else: print('Distribution {} already \nexists on {}\'s {} channel.' ''.format(bldpkg_path(meta), owner, channel)) else: print("No BINSTAR_TOKEN present, so no upload is taking place. " "The distribution just built {} already available on {}'s " "{} channel.".format('is' if exists else 'is not', owner, channel))