def get_install_actions(prefix, specs, env, retries=0, subdir=None, verbose=True, debug=False, locking=True, bldpkgs_dirs=None, timeout=90, disable_pip=False, max_env_retry=3, output_folder=None, channel_urls=None): global cached_actions global last_index_ts actions = {} log = utils.get_logger(__name__) conda_log_level = logging.WARN specs = list(specs) if verbose: capture = contextlib.contextmanager(lambda: (yield)) elif debug: capture = contextlib.contextmanager(lambda: (yield)) conda_log_level = logging.DEBUG else: capture = utils.capture for feature, value in feature_list: if value: specs.append('%s@' % feature) bldpkgs_dirs = ensure_list(bldpkgs_dirs) index, index_ts = get_build_index(subdir, list(bldpkgs_dirs)[0], output_folder=output_folder, channel_urls=channel_urls, debug=debug, verbose=verbose, locking=locking, timeout=timeout) specs = tuple(utils.ensure_valid_spec(spec) for spec in specs if not str(spec).endswith('@')) if ((specs, env, subdir, channel_urls, disable_pip) in cached_actions and last_index_ts >= index_ts): actions = cached_actions[(specs, env, subdir, channel_urls, disable_pip)].copy() if "PREFIX" in actions: actions['PREFIX'] = prefix elif specs: # this is hiding output like: # Fetching package metadata ........... # Solving package specifications: .......... with utils.LoggingContext(conda_log_level): with capture(): try: actions = install_actions(prefix, index, specs, force=True) except NoPackagesFoundError as exc: raise DependencyNeedsBuildingError(exc, subdir=subdir) except (SystemExit, PaddingError, LinkError, DependencyNeedsBuildingError, CondaError, AssertionError) as exc: if 'lock' in str(exc): log.warn("failed to get install actions, retrying. exception was: %s", str(exc)) elif ('requires a minimum conda version' in str(exc) or 'link a source that does not' in str(exc) or isinstance(exc, AssertionError)): locks = utils.get_conda_operation_locks(locking, bldpkgs_dirs, timeout) with utils.try_acquire_locks(locks, timeout=timeout): pkg_dir = str(exc) folder = 0 while os.path.dirname(pkg_dir) not in pkgs_dirs and folder < 20: pkg_dir = os.path.dirname(pkg_dir) folder += 1 log.warn("I think conda ended up with a partial extraction for %s. " "Removing the folder and retrying", pkg_dir) if pkg_dir in pkgs_dirs and os.path.isdir(pkg_dir): utils.rm_rf(pkg_dir) if retries < max_env_retry: log.warn("failed to get install actions, retrying. exception was: %s", str(exc)) actions = get_install_actions(prefix, tuple(specs), env, retries=retries + 1, subdir=subdir, verbose=verbose, debug=debug, locking=locking, bldpkgs_dirs=tuple(bldpkgs_dirs), timeout=timeout, disable_pip=disable_pip, max_env_retry=max_env_retry, output_folder=output_folder, channel_urls=tuple(channel_urls)) else: log.error("Failed to get install actions, max retries exceeded.") raise if disable_pip: for pkg in ('pip', 'setuptools', 'wheel'): # specs are the raw specifications, not the conda-derived actual specs # We're testing that pip etc. are manually specified if not any(re.match('^%s(?:$|[\s=].*)' % pkg, str(dep)) for dep in specs): actions['LINK'] = [spec for spec in actions['LINK'] if spec.name != pkg] utils.trim_empty_keys(actions) cached_actions[(specs, env, subdir, channel_urls, disable_pip)] = actions.copy() last_index_ts = index_ts return actions
def create_env(prefix, specs_or_actions, env, config, subdir, clear_cache=True, retry=0, locks=None, is_cross=False, is_conda=False): ''' Create a conda envrionment for the given prefix and specs. ''' if config.debug: external_logger_context = utils.LoggingContext(logging.DEBUG) else: external_logger_context = utils.LoggingContext(logging.ERROR) with external_logger_context: log = utils.get_logger(__name__) # if os.path.isdir(prefix): # utils.rm_rf(prefix) if specs_or_actions: # Don't waste time if there is nothing to do log.debug("Creating environment in %s", prefix) log.debug(str(specs_or_actions)) with utils.path_prepended(prefix): if not locks: locks = utils.get_conda_operation_locks(config) try: with utils.try_acquire_locks(locks, timeout=config.timeout): # input is a list - it's specs in MatchSpec format if not hasattr(specs_or_actions, 'keys'): specs = list(set(specs_or_actions)) actions = get_install_actions(prefix, tuple(specs), env, subdir=subdir, verbose=config.verbose, debug=config.debug, locking=config.locking, bldpkgs_dirs=tuple(config.bldpkgs_dirs), timeout=config.timeout, disable_pip=config.disable_pip, max_env_retry=config.max_env_retry, output_folder=config.output_folder, channel_urls=tuple(config.channel_urls)) else: actions = specs_or_actions index, index_ts = get_build_index(subdir=subdir, bldpkgs_dir=config.bldpkgs_dir, output_folder=config.output_folder, channel_urls=config.channel_urls, debug=config.debug, verbose=config.verbose, locking=config.locking, timeout=config.timeout) utils.trim_empty_keys(actions) display_actions(actions, index) if utils.on_win: for k, v in os.environ.items(): os.environ[k] = str(v) execute_actions(actions, index, verbose=config.debug) except (SystemExit, PaddingError, LinkError, DependencyNeedsBuildingError, CondaError) as exc: if (("too short in" in str(exc) or re.search('post-link failed for: (?:[a-zA-Z]*::)?openssl', str(exc)) or isinstance(exc, PaddingError)) and config.prefix_length > 80): if config.prefix_length_fallback: log.warn("Build prefix failed with prefix length %d", config.prefix_length) log.warn("Error was: ") log.warn(str(exc)) log.warn("One or more of your package dependencies needs to be rebuilt " "with a longer prefix length.") log.warn("Falling back to legacy prefix length of 80 characters.") log.warn("Your package will not install into prefixes > 80 characters.") config.prefix_length = 80 host = '_h_env' in prefix # Set this here and use to create environ # Setting this here is important because we use it below (symlink) prefix = config.host_prefix if host else config.build_prefix actions['PREFIX'] = prefix create_env(prefix, actions, config=config, subdir=subdir, env=env, clear_cache=clear_cache, is_cross=is_cross) else: raise elif 'lock' in str(exc): if retry < config.max_env_retry: log.warn("failed to create env, retrying. exception was: %s", str(exc)) create_env(prefix, actions, config=config, subdir=subdir, env=env, clear_cache=clear_cache, retry=retry + 1, is_cross=is_cross) elif ('requires a minimum conda version' in str(exc) or 'link a source that does not' in str(exc)): with utils.try_acquire_locks(locks, timeout=config.timeout): pkg_dir = str(exc) folder = 0 while os.path.dirname(pkg_dir) not in pkgs_dirs and folder < 20: pkg_dir = os.path.dirname(pkg_dir) folder += 1 log.warn("I think conda ended up with a partial extraction for %s. " "Removing the folder and retrying", pkg_dir) if os.path.isdir(pkg_dir): utils.rm_rf(pkg_dir) if retry < config.max_env_retry: log.warn("failed to create env, retrying. exception was: %s", str(exc)) create_env(prefix, actions, config=config, subdir=subdir, env=env, clear_cache=clear_cache, retry=retry + 1, is_cross=is_cross) else: log.error("Failed to create env, max retries exceeded.") raise else: raise # HACK: some of the time, conda screws up somehow and incomplete packages result. # Just retry. except (AssertionError, IOError, ValueError, RuntimeError, LockError) as exc: if isinstance(exc, AssertionError): with utils.try_acquire_locks(locks, timeout=config.timeout): pkg_dir = os.path.dirname(os.path.dirname(str(exc))) log.warn("I think conda ended up with a partial extraction for %s. " "Removing the folder and retrying", pkg_dir) if os.path.isdir(pkg_dir): utils.rm_rf(pkg_dir) if retry < config.max_env_retry: log.warn("failed to create env, retrying. exception was: %s", str(exc)) create_env(prefix, actions, config=config, subdir=subdir, env=env, clear_cache=clear_cache, retry=retry + 1, is_cross=is_cross) else: log.error("Failed to create env, max retries exceeded.") raise if not is_conda: # Symlinking conda is critical here to make sure that activate scripts are not # accidentally included in packages. if utils.on_win: shell = "cmd.exe" else: shell = "bash" symlink_conda(prefix, sys.prefix, shell)
def get_upstream_pins(m, actions, env): """Download packages from specs, then inspect each downloaded package for additional downstream dependency specs. Return these additional specs.""" env_specs = m.meta.get('requirements', {}).get(env, []) explicit_specs = [req.split(' ')[0] for req in env_specs] if env_specs else [] linked_packages = actions.get('LINK', []) linked_packages = [ pkg for pkg in linked_packages if pkg.name in explicit_specs ] # edit the plan to download all necessary packages for key in ('LINK', 'EXTRACT', 'UNLINK'): if key in actions: del actions[key] # this should be just downloading packages. We don't need to extract them - # we read contents directly index, index_ts = get_build_index(getattr(m.config, '{}_subdir'.format(env)), bldpkgs_dir=m.config.bldpkgs_dir, output_folder=m.config.output_folder, channel_urls=m.config.channel_urls, debug=m.config.debug, verbose=m.config.verbose, locking=m.config.locking, timeout=m.config.timeout) if 'FETCH' in actions or 'EXTRACT' in actions: # this is to force the download execute_actions(actions, index, verbose=m.config.debug) ignore_list = utils.ensure_list(m.get_value('build/ignore_run_exports')) additional_specs = {} for pkg in linked_packages: if hasattr(pkg, 'dist_name'): pkg_dist = pkg.dist_name else: pkg = strip_channel(pkg) pkg_dist = pkg.split(' ')[0] pkg_loc = find_pkg_dir_or_file_in_pkgs_dirs(pkg_dist, m) # ran through all pkgs_dirs, and did not find package or folder. Download it. # TODO: this is a vile hack reaching into conda's internals. Replace with # proper conda API when available. if not pkg_loc and conda_43: try: # the conda 4.4 API uses a single `link_prefs` kwarg # whereas conda 4.3 used `index` and `link_dists` kwargs pfe = ProgressiveFetchExtract(link_prefs=(index[pkg], )) except TypeError: # TypeError: __init__() got an unexpected keyword argument 'link_prefs' pfe = ProgressiveFetchExtract(link_dists=[pkg], index=index) with utils.LoggingContext(): pfe.execute() for pkg_dir in pkgs_dirs: _loc = os.path.join(pkg_dir, index[pkg].fn) if os.path.isfile(_loc): pkg_loc = _loc break specs = _read_specs_from_package(pkg_loc, pkg_dist) additional_specs = utils.merge_dicts_of_lists( additional_specs, _filter_run_exports(specs, ignore_list)) return additional_specs
def get_upstream_pins(m, actions, env): """Download packages from specs, then inspect each downloaded package for additional downstream dependency specs. Return these additional specs.""" # this attribute is added in the first pass of finalize_outputs_pass raw_specs = (m.original_meta.get('requirements', {}).get(env, []) if hasattr(m, 'original_meta') else []) explicit_specs = [req.split(' ')[0] for req in raw_specs] linked_packages = actions.get('LINK', []) linked_packages = [pkg for pkg in linked_packages if pkg.name in explicit_specs] # edit the plan to download all necessary packages for key in ('LINK', 'EXTRACT', 'UNLINK'): if key in actions: del actions[key] # this should be just downloading packages. We don't need to extract them - # we read contents directly index, index_ts = get_build_index(getattr(m.config, '{}_subdir'.format(env)), bldpkgs_dir=m.config.bldpkgs_dir, output_folder=m.config.output_folder, channel_urls=m.config.channel_urls, debug=m.config.debug, verbose=m.config.verbose, locking=m.config.locking, timeout=m.config.timeout) if 'FETCH' in actions or 'EXTRACT' in actions: # this is to force the download execute_actions(actions, index, verbose=m.config.debug) ignore_list = utils.ensure_list(m.get_value('build/ignore_run_exports')) _pkgs_dirs = pkgs_dirs + list(m.config.bldpkgs_dirs) additional_specs = {} for pkg in linked_packages: pkg_loc = None if hasattr(pkg, 'dist_name'): pkg_dist = pkg.dist_name else: pkg = strip_channel(pkg) pkg_dist = pkg.split(' ')[0] for pkgs_dir in _pkgs_dirs: pkg_dir = os.path.join(pkgs_dir, pkg_dist) pkg_file = os.path.join(pkgs_dir, pkg_dist + '.tar.bz2') if os.path.isdir(pkg_dir): pkg_loc = pkg_dir break elif os.path.isfile(pkg_file): pkg_loc = pkg_file break # ran through all pkgs_dirs, and did not find package or folder. Download it. # TODO: this is a vile hack reaching into conda's internals. Replace with # proper conda API when available. if not pkg_loc and conda_43: try: # the conda 4.4 API uses a single `link_prefs` kwarg # whereas conda 4.3 used `index` and `link_dists` kwargs pfe = ProgressiveFetchExtract(link_prefs=(index[pkg],)) except TypeError: # TypeError: __init__() got an unexpected keyword argument 'link_prefs' pfe = ProgressiveFetchExtract(link_dists=[pkg], index=index) with utils.LoggingContext(): pfe.execute() for pkg_dir in pkgs_dirs: _loc = os.path.join(pkg_dir, index[pkg].fn) if os.path.isfile(_loc): pkg_loc = _loc break specs = {} if os.path.isdir(pkg_loc): downstream_file = os.path.join(pkg_dir, 'info/run_exports') if os.path.isfile(downstream_file): with open(downstream_file) as f: specs = {'weak': [spec.rstrip() for spec in f.readlines()]} # a later attempt: record more info in the yaml file, to support "strong" run exports elif os.path.isfile(downstream_file + '.yaml'): with open(downstream_file + '.yaml') as f: specs = yaml.safe_load(f) elif os.path.isfile(pkg_file): legacy_specs = utils.package_has_file(pkg_file, 'info/run_exports') specs_yaml = utils.package_has_file(pkg_file, 'info/run_exports.yaml') if specs: # exclude packages pinning themselves (makes no sense) specs = {'weak': [spec.rstrip() for spec in legacy_specs.splitlines() if not spec.startswith(pkg_dist.rsplit('-', 2)[0])]} elif specs_yaml: specs = yaml.safe_load(specs_yaml) additional_specs = utils.merge_dicts_of_lists(additional_specs, _filter_run_exports(specs, ignore_list)) return additional_specs
def execute_download_actions(m, actions, env, package_subset=None, require_files=False): index, _, _ = get_build_index(getattr(m.config, '{}_subdir'.format(env)), bldpkgs_dir=m.config.bldpkgs_dir, output_folder=m.config.output_folder, channel_urls=m.config.channel_urls, debug=m.config.debug, verbose=m.config.verbose, locking=m.config.locking, timeout=m.config.timeout) # this should be just downloading packages. We don't need to extract them - # we read contents directly if 'FETCH' in actions or 'EXTRACT' in actions: # this is to force the download execute_actions(actions, index, verbose=m.config.debug) pkg_files = {} packages = actions.get('LINK', []) package_subset = utils.ensure_list(package_subset) selected_packages = set() if package_subset: for pkg in package_subset: if hasattr(pkg, 'name'): if pkg in packages: selected_packages.add(pkg) else: pkg_name = pkg.split()[0] for link_pkg in packages: if pkg_name == link_pkg.name: selected_packages.add(link_pkg) break packages = selected_packages for pkg in packages: if hasattr(pkg, 'dist_name'): pkg_dist = pkg.dist_name else: pkg = strip_channel(pkg) pkg_dist = pkg.split(' ')[0] pkg_loc = find_pkg_dir_or_file_in_pkgs_dirs(pkg_dist, m, files_only=require_files) # ran through all pkgs_dirs, and did not find package or folder. Download it. # TODO: this is a vile hack reaching into conda's internals. Replace with # proper conda API when available. if not pkg_loc and conda_43: try: pkg_record = [_ for _ in index if _.dist_name == pkg_dist][0] # the conda 4.4 API uses a single `link_prefs` kwarg # whereas conda 4.3 used `index` and `link_dists` kwargs pfe = ProgressiveFetchExtract(link_prefs=(index[pkg_record], )) except TypeError: # TypeError: __init__() got an unexpected keyword argument 'link_prefs' pfe = ProgressiveFetchExtract(link_dists=[pkg], index=index) with utils.LoggingContext(): pfe.execute() for pkg_dir in pkgs_dirs: _loc = os.path.join(pkg_dir, index[pkg].fn) if os.path.isfile(_loc): pkg_loc = _loc break pkg_files[pkg] = pkg_loc, pkg_dist return pkg_files
def create_env(prefix, specs_or_actions, config, subdir, clear_cache=True, retry=0, locks=None, is_cross=False): ''' Create a conda envrionment for the given prefix and specs. ''' if config.debug: utils.get_logger("conda_build").setLevel(logging.DEBUG) external_logger_context = utils.LoggingContext(logging.DEBUG) else: utils.get_logger("conda_build").setLevel(logging.INFO) external_logger_context = utils.LoggingContext(logging.ERROR) with external_logger_context: log = utils.get_logger(__name__) # if os.path.isdir(prefix): # utils.rm_rf(prefix) if specs_or_actions: # Don't waste time if there is nothing to do log.debug("Creating environment in %s", prefix) log.debug(str(specs_or_actions)) with utils.path_prepended(prefix): if not locks: locks = utils.get_conda_operation_locks(config) try: with utils.try_acquire_locks(locks, timeout=config.timeout): index, index_ts = get_build_index(config=config, subdir=subdir) # input is a list - it's specs in MatchSpec format if not hasattr(specs_or_actions, 'keys'): specs = list(set(specs_or_actions)) actions = get_install_actions(prefix, index, specs, config, timestamp=index_ts) else: actions = specs_or_actions display_actions(actions, index) if utils.on_win: for k, v in os.environ.items(): os.environ[k] = str(v) execute_actions(actions, index, verbose=config.debug) except (SystemExit, PaddingError, LinkError, DependencyNeedsBuildingError, CondaError) as exc: if (("too short in" in str(exc) or re.search( 'post-link failed for: (?:[a-zA-Z]*::)?openssl', str(exc)) or isinstance(exc, PaddingError)) and config.prefix_length > 80): if config.prefix_length_fallback: log.warn( "Build prefix failed with prefix length %d", config.prefix_length) log.warn("Error was: ") log.warn(str(exc)) log.warn( "One or more of your package dependencies needs to be rebuilt " "with a longer prefix length.") log.warn( "Falling back to legacy prefix length of 80 characters." ) log.warn( "Your package will not install into prefixes > 80 characters." ) config.prefix_length = 80 # Set this here and use to create environ # Setting this here is important because we use it below (symlink) prefix = config.build_prefix actions['PREFIX'] = prefix create_env(prefix, actions, config=config, subdir=subdir, clear_cache=clear_cache, is_cross=is_cross) else: raise elif 'lock' in str(exc): if retry < config.max_env_retry: log.warn( "failed to create env, retrying. exception was: %s", str(exc)) create_env(prefix, actions, config=config, subdir=subdir, clear_cache=clear_cache, retry=retry + 1, is_cross=is_cross) elif ('requires a minimum conda version' in str(exc) or 'link a source that does not' in str(exc)): with utils.try_acquire_locks(locks, timeout=config.timeout): pkg_dir = str(exc) folder = 0 while os.path.dirname( pkg_dir) not in pkgs_dirs and folder < 20: pkg_dir = os.path.dirname(pkg_dir) folder += 1 log.warn( "I think conda ended up with a partial extraction for %s. " "Removing the folder and retrying", pkg_dir) if os.path.isdir(pkg_dir): utils.rm_rf(pkg_dir) if retry < config.max_env_retry: log.warn( "failed to create env, retrying. exception was: %s", str(exc)) create_env(prefix, actions, config=config, subdir=subdir, clear_cache=clear_cache, retry=retry + 1, is_cross=is_cross) else: log.error( "Failed to create env, max retries exceeded.") raise else: raise # HACK: some of the time, conda screws up somehow and incomplete packages result. # Just retry. except (AssertionError, IOError, ValueError, RuntimeError, LockError) as exc: if isinstance(exc, AssertionError): with utils.try_acquire_locks(locks, timeout=config.timeout): pkg_dir = os.path.dirname(os.path.dirname( str(exc))) log.warn( "I think conda ended up with a partial extraction for %s. " "Removing the folder and retrying", pkg_dir) if os.path.isdir(pkg_dir): utils.rm_rf(pkg_dir) if retry < config.max_env_retry: log.warn( "failed to create env, retrying. exception was: %s", str(exc)) create_env(prefix, actions, config=config, subdir=subdir, clear_cache=clear_cache, retry=retry + 1, is_cross=is_cross) else: log.error( "Failed to create env, max retries exceeded.") raise # We must not symlink conda across different platforms when cross-compiling. # On second thought, I think we must, because activating the host env does # the symlink for us anyway, and when activate does it, we end up with # conda symlinks in every package. =() # if os.path.basename(prefix) == '_build_env' or not is_cross: if utils.on_win: shell = "cmd.exe" else: shell = "bash" symlink_conda(prefix, sys.prefix, shell)
def get_upstream_pins(m, actions, env): """Download packages from specs, then inspect each downloaded package for additional downstream dependency specs. Return these additional specs.""" additional_specs = [] linked_packages = actions.get('LINK', []) # edit the plan to download all necessary packages for key in ('LINK', 'EXTRACT', 'UNLINK'): if key in actions: del actions[key] # this should be just downloading packages. We don't need to extract them - # we read contents directly index, index_ts = get_build_index(getattr(m.config, '{}_subdir'.format(env)), bldpkgs_dir=m.config.bldpkgs_dir, output_folder=m.config.output_folder, channel_urls=m.config.channel_urls, debug=m.config.debug, verbose=m.config.verbose, locking=m.config.locking, timeout=m.config.timeout) if actions: execute_actions(actions, index, verbose=m.config.debug) ignore_list = utils.ensure_list( m.get_value('build/ignore_run_exports')) _pkgs_dirs = pkgs_dirs + list(m.config.bldpkgs_dirs) for pkg in linked_packages: for pkgs_dir in _pkgs_dirs: if hasattr(pkg, 'dist_name'): pkg_dist = pkg.dist_name else: pkg = strip_channel(pkg) pkg_dist = pkg.split(' ')[0] pkg_dir = os.path.join(pkgs_dir, pkg_dist) pkg_file = os.path.join(pkgs_dir, pkg_dist + '.tar.bz2') if os.path.isdir(pkg_dir): downstream_file = os.path.join(pkg_dir, 'info/run_exports') if os.path.isfile(downstream_file): specs = open(downstream_file).read().splitlines() additional_specs.extend( _filter_run_exports(specs, ignore_list)) break elif os.path.isfile(pkg_file): extra_specs = utils.package_has_file( pkg_file, 'info/run_exports') if extra_specs: # exclude packages pinning themselves (makes no sense) extra_specs = [ spec for spec in extra_specs.splitlines() if not spec.startswith(pkg_dist.rsplit('-', 2)[0]) ] additional_specs.extend( _filter_run_exports(extra_specs, ignore_list)) break elif utils.conda_43(): # TODO: this is a vile hack reaching into conda's internals. Replace with # proper conda API when available. try: pfe = ProgressiveFetchExtract(link_dists=[pkg], index=index) with utils.LoggingContext(): pfe.execute() for pkgs_dir in _pkgs_dirs: pkg_file = os.path.join(pkgs_dir, pkg.dist_name + '.tar.bz2') if os.path.isfile(pkg_file): extra_specs = utils.package_has_file( pkg_file, 'info/run_exports') if extra_specs: specs = extra_specs.splitlines() additional_specs.extend( _filter_run_exports( specs, ignore_list)) break break except KeyError: raise DependencyNeedsBuildingError(packages=[pkg.name]) else: raise RuntimeError( "Didn't find expected package {} in package cache ({})". format(pkg_dist, _pkgs_dirs)) return additional_specs