def _trim_None_strings(meta_dict): log = utils.get_logger(__name__) for key, value in meta_dict.items(): if hasattr(value, 'keys'): meta_dict[key] = _trim_None_strings(value) elif value and hasattr(value, '__iter__') or isinstance(value, string_types): if isinstance(value, string_types): meta_dict[key] = None if 'None' in value else value else: # support lists of dicts (homogeneous) keep = [] if hasattr(value[0], 'keys'): for d in value: trimmed_dict = _trim_None_strings(d) if trimmed_dict: keep.append(trimmed_dict) # support lists of strings (homogeneous) else: keep = [i for i in value if 'None' not in i] meta_dict[key] = keep else: log.debug("found unrecognized data type in dictionary: {0}, type: {1}".format(value, type(value))) trim_empty_keys(meta_dict) return meta_dict
def dict_of_lists_to_list_of_dicts(dict_of_lists, extend_keys=None): # http://stackoverflow.com/a/5228294/1170370 # end result is a collection of dicts, like [{'python': 2.7, 'numpy': 1.11}, # {'python': 3.5, 'numpy': 1.11}] dicts = [] pass_through_keys = (['extend_keys', 'zip_keys'] + list(ensure_list(extend_keys)) + list(_get_zip_key_set(dict_of_lists))) dimensions = {k: v for k, v in dict_of_lists.items() if k not in pass_through_keys} # here's where we add in the zipped dimensions. Zipped stuff is concatenated strings, to avoid # being distributed in the product. for group in _get_zip_groups(dict_of_lists): dimensions.update(group) # in case selectors nullify any groups - or else zip reduces whole set to nil trim_empty_keys(dimensions) for x in product(*dimensions.values()): remapped = dict(six.moves.zip(dimensions, x)) for col in pass_through_keys: v = dict_of_lists.get(col) if v: remapped[col] = v # split out zipped keys for k, v in remapped.copy().items(): if isinstance(k, string_types) and isinstance(v, string_types): keys = k.split('#') values = v.split('#') for (_k, _v) in zip(keys, values): remapped[_k] = _v if '#' in k: del remapped[k] dicts.append(remapped) return dicts
def parse_config_file(path, config): from conda_build.metadata import select_lines, ns_cfg with open(path) as f: contents = f.read() contents = select_lines(contents, ns_cfg(config), variants_in_place=False) content = yaml.load(contents, Loader=yaml.loader.BaseLoader) or {} trim_empty_keys(content) return content
def _get_hash_contents(self): sections = ['source', 'requirements', 'build'] # make a copy of values, so that no sorting occurs in place composite = HashableDict({section: copy.copy(self.get_section(section)) for section in sections}) outputs = self.get_section('outputs') if outputs: outs = [] for out in outputs: out = copy.copy(out) # files are dynamically determined, and there's no way to match them at render time. # we need to exclude them from the hash. if 'files' in out: del out['files'] outs.append(out) composite.update({'outputs': [HashableDict(out) for out in outs]}) # filter build requirements for ones that should not be in the hash requirements = composite.get('requirements', {}) build_reqs = requirements.get('build', []) excludes = self.config.variant.get('exclude_from_build_hash', []) if excludes: exclude_pattern = re.compile('|'.join('{}[\s$]?.*'.format(exc) for exc in excludes)) build_reqs = [req for req in build_reqs if not exclude_pattern.match(req)] requirements['build'] = build_reqs composite['requirements'] = requirements # remove the build number from the hash, so that we can bump it without changing the hash if 'number' in composite['build']: del composite['build']['number'] # remove the build string, so that hashes don't affect themselves if 'string' in composite['build']: del composite['build']['string'] if not composite['build']: del composite['build'] for key in 'build', 'run': if key in composite['requirements'] and not composite['requirements'].get(key): del composite['requirements'][key] trim_empty_keys(composite) file_paths = [] if self.path: recorded_input_files = os.path.join(self.path, '..', 'hash_input_files') if os.path.exists(recorded_input_files): with open(recorded_input_files) as f: file_paths = f.read().splitlines() else: files = utils.rec_glob(self.path, "*") file_paths = sorted([f.replace(self.path + os.sep, '') for f in files]) # exclude meta.yaml and meta.yaml.template, because the json dictionary captures # their content file_paths = [f for f in file_paths if not f.startswith('meta.yaml')] file_paths = sorted(filter_files(file_paths, self.path)) return composite, file_paths
def dict_of_lists_to_list_of_dicts(dict_or_list_of_dicts, platform=cc_platform): # http://stackoverflow.com/a/5228294/1170370 # end result is a collection of dicts, like [{'python': 2.7, 'numpy': 1.11}, # {'python': 3.5, 'numpy': 1.11}] if hasattr(dict_or_list_of_dicts, 'keys'): specs = [DEFAULT_VARIANTS, dict_or_list_of_dicts] else: specs = [DEFAULT_VARIANTS] + list(dict_or_list_of_dicts or []) combined, extend_keys = combine_specs(specs) if 'target_platform' not in combined or not combined['target_platform']: try: combined['target_platform'] = [DEFAULT_PLATFORMS[platform]] except KeyError: combined['target_platform'] = [ DEFAULT_PLATFORMS[platform.split('-')[0]] ] if 'extend_keys' in combined: del combined['extend_keys'] dicts = [] dimensions = { k: v for k, v in combined.items() if k not in (['extend_keys'] + list(extend_keys) + list(_get_zip_key_set(combined))) } # here's where we add in the zipped dimensions for group in _get_zip_groups(combined): dimensions.update(group) # in case selectors nullify any groups - or else zip reduces whole set to nil trim_empty_keys(dimensions) for x in product(*dimensions.values()): remapped = dict(six.moves.zip(dimensions, x)) for col in list(extend_keys): v = combined.get(col) if v: remapped[col] = v if hasattr(v, 'keys') else list(set(v)) # split out zipped keys for k, v in remapped.copy().items(): if isinstance(k, string_types) and isinstance(v, string_types): keys = k.split('#') values = v.split('#') for (_k, _v) in zip(keys, values): remapped[_k] = _v if '#' in k: del remapped[k] dicts.append(remapped) return dicts
def dict_of_lists_to_list_of_dicts(dict_or_list_of_dicts, platform=cc_platform): # http://stackoverflow.com/a/5228294/1170370 # end result is a collection of dicts, like [{'python': 2.7, 'numpy': 1.11}, # {'python': 3.5, 'numpy': 1.11}] if hasattr(dict_or_list_of_dicts, 'keys'): specs = [DEFAULT_VARIANTS, dict_or_list_of_dicts] else: specs = [DEFAULT_VARIANTS] + list(dict_or_list_of_dicts or []) combined, extend_keys = combine_specs(specs) # default target platform is native subdir # if 'target_platform' not in combined: # from conda_build.config import Config # combined['target_platform'] = [Config().subdir] if 'extend_keys' in combined: del combined['extend_keys'] dicts = [] pass_through_keys = (['extend_keys', 'zip_keys'] + list(extend_keys) + list(_get_zip_key_set(combined))) dimensions = { k: v for k, v in combined.items() if k not in pass_through_keys } # here's where we add in the zipped dimensions for group in _get_zip_groups(combined): dimensions.update(group) # in case selectors nullify any groups - or else zip reduces whole set to nil trim_empty_keys(dimensions) for x in product(*dimensions.values()): remapped = dict(six.moves.zip(dimensions, x)) for col in pass_through_keys: v = combined.get(col) if v: remapped[col] = v # split out zipped keys for k, v in remapped.copy().items(): if isinstance(k, string_types) and isinstance(v, string_types): keys = k.split('#') values = v.split('#') for (_k, _v) in zip(keys, values): remapped[_k] = _v if '#' in k: del remapped[k] dicts.append(remapped) return dicts
def output_yaml(metadata, filename=None): utils.trim_empty_keys(metadata.meta) output = yaml.dump(_MetaYaml(metadata.meta), Dumper=_IndentDumper, default_flow_style=False, indent=4) if filename: if any(sep in filename for sep in ('\\', '/')): try: os.makedirs(os.path.dirname(filename)) except OSError: pass with open(filename, "w") as f: f.write(output) return "Wrote yaml to %s" % filename else: return output
def dict_of_lists_to_list_of_dicts(dict_of_lists, extend_keys=None): # http://stackoverflow.com/a/5228294/1170370 # end result is a collection of dicts, like [{'python': 2.7, 'numpy': 1.11}, # {'python': 3.5, 'numpy': 1.11}] dicts = [] if not extend_keys: extend_keys = set(ensure_list(dict_of_lists.get('extend_keys'))) pass_through_keys = set( ['extend_keys', 'zip_keys', 'pin_run_as_build', 'replacements'] + list(ensure_list(extend_keys)) + list(_get_zip_key_set(dict_of_lists))) dimensions = { k: v for k, v in dict_of_lists.items() if k not in pass_through_keys } # here's where we add in the zipped dimensions. Zipped stuff is concatenated strings, to avoid # being distributed in the product. for group in _get_zip_groups(dict_of_lists): dimensions.update(group) # in case selectors nullify any groups - or else zip reduces whole set to nil trim_empty_keys(dimensions) for x in product(*dimensions.values()): remapped = dict(six.moves.zip(dimensions, x)) for col in pass_through_keys: v = dict_of_lists.get(col) if v or v == '': if isinstance(v, (OrderedDict, dict)): remapped[col] = v.copy() else: remapped[col] = v # split out zipped keys to_del = set() for k, v in remapped.items(): if isinstance(k, string_types) and isinstance(v, string_types): keys = _split_str(k, '#') values = _split_str(v, '#') for (_k, _v) in zip(keys, values): # I am unclear if I should be doing something else here! if not isinstance(remapped[_k], (OrderedDict, dict)): remapped[_k] = _v if '#' in k: to_del.add(k) for key in to_del: del remapped[key] dicts.append(remapped) return dicts
def output_yaml(metadata, filename=None, suppress_outputs=False): local_metadata = metadata.copy() utils.trim_empty_keys(local_metadata.meta) if suppress_outputs and local_metadata.is_output and 'outputs' in local_metadata.meta: del local_metadata.meta['outputs'] output = yaml.dump(_MetaYaml(local_metadata.meta), Dumper=_IndentDumper, default_flow_style=False, indent=4) if filename: if any(sep in filename for sep in ('\\', '/')): try: os.makedirs(os.path.dirname(filename)) except OSError: pass with open(filename, "w") as f: f.write(output) return "Wrote yaml to %s" % filename else: return output
def dict_of_lists_to_list_of_dicts(dict_of_lists, extend_keys=None): # http://stackoverflow.com/a/5228294/1170370 # end result is a collection of dicts, like [{'python': 2.7, 'numpy': 1.11}, # {'python': 3.5, 'numpy': 1.11}] dicts = [] pass_through_keys = (['extend_keys', 'zip_keys'] + list(ensure_list(extend_keys)) + list(_get_zip_key_set(dict_of_lists))) dimensions = { k: v for k, v in dict_of_lists.items() if k not in pass_through_keys } # here's where we add in the zipped dimensions. Zipped stuff is concatenated strings, to avoid # being distributed in the product. for group in _get_zip_groups(dict_of_lists): dimensions.update(group) # in case selectors nullify any groups - or else zip reduces whole set to nil trim_empty_keys(dimensions) for x in product(*dimensions.values()): remapped = dict(six.moves.zip(dimensions, x)) for col in pass_through_keys: v = dict_of_lists.get(col) if v: remapped[col] = v # split out zipped keys for k, v in remapped.copy().items(): if isinstance(k, string_types) and isinstance(v, string_types): keys = k.split('#') values = v.split('#') for (_k, _v) in zip(keys, values): remapped[_k] = _v if '#' in k: del remapped[k] dicts.append(remapped) return dicts
def get_install_actions(prefix, index, specs, config, retries=0, timestamp=0, subdir=None): global _cached_install_actions global _last_timestamp log = utils.get_logger(__name__) if config.verbose: capture = contextlib.contextmanager(lambda: (yield)) else: capture = utils.capture actions = {'LINK': []} for feature, value in feature_list: if value: specs.append('%s@' % feature) specs = tuple(_ensure_valid_spec(spec) for spec in specs) if (specs, subdir, timestamp ) in _cached_install_actions and timestamp > _last_timestamp: actions = _cached_install_actions[(specs, subdir, timestamp)] else: if specs: # this is hiding output like: # Fetching package metadata ........... # Solving package specifications: .......... with capture(): try: actions = install_actions(prefix, index, specs, force=True) # Experimenting with getting conda to create fewer Resolve objects # Experiment failed, seemingly due to conda's statefulness. Packages could # not be found. # index_timestamp=timestamp) except NoPackagesFoundError as exc: # Attempt to skeleton packages it can't find packages = [x.split(" ")[0] for x in exc.pkgs] for pkg in packages: if pkg.startswith("r-"): api.skeletonize([pkg], "cran") else: api.skeletonize([pkg], "pypi") raise DependencyNeedsBuildingError(exc, subdir=subdir) except (SystemExit, PaddingError, LinkError, DependencyNeedsBuildingError, CondaError, AssertionError) as exc: if 'lock' in str(exc): log.warn( "failed to get install actions, retrying. exception was: %s", str(exc)) elif ('requires a minimum conda version' in str(exc) or 'link a source that does not' in str(exc) or isinstance(exc, AssertionError)): locks = utils.get_conda_operation_locks(config) with utils.try_acquire_locks(locks, timeout=config.timeout): pkg_dir = str(exc) folder = 0 while os.path.dirname( pkg_dir) not in pkgs_dirs and folder < 20: pkg_dir = os.path.dirname(pkg_dir) folder += 1 log.warn( "I think conda ended up with a partial extraction for %s. " "Removing the folder and retrying", pkg_dir) if pkg_dir in pkgs_dirs and os.path.isdir(pkg_dir): utils.rm_rf(pkg_dir) if retries < config.max_env_retry: log.warn( "failed to get install actions, retrying. exception was: %s", str(exc)) actions = get_install_actions(prefix, index, specs, config, retries=retries + 1, timestamp=timestamp) else: log.error( "Failed to get install actions, max retries exceeded." ) raise if config.disable_pip: actions['LINK'] = [ spec for spec in actions['LINK'] if not spec.startswith('pip-') and not spec.startswith('setuptools-') ] utils.trim_empty_keys(actions) _cached_install_actions[(specs, subdir, timestamp)] = actions _last_timestamp = timestamp return actions
def expand_variants(spec, extend_keys=None): """ Helper function to expand spec into all of the variants. .. code-block:: python >>> spec = { # normal expansions "foo": [2.7, 3.7, 3.8], # zip_keys are the values that need to be expanded as a set "zip_keys": [["bar", "baz"], ["qux", "quux", "quuz"]], "bar": [1, 2, 3], "baz": [2, 4, 6], "qux": [4, 5], "quux": [8, 10], "quuz": [12, 15], # extend_keys are those values which we do not expand "extend_keys": ["corge"], "corge": 42, } >>> expand_variants(spec) [{ "foo": 2.7, "bar": 1, "baz": 2, "qux": 4, "quux": 8, "quuz": 12, "corge": 42, "zip_keys": ..., "extend_keys": ..., }, { "foo": 2.7, "bar": 1, "baz": 2, "qux": 5, "quux": 10, "quuz": 15, "corge": 42, ..., }, ...] :param spec: Specification to expand :type spec: `dict` :param extend_keys: keys from `spec` to carry over into expanded `spec` without modification, providing this will ignore any `extend_keys` value in `spec` :type extend_keys: `list` of keys (`str`) :return: Expanded specification :rtype: `list` of `dict` """ zip_keys = _get_zip_keys(spec) # key/values from spec that do not expand base_keys = {'extend_keys', 'zip_keys', 'pin_run_as_build', 'replacements'} base_keys.update(ensure_list(extend_keys or spec.get('extend_keys'))) base_keys.difference_update( *zip_keys) # keys in zip_keys are not base values base_keys.intersection_update(spec) # only include keys defined in spec base = {k: spec[k] for k in base_keys if spec[k] or spec[k] == ""} # key/values from spec that do expand matrix = { tuple(ensure_list(k)): [ensure_list(v) for v in ensure_list(spec[k])] for k in set(spec).difference(base_keys, *zip_keys) } matrix.update({ zg: list(zip(*(ensure_list(spec[k]) for k in zg))) for zg in zip_keys }) trim_empty_keys(matrix) # Cartesian Product of dict of lists # http://stackoverflow.com/a/5228294/1170370 # dict.keys() and dict.values() orders are the same even prior to Python 3.6 variants = [] for values in product(*matrix.values()): variant = {k: copy(v) for k, v in base.items()} variant.update( {k: v for zg, zv in zip(matrix, values) for k, v in zip(zg, zv)}) variants.append(variant) return variants
def explode_variants(spec): """ Helper function to explode spec into all of the variants. .. code-block:: python >>> spec = { # normal expansions "foo": [2.7, 3.7, 3.8], # zip_keys are the values that need to be exploded as a set "zip_keys": [["bar", "baz"], ["qux", "quux", "quuz"]], "bar": [1, 2, 3], "baz": [2, 4, 6], "qux": [4, 5], "quux": [8, 10], "quuz": [12, 15], # extend_keys are those values which we do not explode "extend_keys": ["corge"], "corge": 42, } >>> explode_variants(spec) [{ "foo": 2.7, "bar": 1, "baz": 2, "qux": 4, "quux": 8, "quuz": 12, "corge": 42, "zip_keys": ..., "extend_keys": ..., }, { "foo": 2.7, "bar": 1, "baz": 2, "qux": 5, "quux": 10, "quuz": 15, "corge": 42, ..., }, ...] :param spec: Specification to explode :type spec: `dict` :return: Exploded specification :rtype: `list` of `dict` """ zip_keys = _get_zip_keys(spec) # key/values from spec that do not explode passthru_keys = _get_passthru_keys(spec, zip_keys) passthru = {k: spec[k] for k in passthru_keys if spec[k] or spec[k] == ""} # key/values from spec that do explode explode_keys = _get_explode_keys(spec, passthru_keys) explode = { (k, ): [ensure_list(v, include_dict=False) for v in ensure_list(spec[k])] for k in explode_keys.difference(*zip_keys) } explode.update({ zg: list(zip(*(ensure_list(spec[k]) for k in zg))) for zg in zip_keys }) trim_empty_keys(explode) # Cartesian Product of dict of lists # http://stackoverflow.com/a/5228294/1170370 # dict.keys() and dict.values() orders are the same even prior to Python 3.6 variants = [] for values in product(*explode.values()): variant = {k: copy(v) for k, v in passthru.items()} variant.update( {k: v for zg, zv in zip(explode, values) for k, v in zip(zg, zv)}) variants.append(variant) return variants
def get_install_actions(prefix, specs, env, retries=0, subdir=None, verbose=True, debug=False, locking=True, bldpkgs_dirs=None, timeout=90, disable_pip=False, max_env_retry=3, output_folder=None, channel_urls=None): global cached_actions global last_index_ts actions = {} log = utils.get_logger(__name__) conda_log_level = logging.WARN if verbose: capture = contextlib.contextmanager(lambda: (yield)) elif debug: capture = contextlib.contextmanager(lambda: (yield)) conda_log_level = logging.DEBUG else: capture = utils.capture for feature, value in feature_list: if value: specs.append('%s@' % feature) bldpkgs_dirs = ensure_list(bldpkgs_dirs) index, index_ts = get_build_index(subdir, list(bldpkgs_dirs)[0], output_folder=output_folder, channel_urls=channel_urls, debug=debug, verbose=verbose, locking=locking, timeout=timeout) specs = tuple(_ensure_valid_spec(spec) for spec in specs) if (specs, env, subdir, channel_urls) in cached_actions and last_index_ts >= index_ts: actions = cached_actions[(specs, env, subdir, channel_urls)].copy() if "PREFIX" in actions: actions['PREFIX'] = prefix elif specs: # this is hiding output like: # Fetching package metadata ........... # Solving package specifications: .......... with utils.LoggingContext(conda_log_level): with capture(): try: actions = install_actions(prefix, index, specs, force=True) except NoPackagesFoundError as exc: raise DependencyNeedsBuildingError(exc, subdir=subdir) except (SystemExit, PaddingError, LinkError, DependencyNeedsBuildingError, CondaError, AssertionError) as exc: if 'lock' in str(exc): log.warn("failed to get install actions, retrying. exception was: %s", str(exc)) elif ('requires a minimum conda version' in str(exc) or 'link a source that does not' in str(exc) or isinstance(exc, AssertionError)): locks = utils.get_conda_operation_locks(locking, bldpkgs_dirs, timeout) with utils.try_acquire_locks(locks, timeout=timeout): pkg_dir = str(exc) folder = 0 while os.path.dirname(pkg_dir) not in pkgs_dirs and folder < 20: pkg_dir = os.path.dirname(pkg_dir) folder += 1 log.warn("I think conda ended up with a partial extraction for %s. " "Removing the folder and retrying", pkg_dir) if pkg_dir in pkgs_dirs and os.path.isdir(pkg_dir): utils.rm_rf(pkg_dir) if retries < max_env_retry: log.warn("failed to get install actions, retrying. exception was: %s", str(exc)) actions = get_install_actions(prefix, tuple(specs), env, retries=retries + 1, subdir=subdir, verbose=verbose, debug=debug, locking=locking, bldpkgs_dirs=tuple(bldpkgs_dirs), timeout=timeout, disable_pip=disable_pip, max_env_retry=max_env_retry, output_folder=output_folder, channel_urls=tuple(channel_urls)) else: log.error("Failed to get install actions, max retries exceeded.") raise if disable_pip: actions['LINK'] = [spec for spec in actions['LINK'] if not spec.startswith('pip-') and not spec.startswith('setuptools-')] utils.trim_empty_keys(actions) cached_actions[(specs, env, subdir, channel_urls)] = actions.copy() last_index_ts = index_ts return actions
def get_install_actions(prefix, specs, env, retries=0, subdir=None, verbose=True, debug=False, locking=True, bldpkgs_dirs=None, timeout=900, disable_pip=False, max_env_retry=3, output_folder=None, channel_urls=None): global cached_actions global last_index_ts actions = {} log = utils.get_logger(__name__) conda_log_level = logging.WARN specs = list(specs) if specs: specs.extend(create_default_packages) if verbose or debug: capture = contextlib.contextmanager(lambda: (yield)) if debug: conda_log_level = logging.DEBUG else: capture = utils.capture for feature, value in feature_list: if value: specs.append('%s@' % feature) bldpkgs_dirs = ensure_list(bldpkgs_dirs) index, index_ts, _ = get_build_index(subdir, list(bldpkgs_dirs)[0], output_folder=output_folder, channel_urls=channel_urls, debug=debug, verbose=verbose, locking=locking, timeout=timeout) specs = tuple( utils.ensure_valid_spec(spec) for spec in specs if not str(spec).endswith('@')) if ((specs, env, subdir, channel_urls, disable_pip) in cached_actions and last_index_ts >= index_ts): actions = cached_actions[(specs, env, subdir, channel_urls, disable_pip)].copy() if "PREFIX" in actions: actions['PREFIX'] = prefix elif specs: # this is hiding output like: # Fetching package metadata ........... # Solving package specifications: .......... with utils.LoggingContext(conda_log_level): with capture(): try: actions = install_actions(prefix, index, specs, force=True) except (NoPackagesFoundError, UnsatisfiableError) as exc: raise DependencyNeedsBuildingError(exc, subdir=subdir) except (SystemExit, PaddingError, LinkError, DependencyNeedsBuildingError, CondaError, AssertionError, BuildLockError) as exc: if 'lock' in str(exc): log.warn( "failed to get install actions, retrying. exception was: %s", str(exc)) elif ('requires a minimum conda version' in str(exc) or 'link a source that does not' in str(exc) or isinstance(exc, AssertionError)): locks = utils.get_conda_operation_locks( locking, bldpkgs_dirs, timeout) with utils.try_acquire_locks(locks, timeout=timeout): pkg_dir = str(exc) folder = 0 while os.path.dirname( pkg_dir) not in pkgs_dirs and folder < 20: pkg_dir = os.path.dirname(pkg_dir) folder += 1 log.warn( "I think conda ended up with a partial extraction for %s. " "Removing the folder and retrying", pkg_dir) if pkg_dir in pkgs_dirs and os.path.isdir(pkg_dir): utils.rm_rf(pkg_dir) if retries < max_env_retry: log.warn( "failed to get install actions, retrying. exception was: %s", str(exc)) actions = get_install_actions( prefix, tuple(specs), env, retries=retries + 1, subdir=subdir, verbose=verbose, debug=debug, locking=locking, bldpkgs_dirs=tuple(bldpkgs_dirs), timeout=timeout, disable_pip=disable_pip, max_env_retry=max_env_retry, output_folder=output_folder, channel_urls=tuple(channel_urls)) else: log.error( "Failed to get install actions, max retries exceeded." ) raise if disable_pip: for pkg in ('pip', 'setuptools', 'wheel'): # specs are the raw specifications, not the conda-derived actual specs # We're testing that pip etc. are manually specified if not any( re.match(r'^%s(?:$|[\s=].*)' % pkg, str(dep)) for dep in specs): actions['LINK'] = [ spec for spec in actions['LINK'] if spec.name != pkg ] utils.trim_empty_keys(actions) cached_actions[(specs, env, subdir, channel_urls, disable_pip)] = actions.copy() last_index_ts = index_ts return actions
def get_install_actions(prefix, index, specs, config, retries=0): log = utils.get_logger(__name__) if config.verbose: capture = contextlib.contextmanager(lambda: (yield)) else: capture = utils.capture actions = {'LINK': []} for feature, value in feature_list: if value: specs.append('%s@' % feature) specs = [_ensure_valid_spec(spec) for spec in specs] if specs: # this is hiding output like: # Fetching package metadata ........... # Solving package specifications: .......... with capture(): try: actions = install_actions(prefix, index, specs, force=True) except NoPackagesFoundError as exc: raise DependencyNeedsBuildingError(exc) except (SystemExit, PaddingError, LinkError, DependencyNeedsBuildingError, CondaError, AssertionError) as exc: if 'lock' in str(exc): log.warn( "failed to get install actions, retrying. exception was: %s", str(exc)) elif ('requires a minimum conda version' in str(exc) or 'link a source that does not' in str(exc) or isinstance(exc, AssertionError)): locks = utils.get_conda_operation_locks(config) with utils.try_acquire_locks(locks, timeout=config.timeout): pkg_dir = str(exc) folder = 0 while os.path.dirname( pkg_dir) not in pkgs_dirs and folder < 20: pkg_dir = os.path.dirname(pkg_dir) folder += 1 log.warn( "I think conda ended up with a partial extraction for %s. " "Removing the folder and retrying", pkg_dir) if pkg_dir in pkgs_dirs and os.path.isdir(pkg_dir): utils.rm_rf(pkg_dir) if retries < config.max_env_retry: log.warn( "failed to get install actions, retrying. exception was: %s", str(exc)) actions = get_install_actions(prefix, index, specs, config, retries=retries + 1) else: log.error( "Failed to get install actions, max retries exceeded.") raise if config.disable_pip: actions['LINK'] = [ spec for spec in actions['LINK'] if not spec.startswith('pip-') and not spec.startswith('setuptools-') ] utils.trim_empty_keys(actions) return actions
def create_env(prefix, specs_or_actions, env, config, subdir, clear_cache=True, retry=0, locks=None, is_cross=False): ''' Create a conda envrionment for the given prefix and specs. ''' if config.debug: external_logger_context = utils.LoggingContext(logging.DEBUG) else: external_logger_context = utils.LoggingContext(logging.ERROR) with external_logger_context: log = utils.get_logger(__name__) # if os.path.isdir(prefix): # utils.rm_rf(prefix) if specs_or_actions: # Don't waste time if there is nothing to do log.debug("Creating environment in %s", prefix) log.debug(str(specs_or_actions)) with utils.path_prepended(prefix): if not locks: locks = utils.get_conda_operation_locks(config) try: with utils.try_acquire_locks(locks, timeout=config.timeout): # input is a list - it's specs in MatchSpec format if not hasattr(specs_or_actions, 'keys'): specs = list(set(specs_or_actions)) actions = get_install_actions(prefix, tuple(specs), env, subdir=subdir, verbose=config.verbose, debug=config.debug, locking=config.locking, bldpkgs_dirs=tuple(config.bldpkgs_dirs), timeout=config.timeout, disable_pip=config.disable_pip, max_env_retry=config.max_env_retry, output_folder=config.output_folder, channel_urls=tuple(config.channel_urls)) else: actions = specs_or_actions index, index_ts = get_build_index(subdir=subdir, bldpkgs_dir=config.bldpkgs_dir, output_folder=config.output_folder, channel_urls=config.channel_urls, debug=config.debug, verbose=config.verbose, locking=config.locking, timeout=config.timeout) utils.trim_empty_keys(actions) display_actions(actions, index) if utils.on_win: for k, v in os.environ.items(): os.environ[k] = str(v) execute_actions(actions, index, verbose=config.debug) except (SystemExit, PaddingError, LinkError, DependencyNeedsBuildingError, CondaError) as exc: if (("too short in" in str(exc) or re.search('post-link failed for: (?:[a-zA-Z]*::)?openssl', str(exc)) or isinstance(exc, PaddingError)) and config.prefix_length > 80): if config.prefix_length_fallback: log.warn("Build prefix failed with prefix length %d", config.prefix_length) log.warn("Error was: ") log.warn(str(exc)) log.warn("One or more of your package dependencies needs to be rebuilt " "with a longer prefix length.") log.warn("Falling back to legacy prefix length of 80 characters.") log.warn("Your package will not install into prefixes > 80 characters.") config.prefix_length = 80 # Set this here and use to create environ # Setting this here is important because we use it below (symlink) prefix = config.build_prefix actions['PREFIX'] = prefix create_env(prefix, actions, config=config, subdir=subdir, env=env, clear_cache=clear_cache, is_cross=is_cross) else: raise elif 'lock' in str(exc): if retry < config.max_env_retry: log.warn("failed to create env, retrying. exception was: %s", str(exc)) create_env(prefix, actions, config=config, subdir=subdir, env=env, clear_cache=clear_cache, retry=retry + 1, is_cross=is_cross) elif ('requires a minimum conda version' in str(exc) or 'link a source that does not' in str(exc)): with utils.try_acquire_locks(locks, timeout=config.timeout): pkg_dir = str(exc) folder = 0 while os.path.dirname(pkg_dir) not in pkgs_dirs and folder < 20: pkg_dir = os.path.dirname(pkg_dir) folder += 1 log.warn("I think conda ended up with a partial extraction for %s. " "Removing the folder and retrying", pkg_dir) if os.path.isdir(pkg_dir): utils.rm_rf(pkg_dir) if retry < config.max_env_retry: log.warn("failed to create env, retrying. exception was: %s", str(exc)) create_env(prefix, actions, config=config, subdir=subdir, env=env, clear_cache=clear_cache, retry=retry + 1, is_cross=is_cross) else: log.error("Failed to create env, max retries exceeded.") raise else: raise # HACK: some of the time, conda screws up somehow and incomplete packages result. # Just retry. except (AssertionError, IOError, ValueError, RuntimeError, LockError) as exc: if isinstance(exc, AssertionError): with utils.try_acquire_locks(locks, timeout=config.timeout): pkg_dir = os.path.dirname(os.path.dirname(str(exc))) log.warn("I think conda ended up with a partial extraction for %s. " "Removing the folder and retrying", pkg_dir) if os.path.isdir(pkg_dir): utils.rm_rf(pkg_dir) if retry < config.max_env_retry: log.warn("failed to create env, retrying. exception was: %s", str(exc)) create_env(prefix, actions, config=config, subdir=subdir, env=env, clear_cache=clear_cache, retry=retry + 1, is_cross=is_cross) else: log.error("Failed to create env, max retries exceeded.") raise # We must not symlink conda across different platforms when cross-compiling. # On second thought, I think we must, because activating the host env does # the symlink for us anyway, and when activate does it, we end up with # conda symlinks in every package. =() # if os.path.basename(prefix) == '_build_env' or not is_cross: if utils.on_win: shell = "cmd.exe" else: shell = "bash" symlink_conda(prefix, sys.prefix, shell)
def create_env(prefix, specs_or_actions, env, config, subdir, clear_cache=True, retry=0, locks=None, is_cross=False, is_conda=False): ''' Create a conda envrionment for the given prefix and specs. ''' if config.debug: external_logger_context = utils.LoggingContext(logging.DEBUG) else: external_logger_context = utils.LoggingContext(logging.WARN) if os.path.exists(prefix): for entry in glob(os.path.join(prefix, "*")): utils.rm_rf(entry) with external_logger_context: log = utils.get_logger(__name__) # if os.path.isdir(prefix): # utils.rm_rf(prefix) if specs_or_actions: # Don't waste time if there is nothing to do log.debug("Creating environment in %s", prefix) log.debug(str(specs_or_actions)) if not locks: locks = utils.get_conda_operation_locks(config) try: with utils.try_acquire_locks(locks, timeout=config.timeout): # input is a list - it's specs in MatchSpec format if not hasattr(specs_or_actions, 'keys'): specs = list(set(specs_or_actions)) actions = get_install_actions( prefix, tuple(specs), env, subdir=subdir, verbose=config.verbose, debug=config.debug, locking=config.locking, bldpkgs_dirs=tuple(config.bldpkgs_dirs), timeout=config.timeout, disable_pip=config.disable_pip, max_env_retry=config.max_env_retry, output_folder=config.output_folder, channel_urls=tuple(config.channel_urls)) else: actions = specs_or_actions index, _, _ = get_build_index( subdir=subdir, bldpkgs_dir=config.bldpkgs_dir, output_folder=config.output_folder, channel_urls=config.channel_urls, debug=config.debug, verbose=config.verbose, locking=config.locking, timeout=config.timeout) utils.trim_empty_keys(actions) display_actions(actions, index) if utils.on_win: for k, v in os.environ.items(): os.environ[k] = str(v) with env_var('CONDA_QUIET', not config.verbose, reset_context): with env_var('CONDA_JSON', not config.verbose, reset_context): execute_actions(actions, index) except (SystemExit, PaddingError, LinkError, DependencyNeedsBuildingError, CondaError, BuildLockError) as exc: if (("too short in" in str(exc) or re.search( 'post-link failed for: (?:[a-zA-Z]*::)?openssl', str(exc)) or isinstance(exc, PaddingError)) and config.prefix_length > 80): if config.prefix_length_fallback: log.warn("Build prefix failed with prefix length %d", config.prefix_length) log.warn("Error was: ") log.warn(str(exc)) log.warn( "One or more of your package dependencies needs to be rebuilt " "with a longer prefix length.") log.warn( "Falling back to legacy prefix length of 80 characters." ) log.warn( "Your package will not install into prefixes > 80 characters." ) config.prefix_length = 80 host = '_h_env' in prefix # Set this here and use to create environ # Setting this here is important because we use it below (symlink) prefix = config.host_prefix if host else config.build_prefix actions['PREFIX'] = prefix create_env(prefix, actions, config=config, subdir=subdir, env=env, clear_cache=clear_cache, is_cross=is_cross) else: raise elif 'lock' in str(exc): if retry < config.max_env_retry: log.warn( "failed to create env, retrying. exception was: %s", str(exc)) create_env(prefix, specs_or_actions, config=config, subdir=subdir, env=env, clear_cache=clear_cache, retry=retry + 1, is_cross=is_cross) elif ('requires a minimum conda version' in str(exc) or 'link a source that does not' in str(exc)): with utils.try_acquire_locks(locks, timeout=config.timeout): pkg_dir = str(exc) folder = 0 while os.path.dirname( pkg_dir) not in pkgs_dirs and folder < 20: pkg_dir = os.path.dirname(pkg_dir) folder += 1 log.warn( "I think conda ended up with a partial extraction for %s. " "Removing the folder and retrying", pkg_dir) if os.path.isdir(pkg_dir): utils.rm_rf(pkg_dir) if retry < config.max_env_retry: log.warn( "failed to create env, retrying. exception was: %s", str(exc)) create_env(prefix, specs_or_actions, config=config, subdir=subdir, env=env, clear_cache=clear_cache, retry=retry + 1, is_cross=is_cross) else: log.error( "Failed to create env, max retries exceeded.") raise else: raise # HACK: some of the time, conda screws up somehow and incomplete packages result. # Just retry. except (AssertionError, IOError, ValueError, RuntimeError, LockError) as exc: if isinstance(exc, AssertionError): with utils.try_acquire_locks(locks, timeout=config.timeout): pkg_dir = os.path.dirname(os.path.dirname(str(exc))) log.warn( "I think conda ended up with a partial extraction for %s. " "Removing the folder and retrying", pkg_dir) if os.path.isdir(pkg_dir): utils.rm_rf(pkg_dir) if retry < config.max_env_retry: log.warn( "failed to create env, retrying. exception was: %s", str(exc)) create_env(prefix, specs_or_actions, config=config, subdir=subdir, env=env, clear_cache=clear_cache, retry=retry + 1, is_cross=is_cross) else: log.error("Failed to create env, max retries exceeded.") raise
def create_env(prefix, specs_or_actions, env, config, subdir, clear_cache=True, retry=0, locks=None, is_cross=False, always_include_files=[]): ''' Create a conda envrionment for the given prefix and specs. ''' if config.debug: external_logger_context = utils.LoggingContext(logging.DEBUG) else: external_logger_context = utils.LoggingContext(logging.ERROR) with external_logger_context: log = utils.get_logger(__name__) # if os.path.isdir(prefix): # utils.rm_rf(prefix) if specs_or_actions: # Don't waste time if there is nothing to do log.debug("Creating environment in %s", prefix) log.debug(str(specs_or_actions)) with utils.path_prepended(prefix): if not locks: locks = utils.get_conda_operation_locks(config) try: with utils.try_acquire_locks(locks, timeout=config.timeout): # input is a list - it's specs in MatchSpec format if not hasattr(specs_or_actions, 'keys'): specs = list(set(specs_or_actions)) actions = get_install_actions( prefix, tuple(specs), env, subdir=subdir, verbose=config.verbose, debug=config.debug, locking=config.locking, bldpkgs_dirs=tuple(config.bldpkgs_dirs), timeout=config.timeout, disable_pip=config.disable_pip, max_env_retry=config.max_env_retry, output_folder=config.output_folder, channel_urls=tuple(config.channel_urls)) else: actions = specs_or_actions index, index_ts = get_build_index( subdir=subdir, bldpkgs_dir=config.bldpkgs_dir, output_folder=config.output_folder, channel_urls=config.channel_urls, debug=config.debug, verbose=config.verbose, locking=config.locking, timeout=config.timeout) utils.trim_empty_keys(actions) display_actions(actions, index) if utils.on_win: for k, v in os.environ.items(): os.environ[k] = str(v) execute_actions(actions, index, verbose=config.debug) except (SystemExit, PaddingError, LinkError, DependencyNeedsBuildingError, CondaError) as exc: if (("too short in" in str(exc) or re.search( 'post-link failed for: (?:[a-zA-Z]*::)?openssl', str(exc)) or isinstance(exc, PaddingError)) and config.prefix_length > 80): if config.prefix_length_fallback: log.warn( "Build prefix failed with prefix length %d", config.prefix_length) log.warn("Error was: ") log.warn(str(exc)) log.warn( "One or more of your package dependencies needs to be rebuilt " "with a longer prefix length.") log.warn( "Falling back to legacy prefix length of 80 characters." ) log.warn( "Your package will not install into prefixes > 80 characters." ) config.prefix_length = 80 # Set this here and use to create environ # Setting this here is important because we use it below (symlink) prefix = config.build_prefix actions['PREFIX'] = prefix create_env(prefix, actions, config=config, subdir=subdir, env=env, clear_cache=clear_cache, is_cross=is_cross) else: raise elif 'lock' in str(exc): if retry < config.max_env_retry: log.warn( "failed to create env, retrying. exception was: %s", str(exc)) create_env(prefix, actions, config=config, subdir=subdir, env=env, clear_cache=clear_cache, retry=retry + 1, is_cross=is_cross) elif ('requires a minimum conda version' in str(exc) or 'link a source that does not' in str(exc)): with utils.try_acquire_locks(locks, timeout=config.timeout): pkg_dir = str(exc) folder = 0 while os.path.dirname( pkg_dir) not in pkgs_dirs and folder < 20: pkg_dir = os.path.dirname(pkg_dir) folder += 1 log.warn( "I think conda ended up with a partial extraction for %s. " "Removing the folder and retrying", pkg_dir) if os.path.isdir(pkg_dir): utils.rm_rf(pkg_dir) if retry < config.max_env_retry: log.warn( "failed to create env, retrying. exception was: %s", str(exc)) create_env(prefix, actions, config=config, subdir=subdir, env=env, clear_cache=clear_cache, retry=retry + 1, is_cross=is_cross) else: log.error( "Failed to create env, max retries exceeded.") raise else: raise # HACK: some of the time, conda screws up somehow and incomplete packages result. # Just retry. except (AssertionError, IOError, ValueError, RuntimeError, LockError) as exc: if isinstance(exc, AssertionError): with utils.try_acquire_locks(locks, timeout=config.timeout): pkg_dir = os.path.dirname(os.path.dirname( str(exc))) log.warn( "I think conda ended up with a partial extraction for %s. " "Removing the folder and retrying", pkg_dir) if os.path.isdir(pkg_dir): utils.rm_rf(pkg_dir) if retry < config.max_env_retry: log.warn( "failed to create env, retrying. exception was: %s", str(exc)) create_env(prefix, actions, config=config, subdir=subdir, env=env, clear_cache=clear_cache, retry=retry + 1, is_cross=is_cross) else: log.error( "Failed to create env, max retries exceeded.") raise # We must not symlink conda across different platforms when cross-compiling. # On second thought, I think we must, because activating the host env does # the symlink for us anyway, and when activate does it, we end up with # conda symlinks in every package. =() # if os.path.basename(prefix) == '_build_env' or not is_cross: # Hack, do not SYMLINK_CONDA when we're building conda. if not any(include in ('bin/deactivate', 'Scripts/deactivate.bat') for include in always_include_files): if utils.on_win: shell = "cmd.exe" else: shell = "bash" symlink_conda(prefix, sys.prefix, shell)