def _load_environment_yml(filename): """Load an environment.yml as an EnvSpec, or None if not loaded.""" try: with codecs.open(filename, 'r', 'utf-8') as file: contents = file.read() yaml = _load_string(contents) except (IOError, _YAMLError): return None name = None if 'name' in yaml: name = yaml['name'] if not name: if 'prefix' in yaml and yaml['prefix']: name = os.path.basename(yaml['prefix']) if not name: name = os.path.basename(filename) # We don't do too much validation here because we end up doing it # later if we import this into the project, and then load it from # the project file. We will do the import such that we don't end up # keeping the new project file if it's messed up. # # However we do try to avoid crashing on None or type errors here. raw_dependencies = yaml.get('dependencies', []) if not isinstance(raw_dependencies, list) or len(raw_dependencies) == 0: raw_dependencies = yaml.get('packages', []) if not isinstance(raw_dependencies, list): raw_dependencies = [] raw_channels = yaml.get('channels', []) if not isinstance(raw_channels, list): raw_channels = [] conda_packages = [] pip_packages = [] for dep in raw_dependencies: if is_string(dep): conda_packages.append(dep) elif isinstance(dep, dict) and 'pip' in dep and isinstance( dep['pip'], list): for pip_dep in dep['pip']: if is_string(pip_dep): pip_packages.append(pip_dep) channels = [] for channel in raw_channels: if is_string(channel): channels.append(channel) return EnvSpec(name=name, conda_packages=conda_packages, channels=channels, pip_packages=pip_packages, platforms=())
def _parse_default(self, options, env_var, problems): assert (isinstance(options, dict)) raw_default = options.get('default', None) if raw_default is None: good_default = True elif isinstance(raw_default, bool): # we have to check bool since it's considered an int apparently good_default = False elif is_string(raw_default) or isinstance(raw_default, (int, float)): good_default = True else: good_default = False if 'default' in options and raw_default is None: # convert null to be the same as simply missing del options['default'] if good_default: return True else: problems.append( "default value for variable {env_var} must be null, a string, or a number, not {value}." .format(env_var=env_var, value=raw_default)) return False
def extras(filename, errors): try: with codecs.open(filename, encoding='utf-8') as f: json_string = f.read() parsed = json.loads(json_string) except Exception as e: errors.append("Failed to read or parse %s: %s" % (filename, str(e))) return None extras = dict() found_fusion = False if isinstance(parsed, dict) and \ 'cells' in parsed and \ isinstance(parsed['cells'], list): for cell in parsed['cells']: if 'source' in cell: if isinstance(cell['source'], list): source = "".join( [s for s in cell['source'] if is_string(s)]) if _has_fusion_register(source): found_fusion = True if found_fusion: extras['registers_fusion_function'] = True return extras
def _parse(cls, varname, item, problems): """Parse an item from the services: section.""" service_type = None if is_string(item): service_type = item options = dict(type=service_type) elif isinstance(item, dict): service_type = item.get('type', None) if service_type is None: problems.append( "Service {} doesn't contain a 'type' field.".format( varname)) return None options = deepcopy(item) else: problems.append( "Service {} should have a service type string or a dictionary as its value." .format(varname)) return None if not EnvVarRequirement._parse_default(options, varname, problems): return None return dict(service_type=service_type, env_var=varname, options=options)
def _parse(cls, registry, varname, item, problems, requirements): """Parse an item from the services: section.""" service_type = None if is_string(item): service_type = item options = dict(type=service_type) elif isinstance(item, dict): service_type = item.get('type', None) if service_type is None: problems.append( "Service {} doesn't contain a 'type' field.".format( varname)) return options = deepcopy(item) else: problems.append( "Service {} should have a service type string or a dictionary as its value." .format(varname)) return if not EnvVarRequirement._parse_default(options, varname, problems): return requirement = registry.find_requirement_by_service_type( service_type=service_type, env_var=varname, options=options) if requirement is None: problems.append("Service {} has an unknown type '{}'.".format( varname, service_type)) else: assert isinstance(requirement, ServiceRequirement) assert 'type' in requirement.options requirements.append(requirement)
def _path(cls, path): if is_string(path): return (path, ) else: try: return list(element for element in path) except TypeError: raise ValueError("YAML file path must be a string or an iterable of strings")
def status_for(self, env_var_or_class): """Get status for the given env var or class, or None if unknown.""" for status in self.statuses: if is_string(env_var_or_class): if isinstance(status.requirement, EnvVarRequirement) and \ status.requirement.env_var == env_var_or_class: return status elif isinstance(status.requirement, env_var_or_class): return status return None
def _in_provide_whitelist(provide_whitelist, requirement): if provide_whitelist is None: # whitelist of None means "everything" return True for env_var_or_class in provide_whitelist: if is_string(env_var_or_class): if isinstance(requirement, EnvVarRequirement ) and requirement.env_var == env_var_or_class: return True else: if isinstance(requirement, env_var_or_class): return True return False
def __init__(self, registry, options): """Construct a Requirement. Args: registry (PluginRegistry): the plugin registry we came from options (dict): dict of requirement options from the project config """ self.registry = registry if options is None: self.options = dict() else: self.options = deepcopy(options) # always convert the default to a string (it's allowed to be a number # in the config file, but env vars have to be strings), unless # it's a dict because we use a dict for encrypted defaults if 'default' in self.options and not ( is_string(self.options['default']) or isinstance(self.options['default'], dict)): self.options['default'] = str(self.options['default'])
def parse_spec(spec): """Parse a package name and version spec as conda would. Returns: ``ParsedSpec`` or None on failure """ if not is_string(spec): raise TypeError("Expected a string not %r" % spec) m = _spec_pat.match(spec) if m is None: return None name = m.group('name').lower() pip_constraint = m.group('pc') if pip_constraint is not None: pip_constraint = pip_constraint.replace(' ', '') conda_constraint = m.group('cc') exact_version = None exact_build_string = None if conda_constraint is not None: m = _conda_constraint_pat.match(conda_constraint) assert m is not None exact_version = m.group('version') for special in ('|', '*', ','): if special in exact_version: exact_version = None break if exact_version is not None: exact_build_string = m.group('build') if exact_build_string is not None: assert exact_build_string[0] == '=' exact_build_string = exact_build_string[1:] return ParsedSpec(name=name, conda_constraint=conda_constraint, pip_constraint=pip_constraint, exact_version=exact_version, exact_build_string=exact_build_string)
def resolve_dependencies(pkgs, channels=(), platform=None): """Resolve packages into a full transitive list of (name, version, build) tuples.""" if not pkgs or not isinstance(pkgs, (list, tuple)): raise TypeError( 'must specify a list of one or more packages to install into existing environment, not %r', pkgs) # even with --dry-run, conda wants to create the prefix, # so we ensure it's somewhere out of the way. prefix = tempfile.mkdtemp(prefix="_anaconda_project_resolve_") # conda 4.1 (and possibly other versions) will complain # if the directory already exists. An evil attacker # on a multiuser system could replace this with a file # after we remove it, and then conda's mkdir would fail. os.rmdir(prefix) cmd_list = [ 'create', '--yes', '--quiet', '--json', '--dry-run', '--prefix', prefix ] for channel in channels: cmd_list.extend(['--channel', channel]) cmd_list.extend(pkgs) try: parsed = _call_and_parse_json(cmd_list, platform=platform) finally: try: if os.path.isdir(prefix): shutil.rmtree(prefix) except Exception: pass results = [] actions = parsed.get('actions', []) # old conda gives us one dict, newer a list of dicts if isinstance(actions, dict): actions = [actions] for action in actions: if isinstance(action, dict): links = action.get('LINK', []) for link in links: found = None # 4.1 conda gives us a string like # 'python-3.6.0-0 2' and 4.3 gives us a # dict with the fields already decomposed. if isinstance(link, dict): name = link.get('name', None) version = link.get('version', None) build_string = link.get('build_string', None) if name is not None and \ version is not None and \ build_string is not None: found = (name, version, build_string) elif is_string(link): # we have a string like 'python-3.6.0-0 2' pieces = link.split() if len(pieces) > 0: # 'found' can be None if we didn't understand the string found = _parse_dist(pieces[0]) if found is not None: results.append(found) if len(results) == 0: raise CondaError( "Could not understand JSON from Conda, could be a problem with this Conda version.", json=parsed) return results
def _parse(cls, varname, item, problems): """Parse an item from the downloads: section.""" url = None filename = None hash_algorithm = None hash_value = None unzip = None description = None if is_string(item): url = item elif isinstance(item, dict): url = item.get('url', None) if url is None: problems.append( "Download item {} doesn't contain a 'url' field.".format( varname)) return None description = item.get('description', None) if description is not None and not is_string(description): problems.append( "'description' field for download item {} is not a string". format(varname)) return None for method in _hash_algorithms: if method not in item: continue if hash_algorithm is not None: problems.append( "Multiple checksums for download {}: {} and {}.". format(varname, hash_algorithm, method)) return None else: hash_value = item[method] if is_string(hash_value): hash_algorithm = method else: problems.append( "Checksum value for {} should be a string not {}.". format(varname, hash_value)) return None filename = item.get('filename', None) unzip = item.get('unzip', None) if unzip is not None and not isinstance(unzip, bool): problems.append( "Value of 'unzip' for download item {} should be a boolean, not {}." .format(varname, unzip)) return None if url is None or not is_string(url): problems.append(( "Download name {} should be followed by a URL string or a dictionary " + "describing the download.").format(varname)) return None if url == '': problems.append( "Download item {} has an empty 'url' field.".format(varname)) return None # urlsplit doesn't seem to ever throw an exception, but it can # return pretty nonsensical stuff on invalid urls, in particular # an empty path is very possible url_path = os.path.basename(urlparse.urlsplit(url).path) url_path_is_zip = url_path.lower().endswith(".zip") if filename is None: if url_path != '': filename = url_path if url_path_is_zip: if unzip is None: # url is a zip and neither filename nor unzip specified, assume unzip unzip = True if unzip: # unzip specified True, or we guessed True, and url ends in zip; # take the .zip off the filename we invented based on the url. filename = filename[:-4] elif url_path_is_zip and unzip is None and not filename.lower( ).endswith(".zip"): # URL is a zip, filename is not a zip, unzip was not specified, so assume # we want to unzip unzip = True if filename is None: filename = varname if unzip is None: unzip = False return dict(env_var=varname, url=url, filename=filename, hash_algorithm=hash_algorithm, hash_value=hash_value, unzip=unzip, description=description)