def explicit(urls, prefix, verbose=True): import conda.fetch as fetch from conda.utils import md5_file dists = [] for url in urls: if url == '@EXPLICIT': continue print("Fetching: %s" % url) channel_url, fn = url.rsplit('/', 1) dists.append(fn[:-8]) index = fetch.fetch_index((channel_url + '/',)) info = index[fn] pkg_path = join(config.pkgs_dirs[0], fn) if isfile(pkg_path): try: if md5_file(pkg_path) != index[fn]['md5']: install.rm_rf(pkg_path) fetch.fetch_pkg(info) except KeyError: sys.stderr.write('Warning: cannot lookup MD5 of: %s' % fn) else: fetch.fetch_pkg(info) force_extract_and_link(dists, prefix, verbose=verbose)
def fetch(info): download_dir = info['_download_dir'] if not isdir(download_dir): os.makedirs(download_dir) for fn in dists: path = join(download_dir, fn) url = urls.get(fn) md5 = md5s.get(fn) if url: url_index = fetch_index((url, )) try: pkginfo = url_index[fn] except KeyError: sys.exit("Error: no package '%s' in %s" % (fn, url)) else: pkginfo = index[fn] if md5 and md5 != pkginfo['md5']: sys.exit("Error: MD5 sum for '%s' does not match in remote " "repodata %s" % (fn, url)) if isfile(path) and md5_file(path) == pkginfo['md5']: continue print('fetching: %s' % fn) fetch_pkg(pkginfo, download_dir)
def get_index(channel_urls=(), prepend=True, platform=None, use_cache=False, unknown=False, offline=False, prefix=None): """ Return the index of packages available on the channels If prepend=False, only the channels passed in as arguments are used. If platform=None, then the current platform is used. If prefix is supplied, then the packages installed in that prefix are added. """ channel_urls = config.normalize_urls(channel_urls, platform, offline) if prepend: pri0 = max(itervalues(channel_urls)) if channel_urls else 0 for url, rec in iteritems(config.get_channel_urls(platform, offline)): channel_urls[url] = (rec[0], rec[1] + pri0) index = fetch_index(channel_urls, use_cache=use_cache, unknown=unknown) if prefix: for dist, info in iteritems(install.linked_data(prefix)): fn = dist + '.tar.bz2' channel = info.get('channel', '') if channel not in channel_urls: channel_urls[channel] = (config.canonical_channel_name(channel, True, True), 0) url_s, priority = channel_urls[channel] key = url_s + '::' + fn if url_s else fn if key not in index: # only if the package in not in the repodata, use local # conda-meta (with 'depends' defaulting to []) info.setdefault('depends', []) info['fn'] = fn info['schannel'] = url_s info['channel'] = channel info['url'] = channel + fn info['priority'] = priority index[key] = info return index
def get_index(channel_urls=(), prepend=True, platform=None, use_cache=False, unknown=False, offline=False, prefix=None): """ Return the index of packages available on the channels If prepend=False, only the channels passed in as arguments are used. If platform=None, then the current platform is used. If prefix is supplied, then the packages installed in that prefix are added. """ channel_urls = config.normalize_urls(channel_urls, platform=platform) if prepend: channel_urls += config.get_channel_urls(platform=platform) if offline: channel_urls = [url for url in channel_urls if url.startswith('file:')] index = fetch_index(tuple(channel_urls), use_cache=use_cache, unknown=unknown) if prefix: for fn, info in iteritems(install.linked_data(prefix)): fn = fn + '.tar.bz2' orec = index.get(fn) if orec is not None: if orec.get('md5',None) == info.get('md5',None): continue info.setdefault('depends',orec.get('depends',[])) index[fn] = info return index
def get_index(channel_urls=(), prepend=True, platform=None, use_cache=False, unknown=False, offline=False, prefix=None): """ Return the index of packages available on the channels If prepend=False, only the channels passed in as arguments are used. If platform=None, then the current platform is used. If prefix is supplied, then the packages installed in that prefix are added. """ channel_urls = config.normalize_urls(channel_urls, platform=platform) if prepend: channel_urls += config.get_channel_urls(platform=platform) if offline: channel_urls = [url for url in channel_urls if url.startswith('file:')] index = fetch_index(tuple(channel_urls), use_cache=use_cache, unknown=unknown) if prefix: for dist, info in iteritems(install.linked_data(prefix)): fn = dist + '.tar.bz2' if fn not in index: # only if the package in not in the repodata, use local # conda-meta (with 'depends' defaulting to []) info.setdefault('depends', []) index[fn] = info return index
def get_index(channel_urls=(), prepend=True, platform=None, use_local=False, use_cache=False, unknown=False, offline=False, prefix=None): """ Return the index of packages available on the channels If prepend=False, only the channels passed in as arguments are used. If platform=None, then the current platform is used. If prefix is supplied, then the packages installed in that prefix are added. """ if use_local: channel_urls = ['local'] + list(channel_urls) channel_urls = normalize_urls(channel_urls, platform, offline) if prepend: channel_urls.extend(get_channel_urls(platform, offline)) channel_urls = prioritize_channels(channel_urls) index = fetch_index(channel_urls, use_cache=use_cache, unknown=unknown) if prefix: priorities = {c: p for c, p in itervalues(channel_urls)} for dist, info in iteritems(install.linked_data(prefix)): fn = info['fn'] schannel = info['schannel'] prefix = '' if schannel == 'defaults' else schannel + '::' priority = priorities.get(schannel, 0) key = prefix + fn if key in index: # Copy the link information so the resolver knows this is installed index[key]['link'] = info.get('link') else: # only if the package in not in the repodata, use local # conda-meta (with 'depends' defaulting to []) info.setdefault('depends', []) info['priority'] = priority index[key] = info return index
def explicit(urls, prefix, verbose=True): import conda.fetch as fetch from conda.utils import md5_file dists = [] for url in urls: if url == '@EXPLICIT': continue print("Fetching: %s" % url) m = url_pat.match(url) if m is None: sys.exit("Error: Could not parse: %s" % url) fn = m.group('fn') dists.append(fn[:-8]) index = fetch.fetch_index((m.group('url') + '/',)) try: info = index[fn] except KeyError: sys.exit("Error: no package '%s' in index" % fn) if m.group('md5') and m.group('md5') != info['md5']: sys.exit("Error: MD5 in explicit files does not match index") pkg_path = join(config.pkgs_dirs[0], fn) if isfile(pkg_path): try: if md5_file(pkg_path) != info['md5']: install.rm_rf(pkg_path) fetch.fetch_pkg(info) except KeyError: sys.stderr.write('Warning: cannot lookup MD5 of: %s' % fn) else: fetch.fetch_pkg(info) force_extract_and_link(dists, prefix, verbose=verbose)
def fetch(info): download_dir = info['_download_dir'] if not isdir(download_dir): os.makedirs(download_dir) for fn in dists: path = join(download_dir, fn) url = urls.get(fn) md5 = md5s.get(fn) if url: url_index = fetch_index((url,)) try: pkginfo = url_index[fn] except KeyError: sys.exit("Error: no package '%s' in %s" % (fn, url)) else: pkginfo = index[fn] if md5 and md5 != pkginfo['md5']: sys.exit("Error: MD5 sum for '%s' does not match in remote " "repodata %s" % (fn, url)) if isfile(path) and md5_file(path) == pkginfo['md5']: continue print('fetching: %s' % fn) fetch_pkg(pkginfo, download_dir)
def get_index(channel_urls=(), prepend=True): """ Return the index of packages available on the channels If prepend=False, only the channels passed in as arguments are used. """ channel_urls = config.normalize_urls(channel_urls) if prepend: channel_urls += config.get_channel_urls() return fetch_index(tuple(channel_urls))
def get_index(channel_urls=(), prepend=True, platform=None): """ Return the index of packages available on the channels If prepend=False, only the channels passed in as arguments are used. If platform=None, then the current platform is used. """ channel_urls = config.normalize_urls(channel_urls, platform=platform) if prepend: channel_urls += config.get_channel_urls(platform=platform) return fetch_index(tuple(channel_urls))
def get_index(channel_urls=(), prepend=True, platform=None, use_cache=False): """ Return the index of packages available on the channels If prepend=False, only the channels passed in as arguments are used. If platform=None, then the current platform is used. """ channel_urls = config.normalize_urls(channel_urls, platform=platform) if prepend: channel_urls += config.get_channel_urls(platform=platform) return fetch_index(tuple(channel_urls), use_cache=use_cache)
def get_index(channel_urls=(), prepend=True, platform=None, use_cache=False, unknown=False, offline=False): """ Return the index of packages available on the channels If prepend=False, only the channels passed in as arguments are used. If platform=None, then the current platform is used. """ channel_urls = config.normalize_urls(channel_urls, platform=platform) if prepend: channel_urls += config.get_channel_urls(platform=platform) if offline: channel_urls = [url for url in channel_urls if url.startswith("file:")] return fetch_index(tuple(channel_urls), use_cache=use_cache, unknown=unknown)
def get_index(channel_urls=(), prepend=True, platform=None, use_cache=False, unknown=False, offline=False): """ Return the index of packages available on the channels If prepend=False, only the channels passed in as arguments are used. If platform=None, then the current platform is used. """ channel_urls = config.normalize_urls(channel_urls, platform=platform) if prepend: channel_urls += config.get_channel_urls(platform=platform) if offline: channel_urls = [url for url in channel_urls if url.startswith('file:')] return fetch_index(tuple(channel_urls), use_cache=use_cache, unknown=unknown)
def get_index(channel_urls=(), prepend=True, platform=None, use_local=False, use_cache=False, unknown=False, offline=False, prefix=None): """ Return the index of packages available on the channels If prepend=False, only the channels passed in as arguments are used. If platform=None, then the current platform is used. If prefix is supplied, then the packages installed in that prefix are added. """ if use_local: channel_urls = ['local'] + list(channel_urls) channel_urls = normalize_urls(channel_urls, platform, offline) if prepend: channel_urls.extend(get_channel_urls(platform, offline)) channel_urls = prioritize_channels(channel_urls) index = fetch_index(channel_urls, use_cache=use_cache, unknown=unknown) if prefix: priorities = {c: p for c, p in itervalues(channel_urls)} maxp = max(itervalues(priorities)) + 1 if priorities else 1 for dist, info in iteritems(install.linked_data(prefix)): fn = info['fn'] schannel = info['schannel'] prefix = '' if schannel == 'defaults' else schannel + '::' priority = priorities.get(schannel, maxp) key = prefix + fn if key in index: # Copy the link information so the resolver knows this is installed index[key]['link'] = info.get('link') else: # only if the package in not in the repodata, use local # conda-meta (with 'depends' defaulting to []) info.setdefault('depends', []) info['priority'] = priority index[key] = info return index
def main(info, verbose=True): if 'channels' in info: global index index = fetch_index( tuple('%s/%s/' % (url.rstrip('/'), info['_platform']) for url in info['channels'])) if 'specs' in info: resolve(info) exclude_packages(info) if 'packages' in info: handle_packages(info) if not info.get('install_in_dependency_order'): dists.sort() move_python_first() if verbose: show(info) check_dists() fetch(info) info['_dists'] = list(dists)
def set_index(info): global INDEX INDEX = fetch_index(tuple('%s/%s/' % (url.rstrip('/'), info['platform']) for url in info['channels']))
def explicit(specs, prefix, verbose=False, force_extract=True, fetch_args=None): actions = defaultdict(list) actions['PREFIX'] = prefix actions['op_order'] = RM_FETCHED, FETCH, RM_EXTRACTED, EXTRACT, UNLINK, LINK linked = {install.name_dist(dist): dist for dist in install.linked(prefix)} fetch_args = fetch_args or {} index = {} verifies = [] channels = {} for spec in specs: if spec == '@EXPLICIT': continue # Format: (url|path)(:#md5)? m = url_pat.match(spec) if m is None: sys.exit('Could not parse explicit URL: %s' % spec) url, md5 = m.group('url') + '/' + m.group('fn'), m.group('md5') if not is_url(url): if not isfile(url): sys.exit('Error: file not found: %s' % url) url = utils.url_path(url) url_p, fn = url.rsplit('/', 1) # See if the URL refers to a package in our cache prefix = pkg_path = dir_path = None if url_p.startswith('file://'): prefix = install.cached_url(url) # If not, determine the channel name from the URL if prefix is None: _, schannel = url_channel(url) prefix = '' if schannel == 'defaults' else schannel + '::' fn = prefix + fn dist = fn[:-8] pkg_path = install.is_fetched(dist) dir_path = install.is_extracted(dist) # Don't re-fetch unless there is an MD5 mismatch if pkg_path and (md5 and md5_file(pkg_path) != md5): # This removes any extracted copies as well actions[RM_FETCHED].append(dist) pkg_path = dir_path = None # Don't re-extract unless forced, or if we can't check the md5 if dir_path and (force_extract or md5 and not pkg_path): actions[RM_EXTRACTED].append(dist) dir_path = None if not dir_path: if not pkg_path: _, conflict = install.find_new_location(dist) if conflict: actions[RM_FETCHED].append(conflict) actions[FETCH].append(dist) if md5: # Need to verify against the package index verifies.append((dist + '.tar.bz2', md5)) channels[url_p + '/'] = (schannel, 0) actions[EXTRACT].append(dist) # unlink any installed package with that name name = install.name_dist(dist) if name in linked: actions[UNLINK].append(linked[name]) actions[LINK].append(dist) # Finish the MD5 verification if verifies: index = fetch_index(channels, **fetch_args) for fn, md5 in verifies: info = index.get(fn) if info is None: sys.exit("Error: no package '%s' in index" % fn) if 'md5' not in info: sys.stderr.write('Warning: cannot lookup MD5 of: %s' % fn) if info['md5'] != md5: sys.exit( 'MD5 mismatch for: %s\n spec: %s\n repo: %s' % (fn, md5, info['md5'])) execute_actions(actions, index=index, verbose=verbose) return actions
import json from conda.fetch import fetch_index index = fetch_index(('http://repo.continuum.io/pkgs/free/win-32/', 'http://repo.continuum.io/pkgs/pro/win-32/', )) for info in index.itervalues(): del info['md5'], info['size'] channel = info['channel'] channel = channel.split('/')[-3] assert channel in ('pro', 'free', 'test-pkgs'), channel info['channel'] = channel print len(index) data = json.dumps(index, indent=2, sort_keys=True) data = '\n'.join(line.rstrip() for line in data.split('\n')) if not data.endswith('\n'): data += '\n' with open('index.json', 'w') as fo: fo.write(data)
def explicit(specs, prefix, verbose=False, force_extract=True, fetch_args=None): actions = defaultdict(list) actions['PREFIX'] = prefix actions[ 'op_order'] = RM_FETCHED, FETCH, RM_EXTRACTED, EXTRACT, UNLINK, LINK linked = {install.name_dist(dist): dist for dist in install.linked(prefix)} fetch_args = fetch_args or {} index = {} verifies = [] channels = {} for spec in specs: if spec == '@EXPLICIT': continue # Format: (url|path)(:#md5)? m = url_pat.match(spec) if m is None: sys.exit('Could not parse explicit URL: %s' % spec) url, md5 = m.group('url') + '/' + m.group('fn'), m.group('md5') if not is_url(url): if not isfile(url): sys.exit('Error: file not found: %s' % url) url = utils.url_path(url) url_p, fn = url.rsplit('/', 1) # See if the URL refers to a package in our cache prefix = pkg_path = dir_path = None if url_p.startswith('file://'): prefix = install.cached_url(url) # If not, determine the channel name from the URL if prefix is None: _, schannel = url_channel(url) prefix = '' if schannel == 'defaults' else schannel + '::' fn = prefix + fn dist = fn[:-8] pkg_path = install.is_fetched(dist) dir_path = install.is_extracted(dist) # Don't re-fetch unless there is an MD5 mismatch if pkg_path and (md5 and md5_file(pkg_path) != md5): # This removes any extracted copies as well actions[RM_FETCHED].append(dist) pkg_path = dir_path = None # Don't re-extract unless forced, or if we can't check the md5 if dir_path and (force_extract or md5 and not pkg_path): actions[RM_EXTRACTED].append(dist) dir_path = None if not dir_path: if not pkg_path: _, conflict = install.find_new_location(dist) if conflict: actions[RM_FETCHED].append(conflict) channels[url_p + '/'] = (schannel, 0) actions[FETCH].append(dist) verifies.append((dist + '.tar.bz2', md5)) actions[EXTRACT].append(dist) # unlink any installed package with that name name = install.name_dist(dist) if name in linked: actions[UNLINK].append(linked[name]) actions[LINK].append(dist) # Pull the repodata for channels we are using if channels: index.update(fetch_index(channels, **fetch_args)) # Finish the MD5 verification for fn, md5 in verifies: info = index.get(fn) if info is None: sys.exit("Error: no package '%s' in index" % fn) if md5 and 'md5' not in info: sys.stderr.write('Warning: cannot lookup MD5 of: %s' % fn) if md5 and info['md5'] != md5: sys.exit('MD5 mismatch for: %s\n spec: %s\n repo: %s' % (fn, md5, info['md5'])) execute_actions(actions, index=index, verbose=verbose) return actions