def DPkg(s, **kwargs): d = Dist(s) return IndexRecord( fn=d.to_filename(), name=d.name, version=d.version, build=d.build_string, build_number=int(d.build_string.rsplit('_', 1)[-1]), schannel=d.channel, **kwargs)
def DPkg(s, **kwargs): d = Dist(s) return PackageRecord(fn=d.to_filename(), name=d.name, version=d.version, build=d.build_string, build_number=int(d.build_string.rsplit('_', 1)[-1]), channel=d.channel, subdir=context.subdir, md5="012345789", **kwargs)
def DPkg(s, **kwargs): d = Dist(s) return PackageRecord( fn=d.to_filename(), name=d.name, version=d.version, build=d.build_string, build_number=int(d.build_string.rsplit('_', 1)[-1]), channel=d.channel, subdir=context.subdir, md5="012345789", **kwargs)
def DPkg(s, **kwargs): d = Dist(s) _kwargs = dict( fn=d.to_filename(), name=d.name, version=d.version, build=d.build_string, build_number=int(d.build_string.rsplit('_', 1)[-1]), channel=d.channel, subdir=context.subdir, md5="012345789", ) _kwargs.update(kwargs) return IndexRecord(**_kwargs)
def explicit(specs, prefix, verbose=False, force_extract=True, index_args=None, index=None): actions = defaultdict(list) actions['PREFIX'] = prefix actions[ 'op_order'] = RM_FETCHED, FETCH, RM_EXTRACTED, EXTRACT, UNLINK, LINK, SYMLINK_CONDA linked = {dist.dist_name: dist for dist in install_linked(prefix)} index_args = index_args or {} index = index or {} verifies = [] # List[Tuple(filename, md5)] channels = set() for spec in specs: if spec == '@EXPLICIT': continue # Format: (url|path)(:#md5)? m = url_pat.match(spec) if m is None: raise ParseError('Could not parse explicit URL: %s' % spec) url_p, fn, md5 = m.group('url_p'), m.group('fn'), m.group('md5') if not is_url(url_p): if url_p is None: url_p = curdir elif not isdir(url_p): raise CondaFileNotFoundError(join(url_p, fn)) url_p = path_to_url(url_p).rstrip('/') url = "{0}/{1}".format(url_p, fn) # is_local: if the tarball is stored locally (file://) # is_cache: if the tarball is sitting in our cache is_local = not is_url(url) or url.startswith('file://') prefix = cached_url(url) if is_local else None is_cache = prefix is not None if is_cache: # Channel information from the cache schannel = DEFAULTS if prefix == '' else prefix[:-2] else: # Channel information from the URL channel, schannel = Channel(url).url_channel_wtf prefix = '' if schannel == DEFAULTS else schannel + '::' fn = prefix + fn dist = Dist(fn[:-8]) # Add explicit file to index so we'll be sure to see it later if is_local: index[dist] = Record( **{ 'fn': dist.to_filename(), 'url': url, 'md5': md5, 'build': dist.quad[2], 'build_number': dist.build_number(), 'name': dist.quad[0], 'version': dist.quad[1], }) verifies.append((fn, md5)) pkg_path = is_fetched(dist) dir_path = is_extracted(dist) # Don't re-fetch unless there is an MD5 mismatch # Also remove explicit tarballs from cache, unless the path *is* to the cache if pkg_path and not is_cache and (is_local or md5 and md5_file(pkg_path) != md5): # This removes any extracted copies as well actions[RM_FETCHED].append(dist) pkg_path = dir_path = None # Don't re-extract unless forced, or if we can't check the md5 if dir_path and (force_extract or md5 and not pkg_path): actions[RM_EXTRACTED].append(dist) dir_path = None if not dir_path: if not pkg_path: pkg_path, conflict = find_new_location(dist) pkg_path = join(pkg_path, dist.to_filename()) if conflict: actions[RM_FETCHED].append(Dist(conflict)) if not is_local: if dist not in index or index[dist].get('not_fetched'): channels.add(schannel) verifies.append((dist.to_filename(), md5)) actions[FETCH].append(dist) actions[EXTRACT].append(dist) # unlink any installed package with that name name = dist.dist_name if name in linked: actions[UNLINK].append(linked[name]) ###################################### # copied from conda/plan.py TODO: refactor ###################################### # check for link action fetched_dist = dir_path or pkg_path[:-8] fetched_dir = dirname(fetched_dist) try: # Determine what kind of linking is necessary if not dir_path: # If not already extracted, create some dummy # data to test with rm_rf(fetched_dist) ppath = join(fetched_dist, 'info') os.makedirs(ppath) index_json = join(ppath, 'index.json') with open(index_json, 'w'): pass if context.always_copy: lt = LINK_COPY elif try_hard_link(fetched_dir, prefix, dist): lt = LINK_HARD elif context.allow_softlinks and not on_win: lt = LINK_SOFT else: lt = LINK_COPY actions[LINK].append('%s %d' % (dist, lt)) except (OSError, IOError): actions[LINK].append('%s %d' % (dist, LINK_COPY)) finally: if not dir_path: # Remove the dummy data try: rm_rf(fetched_dist) except (OSError, IOError): pass ###################################### # ^^^^^^^^^^ copied from conda/plan.py ###################################### # Pull the repodata for channels we are using if channels: index_args = index_args or {} index_args = index_args.copy() index_args['prepend'] = False index_args['channel_urls'] = list(channels) index.update(get_index(**index_args)) # Finish the MD5 verification for fn, md5 in verifies: info = index.get(Dist(fn)) if info is None: raise PackageNotFoundError(fn, "no package '%s' in index" % fn) if md5 and 'md5' not in info: sys.stderr.write('Warning: cannot lookup MD5 of: %s' % fn) if md5 and info['md5'] != md5: raise MD5MismatchError( 'MD5 mismatch for: %s\n spec: %s\n repo: %s' % (fn, md5, info['md5'])) execute_actions(actions, index=index, verbose=verbose) return actions