def ensure_linked_actions(dists, prefix): actions = defaultdict(list) actions[PREFIX] = prefix for dist in dists: if install.is_linked(prefix, dist): continue actions[LINK].append(dist) if install.is_extracted(config.pkgs_dir, dist): continue actions[EXTRACT].append(dist) if install.is_fetched(config.pkgs_dir, dist): continue actions[FETCH].append(dist) return actions
def ensure_linked_actions(dists, prefix): actions = defaultdict(list) actions[inst.PREFIX] = prefix for dist in dists: if install.is_linked(prefix, dist): continue extracted_in = extracted_where(dist) if extracted_in: if config.always_copy: lt = install.LINK_COPY elif install.try_hard_link(extracted_in, prefix, dist): lt = install.LINK_HARD else: lt = (install.LINK_SOFT if (config.allow_softlinks and sys.platform != 'win32') else install.LINK_COPY) actions[inst.LINK].append('%s %s %d' % (dist, extracted_in, lt)) else: # Make a guess from the first pkgs dir, which is where it will be # extracted try: os.makedirs(join(config.pkgs_dirs[0], dist, 'info')) index_json = join(config.pkgs_dirs[0], dist, 'info', 'index.json') with open(index_json, 'w'): pass if config.always_copy: lt = install.LINK_COPY elif install.try_hard_link(config.pkgs_dirs[0], prefix, dist): lt = install.LINK_HARD else: lt = (install.LINK_SOFT if (config.allow_softlinks and sys.platform != 'win32') else install.LINK_COPY) actions[inst.LINK].append('%s %s %d' % (dist, config.pkgs_dirs[0], lt)) except (OSError, IOError): actions[inst.LINK].append(dist) finally: try: install.rm_rf(join(config.pkgs_dirs[0], dist)) except (OSError, IOError): pass actions[inst.EXTRACT].append(dist) if install.is_fetched(config.pkgs_dirs[0], dist): continue actions[inst.FETCH].append(dist) return actions
def app_info_packages(fn): """ given the filename of a package, return which packages (and their sizes) still need to be downloaded, in order to install the package. That is, the package itself and it's dependencies. Returns a list of tuples (pkg_name, pkg_version, size, fetched? True or False). """ from conda.resolve import Resolve index = get_index() r = Resolve(index) res = [] for fn2 in r.solve([fn2spec(fn)]): info = index[fn2] res.append((info['name'], info['version'], info['size'], install.is_fetched(config.pkgs_dir, fn2[:-8]))) return res
def app_info_packages(fn, prefix=config.root_dir): """ given the filename of a package, return which packages (and their sizes) still need to be downloaded, in order to install the package. That is, the package itself and it's dependencies. Returns a list of tuples (pkg_name, pkg_version, size, fetched? True or False). """ from conda.resolve import Resolve index = get_index(prefix=prefix) r = Resolve(index) res = [] for fn2 in r.solve([_fn2fullspec(fn)], installed=install.linked(prefix)): info = index[fn2] if 'link' not in info: res.append((info['name'], info['version'], info['size'], any(install.is_fetched(pkgs_dir, fn2[:-8]) for pkgs_dir in config.pkgs_dirs))) return res
def ensure_linked_actions(dists, prefix): actions = defaultdict(list) actions[PREFIX] = prefix for dist in dists: if install.is_linked(prefix, dist): continue extracted_in = extracted_where(dist) if extracted_in: if install.try_hard_link(extracted_in, prefix, dist): lt = install.LINK_HARD else: lt = install.LINK_COPY if sys.platform == "win32" else install.LINK_SOFT actions[LINK].append("%s %s %d" % (dist, extracted_in, lt)) continue actions[LINK].append(dist) actions[EXTRACT].append(dist) if install.is_fetched(config.pkgs_dir, dist): continue actions[FETCH].append(dist) return actions
def ensure_linked_actions(dists, prefix): actions = defaultdict(list) actions[PREFIX] = prefix for dist in dists: if install.is_linked(prefix, dist): continue extracted_in = extracted_where(dist) if extracted_in: if install.try_hard_link(extracted_in, prefix, dist): lt = install.LINK_HARD else: lt = (install.LINK_SOFT if (config.allow_softlinks and sys.platform != 'win32') else install.LINK_COPY) actions[LINK].append('%s %s %d' % (dist, extracted_in, lt)) continue actions[LINK].append(dist) actions[EXTRACT].append(dist) if install.is_fetched(config.pkgs_dirs[0], dist): continue actions[FETCH].append(dist) return actions
def explicit(specs, prefix, verbose=False, force_extract=True, fetch_args=None): actions = defaultdict(list) actions['PREFIX'] = prefix actions['op_order'] = RM_FETCHED, FETCH, RM_EXTRACTED, EXTRACT, UNLINK, LINK linked = {install.name_dist(dist): dist for dist in install.linked(prefix)} fetch_args = fetch_args or {} index = {} verifies = [] channels = {} for spec in specs: if spec == '@EXPLICIT': continue # Format: (url|path)(:#md5)? m = url_pat.match(spec) if m is None: sys.exit('Could not parse explicit URL: %s' % spec) url, md5 = m.group('url') + '/' + m.group('fn'), m.group('md5') if not is_url(url): if not isfile(url): sys.exit('Error: file not found: %s' % url) url = utils.url_path(url) url_p, fn = url.rsplit('/', 1) # See if the URL refers to a package in our cache prefix = pkg_path = dir_path = None if url_p.startswith('file://'): prefix = install.cached_url(url) # If not, determine the channel name from the URL if prefix is None: _, schannel = url_channel(url) prefix = '' if schannel == 'defaults' else schannel + '::' fn = prefix + fn dist = fn[:-8] pkg_path = install.is_fetched(dist) dir_path = install.is_extracted(dist) # Don't re-fetch unless there is an MD5 mismatch if pkg_path and (md5 and md5_file(pkg_path) != md5): # This removes any extracted copies as well actions[RM_FETCHED].append(dist) pkg_path = dir_path = None # Don't re-extract unless forced, or if we can't check the md5 if dir_path and (force_extract or md5 and not pkg_path): actions[RM_EXTRACTED].append(dist) dir_path = None if not dir_path: if not pkg_path: _, conflict = install.find_new_location(dist) if conflict: actions[RM_FETCHED].append(conflict) actions[FETCH].append(dist) if md5: # Need to verify against the package index verifies.append((dist + '.tar.bz2', md5)) channels[url_p + '/'] = (schannel, 0) actions[EXTRACT].append(dist) # unlink any installed package with that name name = install.name_dist(dist) if name in linked: actions[UNLINK].append(linked[name]) actions[LINK].append(dist) # Finish the MD5 verification if verifies: index = fetch_index(channels, **fetch_args) for fn, md5 in verifies: info = index.get(fn) if info is None: sys.exit("Error: no package '%s' in index" % fn) if 'md5' not in info: sys.stderr.write('Warning: cannot lookup MD5 of: %s' % fn) if info['md5'] != md5: sys.exit( 'MD5 mismatch for: %s\n spec: %s\n repo: %s' % (fn, md5, info['md5'])) execute_actions(actions, index=index, verbose=verbose) return actions
def ensure_linked_actions(dists, prefix, index=None, force=False, always_copy=False): actions = defaultdict(list) actions[inst.PREFIX] = prefix actions['op_order'] = (inst.RM_FETCHED, inst.FETCH, inst.RM_EXTRACTED, inst.EXTRACT, inst.UNLINK, inst.LINK) for dist in dists: fetched_in = install.is_fetched(dist) extracted_in = install.is_extracted(dist) if fetched_in and index is not None: # Test the MD5, and possibly re-fetch fn = dist + '.tar.bz2' try: if md5_file(fetched_in) != index[fn]['md5']: # RM_FETCHED now removes the extracted data too actions[inst.RM_FETCHED].append(dist) # Re-fetch, re-extract, re-link fetched_in = extracted_in = None force = True except KeyError: sys.stderr.write('Warning: cannot lookup MD5 of: %s' % fn) if not force and install.is_linked(prefix, dist): continue if extracted_in and force: # Always re-extract in the force case actions[inst.RM_EXTRACTED].append(dist) extracted_in = None # Otherwise we need to extract, and possibly fetch if not extracted_in and not fetched_in: # If there is a cache conflict, clean it up fetched_in, conflict = install.find_new_location(dist) if conflict is not None: actions[inst.RM_FETCHED].append(conflict) actions[inst.FETCH].append(dist) if not extracted_in: actions[inst.EXTRACT].append(dist) fetched_dist = extracted_in or fetched_in[:-8] fetched_dir = dirname(fetched_dist) try: # Determine what kind of linking is necessary if not extracted_in: # If not already extracted, create some dummy # data to test with install.rm_rf(fetched_dist) ppath = join(fetched_dist, 'info') os.makedirs(ppath) index_json = join(ppath, 'index.json') with open(index_json, 'w'): pass if config.always_copy or always_copy: lt = install.LINK_COPY elif install.try_hard_link(fetched_dir, prefix, dist): lt = install.LINK_HARD elif config.allow_softlinks and sys.platform != 'win32': lt = install.LINK_SOFT else: lt = install.LINK_COPY actions[inst.LINK].append('%s %d' % (dist, lt)) except (OSError, IOError): actions[inst.LINK].append(dist) finally: if not extracted_in: # Remove the dummy data try: install.rm_rf(fetched_dist) except (OSError, IOError): pass return actions
======= >>>>>>> origin/feature/instruction-arguments ======= >>>>>>> princeofdarkness76/feature/instruction-arguments ======= >>>>>>> origin/feature/instruction-arguments except (OSError, IOError): actions[inst.LINK].append((dist,)) finally: try: install.rm_rf(join(config.pkgs_dirs[0], dist)) except (OSError, IOError): pass actions[inst.EXTRACT].append((dist,)) if install.is_fetched(config.pkgs_dirs[0], dist): continue actions[inst.FETCH].append((dist,)) return actions def force_linked_actions(dists, index, prefix): actions = defaultdict(list) <<<<<<< HEAD <<<<<<< HEAD <<<<<<< HEAD <<<<<<< HEAD <<<<<<< HEAD <<<<<<< HEAD actions[inst.PREFIX] = prefix actions['op_order'] = (inst.RM_FETCHED, inst.FETCH, inst.RM_EXTRACTED,
def ensure_linked_actions(dists, prefix, index=None, force=False, always_copy=False): actions = defaultdict(list) actions[inst.PREFIX] = prefix actions['op_order'] = (inst.RM_FETCHED, inst.FETCH, inst.RM_EXTRACTED, inst.EXTRACT, inst.UNLINK, inst.LINK) for dist in dists: fetched_in = install.is_fetched(dist) extracted_in = install.is_extracted(dist) if fetched_in and index is not None: # Test the MD5, and possibly re-fetch fn = dist + '.tar.bz2' try: if md5_file(fetched_in) != index[fn]['md5']: # RM_FETCHED now removes the extracted data too actions[inst.RM_FETCHED].append(dist) # Re-fetch, re-extract, re-link fetched_in = extracted_in = None force = True except KeyError: sys.stderr.write('Warning: cannot lookup MD5 of: %s' % fn) if not force and install.is_linked(prefix, dist): continue if extracted_in and force: # Always re-extract in the force case actions[inst.RM_EXTRACTED].append(dist) extracted_in = None # Otherwise we need to extract, and possibly fetch if not extracted_in and not fetched_in: # If there is a cache conflict, clean it up fetched_in, conflict = install.find_new_location(dist) fetched_in = join(fetched_in, install._dist2filename(dist)) if conflict is not None: actions[inst.RM_FETCHED].append(conflict) actions[inst.FETCH].append(dist) if not extracted_in: actions[inst.EXTRACT].append(dist) fetched_dist = extracted_in or fetched_in[:-8] fetched_dir = dirname(fetched_dist) try: # Determine what kind of linking is necessary if not extracted_in: # If not already extracted, create some dummy # data to test with install.rm_rf(fetched_dist) ppath = join(fetched_dist, 'info') os.makedirs(ppath) index_json = join(ppath, 'index.json') with open(index_json, 'w'): pass if config_always_copy or always_copy: lt = install.LINK_COPY elif install.try_hard_link(fetched_dir, prefix, dist): lt = install.LINK_HARD elif allow_softlinks and sys.platform != 'win32': lt = install.LINK_SOFT else: lt = install.LINK_COPY actions[inst.LINK].append('%s %d' % (dist, lt)) except (OSError, IOError): actions[inst.LINK].append(dist) finally: if not extracted_in: # Remove the dummy data try: install.rm_rf(fetched_dist) except (OSError, IOError): pass return actions
def explicit(specs, prefix, verbose=False, force_extract=True, fetch_args=None): actions = defaultdict(list) actions['PREFIX'] = prefix actions[ 'op_order'] = RM_FETCHED, FETCH, RM_EXTRACTED, EXTRACT, UNLINK, LINK linked = {install.name_dist(dist): dist for dist in install.linked(prefix)} fetch_args = fetch_args or {} index = {} verifies = [] channels = {} for spec in specs: if spec == '@EXPLICIT': continue # Format: (url|path)(:#md5)? m = url_pat.match(spec) if m is None: sys.exit('Could not parse explicit URL: %s' % spec) url, md5 = m.group('url') + '/' + m.group('fn'), m.group('md5') if not is_url(url): if not isfile(url): sys.exit('Error: file not found: %s' % url) url = utils.url_path(url) url_p, fn = url.rsplit('/', 1) # See if the URL refers to a package in our cache prefix = pkg_path = dir_path = None if url_p.startswith('file://'): prefix = install.cached_url(url) # If not, determine the channel name from the URL if prefix is None: _, schannel = url_channel(url) prefix = '' if schannel == 'defaults' else schannel + '::' fn = prefix + fn dist = fn[:-8] pkg_path = install.is_fetched(dist) dir_path = install.is_extracted(dist) # Don't re-fetch unless there is an MD5 mismatch if pkg_path and (md5 and md5_file(pkg_path) != md5): # This removes any extracted copies as well actions[RM_FETCHED].append(dist) pkg_path = dir_path = None # Don't re-extract unless forced, or if we can't check the md5 if dir_path and (force_extract or md5 and not pkg_path): actions[RM_EXTRACTED].append(dist) dir_path = None if not dir_path: if not pkg_path: _, conflict = install.find_new_location(dist) if conflict: actions[RM_FETCHED].append(conflict) actions[FETCH].append(dist) if md5: # Need to verify against the package index verifies.append((dist + '.tar.bz2', md5)) channels[url_p + '/'] = (schannel, 0) actions[EXTRACT].append(dist) # unlink any installed package with that name name = install.name_dist(dist) if name in linked: actions[UNLINK].append(linked[name]) actions[LINK].append(dist) # Finish the MD5 verification if verifies: index = fetch_index(channels, **fetch_args) for fn, md5 in verifies: info = index.get(fn) if info is None: sys.exit("Error: no package '%s' in index" % fn) if 'md5' not in info: sys.stderr.write('Warning: cannot lookup MD5 of: %s' % fn) if info['md5'] != md5: sys.exit('MD5 mismatch for: %s\n spec: %s\n repo: %s' % (fn, md5, info['md5'])) execute_actions(actions, index=index, verbose=verbose) return actions