def explicit(urls, prefix, verbose=True): import conda.fetch as fetch from conda.utils import md5_file dists = [] for url in urls: if url == '@EXPLICIT': continue print("Fetching: %s" % url) channel_url, fn = url.rsplit('/', 1) dists.append(fn[:-8]) index = fetch.fetch_index((channel_url + '/',)) info = index[fn] pkg_path = join(config.pkgs_dirs[0], fn) if isfile(pkg_path): try: if md5_file(pkg_path) != index[fn]['md5']: install.rm_rf(pkg_path) fetch.fetch_pkg(info) except KeyError: sys.stderr.write('Warning: cannot lookup MD5 of: %s' % fn) else: fetch.fetch_pkg(info) force_extract_and_link(dists, prefix, verbose=verbose)
def fetch(info): download_dir = info['_download_dir'] if not isdir(download_dir): os.makedirs(download_dir) for fn in dists: path = join(download_dir, fn) url = urls.get(fn) md5 = md5s.get(fn) if url: url_index = fetch_index((url, )) try: pkginfo = url_index[fn] except KeyError: sys.exit("Error: no package '%s' in %s" % (fn, url)) else: pkginfo = index[fn] if md5 and md5 != pkginfo['md5']: sys.exit("Error: MD5 sum for '%s' does not match in remote " "repodata %s" % (fn, url)) if isfile(path) and md5_file(path) == pkginfo['md5']: continue print('fetching: %s' % fn) fetch_pkg(pkginfo, download_dir)
def fetch(info): download_dir = info['_download_dir'] if not isdir(download_dir): os.makedirs(download_dir) for fn in dists: path = join(download_dir, fn) url = urls.get(fn) md5 = md5s.get(fn) if url: url_index = fetch_index((url,)) try: pkginfo = url_index[fn] except KeyError: sys.exit("Error: no package '%s' in %s" % (fn, url)) else: pkginfo = index[fn] if md5 and md5 != pkginfo['md5']: sys.exit("Error: MD5 sum for '%s' does not match in remote " "repodata %s" % (fn, url)) if isfile(path) and md5_file(path) == pkginfo['md5']: continue print('fetching: %s' % fn) fetch_pkg(pkginfo, download_dir)
def explicit(urls, prefix, verbose=True): import conda.fetch as fetch from conda.utils import md5_file dists = [] for url in urls: if url == '@EXPLICIT': continue print("Fetching: %s" % url) m = url_pat.match(url) if m is None: sys.exit("Error: Could not parse: %s" % url) fn = m.group('fn') dists.append(fn[:-8]) index = fetch.fetch_index((m.group('url') + '/',)) try: info = index[fn] except KeyError: sys.exit("Error: no package '%s' in index" % fn) if m.group('md5') and m.group('md5') != info['md5']: sys.exit("Error: MD5 in explicit files does not match index") pkg_path = join(config.pkgs_dirs[0], fn) if isfile(pkg_path): try: if md5_file(pkg_path) != info['md5']: install.rm_rf(pkg_path) fetch.fetch_pkg(info) except KeyError: sys.stderr.write('Warning: cannot lookup MD5 of: %s' % fn) else: fetch.fetch_pkg(info) force_extract_and_link(dists, prefix, verbose=verbose)
def find_info(fn): if fn not in crd: return None md5 = md5_file(join(pkgs_dir, fn)) for info in crd[fn]: if md5 == info.get("md5"): return info return None
def test_repo(): index = read_index() files = set() for fn in iter_dir(repo_path): files.add(fn) info = index[fn] path = join(repo_path, fn) assert "%(name)s-%(version)s-%(build)s.tar.bz2" % info == fn assert getsize(path) == info["size"] assert md5_file(path) == info["md5"] assert files == set(index)
def fetch(info): global REPO_DIR REPO_DIR = join(expanduser('~'), '.conda', 'constructor', info['platform']) if not isdir(REPO_DIR): os.makedirs(REPO_DIR) for fn in DISTS: path = join(REPO_DIR, fn) if isfile(path) and md5_file(path) == INDEX[fn]['md5']: continue print('fetching: %s' % fn) fetch_pkg(INDEX[fn], REPO_DIR)
def app_meta(self): d = {'type': 'app'} if self.get_value('app/icon'): d['icon'] = '%s.png' % md5_file(join( self.path, self.get_value('app/icon'))) for field, key in [('app/entry', 'app_entry'), ('app/type', 'app_type'), ('app/cli_opts', 'app_cli_opts'), ('app/summary', 'summary')]: value = self.get_value(field) if value: d[key] = value return d
def app_meta(self): d = {"type": "app"} if self.get_value("app/icon"): d["icon"] = "%s.png" % md5_file(join(self.path, self.get_value("app/icon"))) for field, key in [ ("app/entry", "app_entry"), ("app/type", "app_type"), ("app/cli_opts", "app_cli_opts"), ("app/summary", "summary"), ("app/own_environment", "app_own_environment"), ]: value = self.get_value(field) if value: d[key] = value return d
def get_header(tarball, info): name = info['name'] dists = [fn[:-8] for fn in info['_dists']] dist0 = dists[0] assert name_dist(dist0) == 'python' has_license = bool('license_file' in info) ppd = ns_platform(info['_platform']) ppd['keep_pkgs'] = bool(info.get('keep_pkgs')) ppd['has_license'] = has_license for key in 'pre_install', 'post_install': ppd['has_%s' % key] = bool(key in info) install_lines = ['install_dist %s' % d for d in dists] install_lines.extend(add_condarc(info)) # Needs to happen first -- can be templated replace = { 'NAME': name, 'name': name.lower(), 'VERSION': info['version'], 'PLAT': info['_platform'], 'DIST0': dist0, 'DEFAULT_PREFIX': info.get('default_prefix', '$HOME/%s' % name.lower()), 'MD5': md5_file(tarball), 'INSTALL_COMMANDS': '\n'.join(install_lines), 'pycache': '__pycache__', } if has_license: replace['LICENSE'] = read_ascii_only(info['license_file']) data = read_header_template() data = preprocess(data, ppd) data = fill_template(data, replace) n = data.count('\n') data = data.replace('@LINES@', str(n + 1)) # note that this replacement does not change the size of the header, # which would result into an inconsistency n = len(data) + getsize(tarball) data = data.replace('@SIZE_BYTES@', '%12d' % n) assert len(data) + getsize(tarball) == n return data
def get_header(tarball, info): data = read_header_template() name = info['name'] dists0 = common.DISTS[0][:-8] py_name, py_version, unused_build = dists0.rsplit('-', 2) if py_name != 'python': sys.exit("Error: a Python package needs to be part of the " "specifications") data = preprocess(data, ns_platform(info['platform'])) # Needs to happen first -- can be templated data = data.replace('__NAME__', name) data = data.replace('__name__', name.lower()) data = data.replace('__VERSION__', info['version']) data = data.replace('__DEFAULT_PREFIX__', info.get('default_prefix', '$HOME/' + name.lower())) data = data.replace('__PLAT__', info['platform']) data = data.replace('__DIST0__', dists0) data = data.replace('__PY_VER__', py_version[:3]) has_license = bool('license_file' in info) data = data.replace('__HAS_LICENSE__', str(int(has_license))) if has_license: data = data.replace('__LICENSE__', read_ascii_only(info['license_file'])) lines = ['install_dist %s' % (fn[:-8],) for fn in common.DISTS] if 'conda_default_channels' in info: add_condarc(lines, info) data = data.replace('__INSTALL_COMMANDS__', '\n'.join(lines)) data = data.replace('__MD5__', md5_file(tarball)) n = data.count('\n') data = data.replace('__LINES__', str(n + 1)) # note that this replacement does not change the size of the header, # which would result into an inconsistency n = len(data) + getsize(tarball) data = data.replace('___BYTES___', '%11d' % n) return data
def force_linked_actions(dists, index, prefix): actions = defaultdict(list) actions[PREFIX] = prefix actions["op_order"] = (RM_FETCHED, FETCH, RM_EXTRACTED, EXTRACT, UNLINK, LINK) for dist in dists: fn = dist + ".tar.bz2" pkg_path = join(config.pkgs_dir, fn) if isfile(pkg_path): if md5_file(pkg_path) != index[fn]["md5"]: actions[RM_FETCHED].append(dist) actions[FETCH].append(dist) else: actions[FETCH].append(dist) actions[RM_EXTRACTED].append(dist) actions[EXTRACT].append(dist) if isfile(join(prefix, "conda-meta", dist + ".json")): actions[UNLINK].append(dist) actions[LINK].append(dist) return actions
def force_linked_actions(dists, index, prefix): actions = defaultdict(list) actions[PREFIX] = prefix actions['op_order'] = (RM_FETCHED, FETCH, RM_EXTRACTED, EXTRACT, UNLINK, LINK) for dist in dists: fn = dist + '.tar.bz2' pkg_path = join(config.pkgs_dirs[0], fn) if isfile(pkg_path): if md5_file(pkg_path) != index[fn]['md5']: actions[RM_FETCHED].append(dist) actions[FETCH].append(dist) else: actions[FETCH].append(dist) actions[RM_EXTRACTED].append(dist) actions[EXTRACT].append(dist) if isfile(join(prefix, 'conda-meta', dist + '.json')): actions[UNLINK].append(dist) actions[LINK].append(dist) return actions
def force_linked_actions(dists, index, prefix): actions = defaultdict(list) actions[inst.PREFIX] = prefix actions["op_order"] = (inst.RM_FETCHED, inst.FETCH, inst.RM_EXTRACTED, inst.EXTRACT, inst.UNLINK, inst.LINK) for dist in dists: fn = dist + ".tar.bz2" pkg_path = join(config.pkgs_dirs[0], fn) if isfile(pkg_path): try: if md5_file(pkg_path) != index[fn]["md5"]: actions[inst.RM_FETCHED].append(dist) actions[inst.FETCH].append(dist) except KeyError: sys.stderr.write("Warning: cannot lookup MD5 of: %s" % fn) else: actions[inst.FETCH].append(dist) actions[inst.RM_EXTRACTED].append(dist) actions[inst.EXTRACT].append(dist) if isfile(join(prefix, "conda-meta", dist + ".json")): add_unlink(actions, dist) actions[inst.LINK].append(dist) return actions
def find_lib(link, path=None): from conda_build.build import prefix_files files = prefix_files() if link.startswith(config.build_prefix): link = link[len(config.build_prefix) + 1:] if link not in files: sys.exit("Error: Could not find %s" % link) return link if link.startswith('/'): # but doesn't start with the build prefix return if link.startswith('@rpath/'): # Assume the rpath already points to lib, so there is no need to # change it. return if '/' not in link or link.startswith('@executable_path/'): link = basename(link) file_names = defaultdict(list) for f in files: file_names[basename(f)].append(f) if link not in file_names: sys.exit("Error: Could not find %s" % link) if len(file_names[link]) > 1: if path and basename(path) == link: # The link is for the file itself, just use it return path # Allow for the possibility of the same library appearing in # multiple places. md5s = set() for f in file_names[link]: md5s.add(md5_file(join(config.build_prefix, f))) if len(md5s) > 1: sys.exit("Error: Found multiple instances of %s: %s" % (link, file_names[link])) else: file_names[link].sort() print("Found multiple instances of %s (%s). " "Choosing the first one." % (link, file_names[link])) return file_names[link][0] print("Don't know how to find %s, skipping" % link)
def force_linked_actions(dists, index, prefix): actions = defaultdict(list) actions[inst.PREFIX] = prefix actions['op_order'] = (inst.RM_FETCHED, inst.FETCH, inst.RM_EXTRACTED, inst.EXTRACT, inst.UNLINK, inst.LINK) for dist in dists: fn = dist + '.tar.bz2' pkg_path = join(config.pkgs_dirs[0], fn) if isfile(pkg_path): try: if md5_file(pkg_path) != index[fn]['md5']: actions[inst.RM_FETCHED].append(dist) actions[inst.FETCH].append(dist) except KeyError: sys.stderr.write('Warning: cannot lookup MD5 of: %s' % fn) else: actions[inst.FETCH].append(dist) actions[inst.RM_EXTRACTED].append(dist) actions[inst.EXTRACT].append(dist) if isfile(join(prefix, 'conda-meta', dist + '.json')): add_unlink(actions, dist) actions[inst.LINK].append(dist) return actions
def force_linked_actions(dists, index, prefix): actions = defaultdict(list) actions[PREFIX] = prefix actions['op_order'] = (RM_FETCHED, FETCH, RM_EXTRACTED, EXTRACT, UNLINK, LINK) for dist in dists: fn = dist + '.tar.bz2' pkg_path = join(config.pkgs_dirs[0], fn) if isfile(pkg_path): try: if md5_file(pkg_path) != index[fn]['md5']: actions[RM_FETCHED].append(dist) actions[FETCH].append(dist) except KeyError: sys.stderr.write('Warning: cannot lookup MD5 of: %s' % fn) else: actions[FETCH].append(dist) actions[RM_EXTRACTED].append(dist) actions[EXTRACT].append(dist) if isfile(join(prefix, 'conda-meta', dist + '.json')): actions[UNLINK].append(dist) actions[LINK].append(dist) return actions
def ensure_linked_actions(dists, prefix, index=None, force=False, always_copy=False): actions = defaultdict(list) actions[inst.PREFIX] = prefix actions['op_order'] = (inst.RM_FETCHED, inst.FETCH, inst.RM_EXTRACTED, inst.EXTRACT, inst.UNLINK, inst.LINK) for dist in dists: fetched_in = install.is_fetched(dist) extracted_in = install.is_extracted(dist) if fetched_in and index is not None: # Test the MD5, and possibly re-fetch fn = dist + '.tar.bz2' try: if md5_file(fetched_in) != index[fn]['md5']: # RM_FETCHED now removes the extracted data too actions[inst.RM_FETCHED].append(dist) # Re-fetch, re-extract, re-link fetched_in = extracted_in = None force = True except KeyError: sys.stderr.write('Warning: cannot lookup MD5 of: %s' % fn) if not force and install.is_linked(prefix, dist): continue if extracted_in and force: # Always re-extract in the force case actions[inst.RM_EXTRACTED].append(dist) extracted_in = None # Otherwise we need to extract, and possibly fetch if not extracted_in and not fetched_in: # If there is a cache conflict, clean it up fetched_in, conflict = install.find_new_location(dist) fetched_in = join(fetched_in, install._dist2filename(dist)) if conflict is not None: actions[inst.RM_FETCHED].append(conflict) actions[inst.FETCH].append(dist) if not extracted_in: actions[inst.EXTRACT].append(dist) fetched_dist = extracted_in or fetched_in[:-8] fetched_dir = dirname(fetched_dist) try: # Determine what kind of linking is necessary if not extracted_in: # If not already extracted, create some dummy # data to test with install.rm_rf(fetched_dist) ppath = join(fetched_dist, 'info') os.makedirs(ppath) index_json = join(ppath, 'index.json') with open(index_json, 'w'): pass if config_always_copy or always_copy: lt = install.LINK_COPY elif install.try_hard_link(fetched_dir, prefix, dist): lt = install.LINK_HARD elif allow_softlinks and sys.platform != 'win32': lt = install.LINK_SOFT else: lt = install.LINK_COPY actions[inst.LINK].append('%s %d' % (dist, lt)) except (OSError, IOError): actions[inst.LINK].append(dist) finally: if not extracted_in: # Remove the dummy data try: install.rm_rf(fetched_dist) except (OSError, IOError): pass return actions
======= >>>>>>> origin/feature/instruction-arguments ======= >>>>>>> princeofdarkness76/feature/instruction-arguments ======= >>>>>>> origin/feature/instruction-arguments actions[inst.PREFIX] = (prefix,) actions['op_order'] = (inst.RM_FETCHED, inst.FETCH, inst.RM_EXTRACTED, inst.EXTRACT, inst.UNLINK, inst.LINK) >>>>>>> conda/feature/instruction-arguments for dist in dists: fn = dist + '.tar.bz2' pkg_path = join(config.pkgs_dirs[0], fn) if isfile(pkg_path): try: if md5_file(pkg_path) != index[fn]['md5']: actions[inst.RM_FETCHED].append((dist,)) actions[inst.FETCH].append((dist,)) except KeyError: sys.stderr.write('Warning: cannot lookup MD5 of: %s' % fn) else: actions[inst.FETCH].append((dist,)) actions[inst.RM_EXTRACTED].append((dist,)) actions[inst.EXTRACT].append((dist,)) if isfile(join(prefix, 'conda-meta', dist + '.json')): <<<<<<< HEAD <<<<<<< HEAD <<<<<<< HEAD <<<<<<< HEAD <<<<<<< HEAD <<<<<<< HEAD
def file_info(path): return {'size': getsize(path), 'md5': md5_file(path), 'mtime': getmtime(path)}
def update_index(dir_path, verbose=False, force=False, check_md5=False, remove=True): """ Update all index files in dir_path with changed packages. :param verbose: Should detailed status messages be output? :type verbose: bool :param force: Whether to re-index all packages (including those that haven't changed) or not. :type force: bool :param check_md5: Whether to check MD5s instead of mtimes for determining if a package changed. :type check_md5: bool """ if verbose: print("updating index in:", dir_path) index_path = join(dir_path, '.index.json') if force: index = {} else: try: mode_dict = { 'mode': 'r', 'encoding': 'utf-8' } if PY3 else { 'mode': 'rb' } with open(index_path, **mode_dict) as fi: index = json.load(fi) except (IOError, ValueError): index = {} files = set(fn for fn in os.listdir(dir_path) if fn.endswith('.tar.bz2')) if any(fn.startswith('_license-') for fn in files): sys.exit("""\ Error: Indexing a copy of the Anaconda conda package channel is neither necessary nor supported. If you which to add your own packages, you can do so by adding them to a separate channel. """) for fn in files: path = join(dir_path, fn) if fn in index: if check_md5: if index[fn]['md5'] == md5_file(path): continue elif index[fn]['mtime'] == getmtime(path): continue if verbose: print('updating:', fn) d = read_index_tar(path) d.update(file_info(path)) index[fn] = d if remove: # remove files from the index which are not on disk for fn in set(index) - files: if verbose: print("removing:", fn) del index[fn] # Deal with Python 2 and 3's different json module type reqs mode_dict = {'mode': 'w', 'encoding': 'utf-8'} if PY3 else {'mode': 'wb'} with open(index_path, **mode_dict) as fo: json.dump(index, fo, indent=2, sort_keys=True, default=str) # --- new repodata for fn in index: info = index[fn] for varname in 'arch', 'platform', 'mtime', 'ucs': try: del info[varname] except KeyError: pass if 'requires' in info and 'depends' not in info: info['depends'] = info['requires'] repodata = {'packages': index, 'info': {}} write_repodata(repodata, dir_path)
def update_index(dir_path, verbose=False, force=False, check_md5=False, remove=True): """ Update all index files in dir_path with changed packages. :param verbose: Should detailed status messages be output? :type verbose: bool :param force: Whether to re-index all packages (including those that haven't changed) or not. :type force: bool :param check_md5: Whether to check MD5s instead of mtimes for determining if a package changed. :type check_md5: bool """ if verbose: print("updating index in:", dir_path) index_path = join(dir_path, '.index.json') if force: index = {} else: try: mode_dict = {'mode': 'r', 'encoding': 'utf-8'} if PY3 else {'mode': 'rb'} with open(index_path, **mode_dict) as fi: index = json.load(fi) except (IOError, ValueError): index = {} files = set(fn for fn in os.listdir(dir_path) if fn.endswith('.tar.bz2')) if any(fn.startswith('_license-') for fn in files): sys.exit("""\ Error: Indexing a copy of the Anaconda conda package channel is neither necessary nor supported. If you which to add your own packages, you can do so by adding them to a separate channel. """) for fn in files: path = join(dir_path, fn) if fn in index: if check_md5: if index[fn]['md5'] == md5_file(path): continue elif index[fn]['mtime'] == getmtime(path): continue if verbose: print('updating:', fn) d = read_index_tar(path) d.update(file_info(path)) index[fn] = d if remove: # remove files from the index which are not on disk for fn in set(index) - files: if verbose: print("removing:", fn) del index[fn] # Deal with Python 2 and 3's different json module type reqs mode_dict = {'mode': 'w', 'encoding': 'utf-8'} if PY3 else {'mode': 'wb'} with open(index_path, **mode_dict) as fo: json.dump(index, fo, indent=2, sort_keys=True, default=str) # --- new repodata for fn in index: info = index[fn] for varname in 'arch', 'platform', 'mtime', 'ucs': try: del info[varname] except KeyError: pass if 'requires' in info and 'depends' not in info: info['depends'] = info['requires'] repodata = {'packages': index, 'info': {}} write_repodata(repodata, dir_path)
def explicit(specs, prefix, verbose=False, force_extract=True, fetch_args=None): actions = defaultdict(list) actions['PREFIX'] = prefix actions['op_order'] = RM_FETCHED, FETCH, RM_EXTRACTED, EXTRACT, UNLINK, LINK linked = {install.name_dist(dist): dist for dist in install.linked(prefix)} fetch_args = fetch_args or {} index = {} verifies = [] channels = {} for spec in specs: if spec == '@EXPLICIT': continue # Format: (url|path)(:#md5)? m = url_pat.match(spec) if m is None: sys.exit('Could not parse explicit URL: %s' % spec) url, md5 = m.group('url') + '/' + m.group('fn'), m.group('md5') if not is_url(url): if not isfile(url): sys.exit('Error: file not found: %s' % url) url = utils.url_path(url) url_p, fn = url.rsplit('/', 1) # See if the URL refers to a package in our cache prefix = pkg_path = dir_path = None if url_p.startswith('file://'): prefix = install.cached_url(url) # If not, determine the channel name from the URL if prefix is None: _, schannel = url_channel(url) prefix = '' if schannel == 'defaults' else schannel + '::' fn = prefix + fn dist = fn[:-8] pkg_path = install.is_fetched(dist) dir_path = install.is_extracted(dist) # Don't re-fetch unless there is an MD5 mismatch if pkg_path and (md5 and md5_file(pkg_path) != md5): # This removes any extracted copies as well actions[RM_FETCHED].append(dist) pkg_path = dir_path = None # Don't re-extract unless forced, or if we can't check the md5 if dir_path and (force_extract or md5 and not pkg_path): actions[RM_EXTRACTED].append(dist) dir_path = None if not dir_path: if not pkg_path: _, conflict = install.find_new_location(dist) if conflict: actions[RM_FETCHED].append(conflict) actions[FETCH].append(dist) if md5: # Need to verify against the package index verifies.append((dist + '.tar.bz2', md5)) channels[url_p + '/'] = (schannel, 0) actions[EXTRACT].append(dist) # unlink any installed package with that name name = install.name_dist(dist) if name in linked: actions[UNLINK].append(linked[name]) actions[LINK].append(dist) # Finish the MD5 verification if verifies: index = fetch_index(channels, **fetch_args) for fn, md5 in verifies: info = index.get(fn) if info is None: sys.exit("Error: no package '%s' in index" % fn) if 'md5' not in info: sys.stderr.write('Warning: cannot lookup MD5 of: %s' % fn) if info['md5'] != md5: sys.exit( 'MD5 mismatch for: %s\n spec: %s\n repo: %s' % (fn, md5, info['md5'])) execute_actions(actions, index=index, verbose=verbose) return actions
def explicit(specs, prefix, verbose=False, force_extract=True, fetch_args=None): actions = defaultdict(list) actions['PREFIX'] = prefix actions[ 'op_order'] = RM_FETCHED, FETCH, RM_EXTRACTED, EXTRACT, UNLINK, LINK linked = {install.name_dist(dist): dist for dist in install.linked(prefix)} fetch_args = fetch_args or {} index = {} verifies = [] channels = {} for spec in specs: if spec == '@EXPLICIT': continue # Format: (url|path)(:#md5)? m = url_pat.match(spec) if m is None: sys.exit('Could not parse explicit URL: %s' % spec) url, md5 = m.group('url') + '/' + m.group('fn'), m.group('md5') if not is_url(url): if not isfile(url): sys.exit('Error: file not found: %s' % url) url = utils.url_path(url) url_p, fn = url.rsplit('/', 1) # See if the URL refers to a package in our cache prefix = pkg_path = dir_path = None if url_p.startswith('file://'): prefix = install.cached_url(url) # If not, determine the channel name from the URL if prefix is None: _, schannel = url_channel(url) prefix = '' if schannel == 'defaults' else schannel + '::' fn = prefix + fn dist = fn[:-8] pkg_path = install.is_fetched(dist) dir_path = install.is_extracted(dist) # Don't re-fetch unless there is an MD5 mismatch if pkg_path and (md5 and md5_file(pkg_path) != md5): # This removes any extracted copies as well actions[RM_FETCHED].append(dist) pkg_path = dir_path = None # Don't re-extract unless forced, or if we can't check the md5 if dir_path and (force_extract or md5 and not pkg_path): actions[RM_EXTRACTED].append(dist) dir_path = None if not dir_path: if not pkg_path: _, conflict = install.find_new_location(dist) if conflict: actions[RM_FETCHED].append(conflict) channels[url_p + '/'] = (schannel, 0) actions[FETCH].append(dist) verifies.append((dist + '.tar.bz2', md5)) actions[EXTRACT].append(dist) # unlink any installed package with that name name = install.name_dist(dist) if name in linked: actions[UNLINK].append(linked[name]) actions[LINK].append(dist) # Pull the repodata for channels we are using if channels: index.update(fetch_index(channels, **fetch_args)) # Finish the MD5 verification for fn, md5 in verifies: info = index.get(fn) if info is None: sys.exit("Error: no package '%s' in index" % fn) if md5 and 'md5' not in info: sys.stderr.write('Warning: cannot lookup MD5 of: %s' % fn) if md5 and info['md5'] != md5: sys.exit('MD5 mismatch for: %s\n spec: %s\n repo: %s' % (fn, md5, info['md5'])) execute_actions(actions, index=index, verbose=verbose) return actions
def file_info(path): return { 'size': getsize(path), 'md5': md5_file(path), 'mtime': getmtime(path) }
def file_info(path): return {"size": getsize(path), "md5": md5_file(path), "mtime": getmtime(path)}
def ensure_linked_actions(dists, prefix, index=None, force=False, always_copy=False): actions = defaultdict(list) actions[inst.PREFIX] = prefix actions['op_order'] = (inst.RM_FETCHED, inst.FETCH, inst.RM_EXTRACTED, inst.EXTRACT, inst.UNLINK, inst.LINK) for dist in dists: fetched_in = install.is_fetched(dist) extracted_in = install.is_extracted(dist) if fetched_in and index is not None: # Test the MD5, and possibly re-fetch fn = dist + '.tar.bz2' try: if md5_file(fetched_in) != index[fn]['md5']: # RM_FETCHED now removes the extracted data too actions[inst.RM_FETCHED].append(dist) # Re-fetch, re-extract, re-link fetched_in = extracted_in = None force = True except KeyError: sys.stderr.write('Warning: cannot lookup MD5 of: %s' % fn) if not force and install.is_linked(prefix, dist): continue if extracted_in and force: # Always re-extract in the force case actions[inst.RM_EXTRACTED].append(dist) extracted_in = None # Otherwise we need to extract, and possibly fetch if not extracted_in and not fetched_in: # If there is a cache conflict, clean it up fetched_in, conflict = install.find_new_location(dist) if conflict is not None: actions[inst.RM_FETCHED].append(conflict) actions[inst.FETCH].append(dist) if not extracted_in: actions[inst.EXTRACT].append(dist) fetched_dist = extracted_in or fetched_in[:-8] fetched_dir = dirname(fetched_dist) try: # Determine what kind of linking is necessary if not extracted_in: # If not already extracted, create some dummy # data to test with install.rm_rf(fetched_dist) ppath = join(fetched_dist, 'info') os.makedirs(ppath) index_json = join(ppath, 'index.json') with open(index_json, 'w'): pass if config.always_copy or always_copy: lt = install.LINK_COPY elif install.try_hard_link(fetched_dir, prefix, dist): lt = install.LINK_HARD elif config.allow_softlinks and sys.platform != 'win32': lt = install.LINK_SOFT else: lt = install.LINK_COPY actions[inst.LINK].append('%s %d' % (dist, lt)) except (OSError, IOError): actions[inst.LINK].append(dist) finally: if not extracted_in: # Remove the dummy data try: install.rm_rf(fetched_dist) except (OSError, IOError): pass return actions