def main(info, verbose=True): if 'channels' in info: global index index = fetch_index( tuple('%s/%s/' % (url.rstrip('/'), platform) for url in info['channels'] for platform in (info['_platform'], 'noarch'))) if 'specs' in info: resolve(info, verbose) exclude_packages(info) if 'packages' in info: handle_packages(info) if not info.get('install_in_dependency_order'): dists.sort() move_python_first() all_names = set(name_dist(fn) for fn in dists) for name in info.get('menu_packages', []): if name not in all_names: sys.exit("Error: no such package (in menu_packages): %s" % name) if verbose: show(info) check_dists() fetch(info) info['_dists'] = list(dists)
def find_vs_runtimes(dists, py_version): vs_map = {'2.7': 'vs2008_runtime', '3.4': 'vs2010_runtime', '3.5': 'vs2015_runtime'} vs_runtime = vs_map.get(py_version[:3]) return [dist for dist in dists if name_dist(dist) in (vs_runtime, 'msvc_runtime')]
def check_duplicates(): map_name = defaultdict(list) # map package name to list of filenames for fn in dists: map_name[name_dist(fn)].append(fn) for name, files in map_name.items(): if len(files) > 1: sys.exit("Error: '%s' listed multiple times: %s" % (name, ', '.join(files)))
def exclude_packages(info): check_duplicates() for name in info.get('exclude', []): for bad_char in ' =<>*': if bad_char in name: sys.exit("Error: did not expect '%s' in package name: %s" % name) # find the package with name, and remove it for dist in list(dists): if name_dist(dist) == name: dists.remove(dist) break else: sys.exit("Error: no package named '%s' to remove" % name)
def get_header(tarball, info): name = info['name'] dists = [fn[:-8] for fn in info['_dists']] dist0 = dists[0] assert name_dist(dist0) == 'python' has_license = bool('license_file' in info) ppd = ns_platform(info['_platform']) ppd['keep_pkgs'] = bool(info.get('keep_pkgs')) ppd['has_license'] = has_license for key in 'pre_install', 'post_install': ppd['has_%s' % key] = bool(key in info) install_lines = ['install_dist %s' % d for d in dists] install_lines.extend(add_condarc(info)) # Needs to happen first -- can be templated replace = { 'NAME': name, 'name': name.lower(), 'VERSION': info['version'], 'PLAT': info['_platform'], 'DIST0': dist0, 'DEFAULT_PREFIX': info.get('default_prefix', '$HOME/%s' % name.lower()), 'MD5': md5_file(tarball), 'INSTALL_COMMANDS': '\n'.join(install_lines), 'pycache': '__pycache__', } if has_license: replace['LICENSE'] = read_ascii_only(info['license_file']) data = read_header_template() data = preprocess(data, ppd) data = fill_template(data, replace) n = data.count('\n') data = data.replace('@LINES@', str(n + 1)) # note that this replacement does not change the size of the header, # which would result into an inconsistency n = len(data) + getsize(tarball) data = data.replace('@SIZE_BYTES@', '%12d' % n) assert len(data) + getsize(tarball) == n return data
def resolve(info, verbose=False): if not index: sys.exit("Error: index is empty, maybe 'channels' are missing?") specs = info['specs'] r = Resolve(index) if not any(s.split()[0] == 'python' for s in specs): specs.append('python') if verbose: print("specs: %r" % specs) try: res = list(r.solve(specs)) except NoPackagesFound as e: sys.exit("Error: %s" % e) sys.stdout.write('\n') if 'install_in_dependency_order' in info: sort_info = {name_dist(d): d[:-8] for d in res} dists.extend(d + '.tar.bz2' for d in r.graph_sort(sort_info)) else: dists.extend(res)
def main(info, verbose=True, dry_run=False, use_conda=False): if 'channels' in info: global index if use_conda: from conda.models.channel import prioritize_channels from conda.exports import fetch_index channels = tuple('%s/%s/' % (url.rstrip('/'), platform) for url in info['channels'] for platform in (info['_platform'], 'noarch')) index = fetch_index(prioritize_channels(channels)) else: from libconda.fetch import fetch_index index = fetch_index( tuple('%s/%s/' % (url.rstrip('/'), platform) for url in info['channels'] for platform in (info['_platform'], 'noarch'))) if 'specs' in info: resolve(info, verbose, use_conda) exclude_packages(info) if 'packages' in info: handle_packages(info) if not info.get('install_in_dependency_order'): dists.sort() move_python_first() all_names = set(name_dist(fn) for fn in dists) for name in info.get('menu_packages', []): if name not in all_names: print("WARNING: no such package (in menu_packages): %s" % name) if verbose: show(info) check_dists() if dry_run: return fetch(info, use_conda) info['_dists'] = list(dists)
def create(info, verbose=False): global CACHE_DIR, PACKAGE_ROOT, PACKAGES_DIR CACHE_DIR = info['_download_dir'] PACKAGE_ROOT = join(CACHE_DIR, "package_root") PACKAGES_DIR = join(CACHE_DIR, "built_pkgs") # See http://stackoverflow.com/a/11487658/161801 for how all this works. prefix = join(PACKAGE_ROOT, info['name'].lower()) fresh_dir(PACKAGES_DIR) fresh_dir(PACKAGE_ROOT) pkgs_dir = join(prefix, 'pkgs') os.makedirs(pkgs_dir) preconda.write_files(info, pkgs_dir) # TODO: Refactor code such that the argument to preconda.write_files is # /path/to/base/env, so that such workarounds are not required. shutil.move(join(pkgs_dir, 'conda-meta'), prefix) pkgbuild('preconda') for dist in info['_dists']: if isinstance(dist, str if version_info[0] >= 3 else basestring): fn = dist dname = dist[:-8] ndist(fn) else: fn = dist.fn dname = dist.dist_name ndist = dist.name fresh_dir(PACKAGE_ROOT) if bool(info.get('attempt_hardlinks')): t = tarfile.open(join(CACHE_DIR, fn), 'r:bz2') os.makedirs(join(pkgs_dir, dname)) t.extractall(join(pkgs_dir, dname)) t.close() else: t = tarfile.open(join(CACHE_DIR, fn), 'r:bz2') t.extractall(prefix) t.close() os.rename(join(prefix, 'info'), join(prefix, 'info-tmp')) os.mkdir(join(prefix, 'info')) os.rename(join(prefix, 'info-tmp'), join(prefix, 'info', fn[:-8])) pkgbuild(ndist) # Create special preinstall and postinstall packages to check if Anaconda # is already installed, build Anaconda, and to update the shell profile. # First script pkgbuild_script('postextract', info, 'post_extract.sh') # Next, the script to edit bashrc with the PATH. This is separate so it # can be disabled. pkgbuild_script('pathupdate', info, 'update_path.sh') # Next, the script to be run before everything, which checks if Anaconda # is already installed. pkgbuild_script('apreinstall', info, 'preinstall.sh', 'preinstall') # Now build the final package names = ['apreinstall', 'preconda'] names.extend(name_dist(dist) for dist in info['_dists']) names.extend(['postextract', 'pathupdate']) xml_path = join(PACKAGES_DIR, 'distribution.xml') args = ["productbuild", "--synthesize"] for name in names: args.extend(['--package', join(PACKAGES_DIR, "%s.pkg" % name)]) args.append(xml_path) check_call(args) modify_xml(xml_path, info) check_call([ "productbuild", "--distribution", xml_path, "--package-path", PACKAGES_DIR, "--identifier", info['name'], "tmp.pkg", ]) identity_name = info.get('signing_identity_name') if identity_name: check_call([ 'productsign', '--sign', identity_name, "tmp.pkg", info['_outpath'], ]) os.unlink("tmp.pkg") else: os.rename('tmp.pkg', info['_outpath']) print("done")
def check_dists(): if len(dists) == 0: sys.exit('Error: no packages specified') check_duplicates() assert name_dist(dists[0]) == 'python'
def move_python_first(): for dist in list(dists): if name_dist(dist) == 'python': dists.remove(dist) dists.insert(0, dist) return
def test_name_dist(self): self.assertEqual(name_dist('pip-7.1-py27_0'), 'pip') self.assertEqual(name_dist('conda-build-1.21.6-py35_0'), 'conda-build')
def create(info): global CACHE_DIR, PACKAGE_ROOT, PACKAGES_DIR CACHE_DIR = info['_download_dir'] PACKAGE_ROOT = join(CACHE_DIR, "package_root") PACKAGES_DIR = join(CACHE_DIR, "built_pkgs") # See http://stackoverflow.com/a/11487658/161801 for how all this works. prefix = join(PACKAGE_ROOT, info['name'].lower()) fresh_dir(PACKAGES_DIR) fresh_dir(PACKAGE_ROOT) pkgs_dir = join(prefix, 'pkgs') os.makedirs(pkgs_dir) preconda.write_files(info, pkgs_dir) pkgbuild('preconda') for fn in info['_dists']: fresh_dir(PACKAGE_ROOT) t = tarfile.open(join(CACHE_DIR, fn), 'r:bz2') t.extractall(prefix) t.close() os.rename(join(prefix, 'info'), join(prefix, 'info-tmp')) os.mkdir(join(prefix, 'info')) os.rename(join(prefix, 'info-tmp'), join(prefix, 'info', fn[:-8])) pkgbuild(name_dist(fn)) # Create special preinstall and postinstall packages to check if Anaconda # is already installed, build Anaconda, and to update the shell profile. # First script pkgbuild_script('postextract', info, 'post_extract.sh') # Next, the script to edit bashrc with the PATH. This is separate so it # can be disabled. pkgbuild_script('pathupdate', info, 'update_path.sh') # Next, the script to be run before everything, which checks if Anaconda # is already installed. pkgbuild_script('apreinstall', info, 'preinstall.sh', 'preinstall') # Now build the final package names = ['apreinstall', 'preconda'] names.extend(name_dist(dist) for dist in info['_dists']) names.extend(['postextract', 'pathupdate']) xml_path = join(PACKAGES_DIR, 'distribution.xml') args = ["productbuild", "--synthesize"] for name in names: args.extend(['--package', join(PACKAGES_DIR, "%s.pkg" % name)]) args.append(xml_path) check_call(args) modify_xml(xml_path, info) check_call([ "productbuild", "--distribution", xml_path, "--package-path", PACKAGES_DIR, "--identifier", info['name'], info['_outpath'], ])