def pkg_commands(download_dir, dists, py_version, keep_pkgs): vs_dists = find_vs_runtimes(dists, py_version) print("MSVC runtimes found: %s" % ([filename_dist(d) for d in vs_dists])) if len(vs_dists) != 1: sys.exit("Error: number of MSVC runtimes found: %d" % len(vs_dists)) for n, dist in enumerate(vs_dists + dists): fn = filename_dist(dist) yield '' yield '# --> %s <--' % fn yield 'File %s' % str_esc(join(download_dir, fn)) yield r'untgz::extract -d "$INSTDIR" -zbz2 "$INSTDIR\pkgs\%s"' % fn if n == 0: # only extract MSVC runtimes first, so that Python can be used # by _nsis postpkg assert 'runtime' in fn continue if n == 1: assert fn.startswith('python-') cmd = r'"$INSTDIR\pythonw.exe" -E -s "$INSTDIR\pkgs\.install.py"' yield "ExecWait '%s'" % cmd if keep_pkgs: continue yield r'Delete "$INSTDIR\pkgs\%s"' % fn if not keep_pkgs: yield '' yield r'RMDir "$INSTDIR\pkgs"'
def create(info): tmp_dir = tempfile.mkdtemp() preconda.write_files(info, tmp_dir) tarball = join(tmp_dir, 'tmp.tar') t = tarfile.open(tarball, 'w') if 'license_file' in info: t.add(info['license_file'], 'LICENSE.txt') for dist in preconda.files: fn = filename_dist(dist) t.add(join(tmp_dir, fn), 'pkgs/' + fn) for dist in info['_dists']: fn = filename_dist(dist) t.add(join(info['_download_dir'], fn), 'pkgs/' + fn) for key in 'pre_install', 'post_install': if key in info: t.add(info[key], 'pkgs/%s.sh' % key) for fn in os.listdir(info['_pip_download_dir']): t.add(join(info['_pip_download_dir'], fn), 'pip/' + fn) t.close() header = get_header(tarball, info) shar_path = info['_outpath'] with open(shar_path, 'wb') as fo: fo.write(header.encode('utf-8')) with open(tarball, 'rb') as fi: while True: chunk = fi.read(262144) if not chunk: break fo.write(chunk) os.unlink(tarball) os.chmod(shar_path, 0o755) shutil.rmtree(tmp_dir)
def get_header(tarball, info): name = info['name'] dists = [filename_dist(dist)[:-8] for dist in info['_dists']] dist0 = dists[0] assert name_dist(dist0) == 'python' has_license = bool('license_file' in info) ppd = ns_platform(info['_platform']) ppd['keep_pkgs'] = bool(info.get('keep_pkgs')) ppd['has_license'] = has_license for key in 'pre_install', 'post_install': ppd['has_%s' % key] = bool(key in info) install_lines = ['install_dist %s' % d for d in dists] install_lines.extend(add_condarc(info)) # Needs to happen first -- can be templated replace = { 'NAME': name, 'name': name.lower(), 'VERSION': info['version'], 'PLAT': info['_platform'], 'DIST0': dist0, 'DEFAULT_PREFIX': info.get('default_prefix', '$HOME/%s' % name.lower()), 'MD5': md5_file(tarball), 'INSTALL_COMMANDS': '\n'.join(install_lines), 'pycache': '__pycache__', } if has_license: replace['LICENSE'] = read_ascii_only(info['license_file']) data = read_header_template() data = preprocess(data, ppd) data = fill_template(data, replace) n = data.count('\n') data = data.replace('@LINES@', str(n + 1)) # note that this replacement does not change the size of the header, # which would result into an inconsistency n = len(data) + getsize(tarball) data = data.replace('@SIZE_BYTES@', '%12d' % n) assert len(data) + getsize(tarball) == n return data
def fetch(info, use_conda): # always use the libconda fetch_index function here since no # channel priority is needed from libconda.fetch import fetch_index download_dir = info['_download_dir'] if not isdir(download_dir): os.makedirs(download_dir) info['_urls'] = [] for dist in dists: fn = filename_dist(dist) path = join(download_dir, fn) url = urls.get(dist) md5 = md5s.get(dist) if url: url_index = fetch_index((url, )) try: pkginfo = url_index[dist] except KeyError: sys.exit("Error: no package '%s' in %s" % (dist, url)) else: pkginfo = index[dist] if not pkginfo['channel'].endswith('/'): pkginfo['channel'] += '/' assert pkginfo['channel'].endswith('/') info['_urls'].append((pkginfo['channel'] + fn, pkginfo['md5'])) if md5 and md5 != pkginfo['md5']: sys.exit("Error: MD5 sum for '%s' does not match in remote " "repodata %s" % (fn, url)) if isfile(path) and md5_file(path) == pkginfo['md5']: continue print('fetching: %s' % fn) if use_conda: from conda.exports import download as fetch_pkg pkg_url = pkginfo['channel'] + fn fetch_pkg(pkg_url, path) else: from libconda.fetch import fetch_pkg fetch_pkg(pkginfo, download_dir)
def _main(name, version, download_dir, platform, channel_urls=(), channels_remap=(), specs=(), exclude=(), menu_packages=(), ignore_duplicate_files=False, environment=None, environment_file=None, verbose=True, dry_run=False, conda_exe="conda.exe", transmute_file_type=''): # Add python to specs, since all installers need a python interpreter. In the future we'll # probably want to add conda too. specs = list(concatv(specs, ("python",))) if verbose: print("specs: %r" % specs) # Append channels_remap srcs to channel_urls channel_urls = tuple(concatv( channel_urls, (x['src'] for x in channels_remap), )) if environment_file or environment: # set conda to be the user's conda (what is in the environment) # for purposese of getting & building environements, rather # than the standalone conda (conda_exe). Fallback to the # standalone, if needed user_conda = os.environ.get('CONDA_EXE', '') if not user_conda: if cc_platform == platform: conda_exe else: sys.exit("CONDA_EXE env variable is empty. Need to activate a conda env.") # make the environment, if needed if environment_file: from subprocess import check_call environment = tempfile.mkdtemp() new_env = os.environ.copy() new_env["CONDA_SUBDIR"] = platform check_call([user_conda, "env", "create", "--file", environment_file, "--prefix", environment], universal_newlines=True, env=new_env) # obtain the package records if environment: precs = _precs_from_environment(environment, download_dir, user_conda) else: solver = Solver( # The Solver class doesn't do well with `None` as a prefix right now prefix="/constructor/no-environment", channels=channel_urls, subdirs=(platform, "noarch"), specs_to_add=specs, ) precs = list(solver.solve_final_state()) # move python first python_prec = next(prec for prec in precs if prec.name == "python") precs.remove(python_prec) precs.insert(0, python_prec) warn_menu_packages_missing(precs, menu_packages) check_duplicates(precs) precs = exclude_packages(precs, exclude) if verbose: more_recent_versions = _find_out_of_date_precs(precs, channel_urls, platform) _show(name, version, platform, download_dir, precs, more_recent_versions) if dry_run: return None, None, None, None pc_recs = _fetch(download_dir, precs) # Constructor cache directory can have multiple packages from different # installer creations. Filter out those which the solver picked. precs_fns = [x.fn for x in precs] pc_recs = [x for x in pc_recs if x.fn in precs_fns] _urls = [(pc_rec.url, pc_rec.md5) for pc_rec in pc_recs] has_conda = any(pc_rec.name == 'conda' for pc_rec in pc_recs) approx_tarballs_size, approx_pkgs_size = check_duplicates_files( pc_recs, platform, ignore_duplicate_files ) dists = list(prec.fn for prec in precs) if transmute_file_type != '': new_dists = [] import conda_package_handling.api for dist in dists: if dist.endswith(transmute_file_type): new_dists.append(dist) elif dist.endswith(".tar.bz2"): dist = filename_dist(dist) new_file_name = "%s%s" % (dist[:-8], transmute_file_type) new_dists.append(new_file_name) new_file_name = os.path.join(download_dir, new_file_name) if os.path.exists(new_file_name): continue print("transmuting %s" % dist) conda_package_handling.api.transmute(os.path.join(download_dir, dist), transmute_file_type, out_folder=download_dir) else: new_dists.append(dist) dists = new_dists if environment_file: import shutil shutil.rmtree(environment) return _urls, dists, approx_tarballs_size, approx_pkgs_size, has_conda
def make_nsi(info, dir_path): "Creates the tmp/main.nsi from the template file" name = info['name'] download_dir = info['_download_dir'] dists = info['_dists'] py_name, py_version, unused_build = filename_dist(dists[0]).rsplit('-', 2) assert py_name == 'python' arch = int(info['_platform'].split('-')[1]) # these appear as __<key>__ in the template, and get escaped replace = { 'NAME': name, 'VERSION': info['version'], 'VIPV': make_VIProductVersion(info['version']), 'COMPANY': info.get('company', 'Unknown, Inc.'), 'ARCH': '%d-bit' % arch, 'PY_VER': py_version[:3], 'PYVERSION': py_version, 'PYVERSION_JUSTDIGITS': ''.join(py_version.split('.')), 'OUTFILE': abspath(info['_outpath']), 'LICENSEFILE': abspath( info.get('license_file', join(NSIS_DIR, 'placeholder_license.txt'))), 'DEFAULT_PREFIX': info.get('default_prefix', join('%LOCALAPPDATA%', 'Continuum', name.lower())), } for key, fn in [('HEADERIMAGE', 'header.bmp'), ('WELCOMEIMAGE', 'welcome.bmp'), ('ICONFILE', 'icon.ico'), ('INSTALL_PY', '.install.py'), ('URLS_FILE', 'urls'), ('URLS_TXT_FILE', 'urls.txt'), ('POST_INSTALL', 'post_install.bat')]: replace[key] = join(dir_path, fn) for key in replace: replace[key] = str_esc(replace[key]) data = read_nsi_tmpl() data = preprocess(data, ns_platform(info['_platform'])) data = fill_template(data, replace) cmds = pkg_commands(download_dir, dists, py_version, bool(info.get('keep_pkgs'))) # these are unescaped (and unquoted) for key, value in [ ('@NAME@', name), ('@NSIS_DIR@', NSIS_DIR), ('@BITS@', str(arch)), ('@PKG_COMMANDS@', '\n '.join(cmds)), ('@MENU_PKGS@', ' '.join(info.get('menu_packages', []))), ('@PIP_DOWNLOAD_DIR@', info['_pip_download_dir']), ]: data = data.replace(key, value) nsi_path = join(dir_path, 'main.nsi') with open(nsi_path, 'w') as fo: fo.write(data) # Copy all the NSIS header files (*.nsh) for fn in os.listdir(NSIS_DIR): if fn.endswith('.nsh'): shutil.copy(join(NSIS_DIR, fn), join(dir_path, fn)) print('Created %s file' % nsi_path) return nsi_path