def lib_dir(self): """Return the path to Python's lib directory This is the directory where the core Python modules (os, sys, logging, etc..) reside .. along with the site-packages/ directory. """ if WIN: return existing(xjoin(self.root_dir, 'Lib')) else: return existing( xjoin(self.root_dir, 'lib', 'python{0}'.format(self.pyver)))
def python_exe(self): # Note: we cannot use `self.scripts_dir` here. if WIN: # virtualenv creates the python exe inside Scripts/ directory python_exe = xjoin(self.root_dir, 'Scripts', 'Python.exe') if not os.path.exists(python_exe): # but normal Python installation has it on root directory python_exe = xjoin(self.root_dir, 'Python.exe') else: python_exe = xjoin(self.root_dir, 'bin', 'python') return existing(python_exe)
def add_pep370_dir_on_users_consent(): """If ~/.local/bin is not in $PATH, automatically add them Do this only with the user's consent. And do not run this check more than once (i.e., run only when PyPM is *first run*. """ if sys.platform.startswith('win'): return # Proceed only when the terminal is interactive and never run before isatty = (sys.stdin.isatty() and sys.stdout.isatty()) firstrun_file = join(application.locations.user_cache_dir, '.firstrun-pep370') if (not isatty) or exists(firstrun_file): return pathenv = [xjoin(x.strip()) for x in os.environ.get('PATH', '').split(':')] binpath = xjoin(site.USER_BASE, 'bin') profile = expanduser('~/.profile' if sys.platform == 'darwin' else '~/.bashrc') profile_lines = [ '# PEP 370 PATH added by PyPM on %s' % datetime.now(), 'export PATH=%s:$PATH' % binpath, ] already_in_profile = exists(profile) and profile_lines[1] in [ l.strip() for l in open(profile).readlines() ] # Proceed only if ~/.local/bin is neither in $PATH, nor added to profile if binpath in pathenv or already_in_profile: return # Add to profile on the user's consent msg = ('It seems that "%s" is not in your $PATH which we highly ' 'recommend. According to PEP 370, this is where packages ' 'will install their script files for you to access later. ' 'Would you like PyPM to do this now (by appending to ' 'your "%s")?' % (binpath, profile)) if askyesno(msg, default=True): if exists(profile): shutil.copy(profile, profile + '.bak') # take a backup first with open(profile, 'a') as f: f.write('\n%s\n' % '\n'.join(profile_lines)) print('You may now reopen your shell for the changes to take effect.') os.makedirs(dirname(firstrun_file)) with open(firstrun_file, 'w') as f: pass # prevent future runs
def do_genidx(self, subcmd, opts): """${cmd_name}: Generate the index file for all available repositories ${cmd_usage} ${cmd_option_list} """ with self.bootstrapped(): logsdir = xjoin(self.options.multi_repository_set_path, '_logs') with log.archivedby(logging.getLogger('pypm'), logsdir, 'repository_genidx', level=logging.INFO, formatter=logging.Formatter('%(asctime)s %(message)s')): mreposet = MultiRepositorySet( self.options.multi_repository_set_path, self.options.configfile ) with console.long_process( LOG, desc='genidx for: %s' % mreposet.path, begin_info={}, final_info={}): for repo in mreposet: if opts.platform and opts.platform not in repo.path: LOG.info('skipping %s', repo) continue LOG.info('') LOG.info('generating index for repository: ' '{0.name}:{0.pyver}:{0.osarch}'.format(repo)) LOG.info('%s', repo.path) idx = RepositoryIndex(repo) idx.generate_indices()
def get_repository(self, pyver, osarch, autocreate=False): path = xjoin(self.path, pyver, osarch) url = '/'.join([self.url, pyver, osarch]) if autocreate: # create, if does not already exist console.mkdirs(path) return Repository(path, self.name, pyver, osarch, url)
def get_index(self): """Return an existing index as ``RepoPackageDatabase`` Returned index database corresponds to a temporary file (as the index file is originally compressed; it needs to be extracted to a temporary location) .. hence any attempts to "write" on the returned index database will be futile. """ return RepoPackageDatabase(_ungzip(xjoin(self.repository.path, 'index.gz')))
def _read_info_json(pypm_file): """Read cached info.json from the .d/ directory""" info_json_loc = xjoin(pypm_file + '.d', 'info.json') assert exists(info_json_loc), ( 'Missing %s. It is the responsibility of pypm.builder to create ' 'a `info.json` in the .d/ directory. We cannot afford to read it ' 'from the tarball everytime (expensive).') % info_json_loc return open(info_json_loc).read()
def __init__(self, cmd, timeout, stdout, stderr): msg = '\n'.join([ 'timed out; ergo process is terminated', 'seconds elapsed: {0}'.format(timeout), 'command: {0}'.format(cmd), 'pwd: {0}'.format(xjoin(os.getcwd())), 'stderr:\n{0}'.format(stderr), 'stdout:\n{0}'.format(stdout), ]) super(TimeoutError, self).__init__(msg)
def create(path, data_root, bpkg): """Create a package file at ``path`` containing files under ``data_root`` Return the contents of info.json that was added to the package """ assert type(bpkg) is BinaryPackage pkgroot = tempfile.mkdtemp('-pkgroot', 'pypm-') # generate info.json info_json = bpkg.to_json() # create the .tar.gz file (.pypm) compression.pack_contents(xjoin(pkgroot, 'data.tar.gz'), data_root) open(xjoin(pkgroot, 'info.json'), 'w').write(info_json) compression.pack_contents(path, pkgroot) shutil.rmtree(pkgroot) return info_json
def _generate_index_gz(self): """Generate `index.gz` - the repository index index.gz is the compressed form of `index` which is nothing but a Sqlite database of succeeded packages available in the repository. See ``RepoPackageDatabase`` """ assert exists(self.repository.path) idx_path = xjoin(self.repository.path, 'index') idx_gz_path = idx_path + '.gz' console.rm(idx_path) db = RepoPackageDatabase(idx_path, touch=True) # Tag BE packages; so client may use it to determine if a package is # available only to BE customers or not. pkgtags = 'be' if self.repository.name == 'be' else '' with closing(db): LOG.info('finding packages') packages = self.repository.find_packages() LOG.info('processing %d packages', len(packages)) rpkg_list = [ RepoPackage.create_from( BinaryPackage.from_json(self._read_info_json(pkgfile)), relpath=relpath(pkgfile, self.repository.path), tags=pkgtags) for pkgfile in ProgressBar.iterate(packages) ] # Optimize index size by removing the "description" field. # PyPI's descriptions are typically very long - see # http://pypi.python.org/pypi/zc.buildout for example - hence we # must remove them from the index. for rpkg in rpkg_list: rpkg.description = 'N/A' # keep only the latest version/pkg_version in index LOG.info('pruning older versions') rpkg_list = _prune_older_binary_releases(rpkg_list) LOG.info('writing index') with db.transaction() as session: session.add_all(rpkg_list) session.commit() session.close() LOG.info('compresing index (%s)', idx_gz_path) console.rm(idx_gz_path) with closing(gzip.open(idx_gz_path, 'wb')) as f: f.write(open(idx_path, 'rb').read()) console.rm(idx_path) return len(rpkg_list)
def __init__(self, p, cmd, stdout, stderr): self.stdout = stdout self.stderr = stderr msg = '\n'.join([ 'non-zero returncode: {0}'.format(p.returncode), 'command: {0}'.format(cmd), 'pwd: {0}'.format(xjoin(os.getcwd())), 'stderr:\n{0}'.format(stderr), 'stdout:\n{0}'.format(stdout), ]) super(NonZeroReturnCode, self).__init__(msg)
def get_abspath(self, relpath): return xjoin(self.user_base_dir, relpath)
def get_abspath(self, relpath): return xjoin(self.root_dir, relpath)
def scripts_dir(self): """Return the path to directory where scripts will be installed""" return existing(xjoin(self.root_dir, 'Scripts' if WIN else 'bin'))
def site_packages_dir(self): return existing(xjoin(self.lib_dir, 'site-packages'))
def get_local_index_path(self, remote_repository): return xjoin( self.path, remote_repository.get_unique_id(), 'index')
def lib_dir(self): return existing(xjoin(self.site_packages_dir, os.pardir))
def scripts_dir(self): return existing(xjoin(self.user_base_dir, 'Scripts' if WIN else 'bin'))
import sys import logging import tempfile from urlparse import urlparse from urllib2 import HTTPError from pypm.common import console, net, python, activestate from pypm.common.util import xjoin, existing from pypm.common.package import PackageFile from pypm.common.repository import RepoPackage from pypm.client.base import PyPMFeature, application from pypm.client import error LOG = logging.getLogger(__name__) DOWNLOAD_CACHE = xjoin(application.locations.user_cache_dir, 'download-cache') class Downloader(PyPMFeature): def download_package(self, package): assert type(package) is RepoPackage console.mkdirs(DOWNLOAD_CACHE) LOG.info('Get: [%s] %s %s', urlparse(package.download_url).netloc, package.canonical_name, package.full_version) auth = activestate.get_be_license_auth() send_license = package.requires_be_license license_installed = auth is not None