def install_editable(self, install_options, global_options=()): logger.notify('Running setup.py develop for %s' % self.name) logger.indent += 2 try: # FIXME: should we do --install-headers here too? cwd = self.source_dir if self.editable_options and \ 'subdirectory' in self.editable_options: cwd = os.path.join(cwd, self.editable_options['subdirectory']) call_subprocess( [ sys.executable, '-c', "import setuptools, tokenize; __file__=%r; exec(compile(" "getattr(tokenize, 'open', open)(__file__).read().replace" "('\\r\\n', '\\n'), __file__, 'exec'))" % self.setup_py ] + list(global_options) + ['develop', '--no-deps'] + list(install_options), cwd=cwd, filter_stdout=self._filter_install, show_stdout=False) finally: logger.indent -= 2 self.install_succeeded = True
def cache_download(target_file, temp_location, content_type): logger.notify('Storing download in cache at %s' % display_path(target_file)) shutil.copyfile(temp_location, target_file) fp = open(target_file+'.content-type', 'w') fp.write(content_type) fp.close() os.unlink(temp_location)
def send(self, request, **kwargs): conn = S3Connection(is_secure=True) bucket_name = os.getenv("S3_PIP_BUCKET_NAME") update_indexes = os.getenv("S3_PIP_COMPARE_CACHE") bucket = conn.get_bucket(bucket_name) keyname = urlparse.urlparse(request.url).path html = False if keyname.endswith("/"): keyname += "index.html" html = True key = bucket.get_key(keyname) if html: content_type = "text/html" else: content_type = mimetypes.guess_type(request.url)[0] or "text/plain" if key is None: pypi_resp = HTTPAdapter().send(request, **kwargs) if pypi_resp.status_code != 200: return pypi_resp content = pypi_resp.content resp = S3Response(content, request.url, len(content), content_type) self.s3_sync(request.url, content, bucket, pypi_resp.headers.get("x-pypi-last-serial")) return resp logger.notify("Found item in S3 {0}".format(keyname)) resp = S3Response(key.get_contents_as_string(), request.url, key.size, content_type) if update_indexes: pypi_resp = HTTPAdapter().send(request, **kwargs) if pypi_resp.headers["x-pypi-last-serial"] > key.get_metadata("x-pypi-last-serial"): content = pypi_resp.content resp = S3Response(content, request.url, len(content), content_type) self.s3_sync(request.url, content, bucket, pypi_resp.headers["x-pypi-last-serial"]) return resp
def get_src_requirement(self, dist, location, find_tags=False): repo = self.get_url(location) if repo is None: return None parts = repo.split('/') ## FIXME: why not project name? egg_project_name = dist.egg_name().split('-', 1)[0] rev = self.get_revision(location) if parts[-2] in ('tags', 'tag'): # It's a tag, perfect! full_egg_name = '%s-%s' % (egg_project_name, parts[-1]) elif parts[-2] in ('branches', 'branch'): # It's a branch :( full_egg_name = '%s-%s-r%s' % (dist.egg_name(), parts[-1], rev) elif parts[-1] == 'trunk': # Trunk :-/ full_egg_name = '%s-dev_r%s' % (dist.egg_name(), rev) if find_tags: tag_url = '/'.join(parts[:-1]) + '/tags' tag_revs = self.get_tag_revs(tag_url) match = self.find_tag_match(rev, tag_revs) if match: logger.notify('trunk checkout %s seems to be equivalent to tag %s' % match) repo = '%s/%s' % (tag_url, match) full_egg_name = '%s-%s' % (egg_project_name, match) else: # Don't know what it is logger.warn('svn URL does not fit normal structure (tags/branches/trunk): %s' % repo) full_egg_name = '%s-dev_r%s' % (egg_project_name, rev) return 'svn+%s@%s#egg=%s' % (repo, rev, full_egg_name)
def run(self, options, args): deprecation = textwrap.dedent(""" ############################################### ## ## ## Due to lack of interest and maintenance, ## ## 'pip bundle' and support for installing ## ## from *.pybundle files is now deprecated, ## ## and will be removed in pip v1.5. ## ## ## ############################################### """) logger.notify(deprecation) if not args: raise InstallationError('You must give a bundle filename') # We have to get everything when creating a bundle: options.ignore_installed = True logger.notify('Putting temporary main files in %s and source/develop files in %s' % (display_path(options.build_dir), display_path(options.src_dir))) self.bundle_filename = args.pop(0) requirement_set = super(BundleCommand, self).run(options, args) return requirement_set
def output_package_listing(self, installed_packages): for dist in installed_packages: if dist_is_editable(dist): line = '%s (%s, %s)' % (dist.project_name, dist.version, dist.location) else: line = '%s (%s)' % (dist.project_name, dist.version) logger.notify(line)
def main(self, args): options, args = self.parser.parse_args(args) if not args: self.parser.print_help() return level = 1 # Notify logger.level_for_integer(level) logger.consumers.extend([(level, sys.stdout)]) # get all files requirement_set = self.run(options, args) # trace dependencies logger.notify("Tracing dependencies ...") dependencies = [] values = None if hasattr(requirement_set.requirements, 'itervalues'): values = list(requirement_set.requirements.itervalues()) elif hasattr(requirement_set.requirements, 'values'): values = list(requirement_set.requirements.values()) for req in values: trace_dependencies(req, requirement_set, dependencies) # output the result logger.notify("Output result ...") self.output(options, args, dependencies) requirement_set.cleanup_files()
def _can_uninstall(self): if not dist_is_local(self.dist): logger.notify( "Not uninstalling %s at %s, outside environment %s" % (self.dist.project_name, normalize_path(self.dist.location), sys.prefix) ) return False return True
def run_outdated(self, options): for dist, remote_version_raw, remote_version_parsed in \ self.find_packages_latests_versions(options): if remote_version_parsed > dist.parsed_version: logger.notify( '%s (Current: %s Latest: %s)' % (dist.project_name, dist.version, remote_version_raw) )
def output_package_listing(self, installed_packages): installed_packages = sorted(installed_packages, key=lambda dist: dist.project_name.lower()) for dist in installed_packages: if dist_is_editable(dist): line = '%s (%s, %s)' % (dist.project_name, dist.version, dist.location) else: line = '%s (%s)' % (dist.project_name, dist.version) logger.notify(line)
def _download_url(resp, link, temp_location): fp = open(temp_location, 'wb') download_hash = None if link.hash and link.hash_name: try: download_hash = hashlib.new(link.hash_name) except ValueError: logger.warn("Unsupported hash name %s for package %s" % (link.hash_name, link)) try: total_length = int(resp.headers['content-length']) except (ValueError, KeyError, TypeError): total_length = 0 downloaded = 0 show_progress = total_length > 40 * 1000 or not total_length show_url = link.show_url try: if show_progress: ## FIXME: the URL can get really long in this message: if total_length: logger.start_progress('Downloading %s (%s): ' % (show_url, format_size(total_length))) else: logger.start_progress('Downloading %s (unknown size): ' % show_url) else: logger.notify('Downloading %s' % show_url) logger.info('Downloading from URL %s' % link) def resp_read(chunk_size): try: # Special case for urllib3. try: for chunk in resp.raw.stream( chunk_size, decode_content=False): yield chunk except IncompleteRead as e: raise ChunkedEncodingError(e) except AttributeError: # Standard file-like object. while True: chunk = resp.raw.read(chunk_size) if not chunk: break yield chunk for chunk in resp_read(4096): downloaded += len(chunk) if show_progress: if not total_length: logger.show_progress('%s' % format_size(downloaded)) else: logger.show_progress('%3i%% %s' % (100 * downloaded / total_length, format_size(downloaded))) if download_hash is not None: download_hash.update(chunk) fp.write(chunk) fp.close() finally: if show_progress: logger.end_progress('%s downloaded' % format_size(downloaded)) return download_hash
def run_egg_info(self, force_root_egg_info=False): assert self.source_dir if self.name: logger.notify( 'Running setup.py (path:%s) egg_info for package %s' % (self.setup_py, self.name) ) else: logger.notify( 'Running setup.py (path:%s) egg_info for package from %s' % (self.setup_py, self.url) ) logger.indent += 2 try: # if it's distribute>=0.7, it won't contain an importable # setuptools, and having an egg-info dir blocks the ability of # setup.py to find setuptools plugins, so delete the egg-info dir # if no setuptools. it will get recreated by the run of egg_info # NOTE: this self.name check only works when installing from a # specifier (not archive path/urls) # TODO: take this out later if (self.name == 'distribute' and not os.path.isdir( os.path.join(self.source_dir, 'setuptools'))): rmtree(os.path.join(self.source_dir, 'distribute.egg-info')) script = self._run_setup_py script = script.replace('__SETUP_PY__', repr(self.setup_py)) script = script.replace('__PKG_NAME__', repr(self.name)) egg_info_cmd = [sys.executable, '-c', script, 'egg_info'] # We can't put the .egg-info files at the root, because then the # source code will be mistaken for an installed egg, causing # problems if self.editable or force_root_egg_info: egg_base_option = [] else: egg_info_dir = os.path.join(self.source_dir, 'pip-egg-info') if not os.path.exists(egg_info_dir): os.makedirs(egg_info_dir) egg_base_option = ['--egg-base', 'pip-egg-info'] cwd = self.source_dir if self.editable_options and \ 'subdirectory' in self.editable_options: cwd = os.path.join(cwd, self.editable_options['subdirectory']) call_subprocess( egg_info_cmd + egg_base_option, cwd=cwd, filter_stdout=self._filter_install, show_stdout=False, command_level=logger.VERBOSE_DEBUG, command_desc='python setup.py egg_info') finally: logger.indent -= 2 if not self.req: self.req = pkg_resources.Requirement.parse( "%(Name)s==%(Version)s" % self.pkg_info()) self.correct_build_location()
def check_destination(self, dest, url, rev_options, rev_display): """ Prepare a location to receive a checkout/clone. Return True if the location is ready for (and requires) a checkout/clone, False otherwise. """ checkout = True prompt = False if os.path.exists(dest): checkout = False if os.path.exists(os.path.join(dest, self.dirname)): existing_url = self.get_url(dest) if self.compare_urls(existing_url, url): logger.info('%s in %s exists, and has correct URL (%s)' % (self.repo_name.title(), display_path(dest), url)) logger.notify('Updating %s %s%s' % (display_path(dest), self.repo_name, rev_display)) self.update(dest, rev_options) else: logger.warn('%s %s in %s exists with URL %s' % (self.name, self.repo_name, display_path(dest), existing_url)) prompt = ('(s)witch, (i)gnore, (w)ipe, (b)ackup ', ('s', 'i', 'w', 'b')) else: logger.warn('Directory %s already exists, ' 'and is not a %s %s.' % (dest, self.name, self.repo_name)) prompt = ('(i)gnore, (w)ipe, (b)ackup ', ('i', 'w', 'b')) if prompt: logger.warn('The plan is to install the %s repository %s' % (self.name, url)) response = ask_path_exists('What to do? %s' % prompt[0], prompt[1]) if response == 's': logger.notify('Switching %s %s to %s%s' % (self.repo_name, display_path(dest), url, rev_display)) self.switch(dest, url, rev_options) elif response == 'i': # do nothing pass elif response == 'w': logger.warn('Deleting %s' % display_path(dest)) rmtree(dest) checkout = True elif response == 'b': dest_dir = backup_dir(dest) logger.warn('Backing up %s to %s' % (display_path(dest), dest_dir)) shutil.move(dest, dest_dir) checkout = True return checkout
def zip_package(self, module_name, filename, no_pyc): orig_filename = filename logger.notify('Zip %s (in %s)' % (module_name, display_path(filename))) logger.indent += 2 if filename.endswith('.egg'): dest_filename = filename else: dest_filename = filename + '.zip' try: # FIXME: I think this needs to be undoable: if filename == dest_filename: filename = backup_dir(orig_filename) logger.notify( 'Moving %s aside to %s' % (orig_filename, filename) ) if not self.simulate: shutil.move(orig_filename, filename) try: logger.info( 'Creating zip file in %s' % display_path(dest_filename) ) if not self.simulate: zip = zipfile.ZipFile(dest_filename, 'w') zip.writestr(module_name + '/', '') for dirpath, dirnames, filenames in os.walk(filename): if no_pyc: filenames = [f for f in filenames if not f.lower().endswith('.pyc')] for fns, is_dir in [ (dirnames, True), (filenames, False)]: for fn in fns: full = os.path.join(dirpath, fn) dest = os.path.join( module_name, dirpath[len(filename):].lstrip( os.path.sep ), fn, ) if is_dir: zip.writestr(dest + '/', '') else: zip.write(full, dest) zip.close() logger.info( 'Removing old directory %s' % display_path(filename) ) if not self.simulate: rmtree(filename) except: # FIXME: need to do an undo here raise # FIXME: should also be undone: self.add_filename_to_pth(dest_filename) finally: logger.indent -= 2
def run(self, options, args): if not args: raise InstallationError('You must give a bundle filename') # We have to get everything when creating a bundle: options.ignore_installed = True logger.notify('Putting temporary build files in %s and source/develop files in %s' % (display_path(options.build_dir), display_path(options.src_dir))) self.bundle_filename = args.pop(0) requirement_set = super(BundleCommand, self).run(options, args) return requirement_set
def copy_file(self, filename, location, content_type, link): copy = True download_location = os.path.join(location, link.filename) if os.path.exists(download_location): copy = False logger.notify('Ignoring already existing file.') if copy: shutil.copy(filename, download_location) logger.indent -= 2 logger.notify('Saved %s' % display_path(download_location))
def s3_sync(self, url, content, bucket, cache_index=None): parts = urlparse.urlparse(url) keyname = parts.path if keyname.endswith("/"): keyname += "index.html" key = bucket.new_key(keyname) if cache_index is not None: key.set_metadata("x-pypi-last-serial", cache_index) logger.notify("Synchronizing {0} to s3".format(keyname)) key.set_contents_from_string(content)
def obtain(self, dest): url, rev = self.get_url_rev() rev_options = get_rev_options(url, rev) if rev: rev_display = " (to revision %s)" % rev else: rev_display = "" if self.check_destination(dest, url, rev_options, rev_display): logger.notify("Checking out %s%s to %s" % (url, rev_display, display_path(dest))) call_subprocess([self.cmd, "checkout", "-q"] + rev_options + [url, dest])
def output_package_listing(self, installed_packages): installed_packages = sorted(installed_packages, key=lambda dist: dist.project_name.lower()) for dist in installed_packages: pkg_info_string = dist.get_metadata("PKG-INFO") pkg_info_lines = pkg_info_string.split("\n") for line in pkg_info_lines: line_split_array = line.split("Summary:") if len(line_split_array) == 2: summary = line_split_array[1] line = '%s<==>%s<==>%s' % (dist.project_name, dist.version, summary.strip()) logger.notify(line)
def print_results(self, hits): for hit in hits: name, latest = hit if not name in self.installed_packages: continue dist = pkg_resources.get_distribution(name) if dist.version < latest: try: logger.notify('{0} ({1} < {2})'.format(name, dist.version, latest)) except UnicodeEncodeError: pass
def install_requirements_txt(self, req_to_install): """If ENV is set, try to parse requirements-ENV.txt, falling back to requirements.txt if it exists.""" rtxt_candidates = ["requirements.txt"] if self.options and self.options.env: rtxt_candidates.insert(0, "requirements-{0}.txt".format(self.options.env)) for r in rtxt_candidates: fullpath = os.path.join(req_to_install.source_dir, r) if os.path.exists(fullpath): logger.notify("Found {0} in {1}, installing extra dependencies.".format(r, req_to_install.name)) return parse_requirements(fullpath, req_to_install.name, None, self.options) return []
def rollback(self): """Rollback the changes previously made by remove().""" if self.save_dir is None: logger.error("Can't roll back %s; was not uninstalled" % self.dist.project_name) return False logger.notify("Rolling back uninstall of %s" % self.dist.project_name) for path in self._moved_paths: tmp_path = self._stash(path) logger.info("Replacing %s" % path) renames(tmp_path, path) for pth in self.pth: pth.rollback()
def unpack_http_url(link, location, download_cache, only_download): temp_dir = tempfile.mkdtemp('-unpack', 'pip-') target_url = link.url.split('#', 1)[0] target_file = None download_hash = None if download_cache: target_file = os.path.join(download_cache, urllib.quote(target_url, '')) if not os.path.isdir(download_cache): create_download_cache_folder(download_cache) if (target_file and os.path.exists(target_file) and os.path.exists(target_file + '.content-type')): fp = open(target_file+'.content-type') content_type = fp.read().strip() fp.close() if link.md5_hash: download_hash = _get_md5_from_file(target_file, link) temp_location = target_file logger.notify('Using download cache from %s' % target_file) else: resp = _get_response_from_url(target_url, link) content_type = resp.info()['content-type'] filename = link.filename # fallback # Have a look at the Content-Disposition header for a better guess content_disposition = resp.info().get('content-disposition') if content_disposition: type, params = cgi.parse_header(content_disposition) # We use ``or`` here because we don't want to use an "empty" value # from the filename param. filename = params.get('filename') or filename ext = splitext(filename)[1] if not ext: ext = mimetypes.guess_extension(content_type) if ext: filename += ext if not ext and link.url != geturl(resp): ext = os.path.splitext(geturl(resp))[1] if ext: filename += ext temp_location = os.path.join(temp_dir, filename) download_hash = _download_url(resp, link, temp_location) if link.md5_hash: _check_md5(download_hash, link) if only_download: _copy_file(temp_location, location, content_type, link) else: unpack_file(temp_location, location, content_type, link) if target_file and target_file != temp_location: cache_download(target_file, temp_location, content_type) if target_file is None: os.unlink(temp_location) os.rmdir(temp_dir)
def find_packages_latests_versions(self, options): index_urls = [options.index_url] + options.extra_index_urls if options.no_index: logger.notify('Ignoring indexes: %s' % ','.join(index_urls)) index_urls = [] if options.use_mirrors: logger.deprecated( "1.7", "--use-mirrors has been deprecated and will be removed" " in the future. Explicit uses of --index-url and/or " "--extra-index-url is suggested.") if options.mirrors: logger.deprecated( "1.7", "--mirrors has been deprecated and will be removed in " " the future. Explicit uses of --index-url and/or " "--extra-index-url is suggested.") index_urls += options.mirrors dependency_links = [] for dist in get_installed_distributions(local_only=options.local, skip=self.skip): if dist.has_metadata('dependency_links.txt'): dependency_links.extend( dist.get_metadata_lines('dependency_links.txt'), ) session = self._build_session(options) finder = self._build_package_finder(options, index_urls, session) finder.add_dependency_links(dependency_links) installed_packages = get_installed_distributions( local_only=options.local, include_editables=False, skip=self.skip) for dist in installed_packages: req = InstallRequirement.from_line(dist.key, None) try: link = finder.find_requirement(req, True) # If link is None, means installed version is most up-to-date if link is None: continue except DistributionNotFound: continue except BestVersionAlreadyInstalled: remote_version = req.installed_version else: # It might be a good idea that link or finder had a public method # that returned version remote_version = finder._link_package_versions(link, req.name)[0] remote_version_raw = remote_version[2] remote_version_parsed = remote_version[0] yield dist, remote_version_raw, remote_version_parsed
def obtain(self, dest): url, rev = self.get_url_rev() if rev: rev_options = [rev] rev_display = " (to revision %s)" % rev else: rev_options = [] rev_display = "" if self.check_destination(dest, url, rev_options, rev_display): logger.notify("Cloning hg %s%s to %s" % (url, rev_display, display_path(dest))) call_subprocess([self.cmd, "clone", "--noupdate", "-q", url, dest]) call_subprocess([self.cmd, "update", "-q"] + rev_options, cwd=dest)
def __init__(self, src_dir, requirements, successfully_downloaded): self.src_dir = src_dir self.comparison_cache = ({}, {}) # two maps, one does a->b, the other one does b->a self.pre_installed = {} # maps name -> PackageData self.repo_up_to_date = {} # maps local git clone path -> boolean self.requirements = requirements self.successfully_downloaded = successfully_downloaded try: self.load_installed_distributions() except Exception, e: logger.notify("Exception loading installed distributions " + str(e)) raise
def obtain(self, dest): url, rev = self.get_url_rev() if rev: rev_options = ['-r', rev] rev_display = ' (to revision {0!s})'.format(rev) else: rev_options = [] rev_display = '' if self.check_destination(dest, url, rev_options, rev_display): logger.notify('Checking out {0!s}{1!s} to {2!s}'.format(url, rev_display, display_path(dest))) call_subprocess( [self.cmd, 'checkout', '-q'] + rev_options + [url, dest])
def obtain(self, dest): url, rev = self.get_url_rev() rev_options = get_rev_options(url, rev) if rev: rev_display = ' (to revision %s)' % rev else: rev_display = '' if self.check_destination(dest, url, rev_options, rev_display): logger.notify('Checking out %s%s to %s' % (url, rev_display, display_path(dest))) call_subprocess( [self.cmd, 'checkout', '-q'] + rev_options + [url, dest])
def run(self, options, args): if not options.req_repository: logger.notify('You need to specify a repository. This utility ' 'does not upload to PyPI') return 1 options.build_dir = os.path.abspath(options.req_cache_dir) options.src_dir = os.path.abspath(options.req_cache_dir) options.no_install = True options.ignore_installed = True install_options = options.install_options or [] global_options = options.global_options or [] index_urls = [options.index_url] + options.extra_index_urls if options.no_index: logger.notify('Ignoring indexes: %s' % ','.join(index_urls)) index_urls = [] finder = self._build_package_finder(options, index_urls) requirement_set = RequirementSet(build_dir=options.build_dir, src_dir=options.src_dir, download_dir=options.download_dir, download_cache=options.download_cache, upgrade=options.upgrade, ignore_installed=options.ignore_installed, ignore_dependencies=options.ignore_dependencies) req_req = open(os.path.abspath(options.req_req_requirements), 'w') req_req.writelines(['--index-url=%s' % self.repository_url(options), '\n']) try: for filename in options.requirements: for req in parse_requirements(filename, finder=finder, options=options): logger.info('req ' + str(req.req)) req_req.writelines([str(req.req), '\n']) requirement_set.add_requirement(req) finally: req_req.close() if not options.no_download: requirement_set.prepare_files(finder, force_root_egg_info=False, bundle=False) else: requirement_set.locate_files() if not os.path.exists(os.path.abspath(options.req_cache_dir)): os.mkdir(os.path.abspath(options.req_cache_dir)) if not options.req_no_upload: self.upload_to_repository(options) if options.req_clean_cache: requirement_set.cleanup_files(bundle=False) return requirement_set
def checkout_if_necessary(self, pd): if pd.location is None: pd.location = GitVersionComparator.checkout_pkg_repo(pd.url, pd.clone_dir(self.src_dir)) self.repo_up_to_date[pd.location] = True # self.repo_up_to_date[pd.location] is False if the git repo existed before this # snakebasket run, and has not yet been fetched (therefore may contain old data). elif self.repo_up_to_date.get(pd.location, True) == False: # Do a git fetch for repos which were not checked out recently. logger.notify("Performing git fetch in pre-existing directory %s" % pd.location) GitVersionComparator.do_fetch(pd.location) self.repo_up_to_date[pd.location] = True return pd.location
def obtain(self, dest): url, rev = self.get_url_rev() if rev: rev_options = [rev] rev_display = ' (to revision {0!s})'.format(rev) else: rev_options = [] rev_display = '' if self.check_destination(dest, url, rev_options, rev_display): logger.notify('Cloning hg {0!s}{1!s} to {2!s}'.format(url, rev_display, display_path(dest))) call_subprocess([self.cmd, 'clone', '--noupdate', '-q', url, dest]) call_subprocess([self.cmd, 'update', '-q'] + rev_options, cwd=dest)
def unpack_http_url(link, location, download_cache, download_dir=None): temp_dir = tempfile.mkdtemp('-unpack', 'pip-') target_url = link.url.split('#', 1)[0] target_file = None download_hash = None if download_cache: target_file = os.path.join(download_cache, urllib.quote(target_url, '')) if not os.path.isdir(download_cache): create_download_cache_folder(download_cache) if (target_file and os.path.exists(target_file) and os.path.exists(target_file + '.content-type')): fp = open(target_file+'.content-type') content_type = fp.read().strip() fp.close() if link.md5_hash: download_hash = _get_md5_from_file(target_file, link) temp_location = target_file logger.notify('Using download cache from %s' % target_file) else: resp = _get_response_from_url(target_url, link) content_type = resp.info()['content-type'] filename = link.filename # fallback # Have a look at the Content-Disposition header for a better guess content_disposition = resp.info().get('content-disposition') if content_disposition: type, params = cgi.parse_header(content_disposition) # We use ``or`` here because we don't want to use an "empty" value # from the filename param. filename = params.get('filename') or filename ext = splitext(filename)[1] if not ext: ext = mimetypes.guess_extension(content_type) if ext: filename += ext if not ext and link.url != geturl(resp): ext = os.path.splitext(geturl(resp))[1] if ext: filename += ext temp_location = os.path.join(temp_dir, filename) download_hash = _download_url(resp, link, temp_location) if link.md5_hash: _check_md5(download_hash, link) if download_dir: _copy_file(temp_location, download_dir, content_type, link) unpack_file(temp_location, location, content_type, link) if target_file and target_file != temp_location: cache_download(target_file, temp_location, content_type) if target_file is None: os.unlink(temp_location) os.rmdir(temp_dir)
def zip_package(self, module_name, filename, no_pyc): orig_filename = filename logger.notify('Zip %s (in %s)' % (module_name, display_path(filename))) logger.indent += 2 if filename.endswith('.egg'): dest_filename = filename else: dest_filename = filename + '.zip' try: ## FIXME: I think this needs to be undoable: if filename == dest_filename: filename = backup_dir(orig_filename) logger.notify('Moving %s aside to %s' % (orig_filename, filename)) if not self.simulate: shutil.move(orig_filename, filename) try: logger.info('Creating zip file in %s' % display_path(dest_filename)) if not self.simulate: zip = zipfile.ZipFile(dest_filename, 'w') zip.writestr(module_name + '/', '') for dirpath, dirnames, filenames in os.walk(filename): if no_pyc: filenames = [ f for f in filenames if not f.lower().endswith('.pyc') ] for fns, is_dir in [(dirnames, True), (filenames, False)]: for fn in fns: full = os.path.join(dirpath, fn) dest = os.path.join( module_name, dirpath[len(filename):].lstrip( os.path.sep), fn) if is_dir: zip.writestr(dest + '/', '') else: zip.write(full, dest) zip.close() logger.info('Removing old directory %s' % display_path(filename)) if not self.simulate: rmtree(filename) except: ## FIXME: need to do an undo here raise ## FIXME: should also be undone: self.add_filename_to_pth(dest_filename) finally: logger.indent -= 2
def unpack(self, location): """Get the bzr branch at the url to the destination location""" url, rev = self.get_url_rev() logger.notify('Checking out bzr repository %s to %s' % (url, location)) logger.indent += 2 try: if os.path.exists(location): os.rmdir(location) call_subprocess([self.cmd, 'branch', url, location], filter_stdout=self._filter, show_stdout=False) finally: logger.indent -= 2
def rollback(self): """Rollback the changes previously made by remove().""" if self.save_dir is None: logger.error("Can't roll back %s; was not uninstalled" % self.dist.project_name) return False logger.notify('Rolling back uninstall of %s' % self.dist.project_name) for path in self._moved_paths: tmp_path = self._stash(path) logger.info('Replacing %s' % path) renames(tmp_path, path) for pth in self.pth.values(): pth.rollback()
def unpack(self, location): """Clone the Git repository at the url to the destination location""" url, rev = self.get_url_rev() logger.notify('Cloning Git repository %s to %s' % (url, location)) logger.indent += 2 try: if os.path.exists(location): os.rmdir(location) call_subprocess( [self.cmd, 'clone', url, location], filter_stdout=self._filter, show_stdout=False) finally: logger.indent -= 2
def obtain(self, dest): url, rev = self.get_url_rev() if rev: rev_options = [rev] rev_display = ' (to revision %s)' % rev else: rev_options = [] rev_display = '' if self.check_destination(dest, url, rev_options, rev_display): logger.notify('Cloning hg %s%s to %s' % (url, rev_display, display_path(dest))) call_subprocess([self.cmd, 'clone', '--noupdate', '-q', url, dest]) call_subprocess([self.cmd, 'update', '-q'] + rev_options, cwd=dest)
def unpack_file_url(link, location, download_dir=None): link_path = url_to_path(link.url_without_fragment) already_downloaded = False # If it's a url to a local directory if os.path.isdir(link_path): if os.path.isdir(location): rmtree(location) shutil.copytree(link_path, location, symlinks=True) return # if link has a hash, let's confirm it matches if link.hash: link_path_hash = _get_hash_from_file(link_path, link) _check_hash(link_path_hash, link) # If a download dir is specified, is the file already there and valid? if download_dir: download_path = os.path.join(download_dir, link.filename) if os.path.exists(download_path): content_type = mimetypes.guess_type(download_path)[0] logger.notify('File was already downloaded %s' % download_path) if link.hash: download_hash = _get_hash_from_file(download_path, link) try: _check_hash(download_hash, link) already_downloaded = True except HashMismatch: logger.warn( 'Previously-downloaded file %s has bad hash, ' 're-downloading.' % link_path ) os.unlink(download_path) else: already_downloaded = True if already_downloaded: from_path = download_path else: from_path = link_path content_type = mimetypes.guess_type(from_path)[0] # unpack the archive to the build dir location. even when only downloading # archives, they have to be unpacked to parse dependencies unpack_file(from_path, location, content_type, link) # a download dir is specified and not already downloaded if download_dir and not already_downloaded: _copy_file(from_path, download_dir, content_type, link)
def build(self): """Build wheels.""" #unpack and constructs req set self.requirement_set.prepare_files(self.finder) reqset = self.requirement_set.requirements.values() #make the wheelhouse if not os.path.exists(self.wheel_dir): os.makedirs(self.wheel_dir) #build the wheels logger.notify('Building wheels for collected packages: %s' % ', '.join([req.name for req in reqset])) logger.indent += 2 build_success, build_failure = [], [] for req in reqset: if req.is_wheel: logger.notify("Skipping building wheel: %s", req.url) continue if self._build_one(req): build_success.append(req) else: build_failure.append(req) logger.indent -= 2 #notify sucess/failure if build_success: logger.notify('Successfully built %s' % ' '.join([req.name for req in build_success])) if build_failure: logger.notify('Failed to build %s' % ' '.join([req.name for req in build_failure]))
def print_results(hits, name_column_width=25, terminal_width=None): installed_packages = [p.project_name for p in pkg_resources.working_set] for hit in hits: name = hit['name'] summary = hit['summary'] or '' if terminal_width is not None: # wrap and indent summary to fit terminal summary = textwrap.wrap(summary, terminal_width - name_column_width - 5) summary = ('\n' + ' ' * (name_column_width + 3)).join(summary) line = '%s - %s' % (name.ljust(name_column_width), summary) try: logger.notify(line) if name in installed_packages: dist = pkg_resources.get_distribution(name) logger.indent += 2 try: latest = highest_version(hit['versions']) if dist.version == latest: logger.notify('INSTALLED: %s (latest)' % dist.version) else: logger.notify('INSTALLED: %s' % dist.version) logger.notify('LATEST: %s' % latest) finally: logger.indent -= 2 except UnicodeEncodeError: pass
def check_destination(self, dest, url, rev_options, rev_display): """ Prepare a location to receive a checkout/clone. Return True if the location is ready for (and requires) a checkout/clone, False otherwise. """ checkout = True prompt = False if os.path.exists(dest): checkout = False if os.path.exists(os.path.join(dest, self.dirname)): existing_url = self.get_url(dest) if self.compare_urls(existing_url, url): logger.info('%s in %s exists, and has correct URL (%s)' % (self.repo_name.title(), display_path(dest), url)) logger.notify('Updating %s %s%s' % (display_path(dest), self.repo_name, rev_display)) self.update(dest, rev_options) else: logger.warn('%s %s in %s exists with URL %s' % (self.name, self.repo_name, display_path(dest), existing_url)) prompt = ('(s)witch, (i)gnore, (w)ipe, (b)ackup ', ('s', 'i', 'w', 'b')) else: logger.warn('Directory %s already exists, and is not a %s %s.' % (dest, self.name, self.repo_name)) prompt = ('(i)gnore, (w)ipe, (b)ackup ', ('i', 'w', 'b')) if prompt: logger.warn('The plan is to install the %s repository %s' % (self.name, url)) response = ask('What to do? %s' % prompt[0], prompt[1]) if response == 's': logger.notify('Switching %s %s to %s%s' % (self.repo_name, display_path(dest), url, rev_display)) self.switch(dest, url, rev_options) elif response == 'i': # do nothing pass elif response == 'w': logger.warn('Deleting %s' % display_path(dest)) rmtree(dest) checkout = True elif response == 'b': dest_dir = backup_dir(dest) logger.warn('Backing up %s to %s' % (display_path(dest), dest_dir)) shutil.move(dest, dest_dir) checkout = True return checkout
def locate_files(self): # FIXME: duplicates code from prepare_files; relevant code should # probably be factored out into a separate method unnamed = list(self.unnamed_requirements) reqs = list(self.requirements.values()) while reqs or unnamed: if unnamed: req_to_install = unnamed.pop(0) else: req_to_install = reqs.pop(0) install_needed = True if not self.ignore_installed and not req_to_install.editable: req_to_install.check_if_exists() if req_to_install.satisfied_by: if self.upgrade: # don't uninstall conflict if user install and and # conflict is not user install if not (self.use_user_site and not dist_in_usersite( req_to_install.satisfied_by )): req_to_install.conflicts_with = \ req_to_install.satisfied_by req_to_install.satisfied_by = None else: install_needed = False if req_to_install.satisfied_by: logger.notify('Requirement already satisfied ' '(use --upgrade to upgrade): %s' % req_to_install) if req_to_install.editable: if req_to_install.source_dir is None: req_to_install.source_dir = req_to_install.build_location( self.src_dir ) elif install_needed: req_to_install.source_dir = req_to_install.build_location( self.build_dir, not self.is_download, ) if (req_to_install.source_dir is not None and not os.path.isdir(req_to_install.source_dir)): raise InstallationError( 'Could not install requirement %s because source folder %s' ' does not exist (perhaps --no-download was used without ' 'first running an equivalent install with --no-install?)' % (req_to_install, req_to_install.source_dir) )
def _download_url(resp, link, temp_location): fp = open(temp_location, 'wb') download_hash = None if link.hash and link.hash_name: try: download_hash = hashlib.new(link.hash_name) except ValueError: logger.warn("Unsupported hash name %s for package %s" % (link.hash_name, link)) try: total_length = int(resp.info()['content-length']) except (ValueError, KeyError, TypeError): total_length = 0 downloaded = 0 show_progress = total_length > 40 * 1000 or not total_length show_url = link.show_url try: if show_progress: ## FIXME: the URL can get really long in this message: if total_length: logger.start_progress('Downloading %s (%s): ' % (show_url, format_size(total_length))) else: logger.start_progress('Downloading %s (unknown size): ' % show_url) else: logger.notify('Downloading %s' % show_url) logger.info('Downloading from URL %s' % link) while True: chunk = resp.read(4096) if not chunk: break downloaded += len(chunk) if show_progress: if not total_length: logger.show_progress('%s' % format_size(downloaded)) else: logger.show_progress('%3i%% %s' % (100 * downloaded / total_length, format_size(downloaded))) if download_hash is not None: download_hash.update(chunk) fp.write(chunk) fp.close() finally: if show_progress: logger.end_progress('%s downloaded' % format_size(downloaded)) return download_hash
def unzip_package(self, module_name, filename): zip_filename = os.path.dirname(filename) if not os.path.isfile(zip_filename) and zipfile.is_zipfile(zip_filename): raise InstallationError( 'Module %s (in %s) isn\'t located in a zip file in %s' % (module_name, filename, zip_filename)) package_path = os.path.dirname(zip_filename) if not package_path in self.paths(): logger.warn( 'Unpacking %s into %s, but %s is not on sys.path' % (display_path(zip_filename), display_path(package_path), display_path(package_path))) logger.notify('Unzipping %s (in %s)' % (module_name, display_path(zip_filename))) if self.simulate: logger.notify('Skipping remaining operations because of --simulate') return logger.indent += 2 try: ## FIXME: this should be undoable: zip = zipfile.ZipFile(zip_filename) to_save = [] for name in zip.namelist(): if name.startswith(module_name + os.path.sep): content = zip.read(name) dest = os.path.join(package_path, name) if not os.path.exists(os.path.dirname(dest)): os.makedirs(os.path.dirname(dest)) if not content and dest.endswith(os.path.sep): if not os.path.exists(dest): os.makedirs(dest) else: f = open(dest, 'wb') f.write(content) f.close() else: to_save.append((name, zip.read(name))) zip.close() if not to_save: logger.info('Removing now-empty zip file %s' % display_path(zip_filename)) os.unlink(zip_filename) self.remove_filename_from_pth(zip_filename) else: logger.info('Removing entries in %s/ from zip file %s' % (module_name, display_path(zip_filename))) zip = zipfile.ZipFile(zip_filename, 'w') for name, content in to_save: zip.writestr(name, content) zip.close() finally: logger.indent -= 2
def obtain(self, dest): url, rev = self.get_url_rev() if rev: rev_options = [rev] rev_display = ' (to %s)' % rev else: rev_options = ['origin/master'] rev_display = '' if self.check_destination(dest, url, rev_options, rev_display): logger.notify('Cloning %s%s to %s' % (url, rev_display, display_path(dest))) call_subprocess([self.cmd, 'clone', '-q', url, dest]) rev_options = self.check_rev_options(rev, dest, rev_options) call_subprocess([self.cmd, 'checkout', '-q'] + rev_options, cwd=dest)
def output_package_listing(self, installed_packages): installed_packages = sorted( installed_packages, key=lambda dist: dist.project_name.lower(), ) for dist in installed_packages: if dist_is_editable(dist): line = '%s (%s, %s)' % ( dist.project_name, dist.version, dist.location, ) else: line = '%s (%s)' % (dist.project_name, dist.version) logger.notify(line)
def run(self, options, args): if not args: raise InstallationError('You must give a bundle filename') if not options.build_dir: options.build_dir = backup_dir(build_prefix, '-bundle') if not options.src_dir: options.src_dir = backup_dir(src_prefix, '-bundle') # We have to get everything when creating a bundle: options.ignore_installed = True logger.notify( 'Putting temporary build files in %s and source/develop files in %s' % (display_path(options.build_dir), display_path(options.src_dir))) self.bundle_filename = args.pop(0) requirement_set = super(BundleCommand, self).run(options, args) return requirement_set
def export(self, location): """Export the svn repository at the url to the destination location""" url, rev = self.get_url_rev() logger.notify('Exporting svn repository %s to %s' % (url, location)) logger.indent += 2 try: if os.path.exists(location): # Subversion doesn't like to check out over an existing directory # --force fixes this, but was only added in svn 1.5 rmtree(location) call_subprocess([self.cmd, 'export', url, location], filter_stdout=self._filter, show_stdout=False) finally: logger.indent -= 2
def install_requirements_txt(self, req_to_install): """If ENV is set, try to parse requirements-ENV.txt, falling back to requirements.txt if it exists.""" rtxt_candidates = ["requirements.txt"] if self.options and self.options.env: rtxt_candidates.insert( 0, "requirements-{0}.txt".format(self.options.env)) for r in rtxt_candidates: fullpath = os.path.join(req_to_install.source_dir, r) if os.path.exists(fullpath): logger.notify( "Found {0} in {1}, installing extra dependencies.".format( r, req_to_install.name)) return parse_requirements(fullpath, req_to_install.name, None, self.options) return []
def run(self, options, args): if not options.build_dir: options.build_dir = build_prefix if not options.src_dir: options.src_dir = src_prefix if options.download_dir: options.no_install = True options.ignore_installed = True else: options.build_dir = os.path.abspath(options.build_dir) options.src_dir = os.path.abspath(options.src_dir) index_urls = [options.index_url] + options.extra_index_urls if options.no_index: logger.notify('Ignoring indexes: %s' % ','.join(index_urls)) index_urls = [] finder = PackageFinder(find_links=options.find_links, index_urls=index_urls) requirement_set = RequirementSet( build_dir=options.build_dir, src_dir=options.src_dir, download_dir=options.download_dir, download_cache=options.download_cache, upgrade=options.upgrade, ignore_installed=options.ignore_installed, ignore_dependencies=False, ) for name in args: requirement_set.add_requirement( InstallRequirement.from_line(name, None)) for name in options.editables: requirement_set.add_requirement( InstallRequirement.from_editable( name, default_vcs=options.default_vcs)) for filename in options.requirements: for req in parse_requirements(filename, finder=finder, options=options): requirement_set.add_requirement(req) requirement_set.prepare_files( finder, force_root_egg_info=self.bundle, bundle=self.bundle, ) return requirement_set
def cleanup_files(self): """Clean up files, remove builds.""" logger.notify('Cleaning up...') logger.indent += 2 for req in self.reqs_to_cleanup: req.remove_temporary_source() remove_dir = [] if self._pip_has_created_build_dir(): remove_dir.append(self.build_dir) for dir in remove_dir: if os.path.exists(dir): logger.info('Removing temporary dir %s...' % dir) rmtree(dir) logger.indent -= 2
def unpack_http_url(link, location, download_cache, only_download): temp_dir = tempfile.mkdtemp('-unpack', 'pip-') target_url = link.url.split('#', 1)[0] target_file = None download_hash = None if download_cache: target_file = os.path.join(download_cache, urllib.quote(target_url, '')) if not os.path.isdir(download_cache): create_download_cache_folder(download_cache) if (target_file and os.path.exists(target_file) and os.path.exists(target_file+'.content-type')): fp = open(target_file+'.content-type') content_type = fp.read().strip() fp.close() if link.md5_hash: download_hash = _get_md5_from_file(target_file, link) temp_location = target_file logger.notify('Using download cache from %s' % target_file) else: resp = _get_response_from_url(target_url, link) content_type = resp.info()['content-type'] filename = link.filename ext = splitext(filename)[1] if not ext: ext = mimetypes.guess_extension(content_type) if ext: filename += ext if not ext and link.url != geturl(resp): ext = os.path.splitext(geturl(resp))[1] if ext: filename += ext temp_location = os.path.join(temp_dir, filename) download_hash = _download_url(resp, link, temp_location) if link.md5_hash: _check_md5(download_hash, link) if only_download: _copy_file(temp_location, location, content_type, link) else: unpack_file(temp_location, location, content_type, link) if target_file and target_file != temp_location: cache_download(target_file, temp_location, content_type) if target_file is None: os.unlink(temp_location) os.rmdir(temp_dir)
def obtain(self, dest): url, rev = self.get_url_rev() if rev: rev_options = [rev] rev_display = ' (to %s)' % rev else: rev_options = ['master'] rev_display = '' if self.check_destination(dest, url, rev_options, rev_display): logger.notify('Cloning %s%s to %s' % (url, rev_display, display_path(dest))) call_subprocess([self.cmd, 'clone', '-q', url, dest]) checked_rev = self.check_rev_options(rev, dest, rev_options) # only explicitely checkout the "revision" in case the check # found a valid tag, commit or branch if rev_options != checked_rev: call_subprocess( [self.cmd, 'checkout', '-q'] + checked_rev, cwd=dest)
def run(self, options, args): logger.deprecated( '1.6', "DEPRECATION: 'pip bundle' and support for installing from *.pybundle files is deprecated. " "See https://github.com/pypa/pip/pull/1046") if not args: raise InstallationError('You must give a bundle filename') # We have to get everything when creating a bundle: options.ignore_installed = True logger.notify( 'Putting temporary build files in %s and source/develop files in %s' % (display_path(options.build_dir), display_path(options.src_dir))) self.bundle_filename = args.pop(0) requirement_set = super(BundleCommand, self).run(options, args) return requirement_set
def _download_url(resp, link, temp_location): fp = open(temp_location, 'wb') download_hash = None if link.md5_hash: download_hash = md5() try: total_length = int(resp.info()['content-length']) except (ValueError, KeyError): total_length = 0 downloaded = 0 show_progress = total_length > 40 * 1000 or not total_length show_url = link.show_url try: if show_progress: ## FIXME: the URL can get really long in this message: if total_length: logger.start_progress('Downloading %s (%s): ' % (show_url, format_size(total_length))) else: logger.start_progress('Downloading %s (unknown size): ' % show_url) else: logger.notify('Downloading %s' % show_url) logger.debug('Downloading from URL %s' % link) while 1: chunk = resp.read(4096) if not chunk: break downloaded += len(chunk) if show_progress: if not total_length: logger.show_progress('%s' % format_size(downloaded)) else: logger.show_progress('%3i%% %s' % (100 * downloaded / total_length, format_size(downloaded))) if link.md5_hash: download_hash.update(chunk) fp.write(chunk) fp.close() finally: if show_progress: logger.end_progress('%s downloaded' % format_size(downloaded)) return download_hash
def _find_url_name(self, index_url, url_name, req): """Finds the true URL name of a package, when the given name isn't quite correct. This is usually used to implement case-insensitivity.""" if not index_url.url.endswith('/'): # Vaguely part of the PyPI API... weird but true. ## FIXME: bad to modify this? index_url.url += '/' page = self._get_page(index_url, req) if page is None: logger.fatal('Cannot fetch index base URL %s' % index_url) return norm_name = normalize_name(req.url_name) for link in page.links: base = posixpath.basename(link.path.rstrip('/')) if norm_name == normalize_name(base): logger.notify('Real name of requirement %s is %s' % (url_name, base)) return base return None
def obtain(self, dest): url, rev = self.get_url_rev() if rev: rev_options = [rev] rev_display = ' (to %s)' % rev else: rev_options = ['origin/master'] rev_display = '' if self.check_destination(dest, url, rev_options, rev_display): logger.notify('Cloning %s%s to %s' % (url, rev_display, display_path(dest))) call_subprocess([self.cmd, 'clone', '-q', url, dest]) #: repo may contain submodules self.update_submodules(dest) if rev: rev_options = self.check_rev_options(rev, dest, rev_options) # Only do a checkout if rev_options differs from HEAD if not self.get_revision(dest).startswith(rev_options[0]): call_subprocess([self.cmd, 'checkout', '-q'] + rev_options, cwd=dest)