def extract_links(self, urls, dist, found=None): """ Recursively extract download links from a URL. """ if found is None: found = set() for url in urls: try: response = requests.get(url) except Exception: logger.exception("Can't fetch %s", url) continue if response.status_code != 200: logger.info("Non-200 response from {url}: {code}".format( url=url, code=response.status_code, )) continue try: soup = BeautifulSoup(response.content) except HTMLParseError: logger.debug("Parse error: {0}".format(url)) continue additional = set() for link in soup.find_all('a'): if not 'href' in link.attrs: continue potential = link.attrs['href'] if not '://' in potential: potential = urlparse.urljoin(url, potential) potential = urlparse.urlparse(potential) if (dist in potential.path and is_archive_file(potential.path) and not potential.path.endswith('.mpkg.zip') and not ".macosx-10." in potential.path): potential = list(potential) potential[5] = '' found.add(urlparse.urlunparse(potential)) continue if ('rel' in link.attrs and link.attrs['rel'] == [u'download'] and not is_archive_file(link.attrs['href'])): logger.debug("Download URL {0}".format(link.attrs['href'])) additional.add(urlparse.urlunparse(potential)) found = found.union(self.extract_links(additional, dist)) return found
def import_services_from_anywhere(self, items, base_dir, work_dir=None): """ Imports services from any of the supported sources, be it module names, individual files, directories or distutils2 packages (compressed or not). """ for item_name in items: logger.debug('About to import services from:[%s]', item_name) is_internal = item_name.startswith('zato') # distutils2 archive, decompress and import from the newly created directory .. if is_archive_file(item_name): new_path = self.decompress(item_name, work_dir) self.import_services_from_dist2_directory(new_path) # .. a regular directory or a Distutils2 one .. elif os.path.isdir(item_name): try: self.import_services_from_directory(item_name, base_dir, True) except NoDistributionFound, e: msg = 'Caught an exception e=[{}]'.format(format_exc(e)) logger.log(TRACE1, msg) self.import_services_from_directory(item_name, base_dir, False) # .. a .py/.pyw elif is_python_file(item_name): self.import_services_from_file(item_name, is_internal, base_dir)
def import_services_from_anywhere(self, items, base_dir, work_dir=None): """ Imports services from any of the supported sources, be it module names, individual files or distutils2 packages (compressed or not). """ for item_name in items: logger.debug('About to import services from:[%s]', item_name) is_internal = item_name.startswith('zato') # distutils2 archive, decompress and import from the newly created directory .. if is_archive_file(item_name): new_path = self.decompress(item_name, work_dir) self.import_services_from_directory(new_path) # distutils2 too .. elif os.path.isdir(item_name): self.import_services_from_directory(item_name) # .. a .py/.pyw elif is_python_file(item_name): self.import_services_from_file(item_name, is_internal, base_dir) # .. must be a module object else: self.import_services_from_module(item_name, is_internal)
def hot_deploy(parallel_server, file_name, path, delete_path=True, notify=True): """ Hot-deploys a package if it looks like a Python module or archive. """ if is_python_file(file_name) or is_archive_file(file_name): logger.debug('About to hot-deploy [{}]'.format(path)) now = datetime.utcnow() di = dumps( deployment_info('hot-deploy', file_name, now.isoformat(), path)) # Insert the package into the DB .. package_id = parallel_server.odb.hot_deploy(now, di, file_name, open(path, 'rb').read(), parallel_server.id) # .. and optionally notify all the servers they're to pick up a delivery if notify: parallel_server.notify_new_package(package_id) if delete_path: _os_remove(path) return package_id else: logger.warn('Ignoring {}'.format(path))
def is_installable_file(path): """Determine if a path can potentially be installed""" from pip.utils import is_installable_dir from pip.utils.packaging import specifiers if hasattr(path, 'keys') and any( key for key in path.keys() if key in ['file', 'path']): path = urlparse(path['file']).path if 'file' in path else path['path'] if not isinstance(path, six.string_types) or path == '*': return False # If the string starts with a valid specifier operator, test if it is a valid # specifier set before making a path object (to avoid breaking windows) if any(path.startswith(spec) for spec in '!=<>~'): try: specifiers.SpecifierSet(path) # If this is not a valid specifier, just move on and try it as a path except specifiers.InvalidSpecifier: pass else: return False if not os.path.exists(os.path.abspath(path)): return False lookup_path = Path(path) absolute_path = '{0}'.format(lookup_path.absolute()) if lookup_path.is_dir() and is_installable_dir(absolute_path): return True elif lookup_path.is_file() and is_archive_file(absolute_path): return True return False
def import_services_from_anywhere(self, items, base_dir, work_dir=None): """ Imports services from any of the supported sources, be it module names, individual files, directories or distutils2 packages (compressed or not). """ for item_name in items: logger.debug('About to import services from:[%s]', item_name) is_internal = item_name.startswith('zato') # distutils2 archive, decompress and import from the newly created directory .. if is_archive_file(item_name): new_path = self.decompress(item_name, work_dir) self.import_services_from_dist2_directory(new_path) # .. a regular directory or a Distutils2 one .. elif os.path.isdir(item_name): try: self.import_services_from_directory( item_name, base_dir, True) except NoDistributionFound, e: msg = 'Caught an exception e=[{}]'.format(format_exc(e)) logger.log(TRACE1, msg) self.import_services_from_directory( item_name, base_dir, False) # .. a .py/.pyw elif is_python_file(item_name): self.import_services_from_file(item_name, is_internal, base_dir)
def deploy_package(self, package_id, session): dp = self.get_package(package_id, session) if is_archive_file(dp.payload_name) or is_python_file(dp.payload_name): self._deploy_package(session, package_id, dp.payload_name, dp.payload) else: # This shouldn't really happen at all because the pickup notifier is to # filter such things out but life is full of surprises self._update_deployment_status(session, package_id, DEPLOYMENT_STATUS.IGNORED) self.logger.warn('Ignoring package id:[{}], payload_name:[{}], not a Python file nor an archive'.format(dp.id, dp.payload_name))
def from_line(cls, name, comes_from=None, prereleases=None): """Creates an InstallRequirement from a name, which might be a requirement, directory containing 'setup.py', filename, or URL. """ url = None name = name.strip() req = None path = os.path.normpath(os.path.abspath(name)) link = None if is_url(name): link = Link(name) elif (os.path.isdir(path) and (os.path.sep in name or name.startswith('.'))): if not is_installable_dir(path): raise InstallationError( "Directory %r is not installable. File 'setup.py' not " "found." % name ) link = Link(path_to_url(name)) elif is_archive_file(path): if not os.path.isfile(path): logger.warn( 'Requirement %r looks like a filename, but the file does ' 'not exist', name ) link = Link(path_to_url(name)) # If the line has an egg= definition, but isn't editable, pull the # requirement out. Otherwise, assume the name is the req for the non # URL/path/archive case. if link and req is None: url = link.url_without_fragment # when fragment is None, this will become an 'unnamed' requirement req = link.egg_fragment # Handle relative file URLs if link.scheme == 'file' and re.search(r'\.\./', url): url = path_to_url(os.path.normpath(os.path.abspath(link.path))) # fail early for invalid or unsupported wheels if link.ext == wheel_ext: wheel = Wheel(link.filename) # can raise InvalidWheelFilename if not wheel.supported(): raise UnsupportedWheel( "%s is not a supported wheel on this platform." % wheel.filename ) else: req = name return cls(req, comes_from, url=url, prereleases=prereleases)
def get_requirement(dep): """Pre-clean requirement strings passed to the requirements parser. Ensures that we can accept both local and relative paths, file and VCS URIs, remote URIs, and package names, and that we pass only valid requirement strings to the requirements parser. Performs necessary modifications to requirements object if the user input was a local relative path. :param str dep: A requirement line :returns: :class:`requirements.Requirement` object """ path = None # Split out markers if they are present - similar to how pip does it # See pip.req.req_install.InstallRequirement.from_line if not any(dep.startswith(uri_prefix) for uri_prefix in SCHEME_LIST): marker_sep = ';' else: marker_sep = '; ' if marker_sep in dep: dep, markers = dep.split(marker_sep, 1) markers = markers.strip() if not markers: markers = None else: markers = None # Strip extras from the requirement so we can make a properly parseable req dep, extras = pip.req.req_install._strip_extras(dep) # Only operate on local, existing, non-URI formatted paths if (is_file(dep) and isinstance(dep, six.string_types) and not any(dep.startswith(uri_prefix) for uri_prefix in SCHEME_LIST)): dep_path = Path(dep) # Only parse if it is a file or an installable dir if dep_path.is_file() or (dep_path.is_dir() and pip.utils.is_installable_dir(dep)): dep_link = Link(dep_path.absolute().as_uri()) if dep_path.is_dir() or dep_link.is_wheel or is_archive_file(dep_path.as_posix()): if dep_path.is_absolute() or dep_path.as_posix() == '.': path = dep_path.as_posix() else: path = get_converted_relative_path(dep) dep = dep_link.egg_fragment if dep_link.egg_fragment else dep_link.url_without_fragment req = [r for r in requirements.parse(dep)][0] # If the result is a local file with a URI and we have a local path, unset the URI # and set the path instead if path and not req.path: req.path = path req.uri = None req.local_file = True if markers: req.markers = markers if extras: # Bizarrely this is also what pip does... req.extras = [r for r in requirements.parse('fakepkg{0}'.format(extras))][0].extras return req
def _deploy_package(self, session, package_id, payload_name, payload): """ Deploy a package, either a plain Python file or an archive, and update the deployment status. """ success = False current_work_dir = self.server.hot_deploy_config.current_work_dir if is_python_file(payload_name): file_name = os.path.join(current_work_dir, payload_name) success = self._deploy_file(current_work_dir, payload, file_name) elif is_archive_file(payload_name): success = self._deploy_archive(current_work_dir, payload, payload_name) if success: self._update_deployment_status(session, package_id, DEPLOYMENT_STATUS.DEPLOYED) msg = 'Uploaded package id:[{}], payload_name:[{}]'.format(package_id, payload_name) self.logger.info(msg) else: msg = 'Package id:[{}], payload_name:[{}] has not been deployed'.format(package_id, payload_name) self.logger.warn(msg)
def hot_deploy(parallel_server, file_name, path, delete_path=True): """ Hot-deploys a package if it looks like a Python module or an archive which might contain a Distutils2 distribution. """ if is_python_file(file_name) or is_archive_file(file_name): logger.debug("About to hot-deploy [{}]".format(path)) now = datetime.utcnow() di = dumps(deployment_info("hot-deploy", file_name, now.isoformat(), path)) # Insert the package into the DB .. package_id = parallel_server.odb.hot_deploy(now, di, file_name, open(path, "rb").read(), parallel_server.id) # .. and notify all the servers they're to pick up a delivery parallel_server.notify_new_package(package_id) if delete_path: _os_remove(path) return True else: logger.warn("Ignoring {}".format(path))
def is_installable_file(path): """Determine if a path can potentially be installed""" if hasattr(path, 'keys') and any(key for key in path.keys() if key in ['file', 'path']): path = urlparse(path['file']).path if 'file' in path else path['path'] if not isinstance(path, six.string_types) or path == '*': return False # If the string starts with a valid specifier operator, test if it is a valid # specifier set before making a path object (to avoid breaking windows) if any(path.startswith(spec) for spec in '!=<>~'): try: pip.utils.packaging.specifiers.SpecifierSet(path) # If this is not a valid specifier, just move on and try it as a path except pip.utils.packaging.specifiers.InvalidSpecifier: pass else: return False lookup_path = Path(path) if not lookup_path.exists(): return False lookup_link = Link(lookup_path.resolve().as_uri()) absolute_path = '{0}'.format(lookup_path.absolute()) return ((lookup_path.is_file() and (is_archive_file(absolute_path) or lookup_link.is_wheel)) or (lookup_path.is_dir() and pip.utils.is_installable_dir(lookup_path.as_posix())))
def hot_deploy(parallel_server, file_name, path, delete_path=True, notify=True): """ Hot-deploys a package if it looks like a Python module or archive. """ if is_python_file(file_name) or is_archive_file(file_name): logger.debug('About to hot-deploy [{}]'.format(path)) now = datetime.utcnow() di = dumps(deployment_info('hot-deploy', file_name, now.isoformat(), path)) # Insert the package into the DB .. package_id = parallel_server.odb.hot_deploy( now, di, file_name, open(path, 'rb').read(), parallel_server.id) # .. and optionally notify all the servers they're to pick up a delivery if notify: parallel_server.notify_new_package(package_id) if delete_path: _os_remove(path) return package_id else: logger.warn('Ignoring {}'.format(path))
def _deploy_package(self, session, package_id, payload_name, payload): """ Deploy a package, either a plain Python file or an archive, and update the deployment status. """ success = False current_work_dir = self.server.hot_deploy_config.current_work_dir if is_python_file(payload_name): file_name = os.path.join(current_work_dir, payload_name) success = self._deploy_file(current_work_dir, payload, file_name) elif is_archive_file(payload_name): success = self._deploy_archive(current_work_dir, payload, payload_name) if success: self._update_deployment_status(session, package_id, DEPLOYMENT_STATUS.DEPLOYED) msg = 'Uploaded package id:[{}], payload_name:[{}]'.format( package_id, payload_name) self.logger.info(msg) else: msg = 'Package id:[{}], payload_name:[{}] has not been deployed'.format( package_id, payload_name) self.logger.warn(msg)
def from_line(cls, name, comes_from=None, isolated=False, options=None, wheel_cache=None, constraint=False): """Creates an InstallRequirement from a name, which might be a requirement, directory containing 'setup.py', filename, or URL. """ from pip.index import Link if is_url(name): marker_sep = '; ' else: marker_sep = ';' if marker_sep in name: name, markers = name.split(marker_sep, 1) markers = markers.strip() if not markers: markers = None else: markers = None name = name.strip() req = None path = os.path.normpath(os.path.abspath(name)) link = None extras = None if is_url(name): link = Link(name) else: p, extras = _strip_extras(path) if (os.path.isdir(p) and (os.path.sep in name or name.startswith('.'))): if not is_installable_dir(p): raise InstallationError( "Directory %r is not installable. File 'setup.py' " "not found." % name) link = Link(path_to_url(p)) elif is_archive_file(p): if not os.path.isfile(p): logger.warning( 'Requirement %r looks like a filename, but the ' 'file does not exist', name) link = Link(path_to_url(p)) # it's a local file, dir, or url if link: # Handle relative file URLs if link.scheme == 'file' and re.search(r'\.\./', link.url): link = Link( path_to_url(os.path.normpath(os.path.abspath(link.path)))) # wheel file if link.is_wheel: wheel = Wheel(link.filename) # can raise InvalidWheelFilename if not wheel.supported(): raise UnsupportedWheel( "%s is not a supported wheel on this platform." % wheel.filename) req = "%s==%s" % (wheel.name, wheel.version) else: # set the req to the egg fragment. when it's not there, this # will become an 'unnamed' requirement req = link.egg_fragment # a requirement specifier else: req = name options = options if options else {} res = cls(req, comes_from, link=link, markers=markers, isolated=isolated, options=options, wheel_cache=wheel_cache, constraint=constraint) if extras: res.extras = pkg_resources.Requirement.parse('__placeholder__' + extras).extras return res
def from_line( cls, name, comes_from=None, isolated=False, options=None, wheel_cache=None, constraint=False): """Creates an InstallRequirement from a name, which might be a requirement, directory containing 'setup.py', filename, or URL. """ from pip.index import Link if is_url(name): marker_sep = '; ' else: marker_sep = ';' if marker_sep in name: name, markers = name.split(marker_sep, 1) markers = markers.strip() if not markers: markers = None else: markers = None name = name.strip() req = None path = os.path.normpath(os.path.abspath(name)) link = None extras = None if is_url(name): link = Link(name) else: p, extras = _strip_extras(path) if (os.path.isdir(p) and (os.path.sep in name or name.startswith('.'))): if not is_installable_dir(p): raise InstallationError( "Directory %r is not installable. File 'setup.py' " "not found." % name ) link = Link(path_to_url(p)) elif is_archive_file(p): if not os.path.isfile(p): logger.warning( 'Requirement %r looks like a filename, but the ' 'file does not exist', name ) link = Link(path_to_url(p)) # it's a local file, dir, or url if link: # Normalize URLs link.normalize() # wheel file if link.is_wheel: wheel = Wheel(link.filename) # can raise InvalidWheelFilename req = "%s==%s" % (wheel.name, wheel.version) else: # set the req to the egg fragment. when it's not there, this # will become an 'unnamed' requirement req = link.egg_fragment # a requirement specifier else: req = name options = options if options else {} res = cls(req, comes_from, link=link, markers=markers, isolated=isolated, options=options, wheel_cache=wheel_cache, constraint=constraint) if extras: res.extras = Requirement('placeholder' + extras).extras return res
def from_line(cls, name, comes_from=None, isolated=False, options=None, wheel_cache=None, constraint=False): """Creates an InstallRequirement from a name, which might be a requirement, directory containing 'setup.py', filename, or URL. """ from pip.index import Link if is_url(name): marker_sep = '; ' else: marker_sep = ';' if marker_sep in name: name, markers = name.split(marker_sep, 1) markers = markers.strip() if not markers: markers = None else: markers = Marker(markers) else: markers = None name = name.strip() req = None path = os.path.normpath(os.path.abspath(name)) link = None extras = None if is_url(name): link = Link(name) else: p, extras = _strip_extras(path) if (os.path.isdir(p) and (os.path.sep in name or name.startswith('.'))): if not is_installable_dir(p): raise InstallationError( "Directory %r is not installable. File 'setup.py' " "not found." % name) link = Link(path_to_url(p)) elif is_archive_file(p): if not os.path.isfile(p): logger.warning( 'Requirement %r looks like a filename, but the ' 'file does not exist', name) link = Link(path_to_url(p)) # it's a local file, dir, or url if link: # Handle relative file URLs if link.scheme == 'file' and re.search(r'\.\./', link.url): link = Link( path_to_url(os.path.normpath(os.path.abspath(link.path)))) # wheel file if link.is_wheel: wheel = Wheel(link.filename) # can raise InvalidWheelFilename req = "%s==%s" % (wheel.name, wheel.version) else: # set the req to the egg fragment. when it's not there, this # will become an 'unnamed' requirement req = link.egg_fragment # a requirement specifier else: req = name if extras: extras = Requirement("placeholder" + extras.lower()).extras else: extras = () if req is not None: try: req = Requirement(req) except InvalidRequirement: if os.path.sep in req: add_msg = "It looks like a path." add_msg += deduce_helpful_msg(req) elif '=' in req and not any(op in req for op in operators): add_msg = "= is not a valid operator. Did you mean == ?" else: add_msg = traceback.format_exc() raise InstallationError("Invalid requirement: '%s'\n%s" % (req, add_msg)) return cls( req, comes_from, link=link, markers=markers, isolated=isolated, options=options if options else {}, wheel_cache=wheel_cache, constraint=constraint, extras=extras, )
def index_alarm(self, event): if is_archive_file(event.pathname): logger.debug("Queuing indexing") self.queue.add(os.path.dirname(event.pathname)) signal.setitimer(signal.ITIMER_REAL, 5)
def from_line(cls, name, comes_from=None, isolated=False, options=None, wheel_cache=None, constraint=False): """Creates an InstallRequirement from a name, which might be a requirement, directory containing 'setup.py', filename, or URL. """ from pip.index import Link if is_url(name): marker_sep = "; " else: marker_sep = ";" if marker_sep in name: name, markers = name.split(marker_sep, 1) markers = markers.strip() if not markers: markers = None else: markers = None name = name.strip() req = None path = os.path.normpath(os.path.abspath(name)) link = None extras = None if is_url(name): link = Link(name) else: p, extras = _strip_extras(path) if os.path.isdir(p) and (os.path.sep in name or name.startswith(".")): if not is_installable_dir(p): raise InstallationError("Directory %r is not installable. File 'setup.py' " "not found." % name) link = Link(path_to_url(p)) elif is_archive_file(p): if not os.path.isfile(p): logger.warning("Requirement %r looks like a filename, but the " "file does not exist", name) link = Link(path_to_url(p)) # it's a local file, dir, or url if link: # Handle relative file URLs if link.scheme == "file" and re.search(r"\.\./", link.url): link = Link(path_to_url(os.path.normpath(os.path.abspath(link.path)))) # wheel file if link.is_wheel: wheel = Wheel(link.filename) # can raise InvalidWheelFilename if not wheel.supported(): raise UnsupportedWheel("%s is not a supported wheel on this platform." % wheel.filename) req = "%s==%s" % (wheel.name, wheel.version) else: # set the req to the egg fragment. when it's not there, this # will become an 'unnamed' requirement req = link.egg_fragment # a requirement specifier else: req = name options = options if options else {} res = cls( req, comes_from, link=link, markers=markers, isolated=isolated, options=options, wheel_cache=wheel_cache, constraint=constraint, ) if extras: res.extras = pkg_resources.Requirement.parse("__placeholder__" + extras).extras return res
def url_is_download_archive_url( cls, path ): base, download = os.path.split( path ) if download == "download": return pip_download.is_archive_file( base ) else: return pip_download.is_archive_file( path )
def url_is_download_archive_url(cls, path): base, download = os.path.split(path) if download == "download": return pip_download.is_archive_file(base) else: return pip_download.is_archive_file(path)
def from_line(cls, name, comes_from=None, isolated=False): """Creates an InstallRequirement from a name, which might be a requirement, directory containing 'setup.py', filename, or URL. """ from pip.index import Link url = None if is_url(name): marker_sep = '; ' else: marker_sep = ';' if marker_sep in name: name, markers = name.split(marker_sep, 1) markers = markers.strip() if not markers: markers = None else: markers = None name = name.strip() req = None path = os.path.normpath(os.path.abspath(name)) link = None if is_url(name): link = Link(name) elif (os.path.isdir(path) and (os.path.sep in name or name.startswith('.'))): if not is_installable_dir(path): raise InstallationError( "Directory %r is not installable. File 'setup.py' not " "found." % name ) link = Link(path_to_url(name)) elif is_archive_file(path): if not os.path.isfile(path): logger.warning( 'Requirement %r looks like a filename, but the file does ' 'not exist', name ) link = Link(path_to_url(name)) # it's a local file, dir, or url if link: url = link.url # Handle relative file URLs if link.scheme == 'file' and re.search(r'\.\./', url): url = path_to_url(os.path.normpath(os.path.abspath(link.path))) # wheel file if link.ext == wheel_ext: wheel = Wheel(link.filename) # can raise InvalidWheelFilename if not wheel.supported(): raise UnsupportedWheel( "%s is not a supported wheel on this platform." % wheel.filename ) req = "%s==%s" % (wheel.name, wheel.version) else: # set the req to the egg fragment. when it's not there, this # will become an 'unnamed' requirement req = link.egg_fragment # a requirement specifier else: req = name return cls(req, comes_from, url=url, markers=markers, isolated=isolated)
def index(directory, only=None): logger.info("Indexing {0}".format(directory)) dirs = set() for d in os.listdir(directory): full_dir = os.path.join(directory, d) if os.path.isdir(full_dir): dirs.add(full_dir) if only is None: only = dirs dist_info = {} for dist_dir in only: try: files = os.listdir(dist_dir) except OSError: logger.info("Unable to index {0}".format(dist_dir)) continue # Collect all distributions for f in files: if is_archive_file(f): full_path = os.path.join(dist_dir, f) try: name, version = metadata(full_path) except (tarfile.ReadError, zipfile.BadZipfile): logger.info("Corrupted archive: {0}".format(full_path)) continue except ValueError: logger.error( "Unable to extract info: {0}".format(full_path) ) continue dist_info.setdefault(name, []).append([version, f]) # Sort by version numbers for package, versions in dist_info.iteritems(): dist_info[package] = sorted( versions, key=lambda d: map(int_maybe, d[0].split('.')), ) # Write index logger.debug("Writing index") index_links = "\n".join(( INDEX_LINK.format(dist=dist.rsplit('/')[-1]) for dist in sorted(dirs) )) with open(os.path.join(directory, 'index.html'), 'w') as index: index.write(INDEX_PAGE.format(links=index_links)) for package, version in dist_info.iteritems(): logger.debug("Writing index for {0}".format(package)) package_dir = os.path.join(directory, package) mkdir(package_dir) links = "\n".join(( DIST.format(package=v[1]) for v in version )) with open(os.path.join(package_dir, 'index.html'), 'w') as index: index.write(DIST_PAGE.format(dist=package, links=links)) logger.info("Indexing done")
def get_local_directory( self, cuppa_env, location, sub_dir, branch, full_url ): offline = cuppa_env['offline'] local_directory = None base = cuppa_env['download_root'] if not os.path.isabs( base ): base = os.path.join( cuppa_env['working_dir'], base ) if location.startswith( 'file:' ): location = pip_download.url_to_path( location ) if not pip_download.is_url( location ): if pip_download.is_archive_file( location ): self._local_folder = self.folder_name_from_path( location, cuppa_env ) local_directory = os.path.join( base, self._local_folder ) local_dir_with_sub_dir = os.path.join( local_directory, sub_dir and sub_dir or "" ) if os.path.exists( local_dir_with_sub_dir ): try: os.rmdir( local_dir_with_sub_dir ) except: return local_directory self.extract( location, local_dir_with_sub_dir ) logger.debug( "(local archive) Location = [{}]".format( as_info( location ) ) ) logger.debug( "(local archive) Local folder = [{}]".format( as_info( self._local_folder ) ) ) else: local_directory = branch and os.path.join( location, branch ) or location self._local_folder = self.folder_name_from_path( location, cuppa_env ) logger.debug( "(local file) Location = [{}]".format( as_info( location ) ) ) logger.debug( "(local file) Local folder = [{}]".format( as_info( self._local_folder ) ) ) return local_directory else: self._local_folder = self.folder_name_from_path( full_url, cuppa_env ) local_directory = os.path.join( base, self._local_folder ) if full_url.scheme.startswith( 'http' ) and self.url_is_download_archive_url( full_url.path ): logger.debug( "[{}] is an archive download".format( as_info( location ) ) ) local_dir_with_sub_dir = os.path.join( local_directory, sub_dir and sub_dir or "" ) # First we check to see if we already downloaded and extracted this archive before if os.path.exists( local_dir_with_sub_dir ): try: # If not empty this will fail os.rmdir( local_dir_with_sub_dir ) except: # Not empty so we'll return this as the local_directory logger.debug( "(already present) Location = [{}]".format( as_info( location ) ) ) logger.debug( "(already present) Local folder = [{}]".format( as_info( str(self._local_folder) ) ) ) return local_directory if cuppa_env['dump'] or cuppa_env['clean']: return local_directory # If not we then check to see if we cached the download cached_archive = self.get_cached_archive( cuppa_env['cache_root'], self._local_folder ) if cached_archive: logger.debug( "Cached archive [{}] found for [{}]".format( as_info( cached_archive ), as_info( location ) ) ) self.extract( cached_archive, local_dir_with_sub_dir ) else: logger.info( "Downloading [{}]...".format( as_info( location ) ) ) try: report_hook = None if logger.isEnabledFor( logging.INFO ): report_hook = ReportDownloadProgress() filename, headers = urllib.urlretrieve( location, reporthook=report_hook ) name, extension = os.path.splitext( filename ) logger.info( "[{}] successfully downloaded to [{}]".format( as_info( location ), as_info( filename ) ) ) self.extract( filename, local_dir_with_sub_dir ) if cuppa_env['cache_root']: cached_archive = os.path.join( cuppa_env['cache_root'], self._local_folder ) logger.debug( "Caching downloaded file as [{}]".format( as_info( cached_archive ) ) ) shutil.copyfile( filename, cached_archive ) except urllib.ContentTooShortError as error: logger.error( "Download of [{}] failed with error [{}]".format( as_error( location ), as_error( str(error) ) ) ) raise LocationException( error ) elif '+' in full_url.scheme: vc_type = location.split('+', 1)[0] backend = pip_vcs.vcs.get_backend( vc_type ) if backend: vcs_backend = backend( self.expand_secret( location ) ) local_dir_with_sub_dir = os.path.join( local_directory, sub_dir and sub_dir or "" ) if cuppa_env['dump'] or cuppa_env['clean']: return local_directory if os.path.exists( local_directory ): url, repository, branch, remote, revision = self.get_info( location, local_dir_with_sub_dir, full_url, vc_type ) rev_options = self.get_rev_options( vc_type, vcs_backend, local_remote=remote ) version = self.ver_rev_summary( branch, revision, self._full_url.path )[0] if not offline: logger.info( "Updating [{}] in [{}]{} at [{}]".format( as_info( location ), as_notice( local_dir_with_sub_dir ), ( rev_options and " on {}".format( as_notice( str(rev_options) ) ) or "" ), as_info( version ) ) ) try: update( vcs_backend, local_dir_with_sub_dir, rev_options ) logger.debug( "Successfully updated [{}]".format( as_info( location ) ) ) except pip_exceptions.PipError as error: logger.warn( "Could not update [{}] in [{}]{} due to error [{}]".format( as_warning( location ), as_warning( local_dir_with_sub_dir ), ( rev_options and " at {}".format( as_warning( str(rev_options) ) ) or "" ), as_warning( str(error) ) ) ) else: logger.debug( "Skipping update for [{}] as running in offline mode".format( as_info( location ) ) ) else: rev_options = self.get_rev_options( vc_type, vcs_backend ) action = "Cloning" if vc_type == "svn": action = "Checking out" max_attempts = 2 attempt = 1 while attempt <= max_attempts: logger.info( "{} [{}] into [{}]{}".format( action, as_info( location ), as_info( local_dir_with_sub_dir ), attempt > 1 and "(attempt {})".format( str(attempt) ) or "" ) ) try: vcs_backend.obtain( local_dir_with_sub_dir ) logger.debug( "Successfully retrieved [{}]".format( as_info( location ) ) ) break except pip_exceptions.PipError as error: attempt = attempt + 1 log_as = logger.warn if attempt > max_attempts: log_as = logger.error log_as( "Could not retrieve [{}] into [{}]{} due to error [{}]".format( as_info( location ), as_notice( local_dir_with_sub_dir ), ( rev_options and " to {}".format( as_notice( str(rev_options) ) ) or ""), as_error( str(error) ) ) ) if attempt > max_attempts: raise LocationException( str(error) ) logger.debug( "(url path) Location = [{}]".format( as_info( location ) ) ) logger.debug( "(url path) Local folder = [{}]".format( as_info( self._local_folder ) ) ) return local_directory
def from_line( cls, name, comes_from=None, isolated=False, options=None, wheel_cache=None, constraint=False): """Creates an InstallRequirement from a name, which might be a requirement, directory containing 'setup.py', filename, or URL. """ from pip.index import Link if is_url(name): marker_sep = '; ' else: marker_sep = ';' if marker_sep in name: name, markers = name.split(marker_sep, 1) markers = markers.strip() if not markers: markers = None else: markers = Marker(markers) else: markers = None name = name.strip() req = None path = os.path.normpath(os.path.abspath(name)) link = None extras = None if is_url(name): link = Link(name) else: p, extras = _strip_extras(path) if (os.path.isdir(p) and (os.path.sep in name or name.startswith('.'))): if not is_installable_dir(p): raise InstallationError( "Directory %r is not installable. File 'setup.py' " "not found." % name ) link = Link(path_to_url(p)) elif is_archive_file(p): if not os.path.isfile(p): logger.warning( 'Requirement %r looks like a filename, but the ' 'file does not exist', name ) link = Link(path_to_url(p)) # it's a local file, dir, or url if link: # Handle relative file URLs if link.scheme == 'file' and re.search(r'\.\./', link.url): link = Link( path_to_url(os.path.normpath(os.path.abspath(link.path)))) # wheel file if link.is_wheel: wheel = Wheel(link.filename) # can raise InvalidWheelFilename req = "%s==%s" % (wheel.name, wheel.version) else: # set the req to the egg fragment. when it's not there, this # will become an 'unnamed' requirement req = link.egg_fragment # a requirement specifier else: req = name if extras: extras = Requirement("placeholder" + extras.lower()).extras else: extras = () if req is not None: try: req = Requirement(req) except InvalidRequirement: if os.path.sep in req: add_msg = "It looks like a path." add_msg += deduce_helpful_msg(req) elif '=' in req and not any(op in req for op in operators): add_msg = "= is not a valid operator. Did you mean == ?" else: add_msg = traceback.format_exc() raise InstallationError( "Invalid requirement: '%s'\n%s" % (req, add_msg)) return cls( req, comes_from, link=link, markers=markers, isolated=isolated, options=options if options else {}, wheel_cache=wheel_cache, constraint=constraint, extras=extras, )
def get_local_directory(self, cuppa_env, location, sub_dir, branch, full_url): offline = cuppa_env['offline'] local_directory = None base = cuppa_env['download_root'] if not os.path.isabs(base): base = os.path.join(cuppa_env['working_dir'], base) if location.startswith('file:'): location = pip_download.url_to_path(location) if not pip_download.is_url(location): if pip_download.is_archive_file(location): self._local_folder = self.folder_name_from_path( location, cuppa_env) local_directory = os.path.join(base, self._local_folder) local_dir_with_sub_dir = os.path.join( local_directory, sub_dir and sub_dir or "") if os.path.exists(local_dir_with_sub_dir): try: os.rmdir(local_dir_with_sub_dir) except: return local_directory self.extract(location, local_dir_with_sub_dir) logger.debug("(local archive) Location = [{}]".format( as_info(location))) logger.debug("(local archive) Local folder = [{}]".format( as_info(self._local_folder))) else: local_directory = branch and os.path.join(location, branch) or location self._local_folder = self.folder_name_from_path( location, cuppa_env) logger.debug("(local file) Location = [{}]".format( as_info(location))) logger.debug("(local file) Local folder = [{}]".format( as_info(self._local_folder))) return local_directory else: self._local_folder = self.folder_name_from_path( full_url, cuppa_env) local_directory = os.path.join(base, self._local_folder) if full_url.scheme.startswith( 'http') and self.url_is_download_archive_url( full_url.path): logger.debug("[{}] is an archive download".format( as_info(location))) local_dir_with_sub_dir = os.path.join( local_directory, sub_dir and sub_dir or "") # First we check to see if we already downloaded and extracted this archive before if os.path.exists(local_dir_with_sub_dir): try: # If not empty this will fail os.rmdir(local_dir_with_sub_dir) except: # Not empty so we'll return this as the local_directory logger.debug( "(already present) Location = [{}]".format( as_info(location))) logger.debug( "(already present) Local folder = [{}]".format( as_info(str(self._local_folder)))) return local_directory if cuppa_env['dump'] or cuppa_env['clean']: return local_directory # If not we then check to see if we cached the download cached_archive = self.get_cached_archive( cuppa_env['cache_root'], self._local_folder) if cached_archive: logger.debug("Cached archive [{}] found for [{}]".format( as_info(cached_archive), as_info(location))) self.extract(cached_archive, local_dir_with_sub_dir) else: logger.info("Downloading [{}]...".format( as_info(location))) try: report_hook = None if logger.isEnabledFor(logging.INFO): report_hook = ReportDownloadProgress() filename, headers = urllib.urlretrieve( location, reporthook=report_hook) name, extension = os.path.splitext(filename) logger.info( "[{}] successfully downloaded to [{}]".format( as_info(location), as_info(filename))) self.extract(filename, local_dir_with_sub_dir) if cuppa_env['cache_root']: cached_archive = os.path.join( cuppa_env['cache_root'], self._local_folder) logger.debug( "Caching downloaded file as [{}]".format( as_info(cached_archive))) shutil.copyfile(filename, cached_archive) except urllib.ContentTooShortError as error: logger.error( "Download of [{}] failed with error [{}]".format( as_error(location), as_error(str(error)))) raise LocationException(error) elif '+' in full_url.scheme: vc_type = location.split('+', 1)[0] backend = pip_vcs.vcs.get_backend(vc_type) if backend: vcs_backend = backend(self.expand_secret(location)) local_dir_with_sub_dir = os.path.join( local_directory, sub_dir and sub_dir or "") if cuppa_env['dump'] or cuppa_env['clean']: return local_directory if os.path.exists(local_directory): url, repository, branch, remote, revision = self.get_info( location, local_dir_with_sub_dir, full_url, vc_type) rev_options = self.get_rev_options(vc_type, vcs_backend, local_remote=remote) version = self.ver_rev_summary(branch, revision, self._full_url.path)[0] if not offline: logger.info( "Updating [{}] in [{}]{} at [{}]".format( as_info(location), as_notice(local_dir_with_sub_dir), (rev_options and " on {}".format( as_notice(str(rev_options))) or ""), as_info(version))) try: update(vcs_backend, local_dir_with_sub_dir, rev_options) logger.debug( "Successfully updated [{}]".format( as_info(location))) except pip_exceptions.PipError as error: logger.warn( "Could not update [{}] in [{}]{} due to error [{}]" .format(as_warning(location), as_warning(local_dir_with_sub_dir), (rev_options and " at {}".format( as_warning(str(rev_options))) or ""), as_warning(str(error)))) else: logger.debug( "Skipping update for [{}] as running in offline mode" .format(as_info(location))) else: rev_options = self.get_rev_options( vc_type, vcs_backend) action = "Cloning" if vc_type == "svn": action = "Checking out" max_attempts = 2 attempt = 1 while attempt <= max_attempts: logger.info("{} [{}] into [{}]{}".format( action, as_info(location), as_info(local_dir_with_sub_dir), attempt > 1 and "(attempt {})".format(str(attempt)) or "")) try: vcs_backend.obtain(local_dir_with_sub_dir) logger.debug( "Successfully retrieved [{}]".format( as_info(location))) break except pip_exceptions.PipError as error: attempt = attempt + 1 log_as = logger.warn if attempt > max_attempts: log_as = logger.error log_as( "Could not retrieve [{}] into [{}]{} due to error [{}]" .format(as_info(location), as_notice(local_dir_with_sub_dir), (rev_options and " to {}".format( as_notice(str(rev_options))) or ""), as_error(str(error)))) if attempt > max_attempts: raise LocationException(str(error)) logger.debug("(url path) Location = [{}]".format( as_info(location))) logger.debug("(url path) Local folder = [{}]".format( as_info(self._local_folder))) return local_directory