def finalize_options(self): # log file activation and config if self.debug == 'y': logging.basicConfig(format='%(asctime)s%(levelname)s:%(message)s', level=logging.DEBUG, filename='wss_plugin.log') # load and import config file try: sys.path.append(self.pathConfig) self.configDict = __import__('config_file').config_info logging.info('Loading config_file was successful') except Exception as err: sys.exit("Can't import the config file." + err.message) # load proxy setting if exist if 'proxy' in self.configDict: self.proxySetting = self.configDict['proxy'] if 'index_url' in self.configDict: self.pkgIndex = PackageIndex(index_url=self.configDict['index_url']) self.projectCoordinates = Coordinates.create_project_coordinates(self.distribution) self.userEnvironment = pk_res.Environment(get_python_lib(), platform=None, python=None) distribution_specification = self.distribution.get_name() + "==" + self.distribution.get_version() distribution_requirement = pk_res.Requirement.parse(distribution_specification) # resolve all dependencies try: self.distDepend = pk_res.working_set.resolve([distribution_requirement], env=self.userEnvironment) self.distDepend.pop(0) logging.info("Finished resolving dependencies") except Exception as err: print "distribution was not found on this system, and is required by this application", err.message
def fetch_requirement(req, dest_dir, force_download): from setuptools.package_index import PackageIndex # @Reimport from pkg_resources import working_set # @Reimport # NOQA i = PackageIndex() if force_download: [i.remove(i[req.key][0]) for _ in xrange(len(i[req.key]))] d = i.download(req, dest_dir) else: d = i.fetch_distribution(req, dest_dir, force_scan=True) d = getattr(d, 'location', d) if d else '' return (d if d else working_set.resolve([req])[0].location)
def cache_package(spec, own_url): try: spec = Requirement.parse(spec) except ValueError: raise ArgumentError( "Not a URL, existing file, or requirement spec: %r" % (spec, )) try: # download and unpack source package path = tempfile.mkdtemp('.spynepi') logger.info("Downloading %r" % spec) dist = PackageIndex().fetch_distribution(spec, path, force_scan=True, source=True) archive_path = dist.location logger.info("Unpacking %r" % archive_path) unpack_archive(dist.location, path) # generate pypirc if possible if os.environ.has_key('HOME'): _generate_pypirc(own_url) else: # FIXME: ??? No idea. Hopefully setuptools knows better. pass # raise NotImplementedError("$HOME not defined, .pypirc not found.") # find setup.py in package. plagiarized from setuptools. setups = glob(os.path.join(path, '*', 'setup.py')) if not setups: raise ValidationError( "Couldn't find a setup script in %r editable distribution: %r" % (spec, os.path.join(path, '*', 'setup.py'))) if len(setups) > 1: raise ValidationError( "Multiple setup scripts found in %r editable distribution: %r" % (spec, setups)) # self-register the package. lib_dir = os.path.dirname(setups[0]) command = ["python", "setup.py", "register", "-r", REPO_NAME] logger.info('calling %r', command) subprocess.call(command, cwd=lib_dir, stdout=sys.stdout) # self-upload the package command = ["python", "-m", "spynepi.util.pypi.upload", archive_path] logger.info('calling %r', command) subprocess.call(command, cwd=lib_dir, stdin=sys.stdin, stdout=sys.stdout) finally: shutil.rmtree(path)
def finalize_options(self): if platform.system() != "Windows": self.curl = which('curl') self.bash = which('bash') self.gcloud_url = "https://sdk.cloud.google.com" self.silent = "--disable-prompts" else: self.silent = "/S" self.gcloud_url = "https://dl.google.com/dl/cloudsdk/channels/" \ "rapid/GoogleCloudSDKInstaller.exe" self.package_index = PackageIndex()
def grab_distrib(req, index=None, dest='.', search_pypi=True): """\ Downloads a distribution from the given package index(s) based on the given requirement string(s). Downloaded distributions are placed in the specified destination or the current directory if no destination is specified. If a distribution cannot be found in the given index(s), the Python Package Index will be searched as a last resort unless search_pypi is False. This does NOT install the distribution. """ # allow multiple package indexes to be specified if index is None: index = [] elif isinstance(index, basestring): index = [index] # else just assume it's some iterator of indexes # add PyPI as last place to search if it wasn't already specified if search_pypi and _pypi not in index and (_pypi + '/') not in index: index.append(_pypi) # allow specification of single or multiple requirements if isinstance(req, basestring): reqs = [Requirement.parse(req)] elif isinstance(req, Requirement): reqs = [req] else: reqs = [] for rr in req: if isinstance(rr, basestring): reqs.append(Requirement.parse(rr)) elif isinstance(rr, Requirement): reqs.append(rr) else: raise TypeError("supplied requirement arg must be a string" + " or a Requirement, but given type is %s" % type(rr)) index_list = [PackageIndex(idx, search_path=[]) for idx in index] for req in reqs: fetched = None for idx in index_list: _logger.info('Looking for %s at package index %s' % (req, idx.index_url)) fetched = idx.download(req, dest) if fetched: _logger.info(' %s successfully downloaded' % fetched) break else: _logger.error("couldn't find distrib for %s" % req) return fetched
def initialize_options(self): self.debug = None self.proxySetting = None self.service = None self.configDict = None self.pathConfig = None self.token = None self.userEnvironment = None self.distDepend = None self.pkgIndex = PackageIndex() self.dependencyList = [] self.projectCoordinates = None self.tmpdir = tempfile.mkdtemp(prefix="wss_python_plugin-")
def initialize_options(self): self.bdist_base = None self.dist_dir = None self.format = None self.keep_temp = False self.name_prefix = None self.package_index = PackageIndex() self.requirements_mapping = None self.selected_options = None self.use_pypi_deps = False self.use_wheel = False self.with_py_prefix = False self.initialize_manifest_options()
def checkout_extension(name): log('Downloading extension %s to temporary folder', name) root = os.path.join(tdir, name) os.mkdir(root) checkout_path = PackageIndex().download(name, root) unpack_archive(checkout_path, root) path = None for fn in os.listdir(root): path = os.path.join(root, fn) if os.path.isdir(path): break log('Downloaded to %s', path) return path
def _do_upgrade(self, dist): # Build up a requirement for a higher bugfix release but a lower minor # release (so API compatibility is guaranteed) next_version = _next_version(dist.parsed_version) req = pkg_resources.Requirement.parse('{0}>{1},<{2}'.format( DIST_NAME, dist.version, next_version)) package_index = PackageIndex(index_url=self.index_url) upgrade = package_index.obtain(req) if upgrade is not None: return self._do_download(version=upgrade.version)
def _do_upgrade(dist, index_url): # Build up a requirement for a higher bugfix release but a lower minor # release (so API compatibility is guaranteed) # sketchy version parsing--maybe come up with something a bit more # robust for this major, minor = (int(part) for part in dist.parsed_version[:2]) next_minor = '.'.join([str(major), str(minor + 1), '0']) req = pkg_resources.Requirement.parse( '{0}>{1},<{2}'.format(DIST_NAME, dist.version, next_minor)) package_index = PackageIndex(index_url=index_url) upgrade = package_index.obtain(req) if upgrade is not None: return _do_download(version=upgrade.version, index_url=index_url)
, 'Flask-OpenID>=1.0.1' , 'webassets>=0.7.1' ] import os, shutil, sys from pkg_resources import Requirement from setuptools.package_index import PackageIndex def die(message): sys.stderr.write(message) sys.exit(1) if __name__ == '__main__': if len(sys.argv) < 2: print "Please supply a destination directory for the fetched packages!" sys.exit(1) dest_dir = os.path.abspath(sys.argv[1]) print "Fetching packages:" pkg_index = PackageIndex() with open(os.path.join(dest_dir, 'install_order'), 'w') as fd: for pkg in required_packages: print "(--- Processing requirement '{0}'".format(pkg) dist = pkg_index.fetch_distribution(Requirement.parse(pkg), dest_dir, True, True) if dist is None: die("Couldn't find package satisfying '{0}'!".format(pkg)) print " ---) Fetched {0} {1}".format(dist.project_name, dist.version) fd.write(os.path.basename(dist.location) + '\n')
def __init__(self, *args, **kwargs): self.pypi = PackageIndex() LabelCommand.__init__(self, *args, **kwargs)
class LocalPyPi(HTTPTestServer): """ Abstract class for creating a working dir and virtualenv, setting up a PyPi instance in a thread, and providing an api accessor. """ username = '******' password = '******' hostname = '127.0.0.1' package_index = PackageIndex() def __init__(self, target_python=None, **kwargs): self.target_python = target_python or sys.executable super(LocalPyPi, self).__init__(**kwargs) def pre_setup(self): self.env = dict(os.environ) if "PYTHONPATH" in self.env: del self.env["PYTHONPATH"] existing_path = self.env.get("PATH") self.env["PATH"] = os.path.dirname(self.python) if existing_path: self.env["PATH"] = self.env["PATH"] + os.path.pathsep + existing_path def get_rc(self): """ return a ConfigParser rc for this instance """ config = configparser.ConfigParser() config.add_section('server-login') config.set('server-login', 'repository', self.uri) config.set('server-login', 'username', self.username) config.set('server-login', 'password', self.password) return config def build_egg_from_source(self, pkg_location, output_to, python): try: temp = tempfile.mkdtemp() self.run(('%s setup.py bdist_egg --dist-dir=' + temp) % python, cd=pkg_location, capture=True) files = os.listdir(temp) if len(files) != 1: raise RuntimeError("Error while generating egg file for: %s" % pkg_location) egg_file = os.path.join(temp, files[0]) shutil.move(egg_file, output_to) return os.path.join(output_to, os.path.basename(egg_file)) finally: shutil.rmtree(temp, ignore_errors=True) def create_egg_for_package(self, pkg_location, output_to, python): assert os.path.isdir(pkg_location) if pkg_location.endswith(".egg"): return rezip_egg(pkg_location, output_to) else: return self.build_egg_from_source(pkg_location, output_to, python) def upload_requirement(self, work_dir, req, python): dest_dir = self.get_file_dir(req.project_name).strip() if not os.path.exists(dest_dir): os.makedirs(dest_dir) def fetch_requirement(req, dest_dir, force_download): from setuptools.package_index import PackageIndex # @Reimport from pkg_resources import working_set # @Reimport # NOQA i = PackageIndex() if force_download: [i.remove(i[req.key][0]) for _ in xrange(len(i[req.key]))] d = i.download(req, dest_dir) else: d = i.fetch_distribution(req, dest_dir, force_scan=True) d = getattr(d, 'location', d) if d else '' return (d if d else working_set.resolve([req])[0].location) with set_env(COVERAGE_DISABLE_WARNINGS="1"): fetched = run_in_subprocess(fetch_requirement, python=python, cd=self.workspace )(req, dest_dir, force_download=False) if not fetched or not os.path.exists(fetched): err_msg = "Unable to find requirement: %r\n%s" % (str(req), fetched) raise RuntimeError(err_msg) if os.path.isdir(fetched): fetched = self.create_egg_for_package(fetched, work_dir, python) print("Fetched %r" % fetched) return fetched def post_setup(self): """ Upload the dependencies for pkglib so dependent tools can bootstrap cwthemselves as well as run tests on generated packages """ self.bootstrap_tagup(python=self.target_python) def bootstrap_tagup(self, python): work_dir = os.path.join(self.workspace, 'pkglib-deps') if not os.path.exists(work_dir): os.makedirs(work_dir) with open(os.path.join(work_dir, '.pypirc'), 'wt') as rc_file: self.get_rc().write(rc_file) # XXX find a better way to pass in credentials new_env = copy.copy(dict(os.environ)) new_env['HOME'] = work_dir if "PYTHONPATH" in new_env: del new_env["PYTHONPATH"] def get_pkglib_reqs(): from pkglib.setuptools.dependency import get_all_requirements return [(dist.project_name, dist.version) for dist in get_all_requirements(['pkglib', 'pytest', 'pytest-cov'], ignore_explicit_builtins=True) if dist.project_name not in ['virtualenv', 'setuptools']] for name, version in run_in_subprocess(get_pkglib_reqs, python=python, cd=self.workspace)(): # Quick hack to get the built eggs into the test PyPi instance. # We register with an empty package file then copy the files in manually # We may need pip and distribute if virtualenv installed old versions. # (should only occur when upgrading to new virtualenv). with open(os.path.join(work_dir, 'setup.py'), 'wb') as fp: setup_py = SETUP_TMPL % {'name': name, 'version': version} fp.write(setup_py.encode('utf-8')) cmd = 'cd %s; %s setup.py register' % (work_dir, python) out = self.run(cmd, capture=True, env=new_env) logger.debug(out) assert '200' in out self.upload_requirement(work_dir, Requirement.parse('%s==%s' % (name, version)), python)
def run(self): # Prepare for iterations. pkgreqmap = reqmap() for reqarg in self.reqarglist: pkgreqmap.append_arg(reqarg) pkgreqmap.resolve_matchlist(self.logobj, self.options['--url'], self.options['--skip-logged']) pkgidx = PackageIndex(index_url=self.options['--url']) show_sepline = False # Main loop. distlist = [] ok_packages = [] while len(pkgreqmap) > 0: new_pkgreqmap = reqmap() for idx, total, pkgreqobj in pkgreqmap.reqobj_seq(): pkgname = pkgreqobj.project_name if pkgname in ok_packages: continue ok_packages.append(pkgname) reqstr = str(pkgreqobj) if show_sepline: self.pkgsys.sepline() else: show_sepline = True self.pkgsys.info('======== %s: %d/%d ========' % \ (pkgname, idx + 1, total)) if self.options['--skip-broken']: try: self.logobj.check_broken(pkgname) except: continue # Collect values into args step by step. args = copy.copy(self.options) args['self'] = self.arg0 self.pkgsys.begin('Downloading %s' % reqstr) try: dist = pkgidx.fetch_distribution( pkgreqobj, self.options['--download-dir'], source=True) if dist is None: raise RuntimeError, 'None' except: self.pkgsys.end(False) self.logobj.in_except(pkgname, 'Download %s failed' % reqstr) continue else: self.pkgsys.end(True) self.pkgsys.begin('Unpacking %s' % dist.location) try: smart_archive(args, dist, self.options['--unpack-dir']) except: self.pkgsys.end(False) self.logobj.in_except(pkgname, 'Unpack %s failed' % reqstr) continue else: self.pkgsys.end(True) unpackpath = args['unpackpath'] config_secs = [ '%s-%s' % (dist.project_name, dist.version), dist.project_name ] for secname in config_secs: for name, value in config.items(secname): if name not in args: args[name] = value if not 'patches' in args: args['patches'] = [] else: args['patches'] = args['patches'].split() # Apply patches. for patch in config.patches(config_secs): self.pkgsys.begin('Applying %s' % os.path.basename(patch)) os.system('(cd %s; patch -p0 < %s) > /dev/null' % \ (unpackpath, patch)) self.pkgsys.end(True) if os.path.isfile(os.path.join(unpackpath, 'fixsetup.py')): os.system('(cd %s; python fixsetup.py)' % unpackpath) self.pkgsys.begin('Get package args') try: get_package_args(args, dist) except: self.pkgsys.end(False) self.logobj.in_except(pkgname, 'Get package args failed') continue else: self.pkgsys.end(True) self.pkgsys.begin('Setup args') try: self.pkgsys.setup_args(args) except: self.pkgsys.end(False) self.logobj.in_except(pkgname, 'pkgsys.setup_args failed') continue else: self.pkgsys.end(True) self.pkgsys.begin('Writing %s' % args['output']) try: ensure_dir(os.path.dirname(args['output'])) if smart_write(args['output'], os.path.join(pkgroot, args['template']), args): updated = True if smart_symlink( args['pkgpath'], os.path.join(args['filedir'], args['pkgfile'])): updated = True if args['patches'] != []: ensure_dir(args['patchdir']) for patch in config.patches(config_secs): tgtpatch = os.path.join(args['patchdir'], os.path.basename(patch)) if smart_symlink(patch, tgtpatch): updated = True except: self.pkgsys.end(False) self.logobj.in_except(pkgname, 'write failed') continue else: self.pkgsys.end(True) self.pkgsys.begin('Postproess %s' % args['output']) try: self.pkgsys.process(args) except: self.pkgsys.end(False) self.logobj.in_except(pkgname, 'process failed') continue else: self.pkgsys.end(True) if self.options['--deps']: reqstrlist = args['install_requires'] for k in args['extras_require'].keys(): reqstrlist.extend(args['extras_require'][k]) for reqstr in reqstrlist: new_pkgreqmap.add(reqstr2obj(reqstr)) self.logobj.pkgname_ok(pkgname) if self.options['--cache-root'] != '': distlist.append(dist) # Process of a single package is finished. pkgreqmap = new_pkgreqmap if self.options['--cache-root']: cache = pypicache(self.pkgsys, self.options['--cache-root'], self.options['--cache-url']) cache.add_packages(distlist) del (cache)
import warnings warnings.filterwarnings('ignore') import os import argparse import itertools from paver.easy import path from pip.req import parse_requirements from setuptools.package_index import PackageIndex __cache__ = path("~/.pycache").expanduser().abspath() if not __cache__.exists(): __cache__.makedirs() index = PackageIndex(index_url="http://pypi.python.org/simple/", search_path=[]) html = """<html> <head><title>Index - {project}</title></head> <body> <h1>{project}</h1> {body} </body> </html> """ def main(requirements): if not __cache__.exists(): __cache__.makedirs()