def download_release(self): """ Download uWSGI release based on "version" option and return path to downloaded file. """ if self.cache_dir is not None: download = Download(cache=self.cache_dir) else: self.log.warning("not using a download cache for uwsgi") download = Download() download_url = self.options.get("download-url", DOWNLOAD_URL) download_path, is_temp = download(download_url.format( self.uwsgi_version), md5sum=self.md5sum) return download_path
def mp_download(url): global ws req = pkg_resources.Requirement.parse([url]) pkg = ws._obtain(req) if pkg.location: download = Download(cache=dc) download(pkg.location)
def install(self): '''Install Jython.''' logger = logging.getLogger(self.name) downloader = Download(self.buildout['buildout'], namespace='sk.recipe.jython', logger=logger) url, md5sum = self.options['url'], self.options['md5sum'] if len(md5sum) == 0: md5sum = None installerPath, isInstallerTemporary = downloader(url, md5sum) java, jre, destination = self.options['java'], self.options[ 'jre'], self.options['location'] if not os.path.isdir(destination): os.makedirs(destination) args = [ java, '-jar', installerPath, '--silent', '--directory', destination ] if jre: args.extend(['--jre', jre]) if len(self.parts) > 0: args.append('--include') args.extend(self.parts) rc = subprocess.call(args) if rc != 0: raise SystemError( 'Jython installer return nonzero (%d) status; invoked with %r' % (rc, args)) return destination
def download_release(self): """ Download uWSGI release based on 'version' option and return path to downloaded file. """ cache = self.buildout['buildout'].get('download-cache', None) download = Download(cache=cache) download_url = self.options.get( 'url', 'http://projects.unbit.it/downloads/uwsgi-latest.tar.gz') download_path, is_temp = download(download_url) return download_path
def download_release(self): """ Download uWSGI release based on "version" option and return path to downloaded file. """ cache = tempfile.mkdtemp("download-cache") download = Download(cache=cache) download_url = self.options.get("download-url", DOWNLOAD_URL) download_path, is_temp = download( download_url.format(self.options.get("version", "latest"))) return download_path
def download(self): d = Download(self.buildout['buildout'], hash_name=self.option_hash_name) cached_path, is_temp = d(self.option_url, md5sum=self.option_md5sum) if self.option_sha1sum and \ self.option_sha1sum != get_checksum(cached_path): raise ChecksumError('SHA1 checksum mismatch for cached download ' 'from %r at %r' % (self.option_url, cached_path)) return cached_path, is_temp
def download_and_extract(self, url, md5sum, dest, extract_filter='*', strip_dirs=1): path, is_temp = Download(self.buildout['buildout'])(url, md5sum) files = [] def progress_filter(src, dst): if fnmatch(src, extract_filter): stripped = os.path.normpath(src).split(os.sep)[strip_dirs:] if stripped: files.append(os.path.join(dest, os.path.join( *stripped))) return files[-1] archive_util.unpack_archive(path, dest, progress_filter) return files
def install(self): log = logging.getLogger(self.name) destination = self.options.get('destination') download = Download(self.buildout['buildout'], hash_name=self.options['hash-name'].strip() in TRUE_VALUES) path, is_temp = download(self.options['url'], md5sum=self.options.get('md5sum')) parts = [] try: # Create destination directory if not os.path.isdir(destination): os.makedirs(destination) parts.append(destination) download_only = self.options['download-only'].strip().lower( ) in TRUE_VALUES if download_only: if self.options['filename']: # Use an explicit filename from the section configuration filename = self.options['filename'] else: # Use the original filename of the downloaded file regardless # whether download filename hashing is enabled. # See http://github.com/hexagonit/hexagonit.recipe.download/issues#issue/2 filename = os.path.basename( urlparse.urlparse(self.options['url'])[2]) # Copy the file to destination without extraction target_path = os.path.join(destination, filename) shutil.copy(path, target_path) if self.options.get('mode'): os.chmod(target_path, int(self.options['mode'], 8)) if not destination in parts: parts.append(target_path) else: # Extract the package extract_dir = tempfile.mkdtemp("buildout-" + self.name) self.excluded_count = 0 try: try: setuptools.archive_util.unpack_archive( path, extract_dir, progress_filter=self.progress_filter) except setuptools.archive_util.UnrecognizedFormat: log.error( 'Unable to extract the package %s. Unknown format.', path) raise zc.buildout.UserError('Package extraction error') if self.excluded_count > 0: log.info( "Excluding %s file(s) matching the exclusion pattern." % self.excluded_count) base = self.calculate_base(extract_dir) if not os.path.isdir(destination): os.makedirs(destination) parts.append(destination) log.info('Extracting package to %s' % destination) ignore_existing = self.options['ignore-existing'].strip( ).lower() in TRUE_VALUES for filename in os.listdir(base): dest = os.path.join(destination, filename) if os.path.exists(dest): if ignore_existing: log.info('Ignoring existing target: %s' % dest) else: log.error( 'Target %s already exists. Either remove it or set ' '``ignore-existing = true`` in your buildout.cfg to ignore existing ' 'files and directories.', dest) raise zc.buildout.UserError( 'File or directory already exists.') else: # Only add the file/directory to the list of installed # parts if it does not already exist. This way it does # not get accidentally removed when uninstalling. parts.append(dest) shutil.move(os.path.join(base, filename), dest) finally: shutil.rmtree(extract_dir) finally: if is_temp: os.unlink(path) return parts
def install(self): """Installer""" logger = logging.getLogger(self.name) options = self.options parts = self.buildout['buildout']['parts-directory'] name = 'buildout-node' node_dir = os.path.join(parts, self.name) if not os.path.isdir(node_dir): os.makedirs(node_dir) node_binary = self.get_binary(options) if node_binary is None: args = {} if 'url' not in options: args = dict( v=self.get_version(options), a='x86_64' in os.uname() and 'x64' or 'x86', ) if sys.platform.startswith('linux'): args['p'] = 'linux' elif sys.platform == 'darwin': args['p'] = 'darwin' if 'p' in args: options['url'] = url = self.binary_format.format(**args) logger.info('Using binary distribution at %s', url) from zc.buildout.download import Download from archive import extract # Use the buildout download infrastructure manager = Download(options=self.buildout['buildout']) # The buildout download utility expects us to know whether or # not we have a download cache, which causes fun errors. This # is probably a bug, but this test should be safe regardless. if manager.download_cache: filename = manager.download_cached(url)[0] else: filename = manager.download(url)[0] destination = os.path.join( self.buildout['buildout']['parts-directory'], name) # Finally, extract the archive. The binary distribution urls # are defined in this file, so we can safely assume they're # gzipped tarballs. This prevents an error when downloaded # into a temporary file. extract(filename,destination,ext=".tar.gz") else: if 'url' not in options: options['url'] = url = self.source_format.format(**args) logger.info('Using source distribution at %s', options['url']) import hexagonit.recipe.cmmi options['environment'] = ( 'PYTHONPATH=tools:deps/v8/tools:../../deps/v8/tools' ) node = hexagonit.recipe.cmmi.Recipe(self.buildout, name, options) node.install() node_binary = self.get_binary(options) node_bin = os.path.dirname(node_binary) scripts = options.get('scripts', '').split() scripts = [script.strip() for script in scripts if script.strip()] npms = options.get('npms', '') if npms: npms = ' '.join([npm.strip() for npm in npms.split() if npm.strip()]) p = subprocess.Popen(( 'export HOME=%(node_dir)s;' 'export PATH=%(node_bin)s:$PATH;' 'echo "prefix=$HOME\n" > $HOME/.npmrc;' '%(node_bin)s/npm set color false;' '%(node_bin)s/npm set unicode false;' '%(node_bin)s/npm install -sg %(npms)s') % {'node_dir':shell_quote(node_dir), 'node_bin':shell_quote(node_bin), 'npms':npms}, shell=True) p.wait() for script in scripts: if script in ['node']: continue filename = os.path.join(node_bin, script) if os.path.isfile(filename): fd = open(filename) data = fd.read() fd.close() fd = open(filename, 'w') fd.seek(0) data = data.split('\n') data[0] = '#!%s' % node_binary fd.write('\n'.join(data)) fd.close() for script in ('node', 'npm'): if script not in scripts: scripts.append(script) node_path = options.get('node-path', '').split() node_path.insert(0, os.path.join(node_dir, 'lib', 'node_modules')) node_path = ':'.join(node_path) options['initialization'] = ( 'import os;\nos.environ["NODE_PATH"] = %r' % node_path ) paths = [os.path.join(node_dir, 'bin'), node_bin] all_scripts = [] for p in paths: if os.path.isdir(p): all_scripts.extend(os.listdir(p)) typos = [] for script in scripts: if script not in all_scripts: typos.append(script) if typos: import zc.buildout typos = ', '.join([repr(s) for s in typos]) all_scripts = [repr(s) for s in all_scripts] all_scripts = ', '.join(sorted(all_scripts)) raise zc.buildout.UserError(( 'Script(s) {0} not found in {1[0]};{1[1]}.\n' 'You may have a typo in your buildout config.\n' 'Available scripts are: {2}' ).format(typos, paths, all_scripts)) options['eggs'] = 'gp.recipe.node' options['arguments'] = '%r, (%r, %r), sys.argv[0]' % ( node_binary, os.path.join(node_dir, 'bin'), node_bin, ) options['scripts'] = '\n'.join(scripts) options['entry-points'] = '\n'.join([ '%s=gp.recipe.node.script:main' % s for s in scripts ]) from zc.recipe.egg import Scripts rscripts = Scripts(self.buildout, self.name, options) return rscripts.install()
def _download(self): from zc.buildout.download import Download download = Download(self.buildout.get('buildout')) self.download_path = download(self.url)[0]
def install(self): """Installer""" logger = logging.getLogger(self.name) options = self.options parts = self.buildout['buildout']['parts-directory'] name = 'buildout-node' node_dir = os.path.join(parts, self.name) if not os.path.isdir(node_dir): os.makedirs(node_dir) node_binary = self.get_binary(options) if node_binary is None: args = {} if 'url' not in options: args = dict( v=self.get_version(options), a='x86_64' in os.uname() and 'x64' or 'x86', ) if sys.platform.startswith('linux'): args['p'] = 'linux' elif sys.platform == 'darwin': args['p'] = 'darwin' if 'p' in args: binary_url = options.get('binary-url', self.binary_format) options['url'] = url = binary_url.format(**args) logger.info('Using binary distribution at %s', url) from zc.buildout.download import Download from archive import extract # Use the buildout download infrastructure manager = Download(options=self.buildout['buildout'], offline=self.buildout['buildout'].get('offline') == 'true') # The buildout download utility expects us to know whether or # not we have a download cache, which causes fun errors. This # is probably a bug, but this test should be safe regardless. if manager.download_cache: filename = manager.download_cached(url)[0] else: filename = manager.download(url)[0] destination = self.get_node_directory(options) # Finally, extract the archive. The binary distribution urls # are defined in this file, so we can safely assume they're # gzipped tarballs. This prevents an error when downloaded # into a temporary file. extract(filename, destination, ext=".tar.gz") else: if 'url' not in options: options['url'] = url = self.source_format.format(**args) logger.info('Using source distribution at %s', options['url']) import zc.recipe.cmmi options['environment'] = ( 'PYTHONPATH=tools:deps/v8/tools:../../deps/v8/tools' ) node = zc.recipe.cmmi.Recipe( self.buildout, name, options) node.install() node_binary = self.get_binary(options) node_bin = os.path.dirname(node_binary) npms = options.get('npms', '') if npms: npms = ' '.join([npm.strip() for npm in npms.split() if npm.strip()]) cmd_data = {'node_dir': shell_quote(node_dir), 'node_bin': shell_quote(node_bin), 'cache': os.path.expanduser('~/.npm'), 'npms': npms} cmd_prefix = ( 'export HOME=%(node_dir)s;' 'export PATH=%(node_bin)s:"$PATH";' 'echo "prefix=$HOME" > $HOME/.npmrc;' 'echo "cache=%(cache)s" >> $HOME/.npmrc;' '%(node_bin)s/npm set color false;' '%(node_bin)s/npm set unicode false;') % cmd_data if self.buildout['buildout'].get('offline') == 'true': cmd = cmd_prefix + \ '%(node_bin)s/npm ls %(npms)s --global --json' % cmd_data import zc.buildout try: output = subprocess.check_output(cmd, shell=True) output_json = json.loads(output) installed_npms = output_json.get('dependencies') # if npm reports a discrepancy, error out if not installed_npms or \ len(installed_npms) != len(npms.split()): raise zc.buildout.UserError( "Couldn't install %r npms in offline mode" % npms) logger.debug('Using existing npm install for %r' % npms) except subprocess.CalledProcessError: # npm fails if install has not yet happened raise zc.buildout.UserError( "Couldn't install %r npms in offline mode" % npms) else: cmd = cmd_prefix + \ '%(node_bin)s/npm install -g %(npms)s' % cmd_data p = subprocess.Popen(cmd, shell=True) p.wait() return self.install_scripts()
def install(self): """Installer""" logger = logging.getLogger(self.name) options = self.options parts = self.buildout['buildout']['parts-directory'] name = 'buildout-node' node_dir = os.path.join(parts, self.name) if not os.path.isdir(node_dir): os.makedirs(node_dir) node_binary = self.get_binary(options) if node_binary is None: args = {} if 'url' not in options: args = dict( v=self.get_version(options), a='x86_64' in os.uname() and 'x64' or 'x86', ) if sys.platform.startswith('linux'): args['p'] = 'linux' elif sys.platform == 'darwin': args['p'] = 'darwin' if 'p' in args: options['url'] = url = self.binary_format.format(**args) logger.info('Using binary distribution at %s', url) from zc.buildout.download import Download from archive import extract # Use the buildout download infrastructure manager = Download(options=self.buildout['buildout']) # The buildout download utility expects us to know whether or # not we have a download cache, which causes fun errors. This # is probably a bug, but this test should be safe regardless. if manager.download_cache: filename = manager.download_cached(url)[0] else: filename = manager.download(url)[0] destination = self.get_node_directory(options) # Finally, extract the archive. The binary distribution urls # are defined in this file, so we can safely assume they're # gzipped tarballs. This prevents an error when downloaded # into a temporary file. extract(filename, destination, ext=".tar.gz") else: if 'url' not in options: options['url'] = url = self.source_format.format(**args) logger.info('Using source distribution at %s', options['url']) import zc.recipe.cmmi options['environment'] = ( 'PYTHONPATH=tools:deps/v8/tools:../../deps/v8/tools' ) node = zc.recipe.cmmi.Recipe( self.buildout, name, options) node.install() node_binary = self.get_binary(options) node_bin = os.path.dirname(node_binary) npms = options.get('npms', '') if npms: npms = ' '.join([npm.strip() for npm in npms.split() if npm.strip()]) cmd = ( 'export HOME=%(node_dir)s;' 'export PATH=%(node_bin)s:$PATH;' 'echo "prefix=$HOME\n" > $HOME/.npmrc;' 'echo "cache=%(cache)s\n" >> $HOME/.npmrc;' '%(node_bin)s/npm set color false;' '%(node_bin)s/npm set unicode false;' '%(node_bin)s/npm install -g %(npms)s') % { 'node_dir': shell_quote(node_dir), 'node_bin': shell_quote(node_bin), 'cache': os.path.expanduser('~/.npm'), 'npms': npms} p = subprocess.Popen(cmd, shell=True) p.wait() return self.install_scripts()
def downloadExtract(self, targetFolder, srcRepo, srcList): """ download all sources in srcList, extract them and create symlink in target folder. all sources will be saved in folder parts/PART-NAME. Each source is saved in srcList as the following format: (id, version). """ log = logging.getLogger(self.name) # the zc.buildout download facility will save everything in download # cache. We need make sure it is exist. if not os.path.exists(self.buildout['buildout']['download-cache']): os.makedirs(self.buildout['buildout']['download-cache']) # get a zc.buildout download instance download = Download(self.buildout['buildout']) parts = [] # add the base directory name to parts, so it will be removed during # uninstalling. partdir = os.path.join(self.buildout['buildout']['parts-directory'], self.name) parts.append(partdir) # process the sources one by one. for srcId, srcVersion in srcList: # the download url. url = srcRepo + '/' + srcId + '.' + srcVersion + '.zip' path, is_temp = download(url) # destination is parts/PART-NAME/PLUGIN_ID-PLUGIN_VERSION dest = os.path.join(self.buildout['buildout']['parts-directory'], self.name, srcId + '-' + srcVersion) if not os.path.isdir(dest): os.makedirs(dest) parts.append(dest) # Extract the package extract_dir = tempfile.mkdtemp("buildout-" + self.name) try: setuptools.archive_util.unpack_archive(path, extract_dir) except setuptools.archive_util.UnrecognizedFormat: log.error('Unable to extract the package %s. Unknown format.', path) raise zc.buildout.UserError('Package extraction error') top_level_contents = os.listdir(extract_dir) if len(top_level_contents) != 1: log.error('Unable to strip top level directory because there are more ' 'than one element in the root of the package.') raise zc.buildout.UserError('Invalid package contents') base = os.path.join(extract_dir, top_level_contents[0]) log.info('Extracting package to %s' % dest) ignore_existing = self.options['ignore-existing'].strip().lower() in TRUE_VALUES for filename in os.listdir(base): filenameDest = os.path.join(dest, filename) if os.path.exists(filenameDest): if ignore_existing: log.info('Ignoring existing target: %s' % filenameDest) else: log.error('Target %s already exists. Either remove it or set ' '``ignore-existing = true`` in your buildout.cfg to ignore existing ' 'files and directories.', filenameDest) raise zc.buildout.UserError('File or directory already exists.') else: # Only add the file/directory to the list of installed # parts if it does not already exist. This way it does # not get accidentally removed when uninstalling. parts.append(filenameDest) shutil.move(os.path.join(base, filename), filenameDest) # create the symlink or copy for this srouce targetPath = os.path.join(targetFolder, srcId) # add the dest folder to parts, so it will be removed during # uninstalling. parts.append(targetPath) if self.options['action'].strip().lower() == 'copy': log.info('Rename to %s' % targetPath) if os.path.islink(targetPath): os.unlink(targetPath) elif os.path.exists(targetPath): shutil.rmtree(targetPath) shutil.move(dest, targetPath) else: log.info('Create symlink to %s' % targetPath) if os.path.lexists(targetPath): os.unlink(targetPath) os.symlink(dest, targetPath) # add the dest folder to parts, so it will be removed during # uninstalling. parts.append(dest) shutil.rmtree(extract_dir) return parts
def __init__(self, buildout, name, options): super(PackagingRecipe, self).__init__() self.buildout = buildout self.options = options self.downloader = Download(buildout)