def check_ff_safety(self, rev='HEAD', ignore_permissions=True): # Check the status of the work tree and index. status_ok = True for idx, tree, name in self.status( ignore_permissions=ignore_permissions): if idx or tree: log.error('uncomitted changes:') self.git('status') status_ok = False break # Make sure we haven't forked. ahead, behind = self.distance(self.head, rev) if ahead and behind: log.error('you and the repo have forked') status_ok = False elif ahead: log.warning( 'you are %s commits ahead of the remote repo; please `vee push`' % ahead) status_ok = False elif behind: log.info('You are %d commits behind.' % behind) return status_ok
def link(self, env, force=False): self._assert_paths(install=True) if not force: self._assert_unlinked(env) log.info(style_note('Linking into %s' % env.name)) env.link_directory(self.install_path) self._record_link(env)
def clone_if_not_exists(self, remote_url=None, shallow=True): """Assert that the repo has been cloned. Return True if it did not exist.""" self.remote_url = remote_url or self.remote_url if self.exists: return False if not self.remote_url: raise ValueError('git repo %r does not exist; need remote url' % self.git_dir) if os.path.exists(self.work_tree): call(['git', 'init', '--bare', self.git_dir]) self.git('remote', 'add', 'origin', self.remote_url) self.git('config', '--unset', 'core.bare') if shallow: # TODO: non-master self.git('pull', '--ff-only', '--depth=1', 'origin', get_default_branch()) else: # TODO: non-master self.git('pull', '--ff-only', 'origin', get_default_branch()) elif shallow: log.info(style_note('Cloning shallow', self.remote_url)) call( ['git', 'clone', '--depth=1', self.remote_url, self.work_tree]) else: log.info(style_note('Cloning', self.remote_url)) call(['git', 'clone', self.remote_url, self.work_tree]) return True
def develop(self): pkg = self.package for name in ('bin', 'scripts'): path = os.path.join(pkg.build_path, name) if os.path.exists(path): log.info(style_note("Adding ./%s to $PATH" % name)) pkg.environ['PATH'] = join_env_path('./' + name, pkg.environ.get('PATH', '@'))
def relocate_pkgconfig(root): # Do trivial rewrites of pkgconfig files. pkg_config = os.path.join(root, 'lib', 'pkgconfig') if os.path.exists(pkg_config): for name in os.listdir(pkg_config): if not name.endswith('.pc'): continue path = os.path.join(pkg_config, name) log.info(path) lines = list(open(path)) for i, line in enumerate(lines): if re.match(r'^prefix=([^\$]+)\s*$', line): lines[i] = 'prefix=%s\n' % root break else: with log.indent(): log.warning('No obvious prefix to replace') continue # As silly as this seems, *.pc files we have seen have their # write flag removed, but we still own them (since we just installed # them). Quickest way to fix: delete them. if not os.access(path, os.W_OK): os.unlink(path) with open(path, 'w') as fh: fh.writelines(lines)
def checkout(self, revision, branch=None, force=False, fetch=False, ignore_permissions=True): commit = self.rev_parse(revision, fetch=fetch) if self.head == commit: return log.info(style_note('Checking out', '%s [%s]' % (revision, commit))) cmd = [] if ignore_permissions: cmd.extend(('-c', 'core.fileMode=false')) cmd.append('checkout') if force: cmd.append('--force') if branch: # Make this branch if it doesn't exist. cmd.extend(('-B', branch)) cmd.append(revision) self.git(*cmd) self.git('submodule', 'update', '--init', '--checkout', '--recursive', silent=False) self._head = commit
def create_if_not_exists(self): python = os.path.join(self.path, 'bin', 'python') if not os.path.exists(python): makedirs(self.path) log.info(style_note('Creating Python virtualenv', self.path)) if hasattr(virtualenv, 'cli_run'): # New API (in which there isn't really any API) virtualenv.cli_run( ['--no-pip', '--no-wheel', '--no-setuptools', self.path]) else: # Old API virtualenv.create_environment(self.path, no_setuptools=True, no_pip=True) if not os.path.exists(python + '-config'): version = get_default_python().version names = ( 'python{}.{}-config'.format(*version), 'python{}-config'.format(*version), 'python-config', ) prefix = getattr(sys, 'real_prefix', sys.prefix) for name in names: old_path = os.path.join(prefix, 'bin', name) if os.path.exists(old_path): for name in names: new_path = os.path.join(self.path, 'bin', name) self.rewrite_shebang_or_link(old_path, new_path) break else: log.warning('Could not find python-config')
def inspect(self): pkg = self.package if self.setup_path and not self.egg_path: log.info(style_note('Building Python egg-info')) res = call_setup_py(self.setup_path, ['egg_info'], env=pkg.fresh_environ(), indent=True, verbosity=1) if res: raise RuntimeError('Could not build Python package') self.egg_path = find_in_tree(pkg.build_path, '*.egg-info', 'dir') if not self.egg_path: log.warning('Could not find newly created *.egg-info') if self.egg_path: requires_path = os.path.join(self.egg_path, 'requires.txt') if os.path.exists(requires_path): for line in open(requires_path, 'rb'): line = line.strip() if not line: continue if line.startswith('['): break name = re.split('\W', line)[0].lower() log.debug('%s depends on %s' % (pkg.name, name)) pkg.dependencies.append( Package(name=name, url='pypi:%s' % name))
def extract(self): """Extract the package into the (cleaned) build directory.""" pkg = self.package pkg._assert_paths(build=True) if pkg.checksum: log.info(style_note('Verifying checksum', 'of ' + pkg.package_path), verbosity=1) assert_file_checksum(pkg.package_path, pkg.checksum) log.info( style_note('Expanding %s to' % self.archive_type, pkg.build_path)) pkg._clean_build_path() # gzip-ed Tarballs. if self.archive_type == 'tar+gzip': call(['tar', 'xzf', pkg.package_path], cwd=pkg.build_path) # bzip-ed Tarballs. elif self.archive_type == 'tar+bzip': call(['tar', 'xjf', pkg.package_path], cwd=pkg.build_path) # Zip files (and Python wheels). elif self.archive_type == 'zip': call(['unzip', pkg.package_path], cwd=pkg.build_path)
def build(self): log.info(style_note('source %s' % os.path.basename(self.build_sh))) pkg = self.package pkg._assert_paths(build=True, install=True) env = pkg.fresh_environ() env.update( VEE=pkg.home.root, VEE_BUILD_PATH=pkg.build_path, VEE_INSTALL_NAME=pkg.install_name, VEE_INSTALL_PATH=pkg.install_path, ) # TODO: somehow derive this from --build-sh provided script. cwd = os.path.dirname(self.build_sh) envfile = os.path.join(cwd, 'vee-env-' + os.urandom(8).encode('hex')) call([ 'bash', '-c', '. %s; env | grep VEE > %s' % (os.path.basename(self.build_sh), envfile) ], env=env, cwd=cwd) env = list(open(envfile)) env = dict(line.strip().split('=', 1) for line in env) os.unlink(envfile) pkg.build_subdir = env.get('VEE_BUILD_SUBDIR') or '' pkg.install_prefix = env.get('VEE_INSTALL_PREFIX') or ''
def relocate(root, con, spec=None, dry_run=False, target_cache=None): target_cache = {} if target_cache is None else target_cache flags, include, exclude = _parse_spec(spec, root) if 'linux' in flags and not sys.platform.startswith('linux'): return if 'darwin' in flags and sys.platform != 'darwin': return if not (include or 'auto' in flags): raise ValueError('no libraries to include') # Find everything in include on OS X, since we need to actually find the # individual dependencies. if sys.platform == 'darwin': for path in include: for found in find_shared_libraries(path): log.debug('found %s' % found) for name in name_variants(os.path.basename(found)): target_cache.setdefault(name, []).append(found) for lib_path in find_shared_libraries(root): log.info(lib_path) with log.indent(): if sys.platform == 'darwin': _relocate_darwin_library(lib_path, con, flags, include, exclude, dry_run, target_cache) else: _relocate_linux_library(lib_path, include, dry_run) if 'pkgconfig' in flags: relocate_pkgconfig(root)
def git(args, *command): if not (args.all or args.name): log.error("Please provide -n NAME or --all.") return 1 if not command: log.error('Please provide a git command.') return 1 home = args.assert_home() retcode = 0 if args.all: dev_pkgs = home.iter_development_packages() else: dev_pkgs = [] for name in args.names: dev_pkg = home.find_development_package(name) if not dev_pkg: log.error("Could not find dev package: {!r}.".format(name)) return 2 dev_pkgs.append(dev_pkg) for dev_pkg in dev_pkgs: log.info(style_note(dev_pkg.name, ' '.join(command))) try: dev_pkg.git(*command, verbosity=0, indent=False) except Exception as e: print_cli_exc(e) retcode = 1 return retcode
def fetch(self, pkg): pkg._assert_paths(package=True) if os.path.exists(pkg.package_path): log.info(style_note('Already downloaded', pkg.url)) return log.info(style_note('Downloading', pkg.url)) download(pkg.url, pkg.package_path)
def optlink(self, pkg): if pkg.name: opt_link = pkg.home._abs_path('opt', pkg.name) log.info(style_note('Linking to opt/%s' % pkg.name)) if os.path.lexists(opt_link): os.unlink(opt_link) makedirs(os.path.dirname(opt_link)) os.symlink(pkg.install_path, opt_link)
def _relocate_linux_library(lib_path, include, dry_run): rpath = ':'.join(include) log.info('set rpath to %s' % rpath) if dry_run: return call(['patchelf', '--set-rpath', rpath, lib_path])
def inspect(self): log.info( style_note('Inspecting %s' % os.path.basename(self.requirements_txt))) pkg = self.package for line in open(self.requirements_txt): line = line.strip() if not line or line[0] == '#': continue pkg.dependencies.append(Package(line, home=pkg.home))
def install(self): pkg = self.package pkg._assert_paths(install=True) log.info(style_note('make install')) if call( ['make', 'install', '-j4'], cwd=os.path.dirname(self.makefile_path), env=pkg.fresh_environ(), ): raise RuntimeError('Could not `make install` package')
def relocate(self): pkg = self.package if not pkg.pseudo_homebrew: return # --pseudo-homebrew is first handled by the generic.install, which # sets the install_path to be in the Homebrew cellar. We finish the # job by switching to that version. log.info(style_note('Switching Homebrew to %s %s' % (pkg.name, self.version))) self.brew('switch', pkg.name, self.version)
def _get_meta(self, pkg): path = pkg.home._abs_path('packages', 'pypi', self.name, 'meta.json') log.info(style_note('Looking up %s on PyPI' % self.name)) url = PYPI_URL_PATTERN % self.name res = http_request('GET', url) body = res.data meta = json.loads(body) return meta
def _meta(self): pkg = self.package path = pkg.home._abs_path('packages', 'pypi', self.name, 'meta.json') if not os.path.exists(path): log.info(style_note('Looking up %s on PyPI' % self.name)) url = PYPI_URL_PATTERN % self.name res = urllib2.urlopen(url) makedirs(os.path.dirname(path)) with open(path, 'wb') as fh: fh.write(res.read()) return json.load(open(path, 'rb'))
def init(args): """Initialize the structures on disk before any other commands, and optionally setup the first environment repository. E.g.: vee init [email protected]:vfxetc/vee-repo primary This is the same as: vee init vee repo clone [email protected]:vfxetc/vee-repo primary """ try: args.home.init() log.info('Initialized %s' % args.home.root) except ValueError: log.error('Home already exists.') if args.url: log.info('Create a new repository via:') log.info('\tvee repo clone --default %s %s' % (args.url, args.name or '')) return if args.url: repo = args.home.create_repo(url=args.url, name=args.name) log.info('Created repo %s at %s' % (repo.name, repo.work_tree))
def develop(self): log.info(style_note('source %s' % os.path.basename(self.develop_sh))) pkg = self.package def setenv(name, value): log.info('vee develop setenv %s "%s"' % (name, value)) pkg.environ[name] = value with log.indent(): bash_source(os.path.basename(self.develop_sh), callbacks=dict(vee_develop_setenv=setenv), cwd=os.path.dirname(self.develop_sh))
def relocate(self, pkg): # Standard --relocate and --set-rpath relocate_package(pkg) if pkg.pseudo_homebrew: # --pseudo-homebrew is first handled by the generic.install, which # sets the install_path to be in the Homebrew cellar. We finish the # job by switching to that version. log.info( style_note('Switching Homebrew to %s %s' % (self.untapped_name, self.version))) if self.tap_name: self.brew.assert_tapped(self.tap_name) self.brew('switch', self.untapped_name, self.version)
def _install_setup(self, pkg): pkg._assert_paths(install=True) site_packages = get_default_python().rel_site_packages install_site_packages = os.path.join(pkg.install_path, site_packages) # Setup the PYTHONPATH to point to the "install" directory. env = pkg.fresh_environ() env['PYTHONPATH'] = join_env_path(install_site_packages, env.get('PYTHONPATH')) if os.path.exists(pkg.install_path): log.warning('Removing existing install: ' + pkg.install_path) shutil.rmtree(pkg.install_path) os.makedirs(install_site_packages) log.info( style_note('Installing Python package', 'to ' + install_site_packages)) cmd = [ 'install', '--root', pkg.install_path, # Better than prefix '--prefix', '.', # At one point we forced everything into `lib`, so we don't get a # `lib64`. Virtualenv symlinked them together anyways. But then we # switched to using pip's internals to unpack wheels, and it would # place stuff into both `lib` and `lib64`. So we don't really # know where we stand on this anymore. '--install-lib', site_packages, '--single-version-externally-managed', ] if not pkg.defer_setup_build: cmd.append('--skip-build') res = call_setup_py(self.setup_path, cmd, env=env, indent=True, verbosity=1) if res: raise RuntimeError('Could not install Python package')
def _install_wheel(self, pkg): pkg._assert_paths(install=True) if pkg.package_path.endswith('.whl'): log.info( style_note("Found Python Wheel", os.path.basename(self.dist_info_dir))) else: log.info( style_note("Found dist-info", os.path.basename(self.dist_info_dir))) log.warning("Bare dist-info does not appear to be a wheel.") wheel_dir, dist_info_name = os.path.split(self.dist_info_dir) wheel_name = os.path.splitext(dist_info_name)[0] # Lets just take advantage of pip! # The only reason we're reading into pip like this is because we # would rather just do this part, rather than have it go through # the full process with the *.whl file. If this breaks, feel # free to do something like: # pip install --force-reinstall --prefix {pkg.install_path} --no-deps {pkg.package_path} # along with: # --no-warn-script-location # --disable-pip-version-check # We delay the import just in case the bootstrap is borked. from pip._internal.operations.install.wheel import install_wheel from pip._internal.locations import get_scheme # We may to trick pip into installing into another version's directories. scheme = get_scheme(self.name, prefix=pkg.install_path) version = get_default_python().version src_python = '{}python{}.{}{}'.format(os.path.sep, sys.version_info[0], sys.version_info[1], os.path.sep) dst_python = '{}python{}.{}{}'.format(os.path.sep, version[0], version[1], os.path.sep) if src_python != dst_python: for k in 'platlib', 'purelib', 'headers', 'scripts', 'data': setattr(scheme, k, getattr(scheme, k).replace(src_python, dst_python)) req = DummyPipRequirement() req.name = wheel_name install_wheel(pkg.name, pkg.package_path, scheme, '<VEE dummy request>')
def list_(args): home = args.assert_home() rows = list(home.db.execute('SELECT * FROM repositories')) if not rows: log.warning('No repositories.') return max_len = max(len(row['name']) for row in rows) for row in rows: repo = EnvironmentRepo(row, home=home) if repo.exists: log.info( style_note( repo.name, '%s/%s' % (repo.remote_name, repo.branch_name), repo.remotes().get(repo.remote_name, '') + ' --default' if row['is_default'] else '', ))
def rewrite_shebang(self, old_path, new_path): # Only care if it is at all executable. stat = os.stat(old_path) if not (stat.st_mode & 0o111): return # If it starts with a Python shebang, rewrite it. with open(old_path, 'rb') as old_fh: old_shebang = old_fh.readline() m = re.match(r'#!(|\S+/)([^\s/]+)', old_shebang) if not m: return new_bin = os.path.join(self.path, 'bin', m.group(2)) if not os.path.exists(new_bin): return new_shebang = '#!%s%s' % (new_bin, old_shebang[m.end(2):]) log.info('Rewriting shebang of %s' % old_path, verbosity=1) log.debug('New shebang: %s' % new_shebang.strip(), verbosity=1) self._assert_real_dir(os.path.dirname(new_path)) # Due to the way the _assert_real_dir works, we may have already # created a symlink in the location of the new_path which points to # the old_path. If we don't delete it first, then we will be # reading and writing to the same time, and will only get the # shebang + 1024 bytes (the buffer size on my machine). if os.path.lexists(new_path): os.unlink(new_path) with open(new_path, 'wb') as new_fh: new_fh.write(new_shebang) new_fh.writelines(old_fh) try: shutil.copystat(old_path, new_path) except OSError as e: # These often come up when you are not the owner # of the file. log.exception('Could not copystat to %s' % new_path) if e.errno != errno.EPERM: raise return True
def update(self, force=False): log.info(style_note('Updating repo', self.name)) self.clone_if_not_exists() if self.remote_name not in self.remotes(): log.warning('"%s" does not have remote "%s"' % (self.name, self.remote_name)) return True rev = self.fetch() if not force and not self.check_ff_safety(rev): log.error('Cannot fast-forward; skipping.') return False self.checkout(force=force) return True
def relocate_package(pkg): if pkg.relocate: log.info(style_note('Relocating')) with log.indent(): libs.relocate(pkg.install_path, con=pkg.home.db.connect(), spec=pkg.render_template(pkg.relocate), ) if pkg.set_rpath and sys.platform.startswith('linux'): rpath = pkg.render_template(pkg.set_rpath) log.info(style_note('Setting RPATH to', rpath)) with log.indent(): libs.relocate(pkg.install_path, con=pkg.home.db.connect(), spec=rpath, )
def install(self): if not self.setup_path: return super(PythonBuilder, self).install() pkg = self.package pkg._assert_paths(install=True) install_site_packages = os.path.join(pkg.install_path, site_packages) # Setup the PYTHONPATH to point to the "install" directory. env = pkg.fresh_environ() env['PYTHONPATH'] = '%s:%s' % (install_site_packages, env.get('PYTHONPATH', '')) if os.path.exists(pkg.install_path): log.warning('Removing existing install', pkg.install_path) shutil.rmtree(pkg.install_path) os.makedirs(install_site_packages) log.info( style_note('Installing Python package', 'to ' + install_site_packages)) cmd = [ 'install', '--root', pkg.install_path, # Better than prefix '--prefix', '.', '--install-lib', site_packages, # So that we don't get lib64; virtualenv symlinks them together anyways. '--single-version-externally-managed', ] if not pkg.defer_setup_build: cmd.append('--skip-build') res = call_setup_py(self.setup_path, cmd, env=env, indent=True, verbosity=1) if res: raise RuntimeError('Could not install Python package')