def create_if_not_exists(self): python = os.path.join(self.path, 'bin', 'python') if not os.path.exists(python): makedirs(self.path) log.info(style_note('Creating Python virtualenv', self.path)) if hasattr(virtualenv, 'cli_run'): # New API (in which there isn't really any API) virtualenv.cli_run( ['--no-pip', '--no-wheel', '--no-setuptools', self.path]) else: # Old API virtualenv.create_environment(self.path, no_setuptools=True, no_pip=True) if not os.path.exists(python + '-config'): version = get_default_python().version names = ( 'python{}.{}-config'.format(*version), 'python{}-config'.format(*version), 'python-config', ) prefix = getattr(sys, 'real_prefix', sys.prefix) for name in names: old_path = os.path.join(prefix, 'bin', name) if os.path.exists(old_path): for name in names: new_path = os.path.join(self.path, 'bin', name) self.rewrite_shebang_or_link(old_path, new_path) break else: log.warning('Could not find python-config')
def git(args, *command): """Run a ``git`` command on a environment repository's git repository. (Sorry for the name collision.) e.g.:: $ vee git -r primary status On branch master Your branch is behind 'origin/master' by 1 commit, and can be fast-forwarded. (use "git pull" to update your local branch) nothing to commit, working directory clean """ home = args.assert_home() repo = home.get_env_repo(args.repo) if args.stree: call(['stree', repo.work_tree]) return if not command: print style_error('please provide a git command') return 1 makedirs(repo.work_tree) repo.git(*command, verbosity=0, indent=False)
def create_if_not_exists(self): python = os.path.join(self.path, 'bin', 'python') if not os.path.exists(python): makedirs(self.path) print style('Creating Python virtualenv', 'blue', bold=True), style(self.path, bold=True) virtualenv.create_environment(self.path, no_setuptools=True, no_pip=True) if not os.path.exists(python + '-config'): names = ( 'python%d.%d-config' % sys.version_info[:2], 'python%d-config' % sys.version_info[0], 'python-config', ) prefix = getattr(sys, 'real_prefix', sys.prefix) for name in names: old_path = os.path.join(prefix, 'bin', name) if os.path.exists(old_path): for name in names: new_path = os.path.join(self.path, 'bin', name) self.rewrite_shebang_or_link(old_path, new_path) break else: log.warning('Could not find python-config')
def render(self, **kwargs): self.rev_count += 1 params = self.defaults.copy() params.update(kwargs) params.update(REVNO=self.rev_count, ) def render_contents(contents): return re.sub(r'MOCK([A-Z0-9]+)', lambda m: str(params.get(m.group(1)) or ''), contents) ignore_path = os.path.join(self.template, 'mockignore') if os.path.exists(ignore_path): patterns = [x.strip() for x in open(ignore_path)] + ['mockignore'] pattern = re.compile('|'.join( fnmatch.translate(x) for x in patterns if x)) else: pattern = None for dir_path, dir_names, file_names in os.walk(self.template): for file_name in file_names: if pattern and pattern.match(file_name): continue src_path = os.path.join(dir_path, file_name) rel_path = os.path.relpath(src_path, self.template) dst_path = os.path.join(self.path, render_contents(rel_path)) makedirs(os.path.dirname(dst_path)) contents = render_contents(open(src_path, 'r').read()) with open(dst_path, 'w') as fh: fh.write(contents) shutil.copystat(src_path, dst_path)
def _makedirs(self, create_parents=False): if not create_parents and not os.path.exists(os.path.dirname( self.root)): raise ValueError('parent of %s does not exist' % self.root) for name in ('builds', 'environments', 'installs', 'packages', 'repos'): path = self._abs_path(name) makedirs(path)
def home(self, path=None, init=None): home = Home(path or self.sandbox()) makedirs(os.path.dirname(home.root)) if init is None: init = not home.db.exists if init: home.init(create_parents=True) return home
def _backup(self): backup_dir = os.path.join(os.path.dirname(self.path), 'backups') backup_path = os.path.join( backup_dir, os.path.basename(self.path) + '.' + datetime.datetime.utcnow().isoformat('T')) makedirs(backup_dir) shutil.copyfile(self.path, backup_path)
def optlink(self, pkg): if pkg.name: opt_link = pkg.home._abs_path('opt', pkg.name) log.info(style_note('Linking to opt/%s' % pkg.name)) if os.path.lexists(opt_link): os.unlink(opt_link) makedirs(os.path.dirname(opt_link)) os.symlink(pkg.install_path, opt_link)
def install(self, pkg): pkg._assert_paths(install=True) # TODO: Find the Ruby version. root = os.path.join(pkg.install_path, 'lib/ruby/2.0.0') makedirs(root) cmd = ['gem', 'install', pkg.name] if pkg.config: cmd.append('--') cmd.extend(pkg.render_template(x) for x in pkg.config) call(cmd, env={'GEM_HOME': root})
def _meta(self): pkg = self.package path = pkg.home._abs_path('packages', 'pypi', self.name, 'meta.json') if not os.path.exists(path): log.info(style_note('Looking up %s on PyPI' % self.name)) url = PYPI_URL_PATTERN % self.name res = urllib2.urlopen(url) makedirs(os.path.dirname(path)) with open(path, 'wb') as fh: fh.write(res.read()) return json.load(open(path, 'rb'))
def __init__(self, name, home=None): self.name = name self.path = os.path.abspath( os.path.join(__file__, '..', '..', '..', 'sandbox', 'repos', name)) makedirs(self.path) self.repo = GitRepo(self.path) self.repo.git('init', silent=True, stdout=True) if home is None: from tests import home self.home = home self._rev_count = None
def create_repo(self, path=None, url=None, name=None, remote=None, branch=None, is_default=None): if path: path = os.path.abspath(path) if not os.path.exists(path): raise ValueError('no repo at %s' % path) if url or path: name = name or re.sub(r'\.git$', '', os.path.basename(url or path)) else: name = name or self.default_repo_name or PRIMARY_REPO # Make sure it doesn't exist. try: repo = self.get_repo(name) except ValueError: pass else: raise ValueError('%r repo already exists' % name) branch = branch or get_default_branch() con = self.db.connect() cur = con.execute( 'INSERT OR REPLACE INTO repositories (name, path, remote, branch, is_default) VALUES (?, ?, ?, ?, ?)', [name, path, remote or 'origin', branch, bool(is_default)]) row = con.execute('SELECT * FROM repositories WHERE id = ?', [cur.lastrowid]).fetchone() repo = EnvironmentRepo(row, home=self) if url: repo.clone_if_not_exists(url) elif not repo.exists: makedirs(repo.work_tree) repo.git('init') if branch != repo.get_current_branch(): con.execute('UPDATE repositories SET branch = ? WHERE id = ?', [repo.get_current_branch(), repo.id]) return repo
def __init__(self, name, template, defaults=None, path=None): self.name = name self.template = os.path.abspath( os.path.join(__file__, '..', '..', 'package-templates', template)) self.path = os.path.abspath( os.path.join(__file__, '..', '..', '..', 'sandbox', 'packages', path or name)) makedirs(self.path) self.repo = GitRepo(self.path) self.repo.git('init', silent=True, stdout=True) self.defaults = defaults or {} self.defaults.setdefault('NAME', self.name) self.defaults.setdefault('VERSION', '1.0.0') self.rev_count = 0
def fetch(self, pkg): pkg._assert_paths(package=True) if os.path.exists(pkg.package_path): if os.path.isdir(pkg.package_path): shutil.rmtree(pkg.package_path) else: os.unlink(pkg.package_path) makedirs(os.path.dirname(pkg.package_path)) log.info(style_note('Copying', 'to ' + pkg.package_path)) source = os.path.expanduser(self._path) if os.path.isdir(source): shutil.copytree(source, pkg.package_path, symlinks=True) else: shutil.copyfile(source, pkg.package_path)
def download(url, dst): makedirs(os.path.dirname(dst)) temp = dst + '.downloading' src_fh = None dst_fh = None try: src_fh = urllib2.urlopen(url) dst_fh = open(temp, 'wb') # TODO: Indicate progress. for chunk in iter(lambda: src_fh.read(16384), ''): dst_fh.write(chunk) finally: if src_fh: src_fh.close() if dst_fh: dst_fh.close() shutil.move(temp, dst)
def download(url, dst): makedirs(os.path.dirname(dst)) tmp = dst + '.downloading' src_fh = None dst_fh = None try: src_fh = http_request('GET', url, preload_content=False) dst_fh = open(tmp, 'wb') # TODO: Indicate progress. for chunk in iter(lambda: src_fh.read(16384), b''): dst_fh.write(chunk) finally: if src_fh: src_fh.close() if dst_fh: dst_fh.close() shutil.move(tmp, dst)
def dump(self, path, recurse=True): paths = [path] tmp = path + '.tmp' with open(tmp, 'w') as fh: for line in self.iter_dump(): fh.write(line) os.rename(tmp, path) if not recurse: return for item in self._items: if not item.is_include: continue include = item.value req_set = include.manifest sub_path = os.path.join(os.path.dirname(path), include.path) makedirs(os.path.dirname(sub_path)) paths.extend(req_set.dump(sub_path)) return paths
def repackage(args): home = args.assert_home() con = home.db.connect() makedirs(args.dir) todo = args.packages seen = set() in_order = [] checksums = {} while todo: desc = todo.pop(0) if isinstance(desc, Package): pkg = desc pkg.id or pkg.resolve_existing() else: pkg = Package(name=desc, url='weak', home=home) if not pkg.resolve_existing(weak=True): pkg = Package(url=desc, home=home) if not pkg.resolve_existing(): log.error('cannot find package %s' % desc) continue if pkg.name in seen: continue seen.add(pkg.name) print style_note(str(pkg)) if not args.no_deps: todo.extend(pkg.dependencies) platform_dependent = False for dir_path, dir_names, file_names in os.walk(pkg.install_path): for file_name in file_names: _, ext = os.path.splitext(file_name) if ext in PLATFORM_DEPENDENT_EXTS: platform_dependent = True break if platform_dependent: break name = '%s-%s-%s.tgz' % (pkg.name, pkg.revision, PLATFORM_TAG if platform_dependent else 'any') path = os.path.join(args.dir, name) in_order.append((pkg, path)) if os.path.exists(path): if args.force: os.unlink(path) else: print '%s already exists' % name continue if args.verbose: print name writer = HashingWriter(open(path, 'wb'), hashlib.md5()) archive = tarfile.open(fileobj=writer, mode='w|gz') for dir_path, dir_names, file_names in os.walk(pkg.install_path): for dir_name in dir_names: path = os.path.join(dir_path, dir_name) rel_path = os.path.relpath(path, pkg.install_path) if args.verbose: print ' ' + rel_path + '/' archive.add(path, rel_path, recursive=False) for file_name in file_names: path = os.path.join(dir_path, file_name) mode = os.lstat(path).st_mode if not (stat.S_ISREG(mode) or stat.S_ISDIR(mode) or stat.S_ISLNK(mode)): continue rel_path = os.path.relpath(path, pkg.install_path) if args.verbose: print ' ' + rel_path archive.add(path, rel_path) if pkg.dependencies: requirements = [] for dep in pkg.dependencies: dep.resolve_existing() requirements.append(str(dep)) buf = StringIO('\n'.join(requirements)) info = tarfile.TarInfo('vee-requirements.txt') info.size = len(buf.getvalue()) archive.addfile(info, buf) archive.close() checksums[pkg.name] = 'md5:' + writer.hexdigest() print print 'Add as requirements in (roughly) the following order:' print for pkg, path in reversed(in_order): checksum = checksums.get(pkg.name) url = (args.url.rstrip('/') + '/' + os.path.basename(path) if args.url else os.path.abspath(path)) parts = [url, '--name', pkg.name, '--revision', pkg.revision or '""'] if checksum: parts.extend(('--checksum', checksum)) print ' '.join(parts)
def upgrade(self, dirty=False, subset=None, reinstall=False, relink=False, no_deps=False, force_branch_link=True): self.clone_if_not_exists() try: head = self.head except CalledProcessError: log.warning(style_warning('no commits in repository')) head = None try: remote_head = self.rev_parse('%s/%s' % (self.remote_name, self.branch_name)) except ValueError: log.warning( style_warning('tracked %s/%s does not exist in self' % (self.remote_name, self.branch_name))) remote_head = None if remote_head and head != remote_head: log.warning( style_warning('%s repo not checked out to %s/%s' % (self.name, self.remote_name, self.branch_name))) dirty = bool(list(self.status())) if not dirty and self.is_dirty(): log.error('%s repo is dirty; force with --dirty' % self.name) return False env = self.get_environment() req_set = self.load_requirements() pkg_set = PackageSet(env=env, home=self.home) # Register the whole set, so that dependencies are pulled from here instead # of weakly resolved from installed packages. # TODO: This blanket reinstalls things, even if no_deps is set. pkg_set.resolve_set(req_set, check_existing=not reinstall) # Install and/or link. pkg_set.install(subset or None, link_env=env, reinstall=reinstall, relink=relink, no_deps=no_deps) if pkg_set._errored and not force_branch_link: log.warning( style_warning( "Not creating branch or version links; force with --force-branch-link" )) return False # Create a symlink by branch. path_by_branch = self.home._abs_path('environments', self.name, self.branch_name) if os.path.lexists(path_by_branch): os.unlink(path_by_branch) makedirs(os.path.dirname(path_by_branch)) os.symlink(env.path, path_by_branch) # Create a symlink by version. version = req_set.headers.get('Version') if version: path_by_version = self.home._abs_path( 'environments', self.name, 'versions', version.value + ('-dirty' if dirty else '')) if os.path.lexists(path_by_version): os.unlink(path_by_version) makedirs(os.path.dirname(path_by_version)) os.symlink(env.path, path_by_version) return True
def init(args, do_clone=False, do_install=False, do_add=False, is_find=False): do_init = not (do_clone or do_install or do_add) name = args.name home = args.assert_home() con = home.db.connect() path = os.path.abspath(args.path or os.path.join(home.dev_root, name)) dev_repo = GitRepo(path) if do_init: log.info(style_note('Initing %s' % dev_repo.work_tree)) makedirs(dev_repo.work_tree) dev_repo.git('init') elif do_clone: log.info(style_note('Cloning %s' % args.url)) makedirs(dev_repo.work_tree) dev_repo.clone_if_not_exists(args.url) elif do_install: # Find an existing tool. # TODO: put more of this into EnvironmentRepo or Manifest repo = home.get_repo(args.repo) manifest_path = os.path.join(repo.work_tree, 'manifest.txt') manifest = Manifest(manifest_path, home=home) for req in manifest.iter_packages(): if req.name.lower() == name.lower(): # Make sure it is a Git package. url = normalize_git_url(req.url, prefix=False) if url: break else: log.error('Could not find git-based "%s" in "%s" repo.' % (name, repo.name)) return 2 log.info(style_note('Found %s in %s' % (name, repo.name), str(req))) makedirs(dev_repo.work_tree) dev_repo.clone_if_not_exists(url, shallow=False) elif do_add: log.info(style_note('Adding %s from %s' % (name, path))) if not os.path.exists(path): log.error('%s does not exist' % path) return 1 package = Package([path], home=home, dev=True) try: package.pipeline.run_to('develop') except Exception as e: print_cli_exc(e) return 1 log.info(style_note('Linking dev package', name, path)) dev_pkg = DevPackage( { 'name': name, 'path': path, 'environ': package.environ }, home=home) dev_pkg.save_tag()
def init(args, do_clone=False, do_install=False, do_add=False, is_find=False): do_init = not (do_clone or do_install or do_add) name = args.name home = args.assert_home() con = home.db.connect() # Make sure there are no other packages already, and clear out old ones # which no longer exist. for row in con.execute('SELECT * FROM development_packages WHERE name = ?', [name]): if not args.force and os.path.exists(os.path.join(row['path'], '.git')): if is_find: print style_note('"%s" already exists:' % name, row['path']) return else: print style_error('"%s" already exists:' % name, row['path']) return 1 else: con.execute('DELETE FROM development_packages WHERE id = ?', [row['id']]) path = os.path.abspath(args.path or os.path.join(home.dev_root, name)) dev_repo = GitRepo(path) if do_init: print style_note('Initing %s' % dev_repo.work_tree) makedirs(dev_repo.work_tree) dev_repo.git('init') elif do_clone: print style_note('Cloning %s' % args.url) makedirs(dev_repo.work_tree) dev_repo.clone_if_not_exists(args.url) elif do_install: # Find an existing tool. # TODO: put more of this into EnvironmentRepo or Requirements env_repo = home.get_env_repo(args.repo) req_path = os.path.join(env_repo.work_tree, 'requirements.txt') reqs = Requirements(req_path, home=home) for req in reqs.iter_packages(): if req.name.lower() == name.lower(): # Make sure it is a Git package. url = normalize_git_url(req.url, prefix=False) if url: break else: print style_error('Could not find git-based "%s" in "%s" repo.' % (name, env_repo.name)) return 2 print style_note('Found %s in %s' % (name, env_repo.name), str(req)) makedirs(dev_repo.work_tree) dev_repo.clone_if_not_exists(url, shallow=False) elif do_add: print style_note('Adding %s from %s' % (name, path)) if not os.path.exists(path): log.error('%s does not exist' % path) return 1 package = Package([path], home=home, dev=True) try: package.pipeline.run_to('develop') except Exception as e: print_cli_exc(e) return 1 print style_note('Linking dev package', name, path) con.execute( 'INSERT INTO development_packages (name, path, environ) VALUES (?, ?, ?)', [name, path, json.dumps(package.environ)]) dev_pkg = DevPackage( { 'name': name, 'path': path, 'environ': package.environ }, home=home) dev_pkg.save_tag()