def init(args): """Initialize the structures on disk before any other commands, and optionally setup the first environment repository. E.g.: vee init [email protected]:vfxetc/vee-repo primary This is the same as: vee init vee repo clone [email protected]:vfxetc/vee-repo primary """ try: args.home.init() log.info('Initialized %s' % args.home.root) except ValueError: log.error('Home already exists.') if args.url: log.info('Create a new repository via:') log.info('\tvee repo clone --default %s %s' % (args.url, args.name or '')) return if args.url: repo = args.home.create_repo(url=args.url, name=args.name) log.info('Created repo %s at %s' % (repo.name, repo.work_tree))
def check_ff_safety(self, rev='HEAD', ignore_permissions=True): # Check the status of the work tree and index. status_ok = True for idx, tree, name in self.status( ignore_permissions=ignore_permissions): if idx or tree: log.error('uncomitted changes:') self.git('status') status_ok = False break # Make sure we haven't forked. ahead, behind = self.distance(self.head, rev) if ahead and behind: log.error('you and the repo have forked') status_ok = False elif ahead: log.warning( 'you are %s commits ahead of the remote repo; please `vee push`' % ahead) status_ok = False elif behind: log.info('You are %d commits behind.' % behind) return status_ok
def update(self, force=False): log.info(style_note('Updating repo', self.name)) self.clone_if_not_exists() if self.remote_name not in self.remotes(): log.warning('"%s" does not have remote "%s"' % (self.name, self.remote_name)) return True rev = self.fetch() if not force and not self.check_ff_safety(rev): log.error('Cannot fast-forward; skipping.') return False self.checkout(force=force) return True
def git(args, *command): if not (args.all or args.name): log.error("Please provide -n NAME or --all.") return 1 if not command: log.error('Please provide a git command.') return 1 home = args.assert_home() retcode = 0 if args.all: dev_pkgs = home.iter_development_packages() else: dev_pkgs = [] for name in args.names: dev_pkg = home.find_development_package(name) if not dev_pkg: log.error("Could not find dev package: {!r}.".format(name)) return 2 dev_pkgs.append(dev_pkg) for dev_pkg in dev_pkgs: log.info(style_note(dev_pkg.name, ' '.join(command))) try: dev_pkg.git(*command, verbosity=0, indent=False) except Exception as e: print_cli_exc(e) retcode = 1 return retcode
def install(self, names=None, link_env=None, reinstall=False, relink=False, no_deps=False): # I'd love to split this method into an "install" and "link" step, but # then we'd need to reimplement the dependency resolution. That would # be a good idea to do anyways, but... meh. if isinstance(names, str): names = [names] names = list(names if names else self.keys()) for name in names: if name not in self: raise KeyError(name) if not isinstance(reinstall, set): reinstall = set( names if no_deps else self.keys()) if reinstall else set() if not isinstance(relink, set): relink = set( names if no_deps else self.keys()) if relink else set() while names: name = names.pop(0) self._parent_names.setdefault(name, None) parent_chain = [] tip = name while tip and tip not in parent_chain: parent_chain.append(tip) tip = self._parent_names.get(tip) parent_chain = parent_chain[1:] print( '==>', style(name, 'blue'), style('(%s)' % ' < '.join(parent_chain), faint=True) if parent_chain else '') with log.indent(): # Avoid infinite error loops. if name in self._errored: log.warning('Skipping due to previous error.') continue try: self._install_one(names, name, link_env, reinstall, relink, no_deps) except PipelineError as e: self._errored.add(name) log.error(str(e)) continue except Exception as e: self._errored.add(name) print_cli_exc(e, verbose=True) log.exception('Exception while processing %s' % name) continue if self._errored: log.warning('There were errors in: %s' % ', '.join(sorted(self._errored)))
def repackage(args): home = args.assert_home() con = home.db.connect() makedirs(args.dir) todo = args.packages seen = set() in_order = [] checksums = {} while todo: desc = todo.pop(0) if isinstance(desc, Package): pkg = desc pkg.id or pkg.resolve_existing() else: pkg = Package(name=desc, url='weak', home=home) if not pkg.resolve_existing(weak=True): pkg = Package(url=desc, home=home) if not pkg.resolve_existing(): log.error('cannot find package %s' % desc) continue if pkg.name in seen: continue seen.add(pkg.name) print style_note(str(pkg)) if not args.no_deps: todo.extend(pkg.dependencies) platform_dependent = False for dir_path, dir_names, file_names in os.walk(pkg.install_path): for file_name in file_names: _, ext = os.path.splitext(file_name) if ext in PLATFORM_DEPENDENT_EXTS: platform_dependent = True break if platform_dependent: break name = '%s-%s-%s.tgz' % (pkg.name, pkg.revision, PLATFORM_TAG if platform_dependent else 'any') path = os.path.join(args.dir, name) in_order.append((pkg, path)) if os.path.exists(path): if args.force: os.unlink(path) else: print '%s already exists' % name continue if args.verbose: print name writer = HashingWriter(open(path, 'wb'), hashlib.md5()) archive = tarfile.open(fileobj=writer, mode='w|gz') for dir_path, dir_names, file_names in os.walk(pkg.install_path): for dir_name in dir_names: path = os.path.join(dir_path, dir_name) rel_path = os.path.relpath(path, pkg.install_path) if args.verbose: print ' ' + rel_path + '/' archive.add(path, rel_path, recursive=False) for file_name in file_names: path = os.path.join(dir_path, file_name) mode = os.lstat(path).st_mode if not (stat.S_ISREG(mode) or stat.S_ISDIR(mode) or stat.S_ISLNK(mode)): continue rel_path = os.path.relpath(path, pkg.install_path) if args.verbose: print ' ' + rel_path archive.add(path, rel_path) if pkg.dependencies: requirements = [] for dep in pkg.dependencies: dep.resolve_existing() requirements.append(str(dep)) buf = StringIO('\n'.join(requirements)) info = tarfile.TarInfo('vee-requirements.txt') info.size = len(buf.getvalue()) archive.addfile(info, buf) archive.close() checksums[pkg.name] = 'md5:' + writer.hexdigest() print print 'Add as requirements in (roughly) the following order:' print for pkg, path in reversed(in_order): checksum = checksums.get(pkg.name) url = (args.url.rstrip('/') + '/' + os.path.basename(path) if args.url else os.path.abspath(path)) parts = [url, '--name', pkg.name, '--revision', pkg.revision or '""'] if checksum: parts.extend(('--checksum', checksum)) print ' '.join(parts)
def delete(args): home = args.assert_home() cur = home.db.execute('DELETE FROM repositories WHERE name = ?', [args.name]) if not cur.rowcount: log.error('No %r repository.' % args.name)
def upgrade(self, dirty=False, subset=None, reinstall=False, relink=False, no_deps=False, force_branch_link=True): self.clone_if_not_exists() try: head = self.head except CalledProcessError: log.warning(style_warning('no commits in repository')) head = None try: remote_head = self.rev_parse('%s/%s' % (self.remote_name, self.branch_name)) except ValueError: log.warning( style_warning('tracked %s/%s does not exist in self' % (self.remote_name, self.branch_name))) remote_head = None if remote_head and head != remote_head: log.warning( style_warning('%s repo not checked out to %s/%s' % (self.name, self.remote_name, self.branch_name))) dirty = bool(list(self.status())) if not dirty and self.is_dirty(): log.error('%s repo is dirty; force with --dirty' % self.name) return False env = self.get_environment() req_set = self.load_requirements() pkg_set = PackageSet(env=env, home=self.home) # Register the whole set, so that dependencies are pulled from here instead # of weakly resolved from installed packages. # TODO: This blanket reinstalls things, even if no_deps is set. pkg_set.resolve_set(req_set, check_existing=not reinstall) # Install and/or link. pkg_set.install(subset or None, link_env=env, reinstall=reinstall, relink=relink, no_deps=no_deps) if pkg_set._errored and not force_branch_link: log.warning( style_warning( "Not creating branch or version links; force with --force-branch-link" )) return False # Create a symlink by branch. path_by_branch = self.home._abs_path('environments', self.name, self.branch_name) if os.path.lexists(path_by_branch): os.unlink(path_by_branch) makedirs(os.path.dirname(path_by_branch)) os.symlink(env.path, path_by_branch) # Create a symlink by version. version = req_set.headers.get('Version') if version: path_by_version = self.home._abs_path( 'environments', self.name, 'versions', version.value + ('-dirty' if dirty else '')) if os.path.lexists(path_by_version): os.unlink(path_by_version) makedirs(os.path.dirname(path_by_version)) os.symlink(env.path, path_by_version) return True
def commit(args): home = args.assert_home() repo = home.get_repo(args.repo) if not repo.status(): log.error('Nothing to commit.') return 1 if args.semver_level is None: args.semver_level = 0 if args.micro else 2 if args.message is None: dev_pkgs = {pkg.name: pkg for pkg in home.iter_development_packages()} pkg_set = PackageSet(home=home) by_name = {} for revision, name in [ (None, 'work'), ('HEAD', 'head'), ]: for req in repo.load_manifest(revision=revision).iter_packages(): pkg = pkg_set.resolve(req, check_existing=False) if pkg.fetch_type != 'git': continue by_name.setdefault(pkg.name, {})[name] = req commits = [] for pkg_name, reqs in sorted(by_name.items()): new = reqs['work'] old = reqs['head'] if new.version == old.version: continue dev = dev_pkgs.get(pkg_name) if not dev: continue for line in dev.git('log', '--pretty=%cI %h %s', '{}...{}'.format(old.version, new.version), stdout=True).splitlines(): line = line.strip() if not line: continue time, hash_, subject = line.split(' ', 2) commits.append((time, pkg_name, '[{} {}] {}'.format(pkg_name, hash_, subject))) if commits: if len(commits) == 1: default_message = [commits[0][2]] else: pkg_names = set(c[1] for c in commits) default_message = [ '{} commit{} in {} package{}: {}.'.format( len(commits), 's' if len(commits) != 1 else '', len(pkg_names), 's' if len(pkg_names) != 1 else '', ', '.join(sorted(pkg_names)), ), '' ] for c in commits: default_message.append(c[2]) else: default_message = [default_messages[args.semver_level]] fd, path = tempfile.mkstemp('.txt', 'vee-commit-msg.') with open(path, 'w') as fh: fh.write('\n'.join(default_message)) fh.write(''' # Please enter the commit message for your changes. Lines starting # with '#' will be ignored, and an empty message aborts the commit. ''') editor = os.environ.get('EDITOR', 'vim') editor_args = [editor, path] if editor == 'vim': editor_args.insert(1, r'+syntax match Comment "^\s*#.*$"') code = subprocess.call(editor_args) message = open(path).readlines() os.close(fd) os.unlink(path) if code: log.error("Editor ({}) failed".format(editor), "and returned code {}".format(code)) return message = [ line.rstrip() for line in message if not line.lstrip().startswith('#') ] message = '\n'.join(message).strip() if not message: return args.message = message repo.commit(args.message, args.semver_level)
def init(args, do_clone=False, do_install=False, do_add=False, is_find=False): do_init = not (do_clone or do_install or do_add) name = args.name home = args.assert_home() con = home.db.connect() path = os.path.abspath(args.path or os.path.join(home.dev_root, name)) dev_repo = GitRepo(path) if do_init: log.info(style_note('Initing %s' % dev_repo.work_tree)) makedirs(dev_repo.work_tree) dev_repo.git('init') elif do_clone: log.info(style_note('Cloning %s' % args.url)) makedirs(dev_repo.work_tree) dev_repo.clone_if_not_exists(args.url) elif do_install: # Find an existing tool. # TODO: put more of this into EnvironmentRepo or Manifest repo = home.get_repo(args.repo) manifest_path = os.path.join(repo.work_tree, 'manifest.txt') manifest = Manifest(manifest_path, home=home) for req in manifest.iter_packages(): if req.name.lower() == name.lower(): # Make sure it is a Git package. url = normalize_git_url(req.url, prefix=False) if url: break else: log.error('Could not find git-based "%s" in "%s" repo.' % (name, repo.name)) return 2 log.info(style_note('Found %s in %s' % (name, repo.name), str(req))) makedirs(dev_repo.work_tree) dev_repo.clone_if_not_exists(url, shallow=False) elif do_add: log.info(style_note('Adding %s from %s' % (name, path))) if not os.path.exists(path): log.error('%s does not exist' % path) return 1 package = Package([path], home=home, dev=True) try: package.pipeline.run_to('develop') except Exception as e: print_cli_exc(e) return 1 log.info(style_note('Linking dev package', name, path)) dev_pkg = DevPackage( { 'name': name, 'path': path, 'environ': package.environ }, home=home) dev_pkg.save_tag()
def main(argv=None, environ=None, as_main=__name__ == "__main__"): try: if argv is None: argv = sys.argv[1:] # Split up args in shebangs if we seem to be in a situation where # it wasn't done for us. raw_argv = argv argv = [] for arg in raw_argv: if '--shebang ' in arg: argv.extend(arg.split()) else: argv.append(arg) argv = [x for x in argv if x != '--shebang'] parser = get_parser() args, unparsed = parser.parse_known_args(argv, namespace=Namespace()) func = get_func(args) if func and unparsed and not func.__parse_known_args: args = parser.parse_args(argv, namespace=Namespace()) func = get_func(args) args.environ = os.environ if environ is None else environ args.home_path = default_home_path(environ=args.environ) if args.real_prefix and args.real_prefix != getattr( sys, 'real_prefix', None): sys.real_prefix = args.real_prefix if args.log: root = logging.getLogger('vee') stream = sys.stdout if args.log == '-' else open(args.log, 'ab') handler = logging.StreamHandler(stream) handler.setFormatter( _LogFormatter( '%(asctime)-15s %(name)s %(levelname)s: %(message)s')) root.addHandler(handler) # When called recursively, we want to maintain at least the previous # level of verbosity. log.config.verbosity = max(log.config.verbosity, args.verbose or 0) # TODO: Move this to a $VEE_UMASK envvar or something. # For now, just leave all permissions open. os.umask(0) if func: lock = None try: # Don't grab the lock if we dont need it (or if the home isn't set) if func.__acquire_lock and args.home_path and os.path.exists( args.home_path): try: lock = _global_locks[args.home_path] except KeyError: lock_content = ( os.environ.get('VEE_LOCK_CONTENT') or '%s@%s/%s' % ( os.environ.get('LOGNAME', '<unknown>'), os.environ.get('SSH_CLIENT', 'localhost').split()[0], os.getpid(), )) lock = RLockfile(os.path.join(args.home_path, '.vee-lock'), blocking=False, content=lock_content) _global_locks[args.home_path] = lock lock.acquire() except IOError as e: if e.errno == errno.EWOULDBLOCK: content = lock.get_content() log.error('VEE is locked%s' % (': ' + content if content else '', )) res = 1 else: raise else: if args.cprofile_path: res = cProfile.runctx('func(args, *unparsed)', locals(), globals(), filename=args.cprofile_path) or 0 else: res = func(args, *unparsed) or 0 if func.__acquire_lock and lock is not None: lock.release() else: parser.print_help() res = 1 except Exception as e: if as_main: print_cli_exc(e, verbose=True) res = cli_errno(e) else: raise return res
def init(args, do_clone=False, do_install=False, do_add=False, is_find=False): do_init = not (do_clone or do_install or do_add) name = args.name home = args.assert_home() con = home.db.connect() # Make sure there are no other packages already, and clear out old ones # which no longer exist. for row in con.execute('SELECT * FROM development_packages WHERE name = ?', [name]): if not args.force and os.path.exists(os.path.join(row['path'], '.git')): if is_find: print style_note('"%s" already exists:' % name, row['path']) return else: print style_error('"%s" already exists:' % name, row['path']) return 1 else: con.execute('DELETE FROM development_packages WHERE id = ?', [row['id']]) path = os.path.abspath(args.path or os.path.join(home.dev_root, name)) dev_repo = GitRepo(path) if do_init: print style_note('Initing %s' % dev_repo.work_tree) makedirs(dev_repo.work_tree) dev_repo.git('init') elif do_clone: print style_note('Cloning %s' % args.url) makedirs(dev_repo.work_tree) dev_repo.clone_if_not_exists(args.url) elif do_install: # Find an existing tool. # TODO: put more of this into EnvironmentRepo or Requirements env_repo = home.get_env_repo(args.repo) req_path = os.path.join(env_repo.work_tree, 'requirements.txt') reqs = Requirements(req_path, home=home) for req in reqs.iter_packages(): if req.name.lower() == name.lower(): # Make sure it is a Git package. url = normalize_git_url(req.url, prefix=False) if url: break else: print style_error('Could not find git-based "%s" in "%s" repo.' % (name, env_repo.name)) return 2 print style_note('Found %s in %s' % (name, env_repo.name), str(req)) makedirs(dev_repo.work_tree) dev_repo.clone_if_not_exists(url, shallow=False) elif do_add: print style_note('Adding %s from %s' % (name, path)) if not os.path.exists(path): log.error('%s does not exist' % path) return 1 package = Package([path], home=home, dev=True) try: package.pipeline.run_to('develop') except Exception as e: print_cli_exc(e) return 1 print style_note('Linking dev package', name, path) con.execute( 'INSERT INTO development_packages (name, path, environ) VALUES (?, ?, ?)', [name, path, json.dumps(package.environ)]) dev_pkg = DevPackage( { 'name': name, 'path': path, 'environ': package.environ }, home=home) dev_pkg.save_tag()
def add(args): home = args.assert_home() repo = home.get_repo(args.repo) req_set = repo.load_manifest() pkg_set = PackageSet(home=home) baked_any = None if args.update: baked_any = False for req in req_set.iter_packages(): pkg = pkg_set.resolve(req, check_existing=False) if pkg.fetch_type != 'git': continue log.info(style_note('Fetching', str(req))) branch = get_default_branch() pkg.repo.fetch('origin', branch) # TODO: track these another way? if pkg.repo.check_ff_safety('origin/' + branch): pkg.repo.checkout('origin/' + branch) head = pkg.repo.head[:8] if head != req.version: req.version = pkg.repo.head[:8] log.info(style_note('Updated', str(req))) baked_any = True if args.bake_installed: baked_any = False for req in req_set.iter_packages(): pkg = pkg_set.resolve(req) if pkg.fetch_type != 'git': continue repo = pkg.pipeline.steps['fetch'].repo if req.name and req.name == guess_name(req.url): req.name = None baked_any = True log.info(style_note('Unset redundant name', req.name)) if pkg.installed and req.version != repo.head[:8]: req.version = repo.head[:8] baked_any = True log.info(style_note('Pinned', req.name, req.version)) if args.checksum: baked_any = False for req in req_set.iter_packages(): pkg = pkg_set.resolve(req) if pkg.checksum: continue if not pkg.package_path or not os.path.isfile(pkg.package_path): continue req.checksum = checksum_file(pkg.package_path) log.info(style_note('Checksummed', pkg.name, req.checksum)) baked_any = True if baked_any is not None: if baked_any: repo.dump_manifest(req_set) else: log.info(style_note('No changes.')) return dev_repo = home.find_development_package(args.package) if not dev_repo: raise ValueError('No development package %r' % args.package) # Get the normalized origin. dev_remote_urls = set() for url in dev_repo.remotes().values(): url = normalize_git_url(url, prefer='scp') or url log.debug('adding dev remote url: %s' % url) dev_remote_urls.add(url) if not dev_remote_urls: log.info(style_error('No git remotes for %s' % row['path'])) return 1 for req in req_set.iter_packages(eval_control=False): # We only deal with git packages. pkg = pkg_set.resolve(req, check_existing=False) if pkg.fetch_type != 'git': continue req_url = normalize_git_url(req.url, prefer='scp') log.debug('does match package url?: %s' % req_url) if req_url in dev_remote_urls: if req.version == dev_repo.head[:8]: log.info(style_note('No change to', str(req))) else: req.version = dev_repo.head[:8] log.info(style_note('Updated', str(req))) break else: if not args.init: log.error( '{error}: No required package {name}; would match one of:'. format(error=style('Error', 'red'), name=style(args.package, bold=True))) for url in sorted(dev_remote_urls): log.info(' {}'.format(url)) log.info('Use {} to setup: git+{} --version {}'.format( style('vee add --init %s' % args.package, 'green'), dev_repo.remotes()['origin'], dev_repo.head[:8])) return 1 req = Package( url=normalize_git_url(dev_repo.remotes()['origin'], prefix=True), version=dev_repo.head[:8], home=home, ) req_set.append(('', req, '')) repo.dump_manifest(req_set)