def create_or_update(args): "Creates or updates a code review case." creds = new_credentials_config() g = GitRepo() cr_data_file = get_cr_data_file(g) safe_create_code_review_data_file(cr_data_file) with CodeReviewConfiguration.persisting(cr_data_file) as data: if g.current_branch() in data.branches: cr_data = data.branches[g.current_branch()] if len(cr_data.commits) > 1 and g.sha("HEAD~") == cr_data.commits[-2]: # the last commit was amended, so replace it: if g.sha("HEAD") != cr_data.commits[-1]: cr_data.commits[-1] = g.sha("HEAD") elif g.sha() not in cr_data.commits: cr_data.commits.append(g.sha()) if len(cr_data.commits) >= 2: use_hash = str("..".join([cr_data.commits[0][0:8], cr_data.commits[-1][0:8]])) else: use_hash = str(cr_data.commits[-1]) logger.info("updating an existing code review.") update_code_review(cr_data, g, use_hash) else: data.set_branch( g.current_branch(), {"original_name": g.commit_messages()[0], "commits": [g.sha("HEAD~"), g.sha()]} ) logger.info("creating new code review.") create_code_review(data, g, creds)
def repo(self, path=None): if isinstance(path, GitRepo): self._repo = path elif path is None: self._repo = GitRepo(self.conf.paths.projectroot) elif os.path.isdir(path): self._repo = GitRepo(path) else: self._repo = GitRepo(os.getcwd())
def cherry_pick(args): c = fetch_config(args) g = GitRepo(c.paths.projectroot) if c.runstate.git_branch is None: c.runstate.git_branch = [g.current_branch()] for branch in c.runstate.git_branch: with g.branch(branch): g.cherry_pick(c.runstate.git_objects)
def create_branch(args): """ Takes a single branch name and (if necessary) creates a new branch. Then, populates the ``build/<branch>`` directory for the new branch using either the parent branch or ``master``. Safe to run multiple times (after a rebase) to update the build cache from master. Also calls :method:`~giza.operations.build_env.fix_build_environment()` to tweak the new build output to update hashes and on-disk copies of the environment to prevent unnecessary full-rebuilds from sphinx. """ conf = fetch_config(args) g = GitRepo(conf.paths.projectroot) branch = conf.runstate.git_branch base_branch = g.current_branch() if base_branch == branch: base_branch = 'master' logger.warning('seeding build data for branch "{0}" from "master"'.format(branch)) branch_builddir = os.path.join(conf.paths.projectroot, conf.paths.output, branch) base_builddir = os.path.join(conf.paths.projectroot, conf.paths.output, base_branch) if g.branch_exists(branch): logger.info('checking out branch "{0}"'.format(branch)) else: logger.info('creating and checking out a branch named "{0}"'.format(branch)) g.checkout_branch(branch) cmd = "rsync -r --times --checksum {0}/ {1}".format(base_builddir, branch_builddir) logger.info('seeding build directory for "{0}" from "{1}"'.format(branch, base_branch)) try: subprocess.check_call(args=cmd.split()) logger.info('branch creation complete.') except subprocess.CalledProcessError: logger.error(cmd) # get a new config here for the new branch conf = fetch_config(args) builders = get_existing_builders(conf) with BuildApp.new(pool_type='process', pool_size=conf.runstate.pool_size, force=conf.runstate.force).context() as app: app.exted_queue(fix_build_env_tasks(builders, conf))
def pull_rebase(args): c = fetch_config(args) g = GitRepo(c.paths.projectroot) if c.runstate.git_branch is None: c.runstate.git_branch = [g.current_branch()] for branch in c.runstate.git_branch: with g.branch(branch): g.update() logger.info('updated: ' + branch)
def apply_patch(args): c = fetch_config(args) g = GitRepo(c.paths.projectroot) if c.runstate.git_branch is None: c.runstate.git_branch = [g.current_branch()] for branch in c.runstate.git_branch: with g.branch(branch): g.am(patches=c.runstate.git_objects, repo='/'.join(['https://github.com', c.git.remote.upstream]), sign=c.runstate.git_sign_patch)
def __init__(self, args, destage, dry_run=False): self.args = args self.conf = fetch_config(args) self.destage = destage self.dry_run = dry_run self.branch = GitRepo().current_branch()
def setup_branches(args): conf = fetch_config(args) g = GitRepo(conf.paths.projectroot) if 'upstream' in g.remotes(): remote = 'upstream' else: remote = 'origin' for pbranch in conf.git.branches.published: if g.branch_exists(pbranch): continue else: tracking_branch = '/'.join([remote, pbranch]) g.create_branch(pbranch, tracking=tracking_branch) logger.info('created branch "{0}" tracking "{1}"'.format(pbranch, tracking_branch))
def checkout(args): "Checks out a tracked code review branch." g = GitRepo() cr_data_file = get_cr_data_file(g) safe_create_code_review_data_file(cr_data_file) crconf = CodeReviewConfiguration(cr_data_file) if args._branch_name in crconf.branches: try: g.checkout(args._branch_name) logger.info('checked out: ' + args._branch_name) except: logger.error('could not checkout branch: ' + args._branch_name) else: m = "no branch named {0} tracked. Please use another method to checkout this branch" logger.warning(m.format(args._branch_name))
def list_reviews(args): "Lists tracked code reviews." g = GitRepo() cr_data_file = get_cr_data_file(g) safe_create_code_review_data_file(cr_data_file) crconf = CodeReviewConfiguration(cr_data_file) print( json.dumps([k for k in crconf.branches.keys()], indent=3, sort_keys=True))
def close(args): "Removes a tracked code review." g = GitRepo() cr_data_file = get_cr_data_file(g) safe_create_code_review_data_file(cr_data_file) with CodeReviewConfiguration.persisting(cr_data_file) as data: branches = data.branches.keys() for to_delete in args._branch_name: if to_delete in branches: del data.branches[to_delete] logger.info('removed tracked code review for: ' + to_delete) else: logger.info("not tracking a code review for: " + to_delete) try: g.remove_branch(to_delete, args.force) logger.info('removed branch: ' + to_delete) except: logger.error('could not remove branch: ' + to_delete)
def create_branch(args): """ Takes a single branch name and (if necessary) creates a new branch. Then, populates the ``build/<branch>`` directory for the new branch using either the parent branch or ``master``. Safe to run multiple times (after a rebase) to update the build cache from master. Also calls :method:`~giza.operations.build_env.fix_build_environment()` to tweak the new build output to update hashes and on-disk copies of the environment to prevent unnecessary full-rebuilds from sphinx. """ conf = fetch_config(args) g = GitRepo(conf.paths.projectroot) branch = conf.runstate.git_branch base_branch = g.current_branch() if base_branch == branch: base_branch = 'master' logger.warning( 'seeding build data for branch "{0}" from "master"'.format(branch)) branch_builddir = os.path.join(conf.paths.projectroot, conf.paths.output, branch) base_builddir = os.path.join(conf.paths.projectroot, conf.paths.output, base_branch) if g.branch_exists(branch): logger.info('checking out branch "{0}"'.format(branch)) else: logger.info( 'creating and checking out a branch named "{0}"'.format(branch)) g.checkout_branch(branch) cmd = "rsync -r --times --checksum {0}/ {1}".format( base_builddir, branch_builddir) logger.info('seeding build directory for "{0}" from "{1}"'.format( branch, base_branch)) try: subprocess.check_call(args=cmd.split()) logger.info('branch creation complete.') except subprocess.CalledProcessError: logger.error(cmd) # get a new config here for the new branch conf = fetch_config(args) builders = get_existing_builders(conf) with BuildApp.new(pool_type='process', pool_size=conf.runstate.pool_size, force=conf.runstate.force).context() as app: app.exted_queue(fix_build_env_tasks(builders, conf))
def make_project(args): """ Generate a project skeleton. Prefer this operation over ``sphinx-quickstart``. Also builds skeleton HTML artifacts. """ if args.quickstart_git is True: logger.info('creating a new git repository') g = GitRepo(os.getcwd()) g.create_repo() build_sphinx = True else: try: GitRepo().sha() build_sphinx = True except GitError: build_sphinx = False mod_path = os.path.dirname(inspect.getfile(giza)) qstart_path = os.path.join(mod_path, 'quickstart') cmd = 'rsync --ignore-existing --recursive {0}/. {1}'.format( qstart_path, os.getcwd()) r = subprocess.check_output(cmd.split(), stderr=subprocess.STDOUT) logger.info('migrated new site files') if args.quickstart_git is True: if not r.startswith('Reinitialized'): g.cmd('add', '-A') try: g.cmd('commit', '-m', '"initial commit"') except GitError: build_sphinx = False pass if build_sphinx is True: test_build_site(args)
def setup_branches(args): conf = fetch_config(args) g = GitRepo(conf.paths.projectroot) if 'upstream' in g.remotes(): remote = 'upstream' else: remote = 'origin' for pbranch in conf.git.branches.published: if g.branch_exists(pbranch): continue else: tracking_branch = '/'.join([remote, pbranch]) g.create_branch(pbranch, tracking=tracking_branch) logger.info('created branch "{0}" tracking "{1}"'.format( pbranch, tracking_branch))
def make_project(args): """ Generate a project skeleton. Prefer this operation over ``sphinx-quickstart``. Also builds skeleton HTML artifacts. """ if args.quickstart_git is True: logger.info('creating a new git repository') g = GitRepo(os.getcwd()) g.create_repo() build_sphinx = True else: try: GitRepo().sha() build_sphinx = True except GitError: build_sphinx = False mod_path = os.path.dirname(inspect.getfile(giza)) qstart_path = os.path.join(mod_path, 'quickstart') cmd = 'rsync --ignore-existing --recursive {0}/. {1}'.format(qstart_path, os.getcwd()) r = subprocess.check_output(cmd.split(), stderr=subprocess.STDOUT) logger.info('migrated new site files') if args.quickstart_git is True: if not r.startswith('Reinitialized'): g.cmd('add', '-A') try: g.cmd('commit', '-m', '"initial commit"') except GitError: build_sphinx = False pass if build_sphinx is True: test_build_site(args)
def merge(args): c = fetch_config(args) g = GitRepo(c.paths.projectroot) from_branch = g.current_branch() branch_name = str(id(c.runstate.git_branch)) g.checkout_branch(branch_name, c.runstate.git_branch) try: g.checkout(branch_name) g.rebase(from_branch) g.checkout(from_branch) g.merge(c.runstate.git_branch) logger.info('rebased and merged {0} into {1}'.format(c.runstate.git_branch, from_branch)) except Exception as e: logger.warning('error attempting to merge branch: ' + c.runstate.git_branch) logger.error(e) finally: if g.current_branch != from_branch: g.checkout(from_branch) g.remove_branch(branch_name, force=False)
def merge(args): c = fetch_config(args) g = GitRepo(c.paths.projectroot) from_branch = g.current_branch() branch_name = str(id(c.runstate.git_branch)) g.checkout_branch(branch_name, c.runstate.git_branch) try: g.checkout(branch_name) g.rebase(from_branch) g.checkout(from_branch) g.merge(c.runstate.git_branch) logger.info('rebased and merged {0} into {1}'.format( c.runstate.git_branch, from_branch)) except Exception as e: logger.warning('error attempting to merge branch: ' + c.runstate.git_branch) logger.error(e) finally: if g.current_branch != from_branch: g.checkout(from_branch) g.remove_branch(branch_name, force=False)
def create_or_update(args): "Creates or updates a code review case." creds = new_credentials_config() g = GitRepo() cr_data_file = get_cr_data_file(g) safe_create_code_review_data_file(cr_data_file) with CodeReviewConfiguration.persisting(cr_data_file) as data: if g.current_branch() in data.branches: cr_data = data.branches[g.current_branch()] if len(cr_data.commits) > 1 and g.sha( 'HEAD~') == cr_data.commits[-2]: # the last commit was amended, so replace it: if g.sha('HEAD') != cr_data.commits[-1]: cr_data.commits[-1] = g.sha('HEAD') elif g.sha() not in cr_data.commits: cr_data.commits.append(g.sha()) if len(cr_data.commits) >= 2: use_hash = str('..'.join( [cr_data.commits[0][0:8], cr_data.commits[-1][0:8]])) else: use_hash = str(cr_data.commits[-1]) logger.info('updating an existing code review.') update_code_review(cr_data, g, use_hash) else: data.set_branch( g.current_branch(), { 'original_name': g.commit_messages()[0], 'commits': [g.sha('HEAD~'), g.sha()] }) logger.info('creating new code review.') create_code_review(data, g, creds)