Esempio n. 1
0
    def __init__(self, contributor_count):
        super(CentralisedWithWorkers, self).__init__()
        if contributor_count < 1:
            raise(
                Exception("contributor_count must be 1 or more, got {}".format(
                    contributor_count)))

        self._central_repo = phlsys_git.Repo(tempfile.mkdtemp())
        self._central_repo("init", "--bare")

        self._workers = []
        for i in xrange(contributor_count):
            self._workers.append(
                Worker(phlsys_git.Repo(tempfile.mkdtemp())))
            self.workers[-1].repo("init")
            self.workers[-1].repo(
                "remote", "add", "origin", self._central_repo.working_dir)
            self.workers[-1].repo("fetch")

            if i == 0:
                self.workers[0].commit_new_file('initial commit', 'README')
                phlgit_push.push(self.workers[0].repo, 'master', 'origin')
            else:
                self.workers[i].repo('fetch')
                self.workers[i].repo('checkout', 'master')
Esempio n. 2
0
    def test_A_Breathing(self):
        with phlsys_fs.chtmpdir_context():
            fetch_config = str('remote.origin.fetch=+refs/arcyd/landinglog'
                               ':refs/arcyd/origin/landinglog')

            run = phlsys_subprocess.run_commands

            run('git init --bare origin')
            run('git clone origin dev --config ' + fetch_config)

            with phlsys_fs.chdir_context('dev'):

                # make an initial commit on the master branch
                run('touch README')
                run('git add README')
                run('git commit README -m initial_commit')
                run('git push origin master')
                run('git checkout -b myfeature')

                # create a new branch with unique content
                with open('README', 'w') as f:
                    f.write('myfeature content')
                run('git add README')
                run('git commit README -m myfeature_content')
                run('git push -u origin myfeature')

            dev = phlsys_git.Repo('dev')

            # make sure we can prepend a branch to the landinglog when empty
            abdt_landinglog.prepend(dev, '1234', 'myfeature', '4567')
            log = abdt_landinglog.get_log(dev)
            self.assertEqual(1, len(log))
            self.assertEqual(log[0].review_sha1, "1234")
            self.assertEqual(log[0].name, "myfeature")
            self.assertEqual(log[0].landed_sha1, "4567")

            # make sure we can prepend another branch
            abdt_landinglog.prepend(dev, '5678', 'newfeature', '8901')
            log = abdt_landinglog.get_log(dev)
            self.assertEqual(2, len(log))
            self.assertEqual(log[0].review_sha1, "5678")
            self.assertEqual(log[0].name, "newfeature")
            self.assertEqual(log[0].landed_sha1, "8901")
            self.assertEqual(log[1].review_sha1, "1234")
            self.assertEqual(log[1].name, "myfeature")
            self.assertEqual(log[1].landed_sha1, "4567")

            # make a new, independent clone and make sure we get the same log
            abdt_landinglog.push_log(dev, 'origin')
            run('git clone origin dev2 --config ' + fetch_config)
            with phlsys_fs.chdir_context('dev2'):
                run('git fetch')
            dev2 = phlsys_git.Repo('dev2')
            self.assertListEqual(abdt_landinglog.get_log(dev),
                                 abdt_landinglog.get_log(dev2))
def get_revision_generator(args):

    repo = phlsys_git.Repo('.')

    commit_list = []
    commits_to_follow = []
    did_specify_something = False

    if args.commits:
        did_specify_something = True
        commits_to_follow = args.commits

    if args.list_file:
        did_specify_something = True
        commit_list += args.list_file.read()

    if not did_specify_something:
        commits_to_follow = ['HEAD']

    if commits_to_follow:
        commit_list += phlgit_revlist.commits(repo, *commits_to_follow)

    make_rev = phlgit_log.make_revision_from_hash
    revision_generator = (make_rev(repo, commit) for commit in commit_list)

    return revision_generator
Esempio n. 4
0
def initialise_here():
    """Return a new default Accessor after initialising the current directory.

    :returns: a new Accessor, mounted at the current directory

    """
    layout = Layout()

    phlsys_subprocess.run('git', 'init')
    repo = phlsys_git.Repo('.')

    # create filesystem hierarchy
    phlsys_fs.write_text_file(layout.arcydroot, 'this dir is an arcydroot')
    phlsys_fs.write_text_file('README', _README)
    phlsys_fs.write_text_file('config/README', _CONFIG_README)
    phlsys_fs.write_text_file(
        'config/phabricator/README', _CONFIG_PHABRICATOR_README)
    phlsys_fs.write_text_file(
        'config/repository/README', _CONFIG_REPOSITORY_README)
    phlsys_fs.write_text_file('var/README', _VAR_README)
    phlsys_fs.write_text_file('var/repo/README', _VAR_REPO_README)
    phlsys_fs.write_text_file('var/log/README', _VAR_LOG_README)
    phlsys_fs.write_text_file('var/status/README', _VAR_STATUS_README)
    phlsys_fs.write_text_file('var/command/README', _VAR_COMMAND_README)
    phlsys_fs.write_text_file('var/run/README', _VAR_RUN_README)

    repo('add', '.')
    phlsys_fs.write_text_file('.gitignore', 'var\n')
    repo('add', '.')
    phlgit_commit.index(repo, 'Initialised new Arcyd instance')

    return Accessor(Layout(), '.')
def process(args):
    repo_path = os.path.abspath(os.curdir)
    base = args.base
    head = args.head

    repo = phlsys_git.Repo(repo_path)
    # TODO: do not use private variable abdt_branch._MAX_DIFF_SIZE
    diff = abdt_differ.make_raw_diff(repo, base, head,
                                     abdt_branch._MAX_DIFF_SIZE)
    sys.stdout.write(diff.diff)
Esempio n. 6
0
    def __init__(self, root_dir, barc_cmd_path, arcyon_cmd_path, phab_uri,
                 alice, bob):

        self._root_dir = root_dir
        self.central_path = os.path.join(self._root_dir, 'central')
        os.makedirs(self.central_path)
        self._central_repo = phlsys_git.Repo(self.central_path)
        self._central_repo("init", "--bare")
        self.web_port = phlsys_web.pick_free_port()
        shutil.move(
            os.path.join(self.central_path, 'hooks/post-update.sample'),
            os.path.join(self.central_path, 'hooks/post-update'))

        self._command_hold_path = os.path.join(self.central_path,
                                               'command/hold_dev_arcyd_refs')

        pre_receive_path = os.path.join(self.central_path, 'hooks/pre-receive')
        phlsys_fs.write_text_file(pre_receive_path,
                                  _PRE_RECEIVE_HOLD_DEV_ARCYD_REFS)
        mode = os.stat(pre_receive_path).st_mode
        os.chmod(pre_receive_path, mode | stat.S_IEXEC)

        self._web = phlsys_web.SimpleWebServer(self.central_path,
                                               self.web_port)

        self._workers = []
        for account in (alice, bob):
            account_user = account[0]
            account_email = account[1]
            account_cert = account[2]
            worker_path = os.path.join(self._root_dir, account_user)
            os.makedirs(worker_path)
            self._workers.append(
                atet_worker.Worker(phlsys_git.Repo(worker_path), worker_path,
                                   barc_cmd_path, account_user, account_email,
                                   account_cert, arcyon_cmd_path, phab_uri))
            self._workers[-1].setup(self._central_repo.working_dir)

            if len(self._workers) == 1:
                self._workers[0].push_initial_commit()
            else:
                self._workers[-1].repo('checkout', 'master')
Esempio n. 7
0
 def test_can_commit(self):
     # TODO: make this more portable with shutil etc.
     run = phlsys_subprocess.run
     runCommands = phlsys_subprocess.run_commands
     path = "phlsys_git_TestGitContext"
     runCommands("mkdir " + path)
     run("git", "init", workingDir=path)
     repo = phlsys_git.Repo(path)
     runCommands("touch " + path + "/README")
     repo("add", "README")
     repo("commit", "-m", "initial commit")
     runCommands("rm -rf " + path)
def process(args):

    _ = args  # NOQA
    fs = abdt_fs.make_default_accessor()

    with fs.lockfile_context():
        pid = fs.get_pid_or_none()
        if pid is not None and phlsys_pid.is_running(pid):
            raise Exception("cannot fetch whilst arcyd is running.")

        repo_config_path_list = fs.repo_config_path_list()
        repo_name_config_list = abdi_repoargs.parse_config_file_list(
            repo_config_path_list)

        url_watcher_wrapper = phlurl_watcher.FileCacheWatcherWrapper(
            fs.layout.urlwatcher_cache_path)

        # Let the user know what's happening before potentially blocking for a
        # while.
        print('Refreshing repository snoop status ..', end=' ')
        # Make sure that the output is actually visible by flushing stdout
        # XXX: Will use 'flush' parameter to 'print()' in Python 3.3
        sys.stdout.flush()
        print("done")

        url_watcher_wrapper.watcher.refresh()

        for repo_name, repo_config in repo_name_config_list:
            print(repo_name + ' ..', end=' ')

            # Make sure that the output is actually visible by flushing stdout
            # XXX: Will use 'flush' parameter to 'print()' in Python 3.3
            sys.stdout.flush()

            snoop_url = abdi_repoargs.get_repo_snoop_url(repo_config)

            sys_repo = phlsys_git.Repo(repo_config.repo_path)
            refcache_repo = phlgitx_refcache.Repo(sys_repo)
            differ_cache = abdt_differresultcache.Cache(refcache_repo)
            abd_repo = abdt_git.Repo(refcache_repo, differ_cache, "origin",
                                     repo_config.repo_desc)

            did_fetch = abdi_processrepoarglist.fetch_if_needed(
                url_watcher_wrapper.watcher, snoop_url, abd_repo,
                repo_config.repo_desc)

            if did_fetch:
                print('fetched')
            else:
                print('skipped')

            url_watcher_wrapper.save()
def ensure_repo_ignoring(repo_path):
    """Make sure the .gitattributes override is set up.

    Note that this function will perform clean checkout of all files
    in the working copy from the index so any non-staged changes will
    be lost.

    :repo_path: repository to set up

    """
    if is_repo_definitely_ignoring(repo_path):
        # nothing to do
        return

    repo = phlsys_git.Repo(repo_path)
    repo_attributes_path = os.path.join(repo_path, _REPO_ATTRIBUTES_PATH)

    # Files in our working copy might have been 'smudged' by some
    # filters. After repo-wide attributes override is written those
    # smudged files might be considered as 'modified' because
    # apropriate clean filter is no longer applied.
    #
    # To fix that side effect we need to rebuild the working copy
    # after the attributes are modified.

    # check that any existing file is compatible with the new contents we will
    # write, i.e. it is a subset of the new content
    if os.path.exists(repo_attributes_path):
        contents = phlsys_fs.read_text_file(repo_attributes_path)
        lines = contents.splitlines()
        for l in lines:
            stripped = l.strip()
            if stripped and stripped not in _REPO_ATTRIBUTES_TUPLE:
                # we won't try to do any sort of merging, just escalate
                raise Exception(
                    "cannot merge attributes in existing file: {}".format(
                        repo_attributes_path))

    # the file is exactly one of the existing attributes, we can merge
    # correctly by overwriting it with our superset of attributes
    phlsys_fs.write_text_file(repo_attributes_path, _REPO_ATTRIBUTES_CONTENT)

    # overwrite working copy with files from index
    repo("checkout-index", "-afqu")
Esempio n. 10
0
def process(args):
    # XXX: only supports 'origin' remote at present

    print("""
::DEPRECATION NOTICE::
the 'refs/arcyd/landinglog' ref is no longer being updated, for new
branches do:

    git fetch origin refs/arcyd/landed:refs/arcyd/landed
    git branch --merged refs/arcyd/landed | grep -v '*' | xargs git branch -D
    """.strip())
    print()

    repo = phlsys_git.Repo('.')

    _fetch_log(repo, args.update, args.no_update, args.prompt_update)

    log = abdt_landinglog.get_log(repo)
    log_dict = {i.review_sha1: (i.name, i.landed_sha1) for i in log}

    local_branches = phlgit_branch.get_local_with_sha1(repo)

    if args.force:
        did_something = _prune_branches(
            repo, args, prune_force, log_dict, local_branches)
        if not did_something:
            print("nothing to do.")
        else:
            print("done.")
    else:
        assert args.interactive
        would_do_something = _prune_branches(
            repo, args, prune_dryrun, log_dict, local_branches)
        if not would_do_something:
            print("nothing to do.")
        else:
            choice = phlsys_choice.yes_or_no("perform the pruning?", 'no')
            print()
            if choice:
                _prune_branches(
                    repo, args, prune_force, log_dict, local_branches)
                print("done.")
            else:
                print("stopped.")
Esempio n. 11
0
def setup_repo_context(repo_url, repo_path, repo_push_url=None):
    """Setup a repository, if an exception is raised then remove the repo.

    :repo_url: string url of the repo to clone
    :repo_path: string path to clone the repo to
    :repo_push_url: string url to push to, or None
    :returns: None

    """
    # if there's any failure after cloning then we should remove the repo
    if repo_push_url is not None:
        phlsys_subprocess.run('git', 'clone', repo_url, repo_path, '--config',
                              'remote.origin.pushurl=' + repo_push_url)
    else:
        phlsys_subprocess.run('git', 'clone', repo_url, repo_path)

    try:
        repo = phlsys_git.Repo(repo_path)

        # make sure we have no problems with 'ident' strings, we won't build
        # from arcyd so it shouldn't be externally visible that we don't expand
        # them.
        phlgitx_ignoreattributes.ensure_repo_ignoring(repo_path)

        # test pushing to master
        repo('checkout', 'origin/master')
        phlgit_commit.allow_empty(repo, 'test commit for pushing')
        repo('push', 'origin', '--dry-run', 'HEAD:refs/heads/master')
        repo('checkout', '-')

        try_push_special_refs(repo)

        # fetch the 'landed' and 'abandoned' refs if they exist
        abdt_git.checkout_master_fetch_special_refs(repo, 'origin')

        ensure_reserve_branch(repo)

        # success, allow the caller to do work
        yield
    except Exception:
        # clean up the git repo on any exception
        shutil.rmtree(repo_path)
        raise
Esempio n. 12
0
def process(args):
    repo = phlsys_git.Repo('.')

    #
    # First, gather all the data
    #

    # XXX: only supports 'origin' remote at present
    remote = 'origin'

    hash_ref_pairs = phlgit_showref.hash_ref_pairs(repo)
    remote_branch_to_hash = _remote_branches_as_short_local(
        hash_ref_pairs, remote)
    # local_branch_to_hash = _short_local_branches(hash_ref_pairs)

    branch_naming = abdt_compositenaming.Naming(abdt_classicnaming.Naming(),
                                                abdt_rbranchnaming.Naming())

    branch_pairs = abdt_naming.get_branch_pairs(remote_branch_to_hash.keys(),
                                                branch_naming)

    managed_review_branches = _get_managed_review_branches(
        remote_branch_to_hash, branch_pairs)

    #
    # Finally, decide how to display it
    #

    if args.format_json:
        print(json.dumps(managed_review_branches, sort_keys=True, indent=2))
    elif args.format_python:
        pprint.pprint(managed_review_branches)
    elif args.format_string:
        for branch in managed_review_branches:
            print(args.format_string.format(**branch))
    else:  # args.format_summary
        if managed_review_branches:
            print("{:6} {:14} {}".format("ID", "status", "tracked name"))
            for branch in managed_review_branches:
                print("{review_id:6} {status:14} {tracked_name}".format(
                    **branch))
Esempio n. 13
0
    def __init__(self, repo_name, repo_args, conduit_manager,
                 url_watcher_wrapper, sys_admin_emails, mail_sender):

        self._active_state = _RepoActiveRetryState(
            retry_timestr_list=["10 seconds", "10 minutes", "1 hours"])
        sys_repo = phlsys_git.Repo(repo_args.repo_path)
        self._refcache_repo = phlgitx_refcache.Repo(sys_repo)
        self._differ_cache = abdt_differresultcache.Cache(self._refcache_repo)
        self._abd_repo = abdt_git.Repo(self._refcache_repo, self._differ_cache,
                                       "origin", repo_args.repo_desc)
        self._name = repo_name
        self._args = repo_args
        self._conduit_manager = conduit_manager

        conduit_cache = conduit_manager.get_conduit_and_cache_for_args(
            repo_args)
        self._arcyd_conduit, self._review_cache = conduit_cache

        self._mail_sender = mail_sender
        self._url_watcher_wrapper = url_watcher_wrapper
        self._mail_sender = mail_sender
        self._on_exception = abdt_exhandlers.make_exception_delay_handler(
            sys_admin_emails, repo_name)
Esempio n. 14
0
    def __init__(self, layout, path):
        self._layout = layout
        self._root = os.path.abspath(path)
        self._repo = phlsys_git.Repo(path)

        self._check_arcydroot()
Esempio n. 15
0
 def __init__(self):
     super(TempRepo, self).__init__()
     self._tmp_dir = tempfile.mkdtemp()
     self._repo = phlsys_git.Repo(self._tmp_dir)
     self._repo("init")
Esempio n. 16
0
 def setUp(self):
     # TODO: make this more portable with shutil etc.
     phlsys_subprocess.run_commands("mkdir " + self.path)
     phlsys_subprocess.run("git", "init", workingDir=self.path)
     self.repo = phlsys_git.Repo(self.path)
Esempio n. 17
0
def _check_repo_remote(args, repo_name, repo_config):
    """Return False if the supplied repo has problems with it's remote.

    Will print details of errors found. Will continue when errors are found,
    unless they interfere with the operation of fsck.

    :args: argeparse arguments to arcyd fsck
    :repo_name: string name of the repository
    :repo_config: argparse namespace of the repo's config
    :returns: True or False

    """
    all_ok = True
    repo = phlsys_git.Repo(repo_config.repo_path)

    # check that we can read from the remote
    try:
        repo("ls-remote")
    except phlsys_subprocess.CalledProcessError as e:
        all_ok = False
        print("error reading remote for {repo}".format(repo=repo_name))
        _print_indented(4, e.stdout)
        _print_indented(4, e.stderr)
        print()

    # check that we can write to the remote
    try:
        abdi_repo.try_push_special_refs(repo)
    except phlsys_subprocess.CalledProcessError as e:
        all_ok = False
        print("error writing remote for {repo}".format(repo=repo_name))
        _print_indented(4, e.stdout)
        _print_indented(4, e.stderr)
        print()

    # ensure the reserve branch
    try:
        if not abdi_repo.is_remote_reserve_branch_present(repo):
            print("'{repo}' has no reserve branch".format(repo=repo_name))
            if args.fix:
                print("ensuring reserve branch for '{repo}'..".format(
                    repo=repo_name))
                abdi_repo.ensure_reserve_branch(repo)
            else:
                all_ok = False
    except phlsys_subprocess.CalledProcessError as e:
        all_ok = False
        print(
            "error ensuring reserve branch for {repo}".format(repo=repo_name))
        _print_indented(4, e.stdout)
        _print_indented(4, e.stderr)
        print()

    # ensure the vestigial landinglog ref is not present
    try:
        if abdi_repo.is_legacy_landinglog_branch_present(repo):
            print("'{repo}' has legacy landinglog".format(repo=repo_name))
            if args.fix:
                print("removing landinglog for '{repo}'..".format(
                    repo=repo_name))
                abdi_repo.remove_landinglog(repo)
            else:
                all_ok = False
    except phlsys_subprocess.CalledProcessError as e:
        all_ok = False
        print("error removing landinglog for {repo}".format(repo=repo_name))
        _print_indented(4, e.stdout)
        _print_indented(4, e.stderr)
        print()

    return all_ok