def copy_p2g_with_start(view_name, start, view_lock):
    """Invoked 'p4gf_init_repo.py --start=NNN': copy changes from @NNN to @now."""
    ctx = p4gf_context.create_context(view_name, view_lock)
    LOG.debug("connected to P4, p4gf=%s", ctx.p4gf)

    # Copy any recent changes from Perforce to Git.
    p4gf_copy_p2g.copy_p2g_ctx(ctx, start)
Exemple #2
0
def main():
    """create Perforce user and client for Git Fusion"""
    p4gf_version.print_and_exit_if_argv()
    p4gf_util.reset_git_enviro()

    p4 = connect_p4()
    if not p4:
        return 2

    view_name = p4gf_util.cwd_to_view_name()
    view_lock = p4gf_lock.view_lock_heartbeat_only(p4, view_name)
    ctx       = p4gf_context.create_context(view_name, view_lock)

    # Read each input line (usually only one unless pushing multiple branches)
    # and pass to git-to-p4 copier.
    while True:
        line = sys.stdin.readline()
        if not line:
            break

        old_new_ref = line.strip().split()
        try:
            _copy( ctx
                 , old_sha1     = old_new_ref[0]
                 , new_sha1     = old_new_ref[1]
                 , ref          = old_new_ref[2])
        except RuntimeError as err:
            # bleed the input
            sys.stdin.readlines()
            # display the error message
            print(str(err))
            return 1
    return 0
def __do_trees(view_name, path):
    '''Process any and all files associated with this view'''
    # don't leave a mess: clean up file even if there's a problem processing it
    atexit.register(os.unlink, path)
    with p4gf_context.create_context(view_name, None) as ctx:
        # we don't create any temp clients here, so don't try deleting them either.
        # leave that to processes that actually use them.
        ctx.cleanup_client_pool = False
        os.chdir(ctx.view_dirs.GIT_WORK_TREE)
        LOG.debug("processing trees for view {}".format(view_name))

        with open(path, "r") as f:
            with Timer(p4gf_gitmirror.ADD_SUBMIT):
                trees = set()
                last_tree = None
                while True:
                    line = f.readline().strip()
                    LOG.debug("processing line '{}'".format(line))
                    if line == "end":
                        break
                    elif line == '---':
                        last_tree = None
                    else:
                        if not last_tree:
                            last_tree = __get_snapshot_trees(line, trees)
                        else:
                            last_tree = __get_delta_trees(last_tree, line, trees)
                if trees:
                    LOG.debug("submitting trees for {}".format(view_name))
                    __add_trees_to_p4(ctx, trees)
Exemple #4
0
def __do_trees(view_name, path):
    '''Process any and all files associated with this view'''
    # don't leave a mess: clean up file even if there's a problem processing it
    atexit.register(os.unlink, path)
    with p4gf_context.create_context(view_name, None) as ctx:
        # we don't create any temp clients here, so don't try deleting them either.
        # leave that to processes that actually use them.
        ctx.cleanup_client_pool = False
        os.chdir(ctx.view_dirs.GIT_WORK_TREE)
        LOG.debug("processing trees for view {}".format(view_name))

        with open(path, "r") as f:
            with Timer(p4gf_gitmirror.ADD_SUBMIT):
                trees = set()
                last_tree = None
                while True:
                    line = f.readline().strip()
                    LOG.debug("processing line '{}'".format(line))
                    if line == "end":
                        break
                    elif line == '---':
                        last_tree = None
                    else:
                        if not last_tree:
                            last_tree = __get_snapshot_trees(line, trees)
                        else:
                            last_tree = __get_delta_trees(
                                last_tree, line, trees)
                if trees:
                    LOG.debug("submitting trees for {}".format(view_name))
                    __add_trees_to_p4(ctx, trees)
 def context(self):
     """Get the context used by this initializer, creating one if needed."""
     assert self.repo_name
     if self._context is None:
         self._context = p4gf_context.create_context(self.repo_name)
         self._context.repo_config = self.repo_config
         self._context.repo_lock = self.repo_lock
     return self._context
def create_context(args):
    """Create a p4gf_context for accessing Git Fusion data."""
    ctx = p4gf_context.create_context(args.repo)
    ctx.connect_cli(LOG)
    p4gf_proc.init()
    p4gf_branch.init_case_handling(ctx.p4gf)
    if DEBUG:
        print(_("create_context: ctx.p4.port='{}'  ctx.p4.user='******' .").format(
            ctx.p4.port, ctx.p4.user))
    return (ctx.p4gf, ctx)
def _create_ctx(p4port, p4user, repo_name):
    """
    Connect to Perforce using environment with optional
    p4port/p4user overrides. Set to None of no override.
    """
    ctx = p4gf_context.create_context(repo_name)
    if p4port:
        ctx.config.p4port = p4port
    if p4user:
        ctx.config.p4user = p4user
    ctx.connect()
    return ctx
def _test_write_branch(repo_name, branch, key, value):
    '''
    Unit test hook to see if we actually write the correct values to
    the correct files.
    '''
    with p4gf_context.create_context(NTR('p4gf_repo'), None) as ctx:
        config = p4gf_config.get_repo(ctx.p4gf, repo_name)
    for section in config.sections():
        if config.has_option(section, p4gf_config.KEY_GIT_BRANCH_NAME) and \
           config.get(section, p4gf_config.KEY_GIT_BRANCH_NAME) == branch:
            _test_write(repo_name, section, key, value)
            return
    print(NTR('branch not found: {}').format(branch))
def _test_write_branch(repo_name, branch, key, value):
    '''
    Unit test hook to see if we actually write the correct values to
    the correct files.
    '''
    with p4gf_context.create_context(NTR('p4gf_repo'), None) as ctx:
        config = p4gf_config.get_repo(ctx.p4gf, repo_name)
    for section in config.sections():
        if config.has_option(section, p4gf_config.KEY_GIT_BRANCH_NAME) and \
           config.get(section, p4gf_config.KEY_GIT_BRANCH_NAME) == branch:
            _test_write(repo_name, section, key, value)
            return
    print(NTR('branch not found: {}').format(branch))
def _test_write(repo_name, section, key, value):
    '''
    Unit test hook to see if we actually write the correct values to
    the correct files.
    '''
    if repo_name == 'global':
        print(NTR('write to global config not implemented.'))
        return
    with p4gf_context.create_context(NTR('p4gf_repo'), None) as ctx:
        config = p4gf_config.get_repo(ctx.p4gf, repo_name)
        if not config.has_section(section):
            config.add_section(section)
        config.set(section, key, value)
        p4gf_config.write_repo_if(ctx.p4gf, ctx.client_spec_gf, repo_name, config)
def main():
    """Set up repo for a view."""
    p4gf_util.has_server_id_or_exit()
    args = _parse_argv()
    p4gf_version_3.log_version_extended(include_checksum=True)
    log_l10n()
    if args.enablemismatchedrhs:
        # Git Fusion should never modify the customer's config file, and
        # use of this option resulted in the config file losing all of the
        # comments and formatting the customer had put in place.
        sys.stderr.write(
            _('The --enablemismatchedrhs option is deprecated,'
              ' please use enable-mismatched-rhs config file'
              ' option instead.\n'))
        sys.exit(1)
    repo_name_p4client = None
    if args.p4client:
        repo_name_p4client = p4gf_util.argv_to_repo_name(args.p4client)
    repo_name = _argv_to_repo_name(args.repo_name)
    p4gf_util.reset_git_enviro()

    p4 = p4gf_create_p4.create_p4_temp_client()
    if not p4:
        raise RuntimeError(_('error connecting to Perforce'))

    LOG.debug("connected to P4 at %s", p4.port)
    p4gf_proc.init()

    try:
        with ExitStack() as stack:
            stack.enter_context(p4gf_create_p4.Closer())
            p4gf_version_3.version_check()
            p4gf_branch.init_case_handling(p4)
            repo_lock = p4gf_lock.RepoLock(p4, repo_name)
            stack.enter_context(repo_lock)
            ctx = p4gf_context.create_context(repo_name)
            ctx.p4gf = p4
            ctx.repo_lock = repo_lock
            initer = InitRepo(p4, repo_lock).set_repo_name(repo_name)
            initer.context = ctx
            initer.set_config_file_path(args.config)
            initer.set_charset(args.charset)
            initer.set_noclone(args.noclone)
            initer.set_start(args.start)
            stack.enter_context(ctx)
            initer.full_init(repo_name_p4client)
    except P4.P4Exception as e:
        _print_stderr(_('Error occurred: {exception}').format(exception=e))
        sys.exit(1)
def main():
    """Do the thing."""
    args = parse_argv()
    ctx = p4gf_context.create_context(args.repo)
    ctx.create_config_if_missing(False)
    ctx.config.p4user = args.p4user
    ctx.config.p4port = args.p4port
    ctx.connect_cli(LOG)
    p4gf_proc.init()
    p4gf_branch.init_case_handling(ctx.p4gf)
    rollback = Rollback( ctx           = ctx
                       , change_num    = args.change_num
                       , is_preview    = not args.execute
                       , is_obliterate = args.obliterate )
    rollback.rollback()
def _create_ctx(p4port, p4user, server_id, repo_name):
    """Connect to Perforce using environment with optional p4port/p4user overrides.

    Set to None of no override.
    """
    ctx = p4gf_context.create_context(repo_name)
    if p4port:
        ctx.config.p4port = p4port
    if p4user:
        ctx.config.p4user = p4user
    if server_id:
        ctx.p4gf_client = p4gf_const.P4GF_OBJECT_CLIENT.format(
            server_id=server_id)
    ctx.connect()
    return ctx
def _test_write(repo_name, section, key, value):
    '''
    Unit test hook to see if we actually write the correct values to
    the correct files.
    '''
    if repo_name == 'global':
        print(NTR('write to global config not implemented.'))
        return
    with p4gf_context.create_context(NTR('p4gf_repo'), None) as ctx:
        config = p4gf_config.get_repo(ctx.p4gf, repo_name)
        if not config.has_section(section):
            config.add_section(section)
        config.set(section, key, value)
        p4gf_config.write_repo_if(ctx.p4gf, ctx.client_spec_gf, repo_name,
                                  config)
def init_repo(p4,
              view_name,
              view_lock,
              charset=None,
              enable_mismatched_rhs=False,
              view_name_p4client=None,
              handle_imports=True):
    '''
    Create view and repo if necessary. Does NOT copy p4 to the repo
    (that's p4gf_copy_p2g's job). Returns one of the INIT_REPO_* constants.

    This is p4gf_auth_server's entry point into init_repo, called in response
    to a 'git clone'.

    view_name is the internal view_name with special chars already translated

    view_lock           CounterLock mutex that prevents other processes from
                        touching this repo, whether on this Git Fusion server
                        or another.
    '''
    LOG.debug("init_repo : view_name {1} view_name_p4client {1}".format(
        view_name, view_name_p4client))
    client_name = p4gf_util.view_to_client_name(view_name)
    p4gf_dir = p4gf_util.p4_to_p4gf_dir(p4)
    view_dirs = p4gf_view_dirs.from_p4gf_dir(p4gf_dir, view_name)

    result = _create_p4_client(p4, view_name, client_name, view_dirs.p4root,
                               enable_mismatched_rhs, view_name_p4client,
                               handle_imports)
    if result > INIT_REPO_OK:
        return result
    create_perm_groups(p4, view_name)
    with p4gf_context.create_context(view_name, view_lock) as ctx:
        p4gf_config.create_file_repo(ctx, view_name, charset)
        if ctx.client_exclusions_added:
            _print_stderr(
                _("The referenced client view contains implicit exclusions."
                  "\nThe Git Fusion config will contain these as explicit exclusions."
                  ))
        p4gf_copy_p2g.create_git_repo(ctx, view_dirs.GIT_DIR)
    create_p4_client_root(view_dirs.p4root)
    p4gf_rc.update_file(view_dirs.rcfile, client_name, view_name)
    if result == INIT_REPO_OK:
        LOG.debug("repository creation for %s complete", view_name)
    # return the result of creating the client, to indicate if the client
    # had already been set up or not
    return result
Exemple #16
0
 def create_ctx(self):
     """
     Connect to Perforce using environment.
     """
     p4 = p4gf_create_p4.create_p4_temp_client(port=self.p4port,
                                               user=self.p4user)
     p4gf_branch.init_case_handling(p4)
     self.repo_name = 'estimate_repo_size_' + p4gf_util.uuid()
     self.repo_config = p4gf_config.RepoConfig.from_local_files(
         self.repo_name, p4, self.config_path, None)
     ctx = p4gf_context.create_context(self.repo_name)
     ctx.p4gf = p4
     ctx.repo_config = self.repo_config
     ctx.config.p4user = self.p4user
     ctx.config.p4port = self.p4port
     ctx.connect()
     return ctx
def _test_read(repo_name, section, key):
    '''
    Unit test hook to see if we actually read the correct values from
    the correct files.
    '''
    with p4gf_context.create_context(NTR('p4gf_repo'), None) as ctx:
        if repo_name == 'global':
            config = p4gf_config.get_global(ctx.p4gf)
        else:
            config = p4gf_config.get_repo(ctx.p4gf, repo_name)
    if not config.has_section(section):
        print(NTR('section not found: {}').format(section))
    elif not config.has_option(section, key):
        print(NTR('option not found: [{section}] {key}')
             .format(section=section, key=key))
    else:
        value = config.get(section, key)
        print(value)
def main():
    """Do the thing."""
    args = parse_argv()

    ctx = p4gf_context.create_context(args.repo_name)
    ctx.config.p4user = args.p4user
    ctx.config.p4port = args.p4port
    ctx.connect_cli(log=LOG)

    p4gf_branch.init_case_handling(ctx.p4)

    rc = RepoCompareSingle(ctx, args)
    for change_num in args.change_num:
        rc.compare_change_num(change_num)
    rc.report_summary()

    if rc.how_ct:
        sys.exit(1)
def init_repo(p4
             , view_name
             , view_lock
             , charset=None
             , enable_mismatched_rhs=False
             , view_name_p4client=None
             , handle_imports=True):
    '''
    Create view and repo if necessary. Does NOT copy p4 to the repo
    (that's p4gf_copy_p2g's job). Returns one of the INIT_REPO_* constants.

    This is p4gf_auth_server's entry point into init_repo, called in response
    to a 'git clone'.

    view_name is the internal view_name with special chars already translated

    view_lock           CounterLock mutex that prevents other processes from
                        touching this repo, whether on this Git Fusion server
                        or another.
    '''
    LOG.debug("init_repo : view_name {1} view_name_p4client {1}".format(
        view_name, view_name_p4client))
    client_name = p4gf_util.view_to_client_name(view_name)
    p4gf_dir = p4gf_util.p4_to_p4gf_dir(p4)
    view_dirs = p4gf_view_dirs.from_p4gf_dir(p4gf_dir, view_name)

    result = _create_p4_client(p4, view_name, client_name, view_dirs.p4root,
                               enable_mismatched_rhs, view_name_p4client, handle_imports)
    if result > INIT_REPO_OK:
        return result
    create_perm_groups(p4, view_name)
    with p4gf_context.create_context(view_name, view_lock) as ctx:
        p4gf_config.create_file_repo(ctx, view_name, charset)
        if ctx.client_exclusions_added:
            _print_stderr(_("The referenced client view contains implicit exclusions."
                            "\nThe Git Fusion config will contain these as explicit exclusions."))
        p4gf_copy_p2g.create_git_repo(ctx, view_dirs.GIT_DIR)
    create_p4_client_root(view_dirs.p4root)
    p4gf_rc.update_file(view_dirs.rcfile, client_name, view_name)
    if result == INIT_REPO_OK:
        LOG.debug("repository creation for %s complete", view_name)
    # return the result of creating the client, to indicate if the client
    # had already been set up or not
    return result
def _test_read(repo_name, section, key):
    '''
    Unit test hook to see if we actually read the correct values from
    the correct files.
    '''
    with p4gf_context.create_context(NTR('p4gf_repo'), None) as ctx:
        if repo_name == 'global':
            config = p4gf_config.get_global(ctx.p4gf)
        else:
            config = p4gf_config.get_repo(ctx.p4gf, repo_name)
    if not config.has_section(section):
        print(NTR('section not found: {}').format(section))
    elif not config.has_option(section, key):
        print(
            NTR('option not found: [{section}] {key}').format(section=section,
                                                              key=key))
    else:
        value = config.get(section, key)
        print(value)
def copy_p2g_with_start(view_name, start, view_lock, ctx=None):
    """Invoked 'p4gf_init_repo.py --start=NNN': copy changes from @NNN to #head."""
    if ctx is None:
        ctx = p4gf_context.create_context(view_name, view_lock)
    with ctx:
        LOG.debug("connected to P4, p4gf=%s", ctx.p4gf)
        # Check that there are changes to be copied from any branch.
        ctx.switch_client_view_to_union()
        path = ctx.client_view_path()
        changes_result = ctx.p4.run("changes", "-m1", "{}@{},#head".format(path, start))
        if len(changes_result):
            # Copy any recent changes from Perforce to Git.
            print(_("Copying changes from '{}'...").format(start))
            p4gf_copy_p2g.copy_p2g_ctx(ctx, start)
            print(_('Copying completed.'))
        else:
            msg = _("No changes above '{}'.").format(start)
            if int(start) == 1:
                LOG.debug(msg)
            else:
                LOG.info(msg)
                raise IndexError(msg)
def copy_p2g_with_start(view_name, start, view_lock, ctx=None):
    """Invoked 'p4gf_init_repo.py --start=NNN': copy changes from @NNN to #head."""
    if ctx is None:
        ctx = p4gf_context.create_context(view_name, view_lock)
    with ctx:
        LOG.debug("connected to P4, p4gf=%s", ctx.p4gf)
        # Check that there are changes to be copied from any branch.
        ctx.switch_client_view_to_union()
        path = ctx.client_view_path()
        changes_result = ctx.p4.run("changes", "-m1",
                                    "{}@{},#head".format(path, start))
        if len(changes_result):
            # Copy any recent changes from Perforce to Git.
            print(_("Copying changes from '{}'...").format(start))
            p4gf_copy_p2g.copy_p2g_ctx(ctx, start)
            print(_('Copying completed.'))
        else:
            msg = _("No changes above '{}'.").format(start)
            if int(start) == 1:
                LOG.debug(msg)
            else:
                LOG.info(msg)
                raise IndexError(msg)
def delete_client(args, p4, client_name, metrics, prune_objs=True):
    """Delete the named Perforce client and its workspace.

    Raise P4Exception if the client is not present, or the client configuration
    is not set up as expected.

    Keyword arguments:
    args        -- parsed command line arguments
    p4          -- Git user's Perforce client
    client_name -- name of client to be deleted
    metrics     -- DeletionMetrics for collecting resulting metrics
    prune_objs  -- if True, delete associated objects from cache

    """
    p4.user = p4gf_const.P4GF_USER
    repo_name = p4gf_util.client_to_repo_name(client_name)
    has_main = __name__ == "__main__"
    repo_config = check_repo_exists_and_get_repo_config(
        args, p4, client_name, has_main)
    # The repo client is required only to remove gitmodules from a stream.
    # Since the repo client does not exist we re-construct it only if we have a repo_config.
    # For any repo, the first call to p4gf_delete_repo can use the repo_config, but
    # then deletes it afterward.
    # For the second+ calls to p4gf_delete_repo for other GF instances
    # with a deleted p4gf_config, it is neither possible
    # nor necessary to remove a stream's gitmodules.
    git_modules = None
    stream_name = None
    if repo_config:
        repo_stream_name = get_stream_name_from_repo_config(p4, repo_config)
        if repo_stream_name and repo_stream_name.endswith('_p4gfv'):
            stream_name = repo_stream_name
            git_modules = _find_gitmodules(p4, stream_name)
            if git_modules and args.delete:
                ctx = p4gf_context.create_context(repo_name)
                ctx.repo_config = repo_config
                with ctx:
                    # Temporarily map in the stream so we can delete the file(s).
                    ctx.p4gfrun('client', '-f', '-s', '-S', stream_name,
                                ctx.p4.client)
                    _init_repo(p4, repo_config)
                    ctx.p4.run('sync', '-f', git_modules)
                    ctx.p4.run('delete', git_modules)
                    ctx.p4.run('submit', '-d',
                               "Delete .gitmodules for {0}".format(repo_name),
                               git_modules)
    # Delete the no-client data for this repo
    delete_non_client_repo_data(args, p4, client_name, metrics, prune_objs)
    if stream_name:
        if args.delete:
            p4.run('stream', '-d', stream_name)
        else:
            print(NTR('p4 stream -d {}').format(stream_name))

    if args.delete:
        if __name__ == "__main__":
            server_id_dict = p4gf_util.serverid_dict_for_repo(p4, repo_name)
            if server_id_dict:
                print(
                    _('You must delete this repo from these other Git Fusion instances'
                      ))
                for k, v in server_id_dict.items():
                    print(
                        _("  {server_id} on host {host}").format(server_id=k,
                                                                 host=v))
def main():
    """Copy incoming Git commits to Perforce changelists."""
    _log_environ(os.environ)
    log_l10n()
    LOG.debug("main() running, pid={}".format(os.getpid()))
    p4gf_proc.install_stack_dumper()
    for h in ['-?', '-h', '--help']:
        if h in sys.argv:
            print(_('Git Fusion pre-receive hook.'))
            return 2
    with p4gf_create_p4.Closer():
        p4gf_version.print_and_exit_if_argv()
        p4 = p4gf_create_p4.create_p4()
        if not p4:
            return 2
        p4gf_util.reset_git_enviro(p4)

        view_name = p4gf_util.cwd_to_view_name()
        view_lock = p4gf_lock.view_lock_heartbeat_only(p4, view_name)
        with p4gf_context.create_context(view_name, view_lock) as ctx:

            # this script is called by git while a context and temp clients
            # are already in use.  Don't sabotage that context by deleting
            # the temp clients from here.
            ctx.cleanup_client_pool = False

            # Read each input line (usually only one unless pushing multiple branches)
            # and convert to a list of "tuples" from which we can assign branches.
            prl = []
            delete_prl = []
            while True:
                line = sys.stdin.readline()
                if not line:
                    break
                LOG.debug('main() raw pre-receive-tuple: {}'.format(line))
                prt = PreReceiveTuple.from_line(line)
                if int(prt.new_sha1, 16) == 0:
                    delete_prl.append(prt)
                else:
                    prl.append(prt)

            # Initialize the external process launcher early, before allocating lots
            # of memory, and just after all other conditions have been checked.
            p4gf_proc.init()
            # Prepare for possible spawn of GitMirror worker process by forking
            # now before allocating lots of memory.
            p4gf_gitmirror.setup_spawn(view_name)
            # Kick off garbage collection debugging, if enabled.
            p4gf_gc.init_gc()

            # Reject attempt to delete any fully populated branch defined in
            # p4gf_config. Git Fusion never edits p4gf_config, so Git Fusion never
            # deletes fully populated branches. Edit p4gf_config yourself if you
            # want to remove a branch from history.
            for prt in delete_prl:
                git_branch_name = prt.git_branch_name()
                if not git_branch_name:
                    continue
                branch = ctx.git_branch_name_to_branch(git_branch_name)
                if not branch:
                    LOG.debug(
                        'attempt to delete branch {} which does not exist'.
                        format(git_branch_name))
                    break
                if not branch.is_lightweight:
                    raise RuntimeError(
                        _('Cannot delete branches defined in'
                          ' Git Fusion repo config file: {}').format(
                              git_branch_name))

            # Swarm review creates new Git merge commits. Must occur before branch
            # assignment so that the review reference can be moved to the new merge
            # commit.
            gsreview_coll = GSReviewCollection.from_prl(ctx, prl)
            if gsreview_coll:
                gsreview_coll.pre_copy_to_p4(prl)

            # Assign branches to each of the received commits for pushed branches  - skip deletes.
            if prl:
                assigner = Assigner(ctx.branch_dict(), prl, ctx)
                assigner.assign()

            # For each of the heads being pushed, copy their commits to Perforce.
            if prl:
                try:
                    err = _copy(ctx,
                                prl=prl,
                                assigner=assigner,
                                gsreview_coll=gsreview_coll)  # branch push
                    if err:
                        return _clean_exit(err)
                except RuntimeError as err:
                    # Log the error. The return call below eats the error and stack trace.
                    LOG.exception(NTR("_copy() raised exception."))
                    return _clean_exit(err)
            # For each of the heads being deleted, remove the branch definition from p4gf_config2
            if delete_prl:
                p4gf_call_git.prohibit_interrupt(view_name, os.getpid())
                try:
                    err = _delete(ctx, delete_prl)  # branch delete
                    if err:
                        return _clean_exit(err)
                except RuntimeError as err:
                    # Log the error. The return call below eats the error and stack trace.
                    LOG.exception(NTR("_delete() raised exception."))
                    return _clean_exit(err)
            # Process all of the tags at once.
            err = p4gf_tag.process_tags(ctx, prl + delete_prl)
            if err:
                return _clean_exit(err)

                # If we have any new Git Swarm review references that
                # auth/http_server must rename, send a list of such
                # references across process boundary, via a file.
            if gsreview_coll:
                gsreview_coll.to_file()

            p4gf_gc.process_garbage("at end of pre_receive_hook")
            p4gf_gc.report_objects(NTR("at end of pre_receive_hook"))

        return 0
def import_submodules(ctx, view, change_view, import_paths):
    """For stream clients, create a submodule for each import.

    Arguments:
        ctx -- parent repo context.
        view -- the parent stream's 'View'.
        change_view -- the parent stream's 'ChangeView'.
        import_paths -- result from p4gf_streams.match_import_paths() on the
                        virtual stream's paths and the parent stream's paths.

    """
    usermap = p4gf_usermap.UserMap(ctx.p4gf)
    user_3tuple = usermap.lookup_by_p4user(p4gf_const.P4GF_USER)
    if not user_3tuple:
        LOG.error('Missing Perforce user {}'.format(p4gf_const.P4GF_USER))
        return
    client_name = ctx.config.p4client
    LOG.debug('processing imports for {}'.format(client_name))
    LOG.debug3(
        'import_submodules() view={}, change_view={}, import_paths={}'.format(
            view, change_view, import_paths))
    change_views = p4gf_streams.stream_imports_with_changes(
        view, change_view, import_paths)
    LOG.debug2('import_submodules() change_views={}'.format(change_views))
    if not change_views and LOG.isEnabledFor(logging.DEBUG2):
        LOG.debug2(
            'import_submodules() view={} change_view={} import_paths={}'.
            format(view, change_view, import_paths))
    # initialize and populate the submodules
    old_sha1 = ctx.view_repo.lookup_reference('HEAD').resolve().hex
    for depot_path, change_num, local_path in change_views:
        # avoid double-nesting by excluding the local path from the client path
        client_path = "//{}/...".format(client_name)
        LOG.debug('import_submodules() for {} => {}'.format(
            depot_path, client_path))
        stream_name = depot_path[:-4]
        if p4gf_util.spec_exists(ctx.p4, 'stream', stream_name):
            # convert stream name to repo name by pruning leading slashes
            repo_name = p4gf_streams.repo_name_from_depot_path(stream_name)
            config = None
            LOG.debug('initializing stream import for {}'.format(depot_path))
        else:
            # create a repo configuration file for this 1-line view
            repo_name = p4gf_streams.repo_name_from_depot_path(depot_path)
            client_less_path = CLIENT_LESS_REGEX.match(client_path).group(1)
            if client_path and client_path[0] == '"':
                client_less_path = '"' + client_less_path
            repo_view = depot_path + " " + client_less_path
            LOG.debug('creating config for {}'.format(repo_name))
            config = p4gf_config.default_config_repo_for_view_plain(
                ctx.p4, repo_name, repo_view)
        # prepare to initialize the repository
        p4 = p4gf_create_p4.create_p4()
        if not p4:
            LOG.error('unable to create P4 instance for {}'.format(repo_name))
            return
        with p4gf_lock.view_lock(p4, repo_name) as view_lock:
            if config:
                p4gf_config.create_file_repo_from_config(
                    ctx, repo_name, config)
            LOG.debug('initializing repo for {}'.format(repo_name))
            result = init_repo(p4, repo_name, view_lock, handle_imports=False)
            if result > INIT_REPO_OK:
                return result
            with p4gf_context.create_context(repo_name, view_lock) as subtxt:
                # set up gitmirror for child repos
                p4gf_gitmirror.setup_spawn(repo_name)
                # populate the submodule
                shared_in_progress = p4gf_lock.shared_host_view_lock_exists(
                    subtxt.p4, repo_name)
                if not shared_in_progress:
                    copy_submodule(ctx, repo_name, subtxt, local_path,
                                   change_num, user_3tuple)
        p4gf_create_p4.p4_disconnect(p4)
    # Remove any submodules controlled by Git Fusion that no longer match
    # any of the current import paths.
    deport_submodules(ctx, import_paths, user_3tuple)
    #
    # Ensure the Git commits we just created are copied back to Perforce by
    # faking a 'push' from the client. Roll the HEAD reference ('master')
    # back to the old SHA1, assign the commits to Perforce branches, then
    # move the reference back to the latest commit and copy everything to
    # the depot as usual.
    #
    new_head = ctx.view_repo.lookup_reference('HEAD').resolve()
    ctx.view_repo.git_reference_create(new_head.name, old_sha1, True)
    prt = p4gf_branch_id.PreReceiveTuple(old_sha1, new_head.hex, new_head.name)
    LOG.debug('Copying modules to depot: {}'.format(prt))
    assigner = p4gf_branch_id.Assigner(ctx.branch_dict(), [prt], ctx)
    assigner.assign()
    ctx.view_repo.git_reference_create(new_head.name, new_head.hex, True)
    err = p4gf_copy_to_p4.copy_git_changes_to_p4(ctx, prt, assigner, None)
    if err:
        LOG.error(err)
    def do_it(self):
        """Perform all of the setup, processing, and clean up.

        :rtype: int
        :return: status code for the process upon exit.

        """
        p4gf_util.log_environ(LOG, os.environ, self.label)
        log_l10n()
        p4gf_proc.install_stack_dumper()
        # Kick off garbage collection debugging, if enabled.
        p4gf_mem_gc.init_gc()

        # Use ExitStack to avoid deeply nested code.
        with ExitStack() as stack:
            stack.enter_context(p4gf_create_p4.Closer())
            p4 = p4gf_create_p4.create_p4_temp_client()
            if not p4:
                return 2
            repo_name = p4gf_path.cwd_to_repo_name()
            p4gf_util.reset_git_enviro()

            # Initialize the external process launcher early, before
            # allocating lots of memory, and just after all other
            # conditions have been checked.
            p4gf_proc.init()

            # Assume that something bad will happen (especially with preflight).
            exit_code = os.EX_SOFTWARE
            try:
                p4gf_log.configure_for_repo(repo_name)
                gid = os.environ[p4gf_const.P4GF_FORK_PUSH]
                self.before_p4key_lock(repo_name)
                with p4gf_lock.RepoLock(p4, repo_name,
                                        group_id=gid) as repo_lock:
                    # Work to be done with the p4key lock...
                    self.context = p4gf_context.create_context(repo_name)
                    self.context.p4gf = p4
                    self.context.repo_lock = repo_lock
                    self.context.foruser = os.getenv(p4gf_const.P4GF_FORUSER)
                    stack.enter_context(self.context)
                    self.before()
                    exit_code = self.process()
                if self.after_requires_write_lock():
                    # Work to be done without the p4key lock, but with the
                    # write lock. Note that we release the p4key lock
                    # before acquiring the write lock to avoid deadlock
                    # with the foreground process, which always gets the
                    # repo read/write lock _before_ acquiring the p4key
                    # lock. Hence all this complication with the locks.
                    with p4gf_git_repo_lock.write_lock(repo_name):
                        self.after(exit_code)
                else:
                    # The after() method does not need a write lock...
                    self.after(exit_code)
            finally:
                self.cleanup()
                p4gf_proc.stop()

        # Random tasks after all of the locks have been released.
        msg = NTR("at end of {hook}").format(hook=self.label)
        p4gf_mem_gc.process_garbage(msg)
        p4gf_mem_gc.report_objects(msg)
        return exit_code
def _wsgi_app(environ, start_response):
    """
    WSGI application to process the incoming Git client request. This is
    nearly equivalent to p4gf_auth_server.main() with the exception of
    input validation and error handling.
    """
    p4gf_log.record_http(environ)
    p4gf_version.log_version()
    _log_environ(environ)
    p4gf_version.version_check()
    LOG.debug("processing HTTP request, pid={}".format(os.getpid()))
    # Keep the content type to exactly 'text/plain' so there is at least
    # the remote chance that Git might show our error messages (does not
    # appear to work in practice, however).
    headers = [('Content-Type', 'text/plain')]

    encoding = sys.getfilesystemencoding()
    if encoding == 'ascii':
        # This encoding is wrong and will eventually lead to problems.
        LOG.error("Using 'ascii' file encoding will ultimately result in errors, "
            "please set LANG/LC_ALL to 'utf-8' in web server configuration.")
        start_response(_('500 Internal Server Error'), headers)
        return [b"Filesystem encoding not set to acceptable value.\n"]

    # Sanity check the request.
    for (name, status, msg) in _REQUIRED_HTTP_PARAMS:
        if name not in environ:
            start_response(status, headers)
            return [msg.encode('UTF-8')]

    input_name = environ['wsgi.input']
    # Extract the view_name_git by removing the expected git request suffixes
    path_info = environ['PATH_INFO']
    git_suffixes = ['/info/refs', '/HEAD', '/git-upload-pack', '/git-receive-pack']
    path_end = len(path_info)
    for suffix in git_suffixes:
        try:
            path_end = path_info.index(suffix)
            break
        except ValueError:
            pass
    # slice away the leading slash and the trailing git request suffixes
    view_name_git  = path_info[1:path_end]
    # and remove the view_name_git from the front of PATH_INFO
    environ['PATH_INFO'] = path_info[path_end:]
    LOG.debug("new PATH_INFO {0} view_name_git {1}".format(environ['PATH_INFO'], view_name_git))

    if not view_name_git:
        start_response(_('400 Bad Request'), headers)
        msg = _('Missing required repository name in URL\n')
        return [msg.encode('UTF-8')]
    # translate '/' ':' ' ' .. etc .. for internal view_name
    view_name = p4gf_translate.TranslateReponame.git_to_repo(view_name_git)
    LOG.debug("public view_name: {0}   internal view_name: {1}".format(view_name_git, view_name))

    audit_logger = p4gf_server_common.ExceptionAuditLogger()
    p4_closer = p4gf_create_p4.Closer()
    sink = OutputSink()
    temp_deleter = deleting(input_name)
    mirror_closer = unmirror(view_name)
    with audit_logger   \
        , p4_closer     \
        , sink          \
        , temp_deleter  \
        , mirror_closer:
        LOG.debug(p4gf_log.memory_usage())
        start_time = time.time()

        p4gf_util.reset_git_enviro()
        p4 = p4gf_create_p4.create_p4()
        if not p4:
            start_response(_('500 Internal Server Error'), headers)
            return [b"Perforce connection failed\n"]
        LOG.debug("connected to P4: %s", p4)

        p4gf_server_common.check_readiness(p4)
        p4gf_server_common.check_lock_perm(p4)
        if not p4gf_server_common.check_protects(p4):
            p4gf_server_common.raise_p4gf_perm()

        user = environ['REMOTE_USER']
        if p4gf_server_common.run_special_command(view_name, p4, user):
            start_response(_('200 OK'), headers)
            return [sink.readall()]
        command = _get_command(environ)
        if not command:
            start_response(_('400 Bad Request'), headers)
            return [b"Unrecognized service\n"]
        # Other places in the Perforce-to-Git phase will need to know the
        # name of client user, so set that here. As for Git-to-Perforce,
        # that is handled later by setting the REMOTE_USER envar. Notice
        # also that we're setting os.environ and not 'environ'.
        os.environ[p4gf_const.P4GF_AUTH_P4USER] = user
        # Likewise, some code needs a hint that the request is coming over
        # one protocol (HTTP) or the other (SSH).
        os.environ['REMOTE_ADDR'] = environ['REMOTE_ADDR']

        # Initialize the external process launcher early, before allocating lots
        # of memory, and just after all other conditions have been checked.
        p4gf_proc.init()
        # Prepare for possible spawn of GitMirror worker process by forking
        # now before allocating lots of memory.
        p4gf_gitmirror.setup_spawn(view_name)
        # Kick off garbage collection debugging, if enabled.
        p4gf_gc.init_gc()

        # Go no further, create NOTHING, if user not authorized.
        # We use the translated internal view name here for perm authorization
        required_perm = p4gf_server_common.COMMAND_TO_PERM[command]
        view_perm = p4gf_group.ViewPerm.for_user_and_view(p4, user, view_name, required_perm)
        try:
            p4gf_server_common.check_authorization(p4, view_perm, user, command, view_name)
        except p4gf_server_common.CommandError as ce:
            start_response(_('403 Forbidden'), headers)
            return [str(ce).encode('UTF-8')]

        # Create Git Fusion server depot, user, config. NOPs if already created.
        p4gf_init.init(p4)

        before_lock_time = time.time()
        with p4gf_lock.view_lock(p4, view_name) as view_lock:
            after_lock_time = time.time()

            # Create Git Fusion per-repo client view mapping and config.
            init_repo_status = p4gf_init_repo.init_repo(p4, view_name, view_lock)
            if init_repo_status == p4gf_init_repo.INIT_REPO_OK:
                repo_created = True
            elif init_repo_status == p4gf_init_repo.INIT_REPO_EXISTS:
                repo_created = False
            elif init_repo_status == p4gf_init_repo.INIT_REPO_NOVIEW:
                start_response(_('404 Not Found'), headers)
                return [sink.readall()]
            else:
                start_response(_('500 Internal Server Error'), headers)
                return [b"Repository initialization failed\n"]

            # If authorization came from default, not explicit group
            # membership, copy that authorization to a group now. Could
            # not do this until after p4gf_init_repo() has a chance to
            # create not-yet-existing groups.
            if view_perm:
                view_perm.write_if(p4)

            # Now that we have valid git-fusion-user and
            # git-fusion-<view> client, replace our temporary P4
            # connection with a more permanent Context, shared for the
            # remainder of this process.
            with p4gf_context.create_context(view_name, view_lock) as ctx:
                LOG.debug("reconnected to P4, p4gf=%s", ctx.p4gf)
                ctx.log_context()

                # cd into the work directory. Not all git functions react well
                # to --work-tree=xxxx.
                cwd = os.getcwd()
                os.chdir(ctx.view_dirs.GIT_WORK_TREE)

                # Only copy from Perforce to Git if no other process is cloning
                # from this Git repo right now.
                shared_in_progress = p4gf_lock.shared_host_view_lock_exists(ctx.p4, view_name)
                if not shared_in_progress:
                    # Copy any recent changes from Perforce to Git.
                    try:
                        LOG.debug("bare: No git-upload-pack in progress, force non-bare"
                                  " before update Git from Perforce.")
                        p4gf_git.set_bare(False)
                        p4gf_copy_p2g.copy_p2g_ctx(ctx)
                        p4gf_init_repo.process_imports(ctx)

                        # Now is also an appropriate time to clear out any stale Git
                        # Swarm reviews. We're pre-pull, pre-push, time when we've
                        # got exclusive write access to the Git repo,
                        GSReviewCollection.delete_refs_for_closed_reviews(ctx)

                    except p4gf_lock.LockCanceled as lc:
                        LOG.warning(str(lc))
                    except:
                        # Dump failure to log, BEFORE cleanup, just in case
                        # cleanup ALSO fails and throws its own error (which
                        # happens if we're out of memory).
                        LOG.error(traceback.format_exc())

                        if repo_created:
                            # Return to the original working directory to allow the
                            # config code to call os.getcwd() without dying, since
                            # we are about to delete the current working directory.
                            os.chdir(cwd)
                            p4gf_server_common.cleanup_client(ctx, view_name)
                        raise

                try:
                    exclusive = 'upload' not in command
                    is_push   = 'upload' not in command
                    git_caller = functools.partial(_call_git, input_name, environ, ctx)
                    p4gf_call_git.call_git(git_caller, ctx, view_name, view_lock, exclusive)
                    if is_push:
                        GSReviewCollection.post_push(ctx)
                except p4gf_atomic_lock.LockConflict as lc:
                    start_response(_('500 Internal Server Error'), headers)
                    return ["{}".format(lc).encode('UTF-8')]

        p4gf_gc.process_garbage('at end of auth_server')
        if LOG.isEnabledFor(logging.DEBUG):
            end_time = time.time()
            frm = NTR('Runtime: preparation {} ms, lock acquisition {} ms, processing {} ms')
            LOG.debug(frm.format(before_lock_time - start_time,
                                after_lock_time - before_lock_time,
                                end_time - after_lock_time))
        return []
def delete_client(args, p4, client_name):
    """Delete the named Perforce client and its workspace. Raises
    P4Exception if the client is not present, or the client configuration is
    not set up as expected.

    Keyword arguments:
    args        -- parsed command line arguments
    p4          -- Git user's Perforce client
    client_name -- name of client to be deleted

    """
    group_list = [p4gf_const.P4GF_GROUP_VIEW_PULL, p4gf_const.P4GF_GROUP_VIEW_PUSH]
    p4.user = p4gf_const.P4GF_USER

    print_verbose(args, "Checking for client {}...".format(client_name))
    if not p4gf_util.spec_exists(p4, 'client', client_name):
        raise P4.P4Exception('No such client "{}" defined'
                             .format(client_name))

    view_name = client_name[len(p4gf_const.P4GF_CLIENT_PREFIX):]
    view_lock = None  # We're clobbering and deleting. Overrule locks.
    try:
        ctx = p4gf_context.create_context(view_name, view_lock)
    except RuntimeError:
        # not a conforming Git Fusion client, ignore it
        return
    command_path = ctx.client_view_path()

    p4gf_dir = p4gf_util.p4_to_p4gf_dir(p4)
    view_dirs = p4gf_view_dirs.from_p4gf_dir(p4gf_dir, view_name)
    rm_list = [view_dirs.view_container]
    homedir = os.path.expanduser('~')
    raise_if_homedir(homedir, view_name, rm_list)

    # Scan for objects associated only with this view so we can either remove
    # them completely or update their 'views' attribute appropriately.
    p4.handler = FilterViewFstatHandler(view_name)
    p4.run("fstat", "-Oa", "-T", "depotFile, attr-views", "//.git-fusion/objects/...")
    objects_to_delete = p4.handler.files_to_delete
    objects_to_modify = p4.handler.files_to_modify
    p4.handler = None

    if not args.delete:
        print("p4 sync -f {}#none".format(command_path))
        print("p4 client -f -d {}".format(client_name))
        for d in rm_list:
            print("rm -rf {}".format(d))
        for to_delete in objects_to_delete:
            print("p4 obliterate -y {}".format(to_delete))
        if objects_to_modify:
            for (fname, views) in objects_to_modify:
                print("attribute -p -n views -v {} {}".format(views, fname))
        for group_template in group_list:
            group = group_template.format(view=view_name)
            print("p4 group -a -d {}".format(group))
        print('p4 counter -u -d {}'.format(p4gf_lock.view_lock_name(view_name)))

    else:
        print_verbose(args, "Removing client files for {}...".format(client_name))
        ctx.p4.run('sync', '-fq', command_path + '#none')
        print_verbose(args, "Deleting client {}...".format(client_name))
        p4.run('client', '-df', client_name)
        for d in rm_list:
            remove_file_or_dir(args, view_name, d)
        bite_size = 1000
        while len(objects_to_delete):
            to_delete = objects_to_delete[:bite_size]
            objects_to_delete = objects_to_delete[bite_size:]
            p4.run("obliterate", "-y", to_delete)
        if objects_to_modify:
            for (fname, views) in objects_to_modify:
                p4.run("edit", fname)
                p4.run("attribute", "-p", "-n", "views", "-v", views, fname)
            p4.run("submit", "-d", "'Removing {} from views attribute'".format(view_name))
        for group_template in group_list:
            delete_group(args, p4, group_template.format(view=view_name))
        _delete_counter(p4, p4gf_lock.view_lock_name(view_name))
Exemple #29
0
def main():
    """set up repo for a view"""
    with ExceptionAuditLogger():
        args = parse_args(sys.argv[1:])
        if not args:
            return 1

        # Record the p4 user in environment. We use environment to pass to
        # git-invoked hook. We don't have to set ctx.authenticated_p4user because
        # Context.__init__() reads it from environment, which we set here.
        os.environ[p4gf_const.P4GF_AUTH_P4USER_ENVAR] = args.user

        # print "args={}".format(args)
        view_name = args.options[-1]

        p4gf_util.reset_git_enviro()
        p4 = connect_p4()
        if not p4:
            return 2
        LOG.debug("connected to P4: %s", p4)

        _check_lock_perm(p4)

        if not check_protects(p4):
            _raise_p4gf_perm()

        if run_special_command(view_name, p4, args.user):
            return 0

        # Go no further, create NOTHING, if user not authorized.
        view_perm = p4gf_group.ViewPerm.for_user_and_view(
            p4, args.user, view_name)
        _check_authorization(view_perm, args.user, args.command[0], view_name)
        # Create Git Fusion server depot, user, config. NOPs if already created.
        p4gf_init.init(p4)

        with p4gf_lock.view_lock(p4, view_name) as view_lock:

            # Create Git Fusion per-repo client view mapping and config.
            #
            # NOPs if already created.
            # Create the empty directory that will hold the git repo.
            init_repo_status = p4gf_init_repo.init_repo(p4, view_name)
            if init_repo_status == p4gf_init_repo.INIT_REPO_OK:
                repo_created = True
            elif init_repo_status == p4gf_init_repo.INIT_REPO_EXISTS:
                repo_created = False
            else:
                return 1

            # If authorization came from default, not explicit group
            # membership, copy that authorization to a group now. Could
            # not do this until after p4gf_init_repo() has a chance to
            # create not-yet-existing groups.
            view_perm.write_if(p4)

            # Now that we have valid git-fusion-user and
            # git-fusion-<view> client, replace our temporary P4
            # connection with a more permanent Context, shared for the
            # remainder of this process.
            ctx = p4gf_context.create_context(view_name, view_lock)
            del p4
            LOG.debug("reconnected to P4, p4gf=%s", ctx.p4gf)

            # Find directory paths to feed to git.
            ctx.view_dirs = p4gf_view_dirs.from_p4gf_dir(
                ctx.gitrootdir, view_name)
            ctx.log_context()

            # cd into the work directory. Not all git functions react well
            # to --work-tree=xxxx.
            cwd = os.getcwd()
            os.chdir(ctx.view_dirs.GIT_WORK_TREE)

            # Copy any recent changes from Perforce to Git.
            try:
                p4gf_copy_p2g.copy_p2g_ctx(ctx)
            except:
                # Dump failure to log, BEFORE cleanup, just in case
                # cleanup ALSO fails and throws its own error (which
                # happens if we're out of memory).
                LOG.error(traceback.format_exc())

                if repo_created:
                    # Return to the original working directory to allow the
                    # config code to call os.getcwd() without dying, since
                    # we are about to delete the current working directory.
                    os.chdir(cwd)
                    cleanup_client(ctx, view_name)
                raise

            # Detach git repo's workspace from master before calling
            # original git, otherwise we won't be able to push master.
            p4gf_util.checkout_detached_master()

            # Flush stderr before returning control to Git.
            # Otherwise Git's own output might interrupt ours.
            sys.stderr.flush()

            return _call_original_git(ctx, args)
def main():
    """set up repo for a view"""
    with ExceptionAuditLogger():
        args = parse_args(sys.argv[1:])
        if not args:
            return 1

        # Record the p4 user in environment. We use environment to pass to
        # git-invoked hook. We don't have to set ctx.authenticated_p4user because
        # Context.__init__() reads it from environment, which we set here.
        os.environ[p4gf_const.P4GF_AUTH_P4USER_ENVAR] = args.user

        # print "args={}".format(args)
        view_name = args.options[-1]

        p4gf_util.reset_git_enviro()
        p4 = connect_p4()
        if not p4:
            return 2
        LOG.debug("connected to P4: %s", p4)

        _check_lock_perm(p4)

        if not check_protects(p4):
            _raise_p4gf_perm()

        if run_special_command(view_name, p4, args.user):
            return 0

        # Go no further, create NOTHING, if user not authorized.
        view_perm = p4gf_group.ViewPerm.for_user_and_view(p4, args.user, view_name)
        _check_authorization(view_perm, args.user, args.command[0], view_name)
        # Create Git Fusion server depot, user, config. NOPs if already created.
        p4gf_init.init(p4)

        with p4gf_lock.view_lock(p4, view_name) as view_lock:

            # Create Git Fusion per-repo client view mapping and config.
            #
            # NOPs if already created.
            # Create the empty directory that will hold the git repo.
            init_repo_status = p4gf_init_repo.init_repo(p4, view_name)
            if init_repo_status == p4gf_init_repo.INIT_REPO_OK:
                repo_created = True
            elif init_repo_status == p4gf_init_repo.INIT_REPO_EXISTS:
                repo_created = False
            else:
                return 1

            # If authorization came from default, not explicit group
            # membership, copy that authorization to a group now. Could
            # not do this until after p4gf_init_repo() has a chance to
            # create not-yet-existing groups.
            view_perm.write_if(p4)

            # Now that we have valid git-fusion-user and
            # git-fusion-<view> client, replace our temporary P4
            # connection with a more permanent Context, shared for the
            # remainder of this process.
            ctx = p4gf_context.create_context(view_name, view_lock)
            del p4
            LOG.debug("reconnected to P4, p4gf=%s", ctx.p4gf)

            # Find directory paths to feed to git.
            ctx.view_dirs = p4gf_view_dirs.from_p4gf_dir(ctx.gitrootdir, view_name)
            ctx.log_context()

            # cd into the work directory. Not all git functions react well
            # to --work-tree=xxxx.
            cwd = os.getcwd()
            os.chdir(ctx.view_dirs.GIT_WORK_TREE)

            # Copy any recent changes from Perforce to Git.
            try:
                p4gf_copy_p2g.copy_p2g_ctx(ctx)
            except:
                # Dump failure to log, BEFORE cleanup, just in case
                # cleanup ALSO fails and throws its own error (which
                # happens if we're out of memory).
                LOG.error(traceback.format_exc())

                if repo_created:
                    # Return to the original working directory to allow the
                    # config code to call os.getcwd() without dying, since
                    # we are about to delete the current working directory.
                    os.chdir(cwd)
                    cleanup_client(ctx, view_name)
                raise

            # Detach git repo's workspace from master before calling
            # original git, otherwise we won't be able to push master.
            p4gf_util.checkout_detached_master()

            # Flush stderr before returning control to Git.
            # Otherwise Git's own output might interrupt ours.
            sys.stderr.flush()

            return _call_original_git(ctx, args)
def import_submodules(ctx, view, change_view, import_paths):
    """For stream clients, create a submodule for each import.

    Arguments:
        ctx -- parent repo context.
        view -- the parent stream's 'View'.
        change_view -- the parent stream's 'ChangeView'.
        import_paths -- result from p4gf_streams.match_import_paths() on the
                        virtual stream's paths and the parent stream's paths.

    """
    usermap = p4gf_usermap.UserMap(ctx.p4gf)
    user_3tuple = usermap.lookup_by_p4user(p4gf_const.P4GF_USER)
    if not user_3tuple:
        LOG.error('Missing Perforce user {}'.format(p4gf_const.P4GF_USER))
        return
    client_name = ctx.config.p4client
    LOG.debug('processing imports for {}'.format(client_name))
    LOG.debug3('import_submodules() view={}, change_view={}, import_paths={}'.format(
        view, change_view, import_paths))
    change_views = p4gf_streams.stream_imports_with_changes(view, change_view, import_paths)
    LOG.debug2('import_submodules() change_views={}'.format(change_views))
    if not change_views and LOG.isEnabledFor(logging.DEBUG2):
        LOG.debug2('import_submodules() view={} change_view={} import_paths={}'.format(
            view, change_view, import_paths))
    # initialize and populate the submodules
    old_sha1 = ctx.view_repo.lookup_reference('HEAD').resolve().hex
    for depot_path, change_num, local_path in change_views:
        # avoid double-nesting by excluding the local path from the client path
        client_path = "//{}/...".format(client_name)
        LOG.debug('import_submodules() for {} => {}'.format(depot_path, client_path))
        stream_name = depot_path[:-4]
        if p4gf_util.spec_exists(ctx.p4, 'stream', stream_name):
            # convert stream name to repo name by pruning leading slashes
            repo_name = p4gf_streams.repo_name_from_depot_path(stream_name)
            config = None
            LOG.debug('initializing stream import for {}'.format(depot_path))
        else:
            # create a repo configuration file for this 1-line view
            repo_name = p4gf_streams.repo_name_from_depot_path(depot_path)
            client_less_path = CLIENT_LESS_REGEX.match(client_path).group(1)
            if client_path and client_path[0] == '"':
                client_less_path = '"' + client_less_path
            repo_view = depot_path + " " + client_less_path
            LOG.debug('creating config for {}'.format(repo_name))
            config = p4gf_config.default_config_repo_for_view_plain(ctx.p4, repo_name, repo_view)
        # prepare to initialize the repository
        p4 = p4gf_create_p4.create_p4()
        if not p4:
            LOG.error('unable to create P4 instance for {}'.format(repo_name))
            return
        with p4gf_lock.view_lock(p4, repo_name) as view_lock:
            if config:
                p4gf_config.create_file_repo_from_config(ctx, repo_name, config)
            LOG.debug('initializing repo for {}'.format(repo_name))
            result = init_repo(p4, repo_name, view_lock, handle_imports=False)
            if result > INIT_REPO_OK:
                return result
            with p4gf_context.create_context(repo_name, view_lock) as subtxt:
                # set up gitmirror for child repos
                p4gf_gitmirror.setup_spawn(repo_name)
                # populate the submodule
                shared_in_progress = p4gf_lock.shared_host_view_lock_exists(subtxt.p4, repo_name)
                if not shared_in_progress:
                    copy_submodule(ctx, repo_name, subtxt, local_path, change_num, user_3tuple)
        p4gf_create_p4.p4_disconnect(p4)
    # Remove any submodules controlled by Git Fusion that no longer match
    # any of the current import paths.
    deport_submodules(ctx, import_paths, user_3tuple)
    #
    # Ensure the Git commits we just created are copied back to Perforce by
    # faking a 'push' from the client. Roll the HEAD reference ('master')
    # back to the old SHA1, assign the commits to Perforce branches, then
    # move the reference back to the latest commit and copy everything to
    # the depot as usual.
    #
    new_head = ctx.view_repo.lookup_reference('HEAD').resolve()
    ctx.view_repo.git_reference_create(new_head.name, old_sha1, True)
    prt = p4gf_branch_id.PreReceiveTuple(old_sha1, new_head.hex, new_head.name)
    LOG.debug('Copying modules to depot: {}'.format(prt))
    assigner = p4gf_branch_id.Assigner(ctx.branch_dict(), [prt], ctx)
    assigner.assign()
    ctx.view_repo.git_reference_create(new_head.name, new_head.hex, True)
    err = p4gf_copy_to_p4.copy_git_changes_to_p4(ctx, prt, assigner, None)
    if err:
        LOG.error(err)
Exemple #32
0
def delete_client(args, p4, client_name, metrics, prune_objs=True):
    """Delete the named Perforce client and its workspace. Raises
    P4Exception if the client is not present, or the client configuration is
    not set up as expected.

    Keyword arguments:
    args        -- parsed command line arguments
    p4          -- Git user's Perforce client
    client_name -- name of client to be deleted
    metrics     -- DeletionMetrics for collecting resulting metrics
    prune_objs  -- if True, delete associated objects from cache

    """
    # pylint: disable=R0912,R0915
    group_list = [
        p4gf_const.P4GF_GROUP_VIEW_PULL, p4gf_const.P4GF_GROUP_VIEW_PUSH
    ]
    p4.user = p4gf_const.P4GF_USER

    print_verbose(args, _("Checking for client '{}'...").format(client_name))
    if not p4gf_util.spec_exists(p4, 'client', client_name):
        raise P4.P4Exception(
            _("No such client '{}' defined").format(client_name))
    view_name = p4gf_util.client_to_view_name(client_name)
    p4gf_dir = p4gf_util.p4_to_p4gf_dir(p4)
    view_dirs = p4gf_view_dirs.from_p4gf_dir(p4gf_dir, view_name)
    p4gf_util.ensure_spec_values(p4, 'client', client_name,
                                 {'Root': view_dirs.p4root})

    view_lock = None  # We're clobbering and deleting. Overrule locks.
    with p4gf_context.create_context(view_name, view_lock) as ctx:
        command_path = ctx.client_view_path()

        homedir = os.path.expanduser('~')
        raise_if_homedir(homedir, view_name, view_dirs.view_container)

        # Scan for objects associated only with this view so we can remove them.
        objects_to_delete = []
        if prune_objs:
            objects_to_delete = _find_client_commit_objects(
                args, p4, view_name)

        # Do we have a repo config file to delete?
        config_file = p4gf_config.depot_path_repo(view_name) + '*'
        config_file_exists = p4gf_util.depot_file_exists(p4, config_file)

        # What counters shall we delete?
        counter_list = []
        counter_list.append(
            p4gf_context.calc_last_copied_change_counter_name(
                view_name, p4gf_util.get_server_id()))
        for spec in p4.run('counters', '-u', '-e',
                           "git-fusion-index-last-{},*".format(view_name)):
            counter_list.append(spec['counter'])
        for spec in p4.run('counters', '-u', '-e',
                           "git-fusion-index-branch-{},*".format(view_name)):
            counter_list.append(spec['counter'])

        if not args.delete:
            print(NTR('p4 sync -f {}#none').format(command_path))
            print(NTR('p4 client -f -d {}').format(client_name))
            print(NTR('rm -rf {}').format(view_dirs.view_container))
            print(
                NTR('Deleting {} objects from //{}/objects/...').format(
                    len(objects_to_delete), p4gf_const.P4GF_DEPOT))
            for group_template in group_list:
                group = group_template.format(view=view_name)
                print(NTR('p4 group -a -d {}').format(group))
            for c in counter_list:
                print(NTR('p4 counter -u -d {}').format(c))

            if config_file_exists:
                print(NTR('p4 sync -f {}').format(config_file))
                print(NTR('p4 delete  {}').format(config_file))
                print(
                    NTR('p4 submit -d "Delete repo config for {view_name}" {config_file}'
                        ).format(view_name=view_name, config_file=config_file))
        else:
            print_verbose(
                args,
                NTR('Removing client files for {}...').format(client_name))
            ctx.p4.run('sync', '-fq', command_path + '#none')
            print_verbose(args,
                          NTR('Deleting client {}...').format(client_name))
            p4.run('client', '-df', client_name)
            metrics.clients += 1
            print_verbose(
                args,
                NTR("Deleting repo {0}'s directory {1}...").format(
                    view_name, view_dirs.view_container))
            _remove_tree(view_dirs.view_container, contents_only=False)
            metrics.files += _delete_files(p4, objects_to_delete, view_name)
            for group_template in group_list:
                _delete_group(args, p4, group_template.format(view=view_name),
                              metrics)
            for c in counter_list:
                _delete_counter(p4, c, metrics)

            if config_file_exists:
                p4gf_util.p4run_logged(p4, ['sync', '-fq', config_file])
                with p4gf_util.NumberedChangelist(
                        p4=p4,
                        description=_("Delete repo config for '{}'").format(
                            view_name)) as nc:
                    nc.p4run(["delete", config_file])
                    nc.submit()
def delete_client(args, p4, client_name, metrics, prune_objs=True):
    """Delete the named Perforce client and its workspace. Raises
    P4Exception if the client is not present, or the client configuration is
    not set up as expected.

    Keyword arguments:
    args        -- parsed command line arguments
    p4          -- Git user's Perforce client
    client_name -- name of client to be deleted
    metrics     -- DeletionMetrics for collecting resulting metrics
    prune_objs  -- if True, delete associated objects from cache

    """
    # pylint: disable=R0912,R0915
    group_list = [p4gf_const.P4GF_GROUP_VIEW_PULL, p4gf_const.P4GF_GROUP_VIEW_PUSH]
    p4.user = p4gf_const.P4GF_USER

    print_verbose(args, _("Checking for client '{}'...").format(client_name))
    if not p4gf_util.spec_exists(p4, 'client', client_name):
        raise P4.P4Exception(_("No such client '{}' defined")
                             .format(client_name))
    view_name = p4gf_util.client_to_view_name(client_name)
    p4gf_dir = p4gf_util.p4_to_p4gf_dir(p4)
    view_dirs = p4gf_view_dirs.from_p4gf_dir(p4gf_dir, view_name)
    p4gf_util.ensure_spec_values(p4, 'client', client_name, {'Root': view_dirs.p4root})

    view_lock = None  # We're clobbering and deleting. Overrule locks.
    with p4gf_context.create_context(view_name, view_lock) as ctx:
        command_path = ctx.client_view_path()

        homedir = os.path.expanduser('~')
        raise_if_homedir(homedir, view_name, view_dirs.view_container)

        # Scan for objects associated only with this view so we can remove them.
        objects_to_delete = []
        if prune_objs:
            objects_to_delete = _find_client_commit_objects(args, p4, view_name)

        # Do we have a repo config file to delete?
        config_file = p4gf_config.depot_path_repo(view_name) + '*'
        config_file_exists = p4gf_util.depot_file_exists(p4, config_file)

        # What counters shall we delete?
        counter_list = []
        counter_list.append(p4gf_context.calc_last_copied_change_counter_name(
                            view_name, p4gf_util.get_server_id()))
        for spec in p4.run('counters', '-u', '-e', "git-fusion-index-last-{},*"
                                                   .format(view_name)):
            counter_list.append(spec['counter'])
        for spec in p4.run('counters', '-u', '-e', "git-fusion-index-branch-{},*"
                                                   .format(view_name)):
            counter_list.append(spec['counter'])

        if not args.delete:
            print(NTR('p4 sync -f {}#none').format(command_path))
            print(NTR('p4 client -f -d {}').format(client_name))
            print(NTR('rm -rf {}').format(view_dirs.view_container))
            print(NTR('Deleting {} objects from //{}/objects/...').format(
                len(objects_to_delete), p4gf_const.P4GF_DEPOT))
            for group_template in group_list:
                group = group_template.format(view=view_name)
                print(NTR('p4 group -a -d {}').format(group))
            for c in counter_list:
                print(NTR('p4 counter -u -d {}').format(c))

            if config_file_exists:
                print(NTR('p4 sync -f {}').format(config_file))
                print(NTR('p4 delete  {}').format(config_file))
                print(NTR('p4 submit -d "Delete repo config for {view_name}" {config_file}')
                      .format(view_name=view_name, config_file=config_file))
        else:
            print_verbose(args, NTR('Removing client files for {}...').format(client_name))
            ctx.p4.run('sync', '-fq', command_path + '#none')
            print_verbose(args, NTR('Deleting client {}...').format(client_name))
            p4.run('client', '-df', client_name)
            metrics.clients += 1
            print_verbose(args, NTR("Deleting repo {0}'s directory {1}...").format(view_name,
                view_dirs.view_container))
            _remove_tree(view_dirs.view_container, contents_only=False)
            metrics.files += _delete_files(p4, objects_to_delete, view_name)
            for group_template in group_list:
                _delete_group(args, p4, group_template.format(view=view_name), metrics)
            for c in counter_list:
                _delete_counter(p4, c, metrics)

            if config_file_exists:
                p4gf_util.p4run_logged(p4, ['sync', '-fq', config_file])
                with p4gf_util.NumberedChangelist(
                        p4=p4, description=_("Delete repo config for '{}'")
                                           .format(view_name)) as nc:
                    nc.p4run(["delete", config_file])
                    nc.submit()
def main():
    """Copy incoming Git commits to Perforce changelists."""
    _log_environ(os.environ)
    log_l10n()
    LOG.debug("main() running, pid={}".format(os.getpid()))
    p4gf_proc.install_stack_dumper()
    for h in ['-?', '-h', '--help']:
        if h in sys.argv:
            print(_('Git Fusion pre-receive hook.'))
            return 2
    with p4gf_create_p4.Closer():
        p4gf_version.print_and_exit_if_argv()
        p4 = p4gf_create_p4.create_p4()
        if not p4:
            return 2
        p4gf_util.reset_git_enviro(p4)

        view_name = p4gf_util.cwd_to_view_name()
        view_lock = p4gf_lock.view_lock_heartbeat_only(p4, view_name)
        with p4gf_context.create_context(view_name, view_lock) as ctx:

            # this script is called by git while a context and temp clients
            # are already in use.  Don't sabotage that context by deleting
            # the temp clients from here.
            ctx.cleanup_client_pool = False

            # Read each input line (usually only one unless pushing multiple branches)
            # and convert to a list of "tuples" from which we can assign branches.
            prl = []
            delete_prl = []
            while True:
                line = sys.stdin.readline()
                if not line:
                    break
                LOG.debug('main() raw pre-receive-tuple: {}'.format(line))
                prt = PreReceiveTuple.from_line(line)
                if int(prt.new_sha1, 16) == 0:
                    delete_prl.append(prt)
                else:
                    prl.append(prt)

            # Initialize the external process launcher early, before allocating lots
            # of memory, and just after all other conditions have been checked.
            p4gf_proc.init()
            # Prepare for possible spawn of GitMirror worker process by forking
            # now before allocating lots of memory.
            p4gf_gitmirror.setup_spawn(view_name)
            # Kick off garbage collection debugging, if enabled.
            p4gf_gc.init_gc()

            # Reject attempt to delete any fully populated branch defined in
            # p4gf_config. Git Fusion never edits p4gf_config, so Git Fusion never
            # deletes fully populated branches. Edit p4gf_config yourself if you
            # want to remove a branch from history.
            for prt in delete_prl:
                git_branch_name = prt.git_branch_name()
                if not git_branch_name:
                    continue
                branch = ctx.git_branch_name_to_branch(git_branch_name)
                if not branch:
                    LOG.debug('attempt to delete branch {} which does not exist'
                              .format(git_branch_name))
                    break
                if not branch.is_lightweight:
                    raise RuntimeError(_('Cannot delete branches defined in'
                                         ' Git Fusion repo config file: {}')
                                       .format(git_branch_name))

            # Swarm review creates new Git merge commits. Must occur before branch
            # assignment so that the review reference can be moved to the new merge
            # commit.
            gsreview_coll = GSReviewCollection.from_prl(ctx, prl)
            if gsreview_coll:
                gsreview_coll.pre_copy_to_p4(prl)

            # Assign branches to each of the received commits for pushed branches  - skip deletes.
            if prl:
                assigner = Assigner(ctx.branch_dict(), prl, ctx)
                assigner.assign()

            # For each of the heads being pushed, copy their commits to Perforce.
            if prl:
                try:
                    err = _copy( ctx
                               , prl           = prl
                               , assigner      = assigner
                               , gsreview_coll = gsreview_coll)   # branch push
                    if err:
                        return _clean_exit(err)
                except RuntimeError as err:
                    # Log the error. The return call below eats the error and stack trace.
                    LOG.exception(NTR("_copy() raised exception."))
                    return _clean_exit(err)
            # For each of the heads being deleted, remove the branch definition from p4gf_config2
            if delete_prl:
                p4gf_call_git.prohibit_interrupt(view_name, os.getpid())
                try:
                    err = _delete(ctx, delete_prl)     # branch delete
                    if err:
                        return _clean_exit(err)
                except RuntimeError as err:
                    # Log the error. The return call below eats the error and stack trace.
                    LOG.exception(NTR("_delete() raised exception."))
                    return _clean_exit(err)
            # Process all of the tags at once.
            err = p4gf_tag.process_tags(ctx, prl + delete_prl)
            if err:
                return _clean_exit(err)

                            # If we have any new Git Swarm review references that
                            # auth/http_server must rename, send a list of such
                            # references across process boundary, via a file.
            if gsreview_coll:
                gsreview_coll.to_file()

            p4gf_gc.process_garbage("at end of pre_receive_hook")
            p4gf_gc.report_objects(NTR("at end of pre_receive_hook"))

        return 0
def main(poll_only=False):
    """set up repo for a view
       view_name_git    is the untranslated repo name
       view_name        is the translated repo name
    """
    p4gf_proc.install_stack_dumper()
    _log_environ(os.environ)
    with p4gf_server_common.ExceptionAuditLogger()\
    , p4gf_create_p4.Closer():
        LOG.debug(p4gf_log.memory_usage())
        start_time = time.time()
        args = parse_args(sys.argv[1:])
        if not args:
            return 1

        is_push = 'upload' not in args.command[0]

        # Record the p4 user in environment. We use environment to pass to
        # git-invoked hook. We don't have to set ctx.authenticated_p4user because
        # Context.__init__() reads it from environment, which we set here.
        os.environ[p4gf_const.P4GF_AUTH_P4USER] = args.user

        # view_name_git    is the untranslated repo name
        # view_name        is the translated repo name

        # print "args={}".format(args)
        view_name_git = args.options[-1]
        # translate '/' ':' ' '  .. etc .. for internal view_name
        view_name = p4gf_translate.TranslateReponame.git_to_repo(view_name_git)
        LOG.debug("public view_name: {0}   internal view_name: {1}".format(
            view_name_git, view_name))

        p4gf_util.reset_git_enviro()
        p4 = p4gf_create_p4.create_p4()
        if not p4:
            return 2
        LOG.debug("connected to P4: %s", p4)

        p4gf_server_common.check_readiness(p4)

        p4gf_server_common.check_lock_perm(p4)

        if not p4gf_server_common.check_protects(p4):
            p4gf_server_common.raise_p4gf_perm()

        if p4gf_server_common.run_special_command(view_name, p4, args.user):
            return 0

        # Initialize the external process launcher early, before allocating lots
        # of memory, and just after all other conditions have been checked.
        p4gf_proc.init()
        # Prepare for possible spawn of GitMirror worker process by forking
        # now before allocating lots of memory.
        p4gf_gitmirror.setup_spawn(view_name)
        # Kick off garbage collection debugging, if enabled.
        p4gf_gc.init_gc()

        if poll_only:
            view_perm = None
        else:
            # Go no further, create NOTHING, if user not authorized.
            # We use the translated internal view name here for perm authorization
            required_perm = p4gf_server_common.COMMAND_TO_PERM[args.command[0]]
            view_perm = p4gf_group.ViewPerm.for_user_and_view(
                p4, args.user, view_name, required_perm)
            p4gf_server_common.check_authorization(p4, view_perm, args.user,
                                                   args.command[0], view_name)

        # Create Git Fusion server depot, user, config. NOPs if already created.
        p4gf_init.init(p4)

        write_motd()

        # view_name is the internal view_name (identical when notExist special chars)
        before_lock_time = time.time()
        with p4gf_lock.view_lock(p4, view_name) as view_lock:
            after_lock_time = time.time()

            # Create Git Fusion per-repo client view mapping and config.
            #
            # NOPs if already created.
            # Create the empty directory that will hold the git repo.
            init_repo_status = p4gf_init_repo.init_repo(
                p4, view_name, view_lock)
            if init_repo_status == p4gf_init_repo.INIT_REPO_OK:
                repo_created = True
            elif init_repo_status == p4gf_init_repo.INIT_REPO_EXISTS:
                repo_created = False
            else:
                return 1

            # If authorization came from default, not explicit group
            # membership, copy that authorization to a group now. Could
            # not do this until after p4gf_init_repo() has a chance to
            # create not-yet-existing groups.
            if view_perm:
                view_perm.write_if(p4)

            # Now that we have valid git-fusion-user and
            # git-fusion-<view> client, replace our temporary P4
            # connection with a more permanent Context, shared for the
            # remainder of this process.
            with p4gf_context.create_context(view_name, view_lock) as ctx:
                LOG.debug("reconnected to P4, p4gf=%s", ctx.p4gf)

                # Find directory paths to feed to git.
                ctx.log_context()

                # cd into the work directory. Not all git functions react well
                # to --work-tree=xxxx.
                cwd = os.getcwd()
                os.chdir(ctx.view_dirs.GIT_WORK_TREE)

                # Only copy from Perforce to Git if no other process is cloning
                # from this Git repo right now.
                shared_in_progress = p4gf_lock.shared_host_view_lock_exists(
                    ctx.p4, view_name)
                if not shared_in_progress:
                    # Copy any recent changes from Perforce to Git.
                    try:
                        LOG.debug(
                            "bare: No git-upload-pack in progress, force non-bare"
                            " before update Git from Perforce.")
                        p4gf_git.set_bare(False)
                        p4gf_copy_p2g.copy_p2g_ctx(ctx)
                        p4gf_init_repo.process_imports(ctx)

                        # Now is also an appropriate time to clear out any stale Git
                        # Swarm reviews. We're pre-pull, pre-push, time when we've
                        # got exclusive write access to the Git repo,
                        GSReviewCollection.delete_refs_for_closed_reviews(ctx)

                    except p4gf_lock.LockCanceled as lc:
                        LOG.warning(str(lc))
                    except:
                        # Dump failure to log, BEFORE cleanup, just in case
                        # cleanup ALSO fails and throws its own error (which
                        # happens if we're out of memory).
                        LOG.error(traceback.format_exc())

                        if repo_created:
                            # Return to the original working directory to allow the
                            # config code to call os.getcwd() without dying, since
                            # we are about to delete the current working directory.
                            os.chdir(cwd)
                            p4gf_server_common.cleanup_client(ctx, view_name)
                        raise

                if poll_only:
                    code = os.EX_OK
                else:

                    git_caller = functools.partial(_call_git, args, ctx)
                    try:

                        # Deep in call_git(), we grab an 'p4 reviews' lock on
                        # ctx.clientmap's LHS. Switch that clientmap to our
                        # full union view to prevent simultaneous 'git push'es
                        # from clobbering each other in some shared depot
                        # branch. Must include all lightweight branches, too.
                        ctx.switch_client_view_to_union()

                        exclusive = 'upload' not in args.command[0]
                        code = p4gf_call_git.call_git(git_caller, ctx,
                                                      view_name, view_lock,
                                                      exclusive)
                        if is_push:
                            GSReviewCollection.post_push(ctx)
                    except p4gf_atomic_lock.LockConflict as lc:
                        sys.stderr.write("{}\n".format(lc))
                        code = os.EX_SOFTWARE

            p4gf_gc.process_garbage(NTR('at end of auth_server'))
            if LOG.isEnabledFor(logging.DEBUG):
                end_time = time.time()
                frm = NTR("Runtime: preparation {} ms, lock acquisition {} ms,"
                          " processing {} ms")
                LOG.debug(
                    frm.format(before_lock_time - start_time,
                               after_lock_time - before_lock_time,
                               end_time - after_lock_time))
        return code
def main(poll_only=False):
    """set up repo for a view
       view_name_git    is the untranslated repo name
       view_name        is the translated repo name
    """
    p4gf_proc.install_stack_dumper()
    _log_environ(os.environ)
    with p4gf_server_common.ExceptionAuditLogger()\
    , p4gf_create_p4.Closer():
        LOG.debug(p4gf_log.memory_usage())
        start_time = time.time()
        args = parse_args(sys.argv[1:])
        if not args:
            return 1

        is_push = 'upload' not in args.command[0]

        # Record the p4 user in environment. We use environment to pass to
        # git-invoked hook. We don't have to set ctx.authenticated_p4user because
        # Context.__init__() reads it from environment, which we set here.
        os.environ[p4gf_const.P4GF_AUTH_P4USER] = args.user

        # view_name_git    is the untranslated repo name
        # view_name        is the translated repo name

        # print "args={}".format(args)
        view_name_git = args.options[-1]
        # translate '/' ':' ' '  .. etc .. for internal view_name
        view_name = p4gf_translate.TranslateReponame.git_to_repo(view_name_git)
        LOG.debug("public view_name: {0}   internal view_name: {1}".
                format(view_name_git, view_name))


        p4gf_util.reset_git_enviro()
        p4 = p4gf_create_p4.create_p4()
        if not p4:
            return 2
        LOG.debug("connected to P4: %s", p4)

        p4gf_server_common.check_readiness(p4)

        p4gf_server_common.check_lock_perm(p4)

        if not p4gf_server_common.check_protects(p4):
            p4gf_server_common.raise_p4gf_perm()

        if p4gf_server_common.run_special_command(view_name, p4, args.user):
            return 0

        # Initialize the external process launcher early, before allocating lots
        # of memory, and just after all other conditions have been checked.
        p4gf_proc.init()
        # Prepare for possible spawn of GitMirror worker process by forking
        # now before allocating lots of memory.
        p4gf_gitmirror.setup_spawn(view_name)
        # Kick off garbage collection debugging, if enabled.
        p4gf_gc.init_gc()

        if poll_only:
            view_perm = None
        else:
            # Go no further, create NOTHING, if user not authorized.
            # We use the translated internal view name here for perm authorization
            required_perm = p4gf_server_common.COMMAND_TO_PERM[args.command[0]]
            view_perm = p4gf_group.ViewPerm.for_user_and_view(p4, args.user,
                        view_name, required_perm)
            p4gf_server_common.check_authorization(p4, view_perm, args.user, args.command[0],
                                                   view_name)

        # Create Git Fusion server depot, user, config. NOPs if already created.
        p4gf_init.init(p4)

        write_motd()

        # view_name is the internal view_name (identical when notExist special chars)
        before_lock_time = time.time()
        with p4gf_lock.view_lock(p4, view_name) as view_lock:
            after_lock_time = time.time()

            # Create Git Fusion per-repo client view mapping and config.
            #
            # NOPs if already created.
            # Create the empty directory that will hold the git repo.
            init_repo_status = p4gf_init_repo.init_repo(p4, view_name, view_lock)
            if init_repo_status == p4gf_init_repo.INIT_REPO_OK:
                repo_created = True
            elif init_repo_status == p4gf_init_repo.INIT_REPO_EXISTS:
                repo_created = False
            else:
                return 1

            # If authorization came from default, not explicit group
            # membership, copy that authorization to a group now. Could
            # not do this until after p4gf_init_repo() has a chance to
            # create not-yet-existing groups.
            if view_perm:
                view_perm.write_if(p4)

            # Now that we have valid git-fusion-user and
            # git-fusion-<view> client, replace our temporary P4
            # connection with a more permanent Context, shared for the
            # remainder of this process.
            with p4gf_context.create_context(view_name, view_lock) as ctx:
                LOG.debug("reconnected to P4, p4gf=%s", ctx.p4gf)

                # Find directory paths to feed to git.
                ctx.log_context()

                # cd into the work directory. Not all git functions react well
                # to --work-tree=xxxx.
                cwd = os.getcwd()
                os.chdir(ctx.view_dirs.GIT_WORK_TREE)

                # Only copy from Perforce to Git if no other process is cloning
                # from this Git repo right now.
                shared_in_progress = p4gf_lock.shared_host_view_lock_exists(ctx.p4, view_name)
                if not shared_in_progress:
                    # Copy any recent changes from Perforce to Git.
                    try:
                        LOG.debug("bare: No git-upload-pack in progress, force non-bare"
                                  " before update Git from Perforce.")
                        p4gf_git.set_bare(False)
                        p4gf_copy_p2g.copy_p2g_ctx(ctx)
                        p4gf_init_repo.process_imports(ctx)

                        # Now is also an appropriate time to clear out any stale Git
                        # Swarm reviews. We're pre-pull, pre-push, time when we've
                        # got exclusive write access to the Git repo,
                        GSReviewCollection.delete_refs_for_closed_reviews(ctx)

                    except p4gf_lock.LockCanceled as lc:
                        LOG.warning(str(lc))
                    except:
                        # Dump failure to log, BEFORE cleanup, just in case
                        # cleanup ALSO fails and throws its own error (which
                        # happens if we're out of memory).
                        LOG.error(traceback.format_exc())

                        if repo_created:
                            # Return to the original working directory to allow the
                            # config code to call os.getcwd() without dying, since
                            # we are about to delete the current working directory.
                            os.chdir(cwd)
                            p4gf_server_common.cleanup_client(ctx, view_name)
                        raise

                if poll_only:
                    code = os.EX_OK
                else:

                    git_caller = functools.partial(_call_git, args, ctx)
                    try:

                        # Deep in call_git(), we grab an 'p4 reviews' lock on
                        # ctx.clientmap's LHS. Switch that clientmap to our
                        # full union view to prevent simultaneous 'git push'es
                        # from clobbering each other in some shared depot
                        # branch. Must include all lightweight branches, too.
                        ctx.switch_client_view_to_union()

                        exclusive = 'upload' not in args.command[0]
                        code = p4gf_call_git.call_git(
                                git_caller, ctx, view_name, view_lock, exclusive)
                        if is_push:
                            GSReviewCollection.post_push(ctx)
                    except p4gf_atomic_lock.LockConflict as lc:
                        sys.stderr.write("{}\n".format(lc))
                        code = os.EX_SOFTWARE

            p4gf_gc.process_garbage(NTR('at end of auth_server'))
            if LOG.isEnabledFor(logging.DEBUG):
                end_time = time.time()
                frm = NTR("Runtime: preparation {} ms, lock acquisition {} ms,"
                          " processing {} ms")
                LOG.debug(frm.format(before_lock_time - start_time,
                                    after_lock_time - before_lock_time,
                                    end_time - after_lock_time))
        return code
Exemple #37
0
def main():
    """Update one or more repository hook scripts."""
    parser = p4gf_util.create_arg_parser(
        _('Updates the hook scripts in one or more Git Fusion repositories.'))
    parser.add_argument('-a',
                        '--all',
                        action='store_true',
                        help=_('process all known Git Fusion repositories'))
    parser.add_argument(NTR('repos'),
                        metavar=NTR('repo'),
                        nargs='*',
                        help=_('name of repository to be updated'))
    args = parser.parse_args()

    # Check that either --all, or a repo was named.
    if not args.all and len(args.repos) == 0:
        sys.stderr.write(_('Missing repo names; try adding --all option.\n'))
        sys.exit(2)
    if args.all and len(args.repos) > 0:
        sys.stderr.write(
            _('Ambiguous arguments. Choose --all or a repo name.\n'))
        sys.exit(2)

    p4 = p4gf_create_p4.create_p4_temp_client()
    if not p4:
        sys.exit(2)

    # Sanity check the connection (e.g. user logged in?) before proceeding.
    try:
        p4.fetch_client()
    except P4.P4Exception as e:
        sys.stderr.write(
            _('P4 exception occurred: {exception}').format(exception=e))
        sys.exit(1)

    if args.all:
        repos = p4gf_util.repo_config_list(p4)
        if not repos:
            print(_("No repos exist yet."))
    else:
        repos = args.repos
    p4gf_create_p4.p4_disconnect(p4)

    have_error = False
    for git_view in repos:
        repo_name = p4gf_translate.TranslateReponame.git_to_repo(git_view)
        print(_("Processing repository {repo_name}...").format(
            repo_name=repo_name),
              end='')
        try:
            ctx = p4gf_context.create_context(repo_name)
            ctx.create_config_if_missing(False)
            with ExitStack() as stack:
                stack.enter_context(ctx)
                ctx.repo_lock = p4gf_lock.RepoLock(ctx.p4gf,
                                                   repo_name,
                                                   blocking=False)
                stack.enter_context(ctx.repo_lock)
                # If __file__ contains a symlink, decoding at this top level
                # will cause Python to retain it, for use in the hook paths.
                p4gf_init_host.install_hook(ctx.repo_dirs.GIT_DIR,
                                            overwrite=True,
                                            hook_abs_path=__file__)
            print(_(" successful."))
        except p4gf_config.ConfigLoadError as e:
            import logging
            # cannot use __name__ since it will be "__main__"
            logging.getLogger("p4gf_update_hooks").exception(
                "failed to update hooks")
            print(_(" failed."))
            sys.stderr.write(
                _("\n{exception}\nHook scripts not updated for repo '{repo_name}'."
                  ).format(exception=e, repo_name=repo_name))
            have_error = True
    if have_error:
        sys.exit(1)
    def _import_submodules(self):
        """For stream clients, create a submodule for each import."""
        # pylint:disable=too-many-statements, too-many-branches
        view = self.paths_parent.parent['View']  # the parent stream's 'View'
        change_view = self.paths_parent.parent.get('ChangeView')  # the parent stream's 'ChangeView'
        import_paths = self.paths_parent.import_paths

        # have already split this function several times...
        usermap = p4gf_usermap.UserMap(self.ctx.p4gf, self.ctx.email_case_sensitivity)
        user_3tuple = usermap.lookup_by_p4user(p4gf_const.P4GF_USER)
        if not user_3tuple:
            LOG.error('Missing Perforce user %s', p4gf_const.P4GF_USER)
            return
        client_name = self.ctx.config.p4client
        LOG.debug('processing imports for %s', client_name)
        LOG.debug3('_import_submodules() view=%s, change_view=%s, import_paths=%s',
                   view, change_view, import_paths)
        change_views = p4gf_streams.stream_imports_with_changes(view, change_view, import_paths)
        LOG.debug2('_import_submodules() change_views=%s', change_views)
        if not change_views and LOG.isEnabledFor(logging.DEBUG2):
            LOG.debug2('_import_submodules() view=%s change_view=%s import_paths=%s',
                       view, change_view, import_paths)
        # initialize and populate the submodules
        old_head = p4gf_pygit2.head_ref(self.ctx.repo)
        for depot_path, change_num, local_path in change_views:
            # avoid double-nesting by excluding the local path from the client path
            client_path = "//{}/...".format(client_name)
            LOG.debug('_import_submodules() for %s => %s', depot_path, client_path)
            stream_name = depot_path[:-4]
            if p4gf_p4spec.spec_exists(self.ctx.p4, 'stream', stream_name):
                # convert stream name to repo name by pruning leading slashes
                repo_name = p4gf_streams.repo_name_from_depot_path(stream_name)
                config = None
                LOG.debug('initializing stream import for %s', depot_path)
            else:
                # create a repo configuration file for this 1-line view
                repo_name = p4gf_streams.repo_name_from_depot_path(depot_path)
                client_less_path = CLIENT_LESS_REGEX.match(client_path).group(1)
                if client_path and client_path[0] == '"':
                    client_less_path = '"' + client_less_path
                repo_view = depot_path + " " + client_less_path
                LOG.debug('creating config for %s', repo_name)
                config = p4gf_config.default_config_repo_for_view_plain(self.ctx.p4,
                                                                        repo_name,
                                                                        repo_view)
            # prepare to initialize the repository
            #
            # Note that we skip the temp client counting mechanism in this
            # case because it is rather difficult to avoid.
            p4 = p4gf_create_p4.create_p4_temp_client(skip_count=True)
            if not p4:
                LOG.error('unable to create P4 instance for %s', repo_name)
                return
            if p4gf_const.READ_ONLY:
                try:
                    repo_config = p4gf_config.RepoConfig.from_depot_file(repo_name, p4)
                    subtxt = p4gf_context.create_context(repo_name)
                    subtxt.p4gf = p4
                    ir = InitRepo(p4, None).set_repo_config(repo_config)
                    ir.context = subtxt
                    ir.init_repo(handle_imports=False)
                    with subtxt:
                        # populate the submodule
                        self._copy_submodule(subtxt, local_path, change_num, user_3tuple)
                except p4gf_config.ConfigLoadError:
                    raise ReadOnlyException(_("Read-only instance cannot initialize repositories."))
            else:
                with p4gf_lock.RepoLock(p4, repo_name) as repo_lock:
                    if config:
                        p4gf_config.create_file_repo_from_config(self.ctx, repo_name, config)
                    LOG.debug('initializing repo for %s', repo_name)
                    repo_config = p4gf_config.RepoConfig.from_depot_file(repo_name, p4,
                                                                         create_if_missing=True)
                    subtxt = p4gf_context.create_context(repo_name)
                    subtxt.p4gf = p4
                    subtxt.repo_lock = repo_lock
                    ir = InitRepo(p4, repo_lock).set_repo_config(repo_config)
                    ir.context = subtxt
                    ir.init_repo(handle_imports=False)
                    with subtxt:
                        # populate the submodule
                        self._copy_submodule(subtxt, local_path, change_num, user_3tuple)
            if p4.connected():
                p4gf_create_p4.p4_disconnect(p4)
        # Remove any submodules controlled by Git Fusion that no longer match
        # any of the current import paths.
        self._deport_submodules(import_paths, user_3tuple)

        if not p4gf_const.READ_ONLY:
            # The process() method above configures 'enable-git-submodules'
            # in the parent repo to disable submodule updates. This is written to p4gf_config,
            # but self.ctx.submodules is not set to False, and remains = True.
            # This so the import of the submodule itself is not rejected.
            # However, if the import fails, the next pull attempt would
            # fail now that 'enable-git-submodules' has been set to False.
            # So .. bypass the submodules protection for the fake push which
            # maybe be creating the imported submodule itself in the parent repo.

            submodules = self.ctx.submodules
            self.ctx.submodules = True    # temporary
            self._ensure_commits_copied(old_head)
            self.ctx.submodules = submodules
def main():
    """Update the disk usage p4 keys for one or more repositories."""
    desc = _("Set/reset the total and pending p4 keys.")
    epilog = _("Without the -y/--reset option, only displays current values.")
    parser = p4gf_util.create_arg_parser(desc, epilog=epilog)
    parser.add_argument('-a', '--all', action='store_true',
                        help=_('process all known Git Fusion repositories'))
    parser.add_argument('-y', '--reset', action='store_true',
                        help=_('perform the reset of the p4 keys'))
    parser.add_argument(NTR('repos'), metavar=NTR('repo'), nargs='*',
                        help=_('name of repository to be updated'))
    args = parser.parse_args()

    # Check that either --all, or 'repos' was specified.
    if not args.all and len(args.repos) == 0:
        sys.stderr.write(_('Missing repo names; try adding --all option.\n'))
        sys.exit(2)
    if args.all and len(args.repos) > 0:
        sys.stderr.write(_('Ambiguous arguments. Choose --all or a repo name.\n'))
        sys.exit(2)

    with p4gf_create_p4.Closer():
        p4 = p4gf_create_p4.create_p4_temp_client()
        if not p4:
            sys.exit(2)
        # Sanity check the connection (e.g. user logged in?) before proceeding.
        try:
            p4.fetch_client()
        except P4.P4Exception as e:
            sys.stderr.write(_('P4 exception occurred: {exception}').format(exception=e))
            sys.exit(1)

        if args.all:
            repos = p4gf_util.repo_config_list(p4)
            if len(repos) == 0:
                print(_('No Git Fusion repositories found, nothing to do.'))
                sys.exit(0)
        else:
            repos = args.repos
        p4gf_create_p4.p4_disconnect(p4)

        for repo in repos:
            repo_name = p4gf_translate.TranslateReponame.git_to_repo(repo)
            print(_("Processing repository {repo_name}... ").format(repo_name=repo_name), end='')
            ctx = p4gf_context.create_context(repo_name)
            with ExitStack() as stack:
                stack.enter_context(ctx)
                ctx.repo_lock = p4gf_lock.RepoLock(ctx.p4gf, repo_name, blocking=False)
                stack.enter_context(ctx.repo_lock)
                limits = PushLimits(ctx)
                if args.reset:
                    # Copy any Perforce changes down to this Git repository.
                    p4gf_copy_p2g.copy_p2g_ctx(ctx)
                    # Attempt to trim any unreferenced objects.
                    p4gf_proc.popen(['git', '--git-dir=' + ctx.repo.path, 'prune'])
                    limits.post_copy()
                # Display current key values and disk usage.
                pending_mb = limits.get_pending_mb()
                total_mb = limits.get_total_mb()
                current_mb = limits.space_total
                print(
                    _('{total_mb:.2f}M total, {pending_mb:.2f}M pending, '
                      '{current_mb:.2f}M current')
                    .format(total_mb=total_mb,
                            pending_mb=pending_mb,
                            current_mb=current_mb), end='')
            print("")
def delete_client(args, p4, client_name):
    """Delete the named Perforce client and its workspace. Raises
    P4Exception if the client is not present, or the client configuration is
    not set up as expected.

    Keyword arguments:
    args        -- parsed command line arguments
    p4          -- Git user's Perforce client
    client_name -- name of client to be deleted

    """
    group_list = [
        p4gf_const.P4GF_GROUP_VIEW_PULL, p4gf_const.P4GF_GROUP_VIEW_PUSH
    ]
    p4.user = p4gf_const.P4GF_USER

    print_verbose(args, "Checking for client {}...".format(client_name))
    if not p4gf_util.spec_exists(p4, 'client', client_name):
        raise P4.P4Exception('No such client "{}" defined'.format(client_name))

    view_name = client_name[len(p4gf_const.P4GF_CLIENT_PREFIX):]
    view_lock = None  # We're clobbering and deleting. Overrule locks.
    try:
        ctx = p4gf_context.create_context(view_name, view_lock)
    except RuntimeError:
        # not a conforming Git Fusion client, ignore it
        return
    command_path = ctx.client_view_path()

    p4gf_dir = p4gf_util.p4_to_p4gf_dir(p4)
    view_dirs = p4gf_view_dirs.from_p4gf_dir(p4gf_dir, view_name)
    rm_list = [view_dirs.view_container]
    homedir = os.path.expanduser('~')
    raise_if_homedir(homedir, view_name, rm_list)

    # Scan for objects associated only with this view so we can either remove
    # them completely or update their 'views' attribute appropriately.
    p4.handler = FilterViewFstatHandler(view_name)
    p4.run("fstat", "-Oa", "-T", "depotFile, attr-views",
           "//.git-fusion/objects/...")
    objects_to_delete = p4.handler.files_to_delete
    objects_to_modify = p4.handler.files_to_modify
    p4.handler = None

    if not args.delete:
        print("p4 sync -f {}#none".format(command_path))
        print("p4 client -f -d {}".format(client_name))
        for d in rm_list:
            print("rm -rf {}".format(d))
        for to_delete in objects_to_delete:
            print("p4 obliterate -y {}".format(to_delete))
        if objects_to_modify:
            for (fname, views) in objects_to_modify:
                print("attribute -p -n views -v {} {}".format(views, fname))
        for group_template in group_list:
            group = group_template.format(view=view_name)
            print("p4 group -a -d {}".format(group))
        print('p4 counter -u -d {}'.format(
            p4gf_lock.view_lock_name(view_name)))

    else:
        print_verbose(args,
                      "Removing client files for {}...".format(client_name))
        ctx.p4.run('sync', '-fq', command_path + '#none')
        print_verbose(args, "Deleting client {}...".format(client_name))
        p4.run('client', '-df', client_name)
        for d in rm_list:
            remove_file_or_dir(args, view_name, d)
        bite_size = 1000
        while len(objects_to_delete):
            to_delete = objects_to_delete[:bite_size]
            objects_to_delete = objects_to_delete[bite_size:]
            p4.run("obliterate", "-y", to_delete)
        if objects_to_modify:
            for (fname, views) in objects_to_modify:
                p4.run("edit", fname)
                p4.run("attribute", "-p", "-n", "views", "-v", views, fname)
            p4.run("submit", "-d",
                   "'Removing {} from views attribute'".format(view_name))
        for group_template in group_list:
            delete_group(args, p4, group_template.format(view=view_name))
        _delete_counter(p4, p4gf_lock.view_lock_name(view_name))
def _wsgi_app(environ, start_response):
    """
    WSGI application to process the incoming Git client request. This is
    nearly equivalent to p4gf_auth_server.main() with the exception of
    input validation and error handling.
    """
    p4gf_log.record_http(environ)
    p4gf_version.log_version()
    _log_environ(environ)
    p4gf_version.version_check()
    LOG.debug("processing HTTP request, pid={}".format(os.getpid()))
    # Keep the content type to exactly 'text/plain' so there is at least
    # the remote chance that Git might show our error messages (does not
    # appear to work in practice, however).
    headers = [('Content-Type', 'text/plain')]

    encoding = sys.getfilesystemencoding()
    if encoding == 'ascii':
        # This encoding is wrong and will eventually lead to problems.
        LOG.error(
            "Using 'ascii' file encoding will ultimately result in errors, "
            "please set LANG/LC_ALL to 'utf-8' in web server configuration.")
        start_response(_('500 Internal Server Error'), headers)
        return [b"Filesystem encoding not set to acceptable value.\n"]

    # Sanity check the request.
    for (name, status, msg) in _REQUIRED_HTTP_PARAMS:
        if name not in environ:
            start_response(status, headers)
            return [msg.encode('UTF-8')]

    input_name = environ['wsgi.input']
    # Extract the view_name_git by removing the expected git request suffixes
    path_info = environ['PATH_INFO']
    git_suffixes = [
        '/info/refs', '/HEAD', '/git-upload-pack', '/git-receive-pack'
    ]
    path_end = len(path_info)
    for suffix in git_suffixes:
        try:
            path_end = path_info.index(suffix)
            break
        except ValueError:
            pass
    # slice away the leading slash and the trailing git request suffixes
    view_name_git = path_info[1:path_end]
    # and remove the view_name_git from the front of PATH_INFO
    environ['PATH_INFO'] = path_info[path_end:]
    LOG.debug("new PATH_INFO {0} view_name_git {1}".format(
        environ['PATH_INFO'], view_name_git))

    if not view_name_git:
        start_response(_('400 Bad Request'), headers)
        msg = _('Missing required repository name in URL\n')
        return [msg.encode('UTF-8')]
    # translate '/' ':' ' ' .. etc .. for internal view_name
    view_name = p4gf_translate.TranslateReponame.git_to_repo(view_name_git)
    LOG.debug("public view_name: {0}   internal view_name: {1}".format(
        view_name_git, view_name))

    audit_logger = p4gf_server_common.ExceptionAuditLogger()
    p4_closer = p4gf_create_p4.Closer()
    sink = OutputSink()
    temp_deleter = deleting(input_name)
    mirror_closer = unmirror(view_name)
    with audit_logger   \
        , p4_closer     \
        , sink          \
        , temp_deleter  \
        , mirror_closer:
        LOG.debug(p4gf_log.memory_usage())
        start_time = time.time()

        p4gf_util.reset_git_enviro()
        p4 = p4gf_create_p4.create_p4()
        if not p4:
            start_response(_('500 Internal Server Error'), headers)
            return [b"Perforce connection failed\n"]
        LOG.debug("connected to P4: %s", p4)

        p4gf_server_common.check_readiness(p4)
        p4gf_server_common.check_lock_perm(p4)
        if not p4gf_server_common.check_protects(p4):
            p4gf_server_common.raise_p4gf_perm()

        user = environ['REMOTE_USER']
        if p4gf_server_common.run_special_command(view_name, p4, user):
            start_response(_('200 OK'), headers)
            return [sink.readall()]
        command = _get_command(environ)
        if not command:
            start_response(_('400 Bad Request'), headers)
            return [b"Unrecognized service\n"]
        # Other places in the Perforce-to-Git phase will need to know the
        # name of client user, so set that here. As for Git-to-Perforce,
        # that is handled later by setting the REMOTE_USER envar. Notice
        # also that we're setting os.environ and not 'environ'.
        os.environ[p4gf_const.P4GF_AUTH_P4USER] = user
        # Likewise, some code needs a hint that the request is coming over
        # one protocol (HTTP) or the other (SSH).
        os.environ['REMOTE_ADDR'] = environ['REMOTE_ADDR']

        # Initialize the external process launcher early, before allocating lots
        # of memory, and just after all other conditions have been checked.
        p4gf_proc.init()
        # Prepare for possible spawn of GitMirror worker process by forking
        # now before allocating lots of memory.
        p4gf_gitmirror.setup_spawn(view_name)
        # Kick off garbage collection debugging, if enabled.
        p4gf_gc.init_gc()

        # Go no further, create NOTHING, if user not authorized.
        # We use the translated internal view name here for perm authorization
        required_perm = p4gf_server_common.COMMAND_TO_PERM[command]
        view_perm = p4gf_group.ViewPerm.for_user_and_view(
            p4, user, view_name, required_perm)
        try:
            p4gf_server_common.check_authorization(p4, view_perm, user,
                                                   command, view_name)
        except p4gf_server_common.CommandError as ce:
            start_response(_('403 Forbidden'), headers)
            return [str(ce).encode('UTF-8')]

        # Create Git Fusion server depot, user, config. NOPs if already created.
        p4gf_init.init(p4)

        before_lock_time = time.time()
        with p4gf_lock.view_lock(p4, view_name) as view_lock:
            after_lock_time = time.time()

            # Create Git Fusion per-repo client view mapping and config.
            init_repo_status = p4gf_init_repo.init_repo(
                p4, view_name, view_lock)
            if init_repo_status == p4gf_init_repo.INIT_REPO_OK:
                repo_created = True
            elif init_repo_status == p4gf_init_repo.INIT_REPO_EXISTS:
                repo_created = False
            elif init_repo_status == p4gf_init_repo.INIT_REPO_NOVIEW:
                start_response(_('404 Not Found'), headers)
                return [sink.readall()]
            else:
                start_response(_('500 Internal Server Error'), headers)
                return [b"Repository initialization failed\n"]

            # If authorization came from default, not explicit group
            # membership, copy that authorization to a group now. Could
            # not do this until after p4gf_init_repo() has a chance to
            # create not-yet-existing groups.
            if view_perm:
                view_perm.write_if(p4)

            # Now that we have valid git-fusion-user and
            # git-fusion-<view> client, replace our temporary P4
            # connection with a more permanent Context, shared for the
            # remainder of this process.
            with p4gf_context.create_context(view_name, view_lock) as ctx:
                LOG.debug("reconnected to P4, p4gf=%s", ctx.p4gf)
                ctx.log_context()

                # cd into the work directory. Not all git functions react well
                # to --work-tree=xxxx.
                cwd = os.getcwd()
                os.chdir(ctx.view_dirs.GIT_WORK_TREE)

                # Only copy from Perforce to Git if no other process is cloning
                # from this Git repo right now.
                shared_in_progress = p4gf_lock.shared_host_view_lock_exists(
                    ctx.p4, view_name)
                if not shared_in_progress:
                    # Copy any recent changes from Perforce to Git.
                    try:
                        LOG.debug(
                            "bare: No git-upload-pack in progress, force non-bare"
                            " before update Git from Perforce.")
                        p4gf_git.set_bare(False)
                        p4gf_copy_p2g.copy_p2g_ctx(ctx)
                        p4gf_init_repo.process_imports(ctx)

                        # Now is also an appropriate time to clear out any stale Git
                        # Swarm reviews. We're pre-pull, pre-push, time when we've
                        # got exclusive write access to the Git repo,
                        GSReviewCollection.delete_refs_for_closed_reviews(ctx)

                    except p4gf_lock.LockCanceled as lc:
                        LOG.warning(str(lc))
                    except:
                        # Dump failure to log, BEFORE cleanup, just in case
                        # cleanup ALSO fails and throws its own error (which
                        # happens if we're out of memory).
                        LOG.error(traceback.format_exc())

                        if repo_created:
                            # Return to the original working directory to allow the
                            # config code to call os.getcwd() without dying, since
                            # we are about to delete the current working directory.
                            os.chdir(cwd)
                            p4gf_server_common.cleanup_client(ctx, view_name)
                        raise

                try:
                    exclusive = 'upload' not in command
                    is_push = 'upload' not in command
                    git_caller = functools.partial(_call_git, input_name,
                                                   environ, ctx)
                    p4gf_call_git.call_git(git_caller, ctx, view_name,
                                           view_lock, exclusive)
                    if is_push:
                        GSReviewCollection.post_push(ctx)
                except p4gf_atomic_lock.LockConflict as lc:
                    start_response(_('500 Internal Server Error'), headers)
                    return ["{}".format(lc).encode('UTF-8')]

        p4gf_gc.process_garbage('at end of auth_server')
        if LOG.isEnabledFor(logging.DEBUG):
            end_time = time.time()
            frm = NTR(
                'Runtime: preparation {} ms, lock acquisition {} ms, processing {} ms'
            )
            LOG.debug(
                frm.format(before_lock_time - start_time,
                           after_lock_time - before_lock_time,
                           end_time - after_lock_time))
        return []