Ejemplo n.º 1
0
def create_repo_client(p4, view_name, client_name, client_root, view, stream):
    '''Create a Git Fusion repo client.'''
    desc = (_("Created by Perforce Git Fusion for work in '{view}'.").format(
        view=p4gf_translate.TranslateReponame.repo_to_git(view_name)))
    # if creating from a stream, set 'Stream' but not 'View'
    # otherwise, set 'View' but not 'Stream'
    if stream:
        p4gf_util.set_spec(p4,
                           'client',
                           spec_id=client_name,
                           values={
                               'Owner': p4gf_const.P4GF_USER,
                               'LineEnd': NTR('unix'),
                               'Root': client_root,
                               'Options': CLIENT_OPTIONS,
                               'Host': None,
                               'Stream': stream,
                               'Description': desc
                           })
    else:
        p4gf_util.set_spec(p4,
                           'client',
                           spec_id=client_name,
                           values={
                               'Owner': p4gf_const.P4GF_USER,
                               'LineEnd': NTR('unix'),
                               'View': view,
                               'Root': client_root,
                               'Options': CLIENT_OPTIONS,
                               'Host': None,
                               'Description': desc
                           })

    LOG.debug("Successfully created Git Fusion client %s", client_name)
    def calc_cmd_line_vars(ctx, fe_commit, branch_id, jobs, spec_file_path):
        '''
        Return our mapping of command-line variable substitutions,
        populating if necessary.

        DANGER: Little Bobby Tables! Sanitize your shell inputs!

        These become strings in a command. Watch out for any input
        that a user can control such as repo name or jobs list.
        '''
        branch = ctx.branch_dict()[branch_id]
        git_branch_name = _or_space(branch.git_branch_name)
        client_host = _or_space(p4gf_protect.get_remote_client_addr())

        r = {
            '%repo%': _sanitize(ctx.config.view_name),
            '%sha1%': fe_commit['sha1'],
            '%branch_id%': branch_id,
            '%git-branch-name%': _sanitize(git_branch_name),
            '%client%': ctx.p4.client,
            '%clienthost%': client_host,
            '%serverport%': ctx.p4.port,
            '%quote%': '"',
            '%user%': fe_commit['owner'],
            '%formfile%': spec_file_path,
            '%formname%': NTR('new'),
            '%formtype%': NTR('change'),
            '%jobs%': _sanitize(' '.join(jobs)) if jobs else ''
        }

        return r
def delete_all_local(args, p4, metrics):
    """Remove "everything" as if from a read-only Git Fusion instance.

    :param args: parsed command line arguments
    :param p4: Git user's Perforce client
    :param metrics: for counting delete actions.

    Similar to deleting everything from the master server, except that very
    little is removed from the Perforce server (e.g. counters and files).
    In short, only the client and local directories are removed.

    """
    p4.user = p4gf_const.P4GF_USER
    print(_('Connected to {P4PORT}').format(P4PORT=p4.port))
    client_name = p4gf_util.get_object_client_name()
    localroot = get_p4gf_localroot(p4)
    if not args.delete:
        if localroot:
            if args.no_obliterate:
                print(NTR('p4 sync -f {}...#none').format(localroot))
            else:
                print(NTR('p4 client -f -d {}').format(client_name))
                print(NTR('rm -rf {}').format(localroot))
    else:
        if localroot:
            if not args.no_obliterate:
                # Need this in order to use --gc later on
                p4gf_util.p4_client_df(p4, client_name)
                metrics.clients += 1
                print_verbose(
                    args,
                    _("Deleting client '{client_name}'s workspace...").format(
                        client_name=client_name))
                _remove_local_root(localroot)
Ejemplo n.º 4
0
    def list_for_user(p4, user):
        '''build list of repos visible to user'''
        result = RepoList()

        for view in p4gf_util.view_list(p4):
            #check user permissions for view
            # PERM_PUSH will avoid checking the repo config file for read-permission-check = user
            view_perm = p4gf_group.ViewPerm.for_user_and_view(
                p4, user, view, p4gf_group.PERM_PUSH)
            #sys.stderr.write("view: {}, user: {}, perm: {}".format(view, user, view_perm))
            if view_perm.can_push():
                perm = NTR('push')
            elif view_perm.can_pull():
                perm = NTR('pull')
            else:
                continue

            config = p4gf_config.get_repo(p4, view)
            charset = config.get(p4gf_config.SECTION_REPO,
                                 p4gf_config.KEY_CHARSET,
                                 fallback='')
            desc = config.get(p4gf_config.SECTION_REPO,
                              p4gf_config.KEY_DESCRIPTION,
                              fallback='')
            result.repos.append((view, perm, charset, desc))

        result.repos.sort(key=lambda tup: tup[0])
        return result
Ejemplo n.º 5
0
def _parse_argv():
    '''Convert argv into a usable dict. Dump usage/help and exit if necessary.'''
    help_txt = p4gf_util.read_bin_file('p4gf_init_repo.help.txt')
    if help_txt is False:
        help_txt = _("Missing '{}' file!").format(
            NTR('p4gf_init_repo.help.txt'))
    parser = p4gf_util.create_arg_parser(
        desc=_('Configure and populate Git Fusion repo.'),
        epilog=None,
        usage=_('p4gf_init_repo.py [options] <name>'),
        help_custom=help_txt)
    parser.add_argument('--start', metavar="")
    parser.add_argument('--noclone', action=NTR('store_true'))
    parser.add_argument('--config')
    parser.add_argument('--p4client')
    parser.add_argument(NTR('view'), metavar=NTR('view'))
    parser.add_argument('--charset')
    parser.add_argument('--enablemismatchedrhs', action=NTR('store_true'))
    args = parser.parse_args()
    if args.noclone and args.start:
        _print_stderr(_('Cannot use both --start and --noclone'))
        sys.exit(1)
    if args.config and args.charset:
        _print_stderr(_('Cannot use both --config and --charset'))
    if args.config and args.p4client:
        _print_stderr(_('Cannot use both --config and --p4client'))
    LOG.debug("args={}".format(args))
    return args
Ejemplo n.º 6
0
def depot_branch_info_from_config(config):
    """ Return DepotBranchInfo from configparser object"""
    dbi = DepotBranchInfo()
    dbi.depot_branch_id = _dbid_section(config)
    dbi.root_depot_path = config.get(dbi.depot_branch_id, "root-depot-path")
    firstbranch = None
    firstcl     = None
    branch      = []
    cl          = []
    for option in config.options(dbi.depot_branch_id):
        value = config.get(dbi.depot_branch_id, option)
        if option == KEY_PARENT_BRANCH_ID:
            firstbranch = value
        elif option == KEY_PARENT_CHANGELIST:
            firstcl = value
        elif option.endswith(NTR('branch-id')):
            branch.append(option + ':' + value)
        elif option.endswith(NTR('changelist')):
            cl.append(option + ':' + value)

    branch = p4gf_util.alpha_numeric_sort(branch)
    cl     = p4gf_util.alpha_numeric_sort(cl)

    if firstbranch and firstcl:
        dbi.parent_depot_branch_id_list.append(firstbranch)
        dbi.parent_changelist_list.append(firstcl)

    for i in range(len(branch)):
        dbi.parent_depot_branch_id_list.append(branch[i].split(':')[1])
        dbi.parent_changelist_list.append(cl[i].split(':')[1])

    return dbi
Ejemplo n.º 7
0
def main():
    '''
    Parse the command-line arguments and print a configuration.
    '''
    p4gf_util.has_server_id_or_exit()
    p4gf_client = p4gf_util.get_object_client_name()
    p4 = p4gf_create_p4.create_p4(client=p4gf_client)
    if not p4:
        sys.exit(1)
    desc = _("""Display the effective global or repository configuration.
All comment lines are elided and formatting is normalized per the
default behavior of the configparser Python module.
The default configuration options will be produced if either of the
configuration files is missing.
""")
    parser = p4gf_util.create_arg_parser(desc=desc)
    parser.add_argument(
        NTR('repo'),
        metavar=NTR('R'),
        nargs='?',
        default='',
        help=_('name of the repository, or none to display global.'))
    args = parser.parse_args()
    if args.repo:
        cfg = get_repo(p4, args.repo)
    else:
        cfg = get_global(p4)
    if not cfg:
        print(_('Unable to read configuration file!'))
    cfg.write(sys.stdout)
Ejemplo n.º 8
0
def ssh2_key_generator(itr):
    """A generator function that produces (fingerprint, username, data)
    tuples suitable for writing to the authorized keys file. Reads lines
    from the given line generator, which is assumed to yield results in
    a format common to several SSH2 implementations.
    """
    # Typical "SSH2" authorization file stores related information on separate
    # lines, need to piece it back together again.
    try:
        # Ugly code, but I want to iterate the file line by line while also
        # having look-ahead behavior since user information may be split
        # across multiple lines. What's more, not all lines are managed by
        # this script, so must allow for arbitrary lines of text.
        while True:
            line = next(itr)
            fp = NO_FP
            user = ''
            if line.lower().startswith(NTR('key ')):
                try:
                    # read the next line, possibly finding "Options"
                    ln = next(itr)
                    if ln and ln.lower().startswith(NTR('options ')):
                        fp, user = extract_fp_and_user(ln)
                    yield (fp, user, line)
                    yield (fp, user, ln)
                except StopIteration:
                    yield (fp, user, line)
            else:
                yield (fp, user, line)
    except StopIteration:
        return
    def to_log_level(self, level):
        '''Debugging dump.'''

        # Single line dump
        fmt = NTR('Row: {sha1:<7} {mode:<6} {p4_request:<6} {p4filetype:<10}'
               ' {gwt_path:<10} {depot_path:<10}')

        topline = fmt.format(
                           sha1       = p4gf_util.abbrev(self.sha1) \
                                        if self.sha1 else '0000000'
                         , mode       = p4gf_util.quiet_none(
                                        p4gf_util.mode_str(  self.mode))
                         , gwt_path   = self.gwt_path
                         , depot_path = self.depot_path
                         , p4_request = p4gf_util.quiet_none(self.p4_request)
                         , p4filetype = p4gf_util.quiet_none(self.p4filetype)
                         )

                # Detail each cell at DEBUG2 not DEBUG3. DEBUG2 produces one-
                # line dumps for each cell, which should be useful. DEBUG3 will
                # produce multi-line dumps of each cell, which is VERY noisy.
        if level <= logging.DEBUG2:
            # Multi-line dump.
            lines = [ topline ]
            for i, cell in enumerate(self.cells):
                if not cell:
                    lines.append(NTR('  {i}: {cell}').format(i=i, cell=cell))
                else:
                    lines.append(NTR('  {i}: {cell}')
                            .format( i=i
                                   , cell=cell.to_log_level(level)))
            return '\n'.join(lines)
        else:
            return topline
Ejemplo n.º 10
0
def install_hook(git_dir):
    """Install Git Fusion's pre-receive hook"""

    hook_path = os.path.join(git_dir, NTR('hooks'), NTR('pre-receive'))
    with open(hook_path, 'w') as f:
        f.write(hook_file_content())
    os.chmod(hook_path, 0o755)  # -rwxr-xr-x
Ejemplo n.º 11
0
def as_string_extended(*, p4=None, args=None, include_checksum=False):
    """Return a page-long dump of Git Fusion, P4D, and uname info."""
                        # Git Fusion version info, including Git and P4Python.
    a = as_string(include_checksum)
    l = []
                        # Git Fusion server OS version: uname -a
    l.append(NTR('uname: {}').format(uname()))
    l.append(NTR('Git Fusion path: {}').format(os.path.dirname(os.path.realpath(__file__))))
    l.append(_get_lsb_release())

                        # P4PORT, if supplied
    if p4:
        l.append(_('Perforce server address: {p4port}').format(p4port=p4.port))

                        # 'p4 info', if we can get it.
    try:
        _p4 = _create_p4(p4=p4, args=args)

                    # Run 'p4 info' un-tagged to get human-friendly
                    # server info labels.
        l.append(NTR("p4 info:"))
        l.extend(p4gf_p4cache.fetch_info(_p4, tagged=False))

                    # Run 'p4 info' a SECOND time, tagged, to get
                    # the "unicode" setting that untagged omits.
        u = p4gf_p4cache.fetch_info(_p4, tagged=True).get(("unicode"), _("disabled"))
        l.append(_("Unicode: {value}").format(value=u))
    except P4Exception:
        pass
    return a + "\n".join(l) + "\n"
def _masked_bin(x, mask):
    '''0b000111000 ==> "111" '''
    mask_str = NTR('{:b}').format(mask)
    bit_str  = NTR('{:b}').format(mask & x)
    first_1_index = mask_str.find('1')
    last_1_index  = mask_str.find('1')
    return bit_str[first_1_index:last_1_index]
Ejemplo n.º 13
0
def _apply_default_config(parser):
    """Given a ConfigParser instance, merge with the default logging settings.

    Produce the effective logging configuration and return as a tuple of
    the general ,audit , and auth_keys settings.
    """
    # Configure the general logging
    general_config = NTR({
        # New default is to write to separate files (GF-2729).
        # 'filename': os.environ['HOME'] + '/p4gf_log.txt',
        'format': '%(asctime)s %(name)-10s %(levelname)-8s %(message)s',
        'datefmt': '%m-%d %H:%M:%S',
        'root': 'WARNING',
    })
    general_config = _effective_config(parser, _general_section,
                                       general_config)

    # Configure the audit logging (defaults to standard syslog)
    audit_config = {'root': NTR('warning')}
    audit_config = _effective_config(parser, _audit_section, audit_config)
    if not ('filename' in audit_config or 'handler' in audit_config):
        audit_config['handler'] = NTR('syslog')

    # Configure the authorized_keys logging (defaults to standard syslog)
    auth_keys_config = {'root': NTR('warning')}
    auth_keys_config = _effective_config(parser, _auth_keys_section,
                                         auth_keys_config)
    if not ('filename' in auth_keys_config or 'handler' in auth_keys_config):
        auth_keys_config['handler'] = NTR('syslog')
    return (general_config, audit_config, auth_keys_config)
Ejemplo n.º 14
0
def parse_argv():
    """Convert command line into a usable dict."""
    usage = _("""p4gf_rollback.py [options] --change-num NNN --repo <repo-name>
options:
    --p4port/-p     Perforce server
    --p4user/-u     Perforce user
    --execute/-y    yes, do it (normally just previews/reports)
    --obliterate    delete history from Perforce
    --verbose/-v    write more to console
    --quiet/-q      write nothing but errors to console
""")
    parser = p4gf_util.create_arg_parser(
          help_file    = NTR('p4gf_rollback.help.txt')
        , usage        = usage
        , add_p4_args  = True
        , add_log_args = True
        , add_debug_arg= True
        )
    parser.add_argument('--change-num', metavar="NNN",         required=True)
    parser.add_argument('--repo',       metavar="<repo-name>", required=True)
    parser.add_argument('--execute', '-y', action=NTR("store_true"))
    parser.add_argument('--obliterate', action=NTR("store_true"))

    args = parser.parse_args()
    p4gf_util.apply_log_args(args, LOG)
    LOG.debug("args={}".format(args))
    args.change_num = int(args.change_num)
    return args
    def to_log_level(self, level):
        """Debugging dump."""

        # Single line dump
        fmt = NTR('Row: {sha1:<7} {mode:<6} {p4_request:<6} {p4filetype:<10}'
                  ' {gwt_path:<10} {depot_path:<10}')

        topline = fmt.format(
                           sha1       = p4gf_util.abbrev(self.sha1) \
                                        if self.sha1 else '0000000'
                         , mode       = p4gf_util.quiet_none(
                                        p4gf_util.mode_str(  self.mode))
                         , gwt_path   = self.gwt_path
                         , depot_path = self.depot_path
                         , p4_request = p4gf_util.quiet_none(self.p4_request)
                         , p4filetype = p4gf_util.quiet_none(self.p4filetype)
                         )

        # Detail each cell at DEBUG2 not DEBUG3. DEBUG2 produces one-
        # line dumps for each cell, which should be useful. DEBUG3 will
        # produce multi-line dumps of each cell, which is VERY noisy.
        if level <= logging.DEBUG2:
            # Multi-line dump.
            lines = [topline]
            for i, cell in enumerate(self.cells):
                if not cell:
                    lines.append(NTR('  {i}: {cell}').format(i=i, cell=cell))
                else:
                    lines.append(
                        NTR('  {i}: {cell}').format(
                            i=i, cell=cell.to_log_level(level)))
            return '\n'.join(lines)
        else:
            return topline
Ejemplo n.º 16
0
def _masked_bin(x, mask):
    '''0b000111000 ==> "111" '''
    mask_str = NTR('{:b}').format(mask)
    bit_str = NTR('{:b}').format(mask & x)
    first_1_index = mask_str.find('1')
    last_1_index = mask_str.find('1')
    return bit_str[first_1_index:last_1_index]
Ejemplo n.º 17
0
def _log_cmd_result(result, expect_error):
    """
    Record the command results in the log.

    If command completed successfully, record output at DEBUG level so that
    folks can suppress it with cmd:INFO. But if command completed with error
    (non-zero return code), then record its output at ERROR level so that
    cmd:INFO users still see it.
    """
    ec = result['ec']
    out = result['out']
    err = result['err']
    if (not ec) or expect_error:
        # Things going well? Don't care if not?
        # Then log only if caller is REALLY interested.
        log_level = logging.DEBUG
    else:
        # Things going unexpectedly poorly? Log almost all of the time.
        log_level = logging.ERROR
        log = logging.getLogger('cmd.cmd')
        if not log.isEnabledFor(logging.DEBUG):
            # We did not log the command. Do so now.
            log.log(log_level, result['cmd'])
    logging.getLogger('cmd.exit').log(log_level, NTR("exit: {0}").format(ec))
    out_log = logging.getLogger('cmd.out')
    out_log.debug(NTR("out : ct={0}").format(len(out)))
    if len(out) and out_log.isEnabledFor(logging.DEBUG3):
        out_log.debug3(NTR("out :\n{0}").format(out))
    if len(err):
        logging.getLogger('cmd.err').log(log_level,
                                         NTR("err :\n{0}").format(err))
Ejemplo n.º 18
0
def _install_signal_handler(handler):
    '''
    Install the given signal handler (either a function or one of the
    signal module constants) for all of the terminating signals.
    It is probably a good idea to use _signal_restorer to preserve and
    later restore any existing signal handlers.
    '''
    if LOG.isEnabledFor(logging.DEBUG):
        if callable(handler):
            label = handler.__qualname__
        elif isinstance(handler, int):
            if handler == signal.SIG_DFL:
                label = NTR('default')
            elif handler == signal.SIG_IGN:
                label = NTR('ignore')
            else:
                label = str(handler)
        else:
            label = str(handler)
        LOG.debug("_install_signal_handler({}) for pid={}".format(
            label, os.getpid()))
    signal.signal(signal.SIGHUP, handler)
    signal.signal(signal.SIGINT, handler)
    signal.signal(signal.SIGQUIT, handler)
    signal.signal(signal.SIGTERM, handler)
    signal.signal(signal.SIGTSTP, handler)
Ejemplo n.º 19
0
def main():
    """Validate the configuration for one or more repositories."""
    # pylint:disable=too-many-branches
    desc = _("Report on the validity of a repository configuration.")
    parser = p4gf_util.create_arg_parser(desc)
    parser.add_argument('-a', '--all', action='store_true',
                        help=_('process all known Git Fusion repositories'))
    parser.add_argument(NTR('repos'), metavar=NTR('repo'), nargs='*',
                        help=_('name of repository or file to be validated'))
    args = parser.parse_args()

    # Check that either --all, or 'repos' was specified, but not both.
    if not args.all and len(args.repos) == 0:
        sys.stderr.write(_('Missing repo names; try adding --all option.\n'))
        sys.exit(2)
    if args.all and len(args.repos) > 0:
        sys.stderr.write(_('Ambiguous arguments. Choose --all or a repo name.\n'))
        sys.exit(2)

    with p4gf_create_p4.Closer():
        p4 = p4gf_create_p4.create_p4_temp_client()
        if not p4:
            sys.exit(2)
        # Sanity check the connection (e.g. user logged in?) before proceeding.
        try:
            p4.fetch_client()
        except P4.P4Exception as e:
            sys.stderr.write(_('P4 exception occurred: {exception}').format(exception=e))
            sys.exit(1)

        p4gf_branch.init_case_handling(p4)

        if args.all:
            repos = p4gf_util.repo_config_list(p4)
            if len(repos) == 0:
                print(_('No Git Fusion repositories found, nothing to do.'))
                sys.exit(0)
        else:
            repos = args.repos

        for repo in repos:
            if os.path.exists(repo):
                print(_("Processing file {repo_name}...").format(repo_name=repo))
                try:
                    config = p4gf_config.RepoConfig.from_local_file(repo, p4, repo)
                except p4gf_config.ConfigLoadError as e:
                    sys.stderr.write("{}\n", e)
                except p4gf_config.ConfigParseError as e:
                    sys.stderr.write("{}\n", e)
            else:
                repo_name = p4gf_translate.TranslateReponame.git_to_repo(repo)
                print(_("Processing repository {repo_name}...").format(repo_name=repo_name))
                try:
                    config = p4gf_config.RepoConfig.from_depot_file(repo_name, p4)
                except p4gf_config.ConfigLoadError as err:
                    sys.stderr.write("{}\n", err)
            if Validator(config, p4).is_valid():
                print(_("ok"))
            print("")
Ejemplo n.º 20
0
def main():
    """Parse the command-line arguments and report on locks."""
    desc = _(DESCRIPTION)
    parser = p4gf_util.create_arg_parser(desc=desc)
    parser.add_argument(
        NTR('--config'),
        metavar=NTR('config'),
        help=_('Path to Git Fusion p4gf_config file (required)'),
        required=True)
    parser.add_argument('-u',
                        '--p4user',
                        metavar='p4user',
                        help=_('Perforce user'))
    parser.add_argument('-p',
                        '--p4port',
                        metavar='p4port',
                        help=_('Perforce server'))
    parser.add_argument('--locale',
                        metavar='locale',
                        default='en_US.UTF-8',
                        help=_('system locale setting'))
    args = parser.parse_args()

    need_serverid = False
    try:
        p4gf_util.get_server_id()
    except:  # pylint: disable=W0702
        need_serverid = True

    # If connect args not passed, check that the environment is set.
    if not args.p4port:
        if 'P4PORT' not in os.environ and 'P4GF_ENV' not in os.environ:
            print(
                _("Neither --p4port is an argument nor are P4GF_ENV and P4PORT in the environment."
                  ))
            sys.exit(0)
        if 'P4PORT' in os.environ:
            args.p4port = os.environ['P4PORT']
    else:
        # Set the requested port for Git Fusion's environment
        os.environ['P4PORT'] = args.p4port

    if not args.p4user:
        if 'P4USER' not in os.environ:
            print(
                _("Neither --p4user is an argument nor is P4USER in the environment."
                  ))
            sys.exit(0)
        else:
            args.p4user = os.environ['P4USER']
    else:
        # Set the requested user for Git Fusion's environment
        os.environ['P4USER'] = args.p4user

    repo_size = RepoSize(args.config, args.p4port, args.p4user, args.locale,
                         need_serverid)
    repo_size.estimate_size()
    repo_size.report()
Ejemplo n.º 21
0
 def _add_parent(self, parent_commit, keyword=NTR('from')):
     '''Add one parent to the commit we're currently building.'''
     # Parent is either SHA1 of an existing commit or mark of a commit
     # created earlier in this import operation. Assume a length of
     # 40 indicates the former and mark ids will always be shorter.
     if isinstance(parent_commit, str) and len(parent_commit) == 40:
         self.__append(NTR('{keyword} {sha1}\n').format(keyword=keyword, sha1=parent_commit))
     else:
         self.__append(NTR('{keyword} :{mark}\n').format(keyword=keyword, mark=parent_commit))
Ejemplo n.º 22
0
def _deb_gitact(x):
    '''Debugging converter from int to P4S string.'''
    bits = []
    bits.append(NTR('A') if A & x & ~GHOST_BIT else '.')
    bits.append(NTR('M') if M & x & ~GHOST_BIT else '.')
    bits.append(NTR('D') if D & x & ~GHOST_BIT else '.')
    bits.append(NTR('T') if T & x & ~GHOST_BIT else '.')
    bits.append(NTR('N') if N & x & ~GHOST_BIT else '.')
    return ''.join(bits)
Ejemplo n.º 23
0
def _test_dump_result_to_stdout(assigner):
    '''
    Dump all assignments to stdout in a format that a test script would enjoy.
    '''
    #print("Commit count: {}".format(len(assigner.rev_list)))
    fmt = NTR("{sha1:<7.7}\t{branch_id}\t{subject}")
    for rev in assigner.rev_list:
        p = p4gf_proc.popen(['git', 'log', '-1', '--pretty=format:%s', rev])
        subject = p['out'].splitlines()[0]
        branch_id = assigner.assign_dict[rev].branch_id_str()
        print(fmt.format(sha1=rev, branch_id=branch_id, subject=subject))
Ejemplo n.º 24
0
    def filter_paths(self, blobs):
        """Run list of paths through filter and set list of paths that don't pass."""
        # check against one map for read, one for write
        # if check fails, figure out if it was the view map or the protects
        # that caused the problem and report accordingly
        self.author_denied = []
        self.pusher_denied = []
        self.foruser_denied = []
        self.fusion_denied = []
        self.unmapped = []
        c2d = P4.Map.RIGHT2LEFT

        LOG.debug('filter_paths() write_filter: %s', self.write_filter)
        for blob in blobs:
            gwt_path = self.ctx.gwt_path(blob['path'])
            topath_c = gwt_path.to_client()
            topath_d = gwt_path.to_depot()

            LOG.debug('filter_paths() topath_d: %s', topath_d)
            # for all actions, need to check write access for dest path
            result = "  "  # zum loggen
            if topath_d and P4GF_DEPOT_OBJECTS_RE.match(topath_d):
                LOG.debug('filter_paths() topath_d in //.git-fusion/objects')
                continue
            # do not require user write access to //.git-fusion/branches
            if topath_d and P4GF_DEPOT_BRANCHES_RE.match(topath_d):
                LOG.debug('filter_paths() topath_d in //.git-fusion/branches')
                continue
            if not self.write_filter.includes(topath_c, c2d):
                if not self.view_map.includes(topath_c, c2d):
                    self.unmapped.append(topath_c)
                    result = NTR('unmapped')
                elif not (self.ignore_author_perms
                          or self.write_protect_author.includes(topath_d)):
                    self.author_denied.append(topath_c)
                    result = NTR('author denied')
                elif (self.write_protect_pusher
                      and not self.write_protect_pusher.includes(topath_d)):
                    self.pusher_denied.append(topath_c)
                    result = NTR('pusher denied')
                elif (self.write_protect_foruser
                      and not self.write_protect_foruser.includes(topath_d)):
                    self.foruser_denied.append(topath_c)
                    result = NTR('foruser denied')
                elif not self.write_protect_fusion.includes(topath_d):
                    self.fusion_denied.append(topath_c)
                    result = NTR('Git Fusion denied')
                else:
                    result = "?"
                LOG.error('filter_paths() {:<13} {}, {}, {}'.format(
                    result, blob['path'], topath_d, topath_c))
            elif LOG.isEnabledFor(logging.DEBUG):
                LOG.debug('filter_paths() topath_c in write_filter: %s',
                          topath_c)
 def _protect_dict_to_str(cls, pdict):
     """
     Format one protection line as dictionary to string.
     """
     excl = '-' if 'unmap' in pdict else ''
     if NTR('user') in pdict:
         user = NTR('user ') + pdict['user']
     else:
         user = NTR('group ') + pdict['group']
     return "{0} {1} {2} {3}{4}".format(pdict['perm'], user, pdict['host'],
                                        excl, pdict['depotFile'])
Ejemplo n.º 26
0
    def _write_ancestor_commit_list(self, f):
        """If we have any ancestor commits, write a section listing all of them."""
        if not self.ancestor_commit_otl:
            return

        f.write(NTR('[ancestor-list]\n'))
        for ot in self.ancestor_commit_otl:
            f.write(
                NTR('{sha1} {change_num}\n').format(sha1=ot.sha1,
                                                    change_num=ot.change_num))
        f.write('\n')
Ejemplo n.º 27
0
def _effective_config(parser, section, defaults):
    """Build the effective configuration for a logger.

    Uses a combination of the configparser instance and default options.
    Returns a dict with only the relevant settings for configuring a Logger
    instance.

    It is here the 'handler' over 'filename' and other such precedence
    rules are enforced.

    :param parser: instance of ConfigParser providing configuration.
    :param section: section name from which to take logging configuration.
    :param defaults: dict of default settings.

    """
    assert 'file' not in defaults
    config = defaults.copy()
    fallback = parser.defaults()
    if parser.has_section(section):
        fallback = parser[section]
    config.update(fallback)
    # Allow configuration 'file' setting to take precedence over 'filename'
    # since it is not one of our defaults.
    if 'file' in config:
        config['filename'] = config.pop('file')
    if 'handler' in config:
        val = config['handler']
        if val.startswith('syslog'):
            # Logging to syslog means no format support.
            config.pop('format', None)
            config.pop('datefmt', None)
        # Logging to a handler means no filename
        config.pop('filename', None)
    elif 'filename' in config:
        # perform variable substitution on file path
        fnargs = {}
        fnargs['user'] = os.path.expanduser('~')
        fnargs['tmp'] = tempfile.gettempdir()
        if '%(repo)s' in config['filename']:
            fnargs['repo'] = os.environ.get(p4gf_const.P4GF_LOG_REPO,
                                            NTR("norepo"))
            REPO_LOG_FILES.add(section)
        config['filename'] %= fnargs
    else:
        # default for these is syslog - rather than xml file
        if section in [_auth_keys_section, _audit_section]:
            config['handler'] = NTR('syslog')
        else:
            fpath = _generate_default_name(section)
            config['filename'] = fpath
    config.setdefault(NTR('format'), logging.BASIC_FORMAT)
    config.setdefault(NTR('datefmt'), None)
    return config
Ejemplo n.º 28
0
def ssh_key_add(p4, depot_path, keys, action=None):
    """Read the contents of the named file and use it to produce a
    fingerprint of the presumed SSH key, formatting the results into
    a line suitable for adding to the SSH configuration file. The line
    is added to the set of keys, keyed by a generated fingerprint.

    Keyword arguments:
    p4         -- P4 API object
    depot_path -- path to keys file
    keys       -- instance of KeyKeeper
    action     -- string describing the action being performed (e.g. 'edit'),
                  defaults to ADD. For debug log only.
    """
    user, key, fp = extract_key_data(p4, depot_path)
    if not user:
        _print_warn(
            _('Could not extract user name from unrecognized depot path: {depot_path}'
              ).format(depot_path=depot_path))
        return
    if not fp:
        if p4gf_util.depot_file_exists(p4, depot_path):
            _print_warn(
                _("File '{depot_path}' does not conform to a valid SSH key, ignoring..."
                  ).format(depot_path=depot_path))
        return
    if not action:
        action = _ADD
    _print_debug(
        _('action {}, user {}, key {}, FP {}').format(action, user, key, fp))
    # $SSH[2]_ORIGINAL_COMMAND is there to get the command being invoked
    # by the client via SSH (e.g. git-upload-pack 'foo') -- we need that
    # in order to take the appropriate action, and for auditing purposes.
    if Ssh2:
        fname = os.path.join(KEYS_DIR, user, fp.replace(':', '') + NTR('.pub'))
        fpath = os.path.join(SshDirectory, fname)
        fdir = os.path.dirname(fpath)
        if not os.path.exists(fdir):
            os.makedirs(fdir)
        with open(fpath, 'w') as f:
            f.write(SSH2_HEADER_LINE + '\n')
            keydata = key
            while keydata:
                f.write(keydata[:72] + '\n')
                keydata = keydata[72:]
            f.write(SSH2_FOOTER_LINE + '\n')
        ln = NTR(
            'Key {file}\nOptions command="p4gf_auth_server.py --user={user} --keyfp={keyfp}'
            ' $SSH2_ORIGINAL_COMMAND"').format(file=fname, user=user, keyfp=fp)
        # No options are included since not all SSH2 implementations support them.
    else:
        ln = generate_openssh_key(user, fp, key)
    keys.add(fp, user, ln)
Ejemplo n.º 29
0
def _test_write_branch(repo_name, branch, key, value):
    '''
    Unit test hook to see if we actually write the correct values to
    the correct files.
    '''
    with p4gf_context.create_context(NTR('p4gf_repo'), None) as ctx:
        config = p4gf_config.get_repo(ctx.p4gf, repo_name)
    for section in config.sections():
        if config.has_option(section, p4gf_config.KEY_GIT_BRANCH_NAME) and \
           config.get(section, p4gf_config.KEY_GIT_BRANCH_NAME) == branch:
            _test_write(repo_name, section, key, value)
            return
    print(NTR('branch not found: {}').format(branch))
Ejemplo n.º 30
0
def ensure_depot_gf(p4):
    """Create depot P4GF_DEPOT if not already exists.

    Requires that connection p4 has super permissions.

    Return True if created, False if already exists.
    """
    spec = {
        'Owner':       p4gf_const.P4GF_USER,
        'Description': _('Git Fusion data storage.'),
        'Type':        NTR('local'),
        'Map':         '{depot}/...'.format(depot=p4gf_const.P4GF_DEPOT)
    }
    return ensure_spec(p4, NTR('depot'), spec_id=p4gf_const.P4GF_DEPOT, values=spec)
Ejemplo n.º 31
0
def check_and_create_default_p4gf_env_config():
    """If p4gf_env_config threw the MissingConfigPath exception,
    because P4GF_ENV names a non-existing filepath
    then save the required (two) default items
    into the user configured P4GF_ENV environment config file.
    """
    if not Create_P4GF_CONFIG:
        LOG.debug('not creating configuration file')
        return
    LOG.debug('creating missing configuration file')
    Verbosity.report(
        Verbosity.INFO, _("Git Fusion environment var P4GF_ENV = {path} names a non-existing file.")
        .format(path=p4gf_const.P4GF_ENV))
    Verbosity.report(
        Verbosity.INFO, _("Creating {path} with the default required items.")
        .format(path=p4gf_const.P4GF_ENV))
    Verbosity.report(
        Verbosity.INFO, _("Review the file's comments and edit as needed."))
    Verbosity.report(
        Verbosity.INFO, _("You may unset P4GF_ENV to use no config file.")
        .format(p4gf_const.P4GF_ENV))
    config = configparser.ConfigParser(interpolation  = None,
                                       allow_no_value = True)
    config.optionxform = str
    config.add_section(p4gf_const.SECTION_ENVIRONMENT)
    config.set(p4gf_const.SECTION_ENVIRONMENT, p4gf_const.P4GF_HOME_NAME, p4gf_const.P4GF_HOME)
    Verbosity.report(
        Verbosity.INFO, _("Setting {home_name} = {home} in {env}.")
        .format(home_name=p4gf_const.P4GF_HOME_NAME,
                home=p4gf_const.P4GF_HOME,
                env=p4gf_const.P4GF_ENV))
    config.set(p4gf_const.SECTION_ENVIRONMENT, NTR('P4PORT'), P4PORT)
    Verbosity.report(
        Verbosity.INFO, _("Setting {p4port} = {p4port_value} in {env}.")
        .format(p4port=NTR('P4PORT'),
                p4port_value=P4PORT,
                env=p4gf_const.P4GF_ENV))
    header = p4gf_util.read_bin_file(NTR('p4gf_env_config.txt'))
    if header is False:
        sys.stderr.write(_('no p4gf_env_config.txt found\n'))
        header = _('# Missing p4gf_env_config.txt file!')
    out = io.StringIO()
    out.write(header)
    config.write(out)
    file_content = out.getvalue()
    out.close()
    p4gf_util.ensure_dir(p4gf_util.parent_dir(p4gf_const.P4GF_ENV))
    with open(p4gf_const.P4GF_ENV, 'w') as f:
        f.write(file_content)
    LOG.debug('created configuration file %s', p4gf_const.P4GF_ENV)
def depot_branch_info_from_config(config):
    """ Return DepotBranchInfo from configparser object."""
    dbi = DepotBranchInfo(_dbid_section(config))
    dbi.root_depot_path = config.get(dbi.depot_branch_id, KEY_ROOT_DEPOT_PATH)
    firstbranch = None
    firstcl = None
    branch = []
    cl = []
    fp_basis_change_num = None
    fp_basis_map_line_list = None
    for option in config.options(dbi.depot_branch_id):
        value = config.get(dbi.depot_branch_id, option)
        if option == KEY_PARENT_BRANCH_ID:
            firstbranch = value
        elif option == KEY_PARENT_CHANGELIST:
            firstcl = value
        elif option == KEY_BASIS_CHANGE_NUM:
            fp_basis_change_num = value
        elif option == KEY_BASIS_MAP_LINES:
            fp_basis_map_line_list = p4gf_config.get_view_lines(
                config[dbi.depot_branch_id], KEY_BASIS_MAP_LINES)
            # We're being clever with parent lists here. variables
            # branch and cl are lists of strings that include a
            # numbered "parent -{}-branch-id/changelist:" prefix,
            # so that the two lists will alphanumeric sort
            # identically, keeping branch and cl in step with each
            # other.
        elif option.endswith(NTR('branch-id')):
            branch.append(option + ':' + value)
        elif option.endswith(NTR('changelist')):
            cl.append(option + ':' + value)

    branch = p4gf_util.alpha_numeric_sort(branch)
    cl = p4gf_util.alpha_numeric_sort(cl)

    if firstbranch and firstcl:
        dbi.parent_depot_branch_id_list.append(firstbranch)
        dbi.parent_changelist_list.append(firstcl)

    for i in range(len(branch)):
        dbi.parent_depot_branch_id_list.append(branch[i].split(':')[1])
        dbi.parent_changelist_list.append(cl[i].split(':')[1])

    if fp_basis_change_num is not None:
        dbi.fp_basis_known = True
        dbi.fp_basis_change_num = int(fp_basis_change_num)
        dbi.fp_basis_map_line_list = fp_basis_map_line_list

    return dbi
Ejemplo n.º 33
0
    def get_changelists(p4, path, callback, limit=None):
        """Run p4 changes to get a list of changes
        Call callback with each found changelist.

        p4: initialized P4 object
        path: path + revision specifier, e.g. //depot/main/p4/...@1,#head
        callback: function taking P4Changelist
        """
        cmd = NTR(["changes", "-l"])
        if limit:
            cmd.extend(["-m", str(limit)])
        cmd.append(path)
        handler = ChangesHandler(callback)
        with p4gf_util.Handler(p4, handler):
            p4.run(cmd)
        return handler.count
Ejemplo n.º 34
0
def _dict_to_string(d):
    """Convert as_dict()'s result into a multiline string suitable for user display."""

    if 'release-codeline' in d:
        template = NTR("""{company}
{copyright}
Rev. {product_abbrev}/{release_year}.{release_sub}.{release_codeline}/{patchlevel} ({date_year}/{date_month}/{date_day}).
SHA1: {bin_sha1}
Git: {git}
Python: {python}
P4Python: {p4python}
""")
    else:
        template = NTR("""{company}
{copyright}
Rev. {product_abbrev}/{release_year}.{release_sub}/{patchlevel} ({date_year}/{date_month}/{date_day}).
SHA1: {bin_sha1}
Git: {git}
Python: {python}
P4Python: {p4python}
""")
    return template.format(**d)
Ejemplo n.º 35
0
def _wsgi_app(environ, start_response):
    """
    WSGI application to process the incoming Git client request. This is
    nearly equivalent to p4gf_auth_server.main() with the exception of
    input validation and error handling.
    """
    p4gf_log.record_http(environ)
    p4gf_version.log_version()
    _log_environ(environ)
    p4gf_version.version_check()
    LOG.debug("processing HTTP request, pid={}".format(os.getpid()))
    # Keep the content type to exactly 'text/plain' so there is at least
    # the remote chance that Git might show our error messages (does not
    # appear to work in practice, however).
    headers = [('Content-Type', 'text/plain')]

    encoding = sys.getfilesystemencoding()
    if encoding == 'ascii':
        # This encoding is wrong and will eventually lead to problems.
        LOG.error("Using 'ascii' file encoding will ultimately result in errors, "
            "please set LANG/LC_ALL to 'utf-8' in web server configuration.")
        start_response(_('500 Internal Server Error'), headers)
        return [b"Filesystem encoding not set to acceptable value.\n"]

    # Sanity check the request.
    for (name, status, msg) in _REQUIRED_HTTP_PARAMS:
        if name not in environ:
            start_response(status, headers)
            return [msg.encode('UTF-8')]

    input_name = environ['wsgi.input']
    # Extract the view_name_git by removing the expected git request suffixes
    path_info = environ['PATH_INFO']
    git_suffixes = ['/info/refs', '/HEAD', '/git-upload-pack', '/git-receive-pack']
    path_end = len(path_info)
    for suffix in git_suffixes:
        try:
            path_end = path_info.index(suffix)
            break
        except ValueError:
            pass
    # slice away the leading slash and the trailing git request suffixes
    view_name_git  = path_info[1:path_end]
    # and remove the view_name_git from the front of PATH_INFO
    environ['PATH_INFO'] = path_info[path_end:]
    LOG.debug("new PATH_INFO {0} view_name_git {1}".format(environ['PATH_INFO'], view_name_git))

    if not view_name_git:
        start_response(_('400 Bad Request'), headers)
        msg = _('Missing required repository name in URL\n')
        return [msg.encode('UTF-8')]
    # translate '/' ':' ' ' .. etc .. for internal view_name
    view_name = p4gf_translate.TranslateReponame.git_to_repo(view_name_git)
    LOG.debug("public view_name: {0}   internal view_name: {1}".format(view_name_git, view_name))

    audit_logger = p4gf_server_common.ExceptionAuditLogger()
    p4_closer = p4gf_create_p4.Closer()
    sink = OutputSink()
    temp_deleter = deleting(input_name)
    mirror_closer = unmirror(view_name)
    with audit_logger   \
        , p4_closer     \
        , sink          \
        , temp_deleter  \
        , mirror_closer:
        LOG.debug(p4gf_log.memory_usage())
        start_time = time.time()

        p4gf_util.reset_git_enviro()
        p4 = p4gf_create_p4.create_p4()
        if not p4:
            start_response(_('500 Internal Server Error'), headers)
            return [b"Perforce connection failed\n"]
        LOG.debug("connected to P4: %s", p4)

        p4gf_server_common.check_readiness(p4)
        p4gf_server_common.check_lock_perm(p4)
        if not p4gf_server_common.check_protects(p4):
            p4gf_server_common.raise_p4gf_perm()

        user = environ['REMOTE_USER']
        if p4gf_server_common.run_special_command(view_name, p4, user):
            start_response(_('200 OK'), headers)
            return [sink.readall()]
        command = _get_command(environ)
        if not command:
            start_response(_('400 Bad Request'), headers)
            return [b"Unrecognized service\n"]
        # Other places in the Perforce-to-Git phase will need to know the
        # name of client user, so set that here. As for Git-to-Perforce,
        # that is handled later by setting the REMOTE_USER envar. Notice
        # also that we're setting os.environ and not 'environ'.
        os.environ[p4gf_const.P4GF_AUTH_P4USER] = user
        # Likewise, some code needs a hint that the request is coming over
        # one protocol (HTTP) or the other (SSH).
        os.environ['REMOTE_ADDR'] = environ['REMOTE_ADDR']

        # Initialize the external process launcher early, before allocating lots
        # of memory, and just after all other conditions have been checked.
        p4gf_proc.init()
        # Prepare for possible spawn of GitMirror worker process by forking
        # now before allocating lots of memory.
        p4gf_gitmirror.setup_spawn(view_name)
        # Kick off garbage collection debugging, if enabled.
        p4gf_gc.init_gc()

        # Go no further, create NOTHING, if user not authorized.
        # We use the translated internal view name here for perm authorization
        required_perm = p4gf_server_common.COMMAND_TO_PERM[command]
        view_perm = p4gf_group.ViewPerm.for_user_and_view(p4, user, view_name, required_perm)
        try:
            p4gf_server_common.check_authorization(p4, view_perm, user, command, view_name)
        except p4gf_server_common.CommandError as ce:
            start_response(_('403 Forbidden'), headers)
            return [str(ce).encode('UTF-8')]

        # Create Git Fusion server depot, user, config. NOPs if already created.
        p4gf_init.init(p4)

        before_lock_time = time.time()
        with p4gf_lock.view_lock(p4, view_name) as view_lock:
            after_lock_time = time.time()

            # Create Git Fusion per-repo client view mapping and config.
            init_repo_status = p4gf_init_repo.init_repo(p4, view_name, view_lock)
            if init_repo_status == p4gf_init_repo.INIT_REPO_OK:
                repo_created = True
            elif init_repo_status == p4gf_init_repo.INIT_REPO_EXISTS:
                repo_created = False
            elif init_repo_status == p4gf_init_repo.INIT_REPO_NOVIEW:
                start_response(_('404 Not Found'), headers)
                return [sink.readall()]
            else:
                start_response(_('500 Internal Server Error'), headers)
                return [b"Repository initialization failed\n"]

            # If authorization came from default, not explicit group
            # membership, copy that authorization to a group now. Could
            # not do this until after p4gf_init_repo() has a chance to
            # create not-yet-existing groups.
            if view_perm:
                view_perm.write_if(p4)

            # Now that we have valid git-fusion-user and
            # git-fusion-<view> client, replace our temporary P4
            # connection with a more permanent Context, shared for the
            # remainder of this process.
            with p4gf_context.create_context(view_name, view_lock) as ctx:
                LOG.debug("reconnected to P4, p4gf=%s", ctx.p4gf)
                ctx.log_context()

                # cd into the work directory. Not all git functions react well
                # to --work-tree=xxxx.
                cwd = os.getcwd()
                os.chdir(ctx.view_dirs.GIT_WORK_TREE)

                # Only copy from Perforce to Git if no other process is cloning
                # from this Git repo right now.
                shared_in_progress = p4gf_lock.shared_host_view_lock_exists(ctx.p4, view_name)
                if not shared_in_progress:
                    # Copy any recent changes from Perforce to Git.
                    try:
                        LOG.debug("bare: No git-upload-pack in progress, force non-bare"
                                  " before update Git from Perforce.")
                        p4gf_git.set_bare(False)
                        p4gf_copy_p2g.copy_p2g_ctx(ctx)
                        p4gf_init_repo.process_imports(ctx)

                        # Now is also an appropriate time to clear out any stale Git
                        # Swarm reviews. We're pre-pull, pre-push, time when we've
                        # got exclusive write access to the Git repo,
                        GSReviewCollection.delete_refs_for_closed_reviews(ctx)

                    except p4gf_lock.LockCanceled as lc:
                        LOG.warning(str(lc))
                    except:
                        # Dump failure to log, BEFORE cleanup, just in case
                        # cleanup ALSO fails and throws its own error (which
                        # happens if we're out of memory).
                        LOG.error(traceback.format_exc())

                        if repo_created:
                            # Return to the original working directory to allow the
                            # config code to call os.getcwd() without dying, since
                            # we are about to delete the current working directory.
                            os.chdir(cwd)
                            p4gf_server_common.cleanup_client(ctx, view_name)
                        raise

                try:
                    exclusive = 'upload' not in command
                    is_push   = 'upload' not in command
                    git_caller = functools.partial(_call_git, input_name, environ, ctx)
                    p4gf_call_git.call_git(git_caller, ctx, view_name, view_lock, exclusive)
                    if is_push:
                        GSReviewCollection.post_push(ctx)
                except p4gf_atomic_lock.LockConflict as lc:
                    start_response(_('500 Internal Server Error'), headers)
                    return ["{}".format(lc).encode('UTF-8')]

        p4gf_gc.process_garbage('at end of auth_server')
        if LOG.isEnabledFor(logging.DEBUG):
            end_time = time.time()
            frm = NTR('Runtime: preparation {} ms, lock acquisition {} ms, processing {} ms')
            LOG.debug(frm.format(before_lock_time - start_time,
                                after_lock_time - before_lock_time,
                                end_time - after_lock_time))
        return []
Ejemplo n.º 36
0
def main(poll_only=False):
    """set up repo for a view
       view_name_git    is the untranslated repo name
       view_name        is the translated repo name
    """
    p4gf_proc.install_stack_dumper()
    _log_environ(os.environ)
    with p4gf_server_common.ExceptionAuditLogger()\
    , p4gf_create_p4.Closer():
        LOG.debug(p4gf_log.memory_usage())
        start_time = time.time()
        args = parse_args(sys.argv[1:])
        if not args:
            return 1

        is_push = 'upload' not in args.command[0]

        # Record the p4 user in environment. We use environment to pass to
        # git-invoked hook. We don't have to set ctx.authenticated_p4user because
        # Context.__init__() reads it from environment, which we set here.
        os.environ[p4gf_const.P4GF_AUTH_P4USER] = args.user

        # view_name_git    is the untranslated repo name
        # view_name        is the translated repo name

        # print "args={}".format(args)
        view_name_git = args.options[-1]
        # translate '/' ':' ' '  .. etc .. for internal view_name
        view_name = p4gf_translate.TranslateReponame.git_to_repo(view_name_git)
        LOG.debug("public view_name: {0}   internal view_name: {1}".
                format(view_name_git, view_name))


        p4gf_util.reset_git_enviro()
        p4 = p4gf_create_p4.create_p4()
        if not p4:
            return 2
        LOG.debug("connected to P4: %s", p4)

        p4gf_server_common.check_readiness(p4)

        p4gf_server_common.check_lock_perm(p4)

        if not p4gf_server_common.check_protects(p4):
            p4gf_server_common.raise_p4gf_perm()

        if p4gf_server_common.run_special_command(view_name, p4, args.user):
            return 0

        # Initialize the external process launcher early, before allocating lots
        # of memory, and just after all other conditions have been checked.
        p4gf_proc.init()
        # Prepare for possible spawn of GitMirror worker process by forking
        # now before allocating lots of memory.
        p4gf_gitmirror.setup_spawn(view_name)
        # Kick off garbage collection debugging, if enabled.
        p4gf_gc.init_gc()

        if poll_only:
            view_perm = None
        else:
            # Go no further, create NOTHING, if user not authorized.
            # We use the translated internal view name here for perm authorization
            required_perm = p4gf_server_common.COMMAND_TO_PERM[args.command[0]]
            view_perm = p4gf_group.ViewPerm.for_user_and_view(p4, args.user,
                        view_name, required_perm)
            p4gf_server_common.check_authorization(p4, view_perm, args.user, args.command[0],
                                                   view_name)

        # Create Git Fusion server depot, user, config. NOPs if already created.
        p4gf_init.init(p4)

        write_motd()

        # view_name is the internal view_name (identical when notExist special chars)
        before_lock_time = time.time()
        with p4gf_lock.view_lock(p4, view_name) as view_lock:
            after_lock_time = time.time()

            # Create Git Fusion per-repo client view mapping and config.
            #
            # NOPs if already created.
            # Create the empty directory that will hold the git repo.
            init_repo_status = p4gf_init_repo.init_repo(p4, view_name, view_lock)
            if init_repo_status == p4gf_init_repo.INIT_REPO_OK:
                repo_created = True
            elif init_repo_status == p4gf_init_repo.INIT_REPO_EXISTS:
                repo_created = False
            else:
                return 1

            # If authorization came from default, not explicit group
            # membership, copy that authorization to a group now. Could
            # not do this until after p4gf_init_repo() has a chance to
            # create not-yet-existing groups.
            if view_perm:
                view_perm.write_if(p4)

            # Now that we have valid git-fusion-user and
            # git-fusion-<view> client, replace our temporary P4
            # connection with a more permanent Context, shared for the
            # remainder of this process.
            with p4gf_context.create_context(view_name, view_lock) as ctx:
                LOG.debug("reconnected to P4, p4gf=%s", ctx.p4gf)

                # Find directory paths to feed to git.
                ctx.log_context()

                # cd into the work directory. Not all git functions react well
                # to --work-tree=xxxx.
                cwd = os.getcwd()
                os.chdir(ctx.view_dirs.GIT_WORK_TREE)

                # Only copy from Perforce to Git if no other process is cloning
                # from this Git repo right now.
                shared_in_progress = p4gf_lock.shared_host_view_lock_exists(ctx.p4, view_name)
                if not shared_in_progress:
                    # Copy any recent changes from Perforce to Git.
                    try:
                        LOG.debug("bare: No git-upload-pack in progress, force non-bare"
                                  " before update Git from Perforce.")
                        p4gf_git.set_bare(False)
                        p4gf_copy_p2g.copy_p2g_ctx(ctx)
                        p4gf_init_repo.process_imports(ctx)

                        # Now is also an appropriate time to clear out any stale Git
                        # Swarm reviews. We're pre-pull, pre-push, time when we've
                        # got exclusive write access to the Git repo,
                        GSReviewCollection.delete_refs_for_closed_reviews(ctx)

                    except p4gf_lock.LockCanceled as lc:
                        LOG.warning(str(lc))
                    except:
                        # Dump failure to log, BEFORE cleanup, just in case
                        # cleanup ALSO fails and throws its own error (which
                        # happens if we're out of memory).
                        LOG.error(traceback.format_exc())

                        if repo_created:
                            # Return to the original working directory to allow the
                            # config code to call os.getcwd() without dying, since
                            # we are about to delete the current working directory.
                            os.chdir(cwd)
                            p4gf_server_common.cleanup_client(ctx, view_name)
                        raise

                if poll_only:
                    code = os.EX_OK
                else:

                    git_caller = functools.partial(_call_git, args, ctx)
                    try:

                        # Deep in call_git(), we grab an 'p4 reviews' lock on
                        # ctx.clientmap's LHS. Switch that clientmap to our
                        # full union view to prevent simultaneous 'git push'es
                        # from clobbering each other in some shared depot
                        # branch. Must include all lightweight branches, too.
                        ctx.switch_client_view_to_union()

                        exclusive = 'upload' not in args.command[0]
                        code = p4gf_call_git.call_git(
                                git_caller, ctx, view_name, view_lock, exclusive)
                        if is_push:
                            GSReviewCollection.post_push(ctx)
                    except p4gf_atomic_lock.LockConflict as lc:
                        sys.stderr.write("{}\n".format(lc))
                        code = os.EX_SOFTWARE

            p4gf_gc.process_garbage(NTR('at end of auth_server'))
            if LOG.isEnabledFor(logging.DEBUG):
                end_time = time.time()
                frm = NTR("Runtime: preparation {} ms, lock acquisition {} ms,"
                          " processing {} ms")
                LOG.debug(frm.format(before_lock_time - start_time,
                                    after_lock_time - before_lock_time,
                                    end_time - after_lock_time))
        return code