def create_repo_client(p4, view_name, client_name, client_root, view, stream):
    '''Create a Git Fusion repo client.'''
    desc = (_("Created by Perforce Git Fusion for work in '{view}'.").format(
        view=p4gf_translate.TranslateReponame.repo_to_git(view_name)))
    # if creating from a stream, set 'Stream' but not 'View'
    # otherwise, set 'View' but not 'Stream'
    if stream:
        p4gf_util.set_spec(p4,
                           'client',
                           spec_id=client_name,
                           values={
                               'Owner': p4gf_const.P4GF_USER,
                               'LineEnd': NTR('unix'),
                               'Root': client_root,
                               'Options': CLIENT_OPTIONS,
                               'Host': None,
                               'Stream': stream,
                               'Description': desc
                           })
    else:
        p4gf_util.set_spec(p4,
                           'client',
                           spec_id=client_name,
                           values={
                               'Owner': p4gf_const.P4GF_USER,
                               'LineEnd': NTR('unix'),
                               'View': view,
                               'Root': client_root,
                               'Options': CLIENT_OPTIONS,
                               'Host': None,
                               'Description': desc
                           })

    LOG.debug("Successfully created Git Fusion client %s", client_name)
    def calc_cmd_line_vars(ctx, fe_commit, branch_id, jobs, spec_file_path):
        '''
        Return our mapping of command-line variable substitutions,
        populating if necessary.

        DANGER: Little Bobby Tables! Sanitize your shell inputs!

        These become strings in a command. Watch out for any input
        that a user can control such as repo name or jobs list.
        '''
        branch = ctx.branch_dict()[branch_id]
        git_branch_name = _or_space(branch.git_branch_name)
        client_host = _or_space(p4gf_protect.get_remote_client_addr())

        r = {
            '%repo%': _sanitize(ctx.config.view_name),
            '%sha1%': fe_commit['sha1'],
            '%branch_id%': branch_id,
            '%git-branch-name%': _sanitize(git_branch_name),
            '%client%': ctx.p4.client,
            '%clienthost%': client_host,
            '%serverport%': ctx.p4.port,
            '%quote%': '"',
            '%user%': fe_commit['owner'],
            '%formfile%': spec_file_path,
            '%formname%': NTR('new'),
            '%formtype%': NTR('change'),
            '%jobs%': _sanitize(' '.join(jobs)) if jobs else ''
        }

        return r
def _parse_argv():
    '''Convert argv into a usable dict. Dump usage/help and exit if necessary.'''
    help_txt = p4gf_util.read_bin_file('p4gf_init_repo.help.txt')
    if help_txt is False:
        help_txt = _("Missing '{}' file!").format(
            NTR('p4gf_init_repo.help.txt'))
    parser = p4gf_util.create_arg_parser(
        desc=_('Configure and populate Git Fusion repo.'),
        epilog=None,
        usage=_('p4gf_init_repo.py [options] <name>'),
        help_custom=help_txt)
    parser.add_argument('--start', metavar="")
    parser.add_argument('--noclone', action=NTR('store_true'))
    parser.add_argument('--config')
    parser.add_argument('--p4client')
    parser.add_argument(NTR('view'), metavar=NTR('view'))
    parser.add_argument('--charset')
    parser.add_argument('--enablemismatchedrhs', action=NTR('store_true'))
    args = parser.parse_args()
    if args.noclone and args.start:
        _print_stderr(_('Cannot use both --start and --noclone'))
        sys.exit(1)
    if args.config and args.charset:
        _print_stderr(_('Cannot use both --config and --charset'))
    if args.config and args.p4client:
        _print_stderr(_('Cannot use both --config and --p4client'))
    LOG.debug("args={}".format(args))
    return args
def delete_all_local(args, p4, metrics):
    """Remove "everything" as if from a read-only Git Fusion instance.

    :param args: parsed command line arguments
    :param p4: Git user's Perforce client
    :param metrics: for counting delete actions.

    Similar to deleting everything from the master server, except that very
    little is removed from the Perforce server (e.g. counters and files).
    In short, only the client and local directories are removed.

    """
    p4.user = p4gf_const.P4GF_USER
    print(_('Connected to {P4PORT}').format(P4PORT=p4.port))
    client_name = p4gf_util.get_object_client_name()
    localroot = get_p4gf_localroot(p4)
    if not args.delete:
        if localroot:
            if args.no_obliterate:
                print(NTR('p4 sync -f {}...#none').format(localroot))
            else:
                print(NTR('p4 client -f -d {}').format(client_name))
                print(NTR('rm -rf {}').format(localroot))
    else:
        if localroot:
            if not args.no_obliterate:
                # Need this in order to use --gc later on
                p4gf_util.p4_client_df(p4, client_name)
                metrics.clients += 1
                print_verbose(
                    args,
                    _("Deleting client '{client_name}'s workspace...").format(
                        client_name=client_name))
                _remove_local_root(localroot)
def parse_argv():
    """Convert command line into a usable dict."""
    usage = _("""p4gf_rollback.py [options] --change-num NNN --repo <repo-name>
options:
    --p4port/-p     Perforce server
    --p4user/-u     Perforce user
    --execute/-y    yes, do it (normally just previews/reports)
    --obliterate    delete history from Perforce
    --verbose/-v    write more to console
    --quiet/-q      write nothing but errors to console
""")
    parser = p4gf_util.create_arg_parser(
          help_file    = NTR('p4gf_rollback.help.txt')
        , usage        = usage
        , add_p4_args  = True
        , add_log_args = True
        , add_debug_arg= True
        )
    parser.add_argument('--change-num', metavar="NNN",         required=True)
    parser.add_argument('--repo',       metavar="<repo-name>", required=True)
    parser.add_argument('--execute', '-y', action=NTR("store_true"))
    parser.add_argument('--obliterate', action=NTR("store_true"))

    args = parser.parse_args()
    p4gf_util.apply_log_args(args, LOG)
    LOG.debug("args={}".format(args))
    args.change_num = int(args.change_num)
    return args
Example #6
0
def main():
    '''
    Parse the command-line arguments and print a configuration.
    '''
    p4gf_util.has_server_id_or_exit()
    p4gf_client = p4gf_util.get_object_client_name()
    p4 = p4gf_create_p4.create_p4(client=p4gf_client)
    if not p4:
        sys.exit(1)
    desc = _("""Display the effective global or repository configuration.
All comment lines are elided and formatting is normalized per the
default behavior of the configparser Python module.
The default configuration options will be produced if either of the
configuration files is missing.
""")
    parser = p4gf_util.create_arg_parser(desc=desc)
    parser.add_argument(
        NTR('repo'),
        metavar=NTR('R'),
        nargs='?',
        default='',
        help=_('name of the repository, or none to display global.'))
    args = parser.parse_args()
    if args.repo:
        cfg = get_repo(p4, args.repo)
    else:
        cfg = get_global(p4)
    if not cfg:
        print(_('Unable to read configuration file!'))
    cfg.write(sys.stdout)
def _install_signal_handler(handler):
    '''
    Install the given signal handler (either a function or one of the
    signal module constants) for all of the terminating signals.
    It is probably a good idea to use _signal_restorer to preserve and
    later restore any existing signal handlers.
    '''
    if LOG.isEnabledFor(logging.DEBUG):
        if callable(handler):
            label = handler.__qualname__
        elif isinstance(handler, int):
            if handler == signal.SIG_DFL:
                label = NTR('default')
            elif handler == signal.SIG_IGN:
                label = NTR('ignore')
            else:
                label = str(handler)
        else:
            label = str(handler)
        LOG.debug("_install_signal_handler({}) for pid={}".format(
            label, os.getpid()))
    signal.signal(signal.SIGHUP, handler)
    signal.signal(signal.SIGINT, handler)
    signal.signal(signal.SIGQUIT, handler)
    signal.signal(signal.SIGTERM, handler)
    signal.signal(signal.SIGTSTP, handler)
Example #8
0
def _masked_bin(x, mask):
    '''0b000111000 ==> "111" '''
    mask_str = NTR('{:b}').format(mask)
    bit_str = NTR('{:b}').format(mask & x)
    first_1_index = mask_str.find('1')
    last_1_index = mask_str.find('1')
    return bit_str[first_1_index:last_1_index]
def depot_branch_info_from_config(config):
    """ Return DepotBranchInfo from configparser object"""
    dbi = DepotBranchInfo()
    dbi.depot_branch_id = _dbid_section(config)
    dbi.root_depot_path = config.get(dbi.depot_branch_id, "root-depot-path")
    firstbranch = None
    firstcl     = None
    branch      = []
    cl          = []
    for option in config.options(dbi.depot_branch_id):
        value = config.get(dbi.depot_branch_id, option)
        if option == KEY_PARENT_BRANCH_ID:
            firstbranch = value
        elif option == KEY_PARENT_CHANGELIST:
            firstcl = value
        elif option.endswith(NTR('branch-id')):
            branch.append(option + ':' + value)
        elif option.endswith(NTR('changelist')):
            cl.append(option + ':' + value)

    branch = p4gf_util.alpha_numeric_sort(branch)
    cl     = p4gf_util.alpha_numeric_sort(cl)

    if firstbranch and firstcl:
        dbi.parent_depot_branch_id_list.append(firstbranch)
        dbi.parent_changelist_list.append(firstcl)

    for i in range(len(branch)):
        dbi.parent_depot_branch_id_list.append(branch[i].split(':')[1])
        dbi.parent_changelist_list.append(cl[i].split(':')[1])

    return dbi
Example #10
0
def ssh2_key_generator(itr):
    """A generator function that produces (fingerprint, username, data)
    tuples suitable for writing to the authorized keys file. Reads lines
    from the given line generator, which is assumed to yield results in
    a format common to several SSH2 implementations.
    """
    # Typical "SSH2" authorization file stores related information on separate
    # lines, need to piece it back together again.
    try:
        # Ugly code, but I want to iterate the file line by line while also
        # having look-ahead behavior since user information may be split
        # across multiple lines. What's more, not all lines are managed by
        # this script, so must allow for arbitrary lines of text.
        while True:
            line = next(itr)
            fp = NO_FP
            user = ''
            if line.lower().startswith(NTR('key ')):
                try:
                    # read the next line, possibly finding "Options"
                    ln = next(itr)
                    if ln and ln.lower().startswith(NTR('options ')):
                        fp, user = extract_fp_and_user(ln)
                    yield (fp, user, line)
                    yield (fp, user, ln)
                except StopIteration:
                    yield (fp, user, line)
            else:
                yield (fp, user, line)
    except StopIteration:
        return
Example #11
0
def _apply_default_config(parser):
    """Given a ConfigParser instance, merge with the default logging settings.

    Produce the effective logging configuration and return as a tuple of
    the general ,audit , and auth_keys settings.
    """
    # Configure the general logging
    general_config = NTR({
        # New default is to write to separate files (GF-2729).
        # 'filename': os.environ['HOME'] + '/p4gf_log.txt',
        'format': '%(asctime)s %(name)-10s %(levelname)-8s %(message)s',
        'datefmt': '%m-%d %H:%M:%S',
        'root': 'WARNING',
    })
    general_config = _effective_config(parser, _general_section,
                                       general_config)

    # Configure the audit logging (defaults to standard syslog)
    audit_config = {'root': NTR('warning')}
    audit_config = _effective_config(parser, _audit_section, audit_config)
    if not ('filename' in audit_config or 'handler' in audit_config):
        audit_config['handler'] = NTR('syslog')

    # Configure the authorized_keys logging (defaults to standard syslog)
    auth_keys_config = {'root': NTR('warning')}
    auth_keys_config = _effective_config(parser, _auth_keys_section,
                                         auth_keys_config)
    if not ('filename' in auth_keys_config or 'handler' in auth_keys_config):
        auth_keys_config['handler'] = NTR('syslog')
    return (general_config, audit_config, auth_keys_config)
    def to_log_level(self, level):
        """Debugging dump."""

        # Single line dump
        fmt = NTR('Row: {sha1:<7} {mode:<6} {p4_request:<6} {p4filetype:<10}'
                  ' {gwt_path:<10} {depot_path:<10}')

        topline = fmt.format(
                           sha1       = p4gf_util.abbrev(self.sha1) \
                                        if self.sha1 else '0000000'
                         , mode       = p4gf_util.quiet_none(
                                        p4gf_util.mode_str(  self.mode))
                         , gwt_path   = self.gwt_path
                         , depot_path = self.depot_path
                         , p4_request = p4gf_util.quiet_none(self.p4_request)
                         , p4filetype = p4gf_util.quiet_none(self.p4filetype)
                         )

        # Detail each cell at DEBUG2 not DEBUG3. DEBUG2 produces one-
        # line dumps for each cell, which should be useful. DEBUG3 will
        # produce multi-line dumps of each cell, which is VERY noisy.
        if level <= logging.DEBUG2:
            # Multi-line dump.
            lines = [topline]
            for i, cell in enumerate(self.cells):
                if not cell:
                    lines.append(NTR('  {i}: {cell}').format(i=i, cell=cell))
                else:
                    lines.append(
                        NTR('  {i}: {cell}').format(
                            i=i, cell=cell.to_log_level(level)))
            return '\n'.join(lines)
        else:
            return topline
Example #13
0
def _log_cmd_result(result, expect_error):
    """
    Record the command results in the log.

    If command completed successfully, record output at DEBUG level so that
    folks can suppress it with cmd:INFO. But if command completed with error
    (non-zero return code), then record its output at ERROR level so that
    cmd:INFO users still see it.
    """
    ec = result['ec']
    out = result['out']
    err = result['err']
    if (not ec) or expect_error:
        # Things going well? Don't care if not?
        # Then log only if caller is REALLY interested.
        log_level = logging.DEBUG
    else:
        # Things going unexpectedly poorly? Log almost all of the time.
        log_level = logging.ERROR
        log = logging.getLogger('cmd.cmd')
        if not log.isEnabledFor(logging.DEBUG):
            # We did not log the command. Do so now.
            log.log(log_level, result['cmd'])
    logging.getLogger('cmd.exit').log(log_level, NTR("exit: {0}").format(ec))
    out_log = logging.getLogger('cmd.out')
    out_log.debug(NTR("out : ct={0}").format(len(out)))
    if len(out) and out_log.isEnabledFor(logging.DEBUG3):
        out_log.debug3(NTR("out :\n{0}").format(out))
    if len(err):
        logging.getLogger('cmd.err').log(log_level,
                                         NTR("err :\n{0}").format(err))
def install_hook(git_dir):
    """Install Git Fusion's pre-receive hook"""

    hook_path = os.path.join(git_dir, NTR('hooks'), NTR('pre-receive'))
    with open(hook_path, 'w') as f:
        f.write(hook_file_content())
    os.chmod(hook_path, 0o755)  # -rwxr-xr-x
Example #15
0
def as_string_extended(*, p4=None, args=None, include_checksum=False):
    """Return a page-long dump of Git Fusion, P4D, and uname info."""
                        # Git Fusion version info, including Git and P4Python.
    a = as_string(include_checksum)
    l = []
                        # Git Fusion server OS version: uname -a
    l.append(NTR('uname: {}').format(uname()))
    l.append(NTR('Git Fusion path: {}').format(os.path.dirname(os.path.realpath(__file__))))
    l.append(_get_lsb_release())

                        # P4PORT, if supplied
    if p4:
        l.append(_('Perforce server address: {p4port}').format(p4port=p4.port))

                        # 'p4 info', if we can get it.
    try:
        _p4 = _create_p4(p4=p4, args=args)

                    # Run 'p4 info' un-tagged to get human-friendly
                    # server info labels.
        l.append(NTR("p4 info:"))
        l.extend(p4gf_p4cache.fetch_info(_p4, tagged=False))

                    # Run 'p4 info' a SECOND time, tagged, to get
                    # the "unicode" setting that untagged omits.
        u = p4gf_p4cache.fetch_info(_p4, tagged=True).get(("unicode"), _("disabled"))
        l.append(_("Unicode: {value}").format(value=u))
    except P4Exception:
        pass
    return a + "\n".join(l) + "\n"
    def list_for_user(p4, user):
        '''build list of repos visible to user'''
        result = RepoList()

        for view in p4gf_util.view_list(p4):
            #check user permissions for view
            # PERM_PUSH will avoid checking the repo config file for read-permission-check = user
            view_perm = p4gf_group.ViewPerm.for_user_and_view(
                p4, user, view, p4gf_group.PERM_PUSH)
            #sys.stderr.write("view: {}, user: {}, perm: {}".format(view, user, view_perm))
            if view_perm.can_push():
                perm = NTR('push')
            elif view_perm.can_pull():
                perm = NTR('pull')
            else:
                continue

            config = p4gf_config.get_repo(p4, view)
            charset = config.get(p4gf_config.SECTION_REPO,
                                 p4gf_config.KEY_CHARSET,
                                 fallback='')
            desc = config.get(p4gf_config.SECTION_REPO,
                              p4gf_config.KEY_DESCRIPTION,
                              fallback='')
            result.repos.append((view, perm, charset, desc))

        result.repos.sort(key=lambda tup: tup[0])
        return result
Example #17
0
def main():
    """Validate the configuration for one or more repositories."""
    # pylint:disable=too-many-branches
    desc = _("Report on the validity of a repository configuration.")
    parser = p4gf_util.create_arg_parser(desc)
    parser.add_argument('-a', '--all', action='store_true',
                        help=_('process all known Git Fusion repositories'))
    parser.add_argument(NTR('repos'), metavar=NTR('repo'), nargs='*',
                        help=_('name of repository or file to be validated'))
    args = parser.parse_args()

    # Check that either --all, or 'repos' was specified, but not both.
    if not args.all and len(args.repos) == 0:
        sys.stderr.write(_('Missing repo names; try adding --all option.\n'))
        sys.exit(2)
    if args.all and len(args.repos) > 0:
        sys.stderr.write(_('Ambiguous arguments. Choose --all or a repo name.\n'))
        sys.exit(2)

    with p4gf_create_p4.Closer():
        p4 = p4gf_create_p4.create_p4_temp_client()
        if not p4:
            sys.exit(2)
        # Sanity check the connection (e.g. user logged in?) before proceeding.
        try:
            p4.fetch_client()
        except P4.P4Exception as e:
            sys.stderr.write(_('P4 exception occurred: {exception}').format(exception=e))
            sys.exit(1)

        p4gf_branch.init_case_handling(p4)

        if args.all:
            repos = p4gf_util.repo_config_list(p4)
            if len(repos) == 0:
                print(_('No Git Fusion repositories found, nothing to do.'))
                sys.exit(0)
        else:
            repos = args.repos

        for repo in repos:
            if os.path.exists(repo):
                print(_("Processing file {repo_name}...").format(repo_name=repo))
                try:
                    config = p4gf_config.RepoConfig.from_local_file(repo, p4, repo)
                except p4gf_config.ConfigLoadError as e:
                    sys.stderr.write("{}\n", e)
                except p4gf_config.ConfigParseError as e:
                    sys.stderr.write("{}\n", e)
            else:
                repo_name = p4gf_translate.TranslateReponame.git_to_repo(repo)
                print(_("Processing repository {repo_name}...").format(repo_name=repo_name))
                try:
                    config = p4gf_config.RepoConfig.from_depot_file(repo_name, p4)
                except p4gf_config.ConfigLoadError as err:
                    sys.stderr.write("{}\n", err)
            if Validator(config, p4).is_valid():
                print(_("ok"))
            print("")
Example #18
0
def main():
    """Parse the command-line arguments and report on locks."""
    desc = _(DESCRIPTION)
    parser = p4gf_util.create_arg_parser(desc=desc)
    parser.add_argument(
        NTR('--config'),
        metavar=NTR('config'),
        help=_('Path to Git Fusion p4gf_config file (required)'),
        required=True)
    parser.add_argument('-u',
                        '--p4user',
                        metavar='p4user',
                        help=_('Perforce user'))
    parser.add_argument('-p',
                        '--p4port',
                        metavar='p4port',
                        help=_('Perforce server'))
    parser.add_argument('--locale',
                        metavar='locale',
                        default='en_US.UTF-8',
                        help=_('system locale setting'))
    args = parser.parse_args()

    need_serverid = False
    try:
        p4gf_util.get_server_id()
    except:  # pylint: disable=W0702
        need_serverid = True

    # If connect args not passed, check that the environment is set.
    if not args.p4port:
        if 'P4PORT' not in os.environ and 'P4GF_ENV' not in os.environ:
            print(
                _("Neither --p4port is an argument nor are P4GF_ENV and P4PORT in the environment."
                  ))
            sys.exit(0)
        if 'P4PORT' in os.environ:
            args.p4port = os.environ['P4PORT']
    else:
        # Set the requested port for Git Fusion's environment
        os.environ['P4PORT'] = args.p4port

    if not args.p4user:
        if 'P4USER' not in os.environ:
            print(
                _("Neither --p4user is an argument nor is P4USER in the environment."
                  ))
            sys.exit(0)
        else:
            args.p4user = os.environ['P4USER']
    else:
        # Set the requested user for Git Fusion's environment
        os.environ['P4USER'] = args.p4user

    repo_size = RepoSize(args.config, args.p4port, args.p4user, args.locale,
                         need_serverid)
    repo_size.estimate_size()
    repo_size.report()
Example #19
0
def _deb_gitact(x):
    '''Debugging converter from int to P4S string.'''
    bits = []
    bits.append(NTR('A') if A & x & ~GHOST_BIT else '.')
    bits.append(NTR('M') if M & x & ~GHOST_BIT else '.')
    bits.append(NTR('D') if D & x & ~GHOST_BIT else '.')
    bits.append(NTR('T') if T & x & ~GHOST_BIT else '.')
    bits.append(NTR('N') if N & x & ~GHOST_BIT else '.')
    return ''.join(bits)
Example #20
0
 def _add_parent(self, parent_commit, keyword=NTR('from')):
     '''Add one parent to the commit we're currently building.'''
     # Parent is either SHA1 of an existing commit or mark of a commit
     # created earlier in this import operation. Assume a length of
     # 40 indicates the former and mark ids will always be shorter.
     if isinstance(parent_commit, str) and len(parent_commit) == 40:
         self.__append(NTR('{keyword} {sha1}\n').format(keyword=keyword, sha1=parent_commit))
     else:
         self.__append(NTR('{keyword} :{mark}\n').format(keyword=keyword, mark=parent_commit))
Example #21
0
    def filter_paths(self, blobs):
        """Run list of paths through filter and set list of paths that don't pass."""
        # check against one map for read, one for write
        # if check fails, figure out if it was the view map or the protects
        # that caused the problem and report accordingly
        self.author_denied = []
        self.pusher_denied = []
        self.foruser_denied = []
        self.fusion_denied = []
        self.unmapped = []
        c2d = P4.Map.RIGHT2LEFT

        LOG.debug('filter_paths() write_filter: %s', self.write_filter)
        for blob in blobs:
            gwt_path = self.ctx.gwt_path(blob['path'])
            topath_c = gwt_path.to_client()
            topath_d = gwt_path.to_depot()

            LOG.debug('filter_paths() topath_d: %s', topath_d)
            # for all actions, need to check write access for dest path
            result = "  "  # zum loggen
            if topath_d and P4GF_DEPOT_OBJECTS_RE.match(topath_d):
                LOG.debug('filter_paths() topath_d in //.git-fusion/objects')
                continue
            # do not require user write access to //.git-fusion/branches
            if topath_d and P4GF_DEPOT_BRANCHES_RE.match(topath_d):
                LOG.debug('filter_paths() topath_d in //.git-fusion/branches')
                continue
            if not self.write_filter.includes(topath_c, c2d):
                if not self.view_map.includes(topath_c, c2d):
                    self.unmapped.append(topath_c)
                    result = NTR('unmapped')
                elif not (self.ignore_author_perms
                          or self.write_protect_author.includes(topath_d)):
                    self.author_denied.append(topath_c)
                    result = NTR('author denied')
                elif (self.write_protect_pusher
                      and not self.write_protect_pusher.includes(topath_d)):
                    self.pusher_denied.append(topath_c)
                    result = NTR('pusher denied')
                elif (self.write_protect_foruser
                      and not self.write_protect_foruser.includes(topath_d)):
                    self.foruser_denied.append(topath_c)
                    result = NTR('foruser denied')
                elif not self.write_protect_fusion.includes(topath_d):
                    self.fusion_denied.append(topath_c)
                    result = NTR('Git Fusion denied')
                else:
                    result = "?"
                LOG.error('filter_paths() {:<13} {}, {}, {}'.format(
                    result, blob['path'], topath_d, topath_c))
            elif LOG.isEnabledFor(logging.DEBUG):
                LOG.debug('filter_paths() topath_c in write_filter: %s',
                          topath_c)
 def _protect_dict_to_str(cls, pdict):
     """
     Format one protection line as dictionary to string.
     """
     excl = '-' if 'unmap' in pdict else ''
     if NTR('user') in pdict:
         user = NTR('user ') + pdict['user']
     else:
         user = NTR('group ') + pdict['group']
     return "{0} {1} {2} {3}{4}".format(pdict['perm'], user, pdict['host'],
                                        excl, pdict['depotFile'])
Example #23
0
    def _write_ancestor_commit_list(self, f):
        """If we have any ancestor commits, write a section listing all of them."""
        if not self.ancestor_commit_otl:
            return

        f.write(NTR('[ancestor-list]\n'))
        for ot in self.ancestor_commit_otl:
            f.write(
                NTR('{sha1} {change_num}\n').format(sha1=ot.sha1,
                                                    change_num=ot.change_num))
        f.write('\n')
Example #24
0
def _effective_config(parser, section, defaults):
    """Build the effective configuration for a logger.

    Uses a combination of the configparser instance and default options.
    Returns a dict with only the relevant settings for configuring a Logger
    instance.

    It is here the 'handler' over 'filename' and other such precedence
    rules are enforced.

    :param parser: instance of ConfigParser providing configuration.
    :param section: section name from which to take logging configuration.
    :param defaults: dict of default settings.

    """
    assert 'file' not in defaults
    config = defaults.copy()
    fallback = parser.defaults()
    if parser.has_section(section):
        fallback = parser[section]
    config.update(fallback)
    # Allow configuration 'file' setting to take precedence over 'filename'
    # since it is not one of our defaults.
    if 'file' in config:
        config['filename'] = config.pop('file')
    if 'handler' in config:
        val = config['handler']
        if val.startswith('syslog'):
            # Logging to syslog means no format support.
            config.pop('format', None)
            config.pop('datefmt', None)
        # Logging to a handler means no filename
        config.pop('filename', None)
    elif 'filename' in config:
        # perform variable substitution on file path
        fnargs = {}
        fnargs['user'] = os.path.expanduser('~')
        fnargs['tmp'] = tempfile.gettempdir()
        if '%(repo)s' in config['filename']:
            fnargs['repo'] = os.environ.get(p4gf_const.P4GF_LOG_REPO,
                                            NTR("norepo"))
            REPO_LOG_FILES.add(section)
        config['filename'] %= fnargs
    else:
        # default for these is syslog - rather than xml file
        if section in [_auth_keys_section, _audit_section]:
            config['handler'] = NTR('syslog')
        else:
            fpath = _generate_default_name(section)
            config['filename'] = fpath
    config.setdefault(NTR('format'), logging.BASIC_FORMAT)
    config.setdefault(NTR('datefmt'), None)
    return config
Example #25
0
def ssh_key_add(p4, depot_path, keys, action=None):
    """Read the contents of the named file and use it to produce a
    fingerprint of the presumed SSH key, formatting the results into
    a line suitable for adding to the SSH configuration file. The line
    is added to the set of keys, keyed by a generated fingerprint.

    Keyword arguments:
    p4         -- P4 API object
    depot_path -- path to keys file
    keys       -- instance of KeyKeeper
    action     -- string describing the action being performed (e.g. 'edit'),
                  defaults to ADD. For debug log only.
    """
    user, key, fp = extract_key_data(p4, depot_path)
    if not user:
        _print_warn(
            _('Could not extract user name from unrecognized depot path: {depot_path}'
              ).format(depot_path=depot_path))
        return
    if not fp:
        if p4gf_util.depot_file_exists(p4, depot_path):
            _print_warn(
                _("File '{depot_path}' does not conform to a valid SSH key, ignoring..."
                  ).format(depot_path=depot_path))
        return
    if not action:
        action = _ADD
    _print_debug(
        _('action {}, user {}, key {}, FP {}').format(action, user, key, fp))
    # $SSH[2]_ORIGINAL_COMMAND is there to get the command being invoked
    # by the client via SSH (e.g. git-upload-pack 'foo') -- we need that
    # in order to take the appropriate action, and for auditing purposes.
    if Ssh2:
        fname = os.path.join(KEYS_DIR, user, fp.replace(':', '') + NTR('.pub'))
        fpath = os.path.join(SshDirectory, fname)
        fdir = os.path.dirname(fpath)
        if not os.path.exists(fdir):
            os.makedirs(fdir)
        with open(fpath, 'w') as f:
            f.write(SSH2_HEADER_LINE + '\n')
            keydata = key
            while keydata:
                f.write(keydata[:72] + '\n')
                keydata = keydata[72:]
            f.write(SSH2_FOOTER_LINE + '\n')
        ln = NTR(
            'Key {file}\nOptions command="p4gf_auth_server.py --user={user} --keyfp={keyfp}'
            ' $SSH2_ORIGINAL_COMMAND"').format(file=fname, user=user, keyfp=fp)
        # No options are included since not all SSH2 implementations support them.
    else:
        ln = generate_openssh_key(user, fp, key)
    keys.add(fp, user, ln)
def _test_write_branch(repo_name, branch, key, value):
    '''
    Unit test hook to see if we actually write the correct values to
    the correct files.
    '''
    with p4gf_context.create_context(NTR('p4gf_repo'), None) as ctx:
        config = p4gf_config.get_repo(ctx.p4gf, repo_name)
    for section in config.sections():
        if config.has_option(section, p4gf_config.KEY_GIT_BRANCH_NAME) and \
           config.get(section, p4gf_config.KEY_GIT_BRANCH_NAME) == branch:
            _test_write(repo_name, section, key, value)
            return
    print(NTR('branch not found: {}').format(branch))
Example #27
0
def ensure_depot_gf(p4):
    """Create depot P4GF_DEPOT if not already exists.

    Requires that connection p4 has super permissions.

    Return True if created, False if already exists.
    """
    spec = {
        'Owner':       p4gf_const.P4GF_USER,
        'Description': _('Git Fusion data storage.'),
        'Type':        NTR('local'),
        'Map':         '{depot}/...'.format(depot=p4gf_const.P4GF_DEPOT)
    }
    return ensure_spec(p4, NTR('depot'), spec_id=p4gf_const.P4GF_DEPOT, values=spec)
Example #28
0
def check_and_create_default_p4gf_env_config():
    """If p4gf_env_config threw the MissingConfigPath exception,
    because P4GF_ENV names a non-existing filepath
    then save the required (two) default items
    into the user configured P4GF_ENV environment config file.
    """
    if not Create_P4GF_CONFIG:
        LOG.debug('not creating configuration file')
        return
    LOG.debug('creating missing configuration file')
    Verbosity.report(
        Verbosity.INFO, _("Git Fusion environment var P4GF_ENV = {path} names a non-existing file.")
        .format(path=p4gf_const.P4GF_ENV))
    Verbosity.report(
        Verbosity.INFO, _("Creating {path} with the default required items.")
        .format(path=p4gf_const.P4GF_ENV))
    Verbosity.report(
        Verbosity.INFO, _("Review the file's comments and edit as needed."))
    Verbosity.report(
        Verbosity.INFO, _("You may unset P4GF_ENV to use no config file.")
        .format(p4gf_const.P4GF_ENV))
    config = configparser.ConfigParser(interpolation  = None,
                                       allow_no_value = True)
    config.optionxform = str
    config.add_section(p4gf_const.SECTION_ENVIRONMENT)
    config.set(p4gf_const.SECTION_ENVIRONMENT, p4gf_const.P4GF_HOME_NAME, p4gf_const.P4GF_HOME)
    Verbosity.report(
        Verbosity.INFO, _("Setting {home_name} = {home} in {env}.")
        .format(home_name=p4gf_const.P4GF_HOME_NAME,
                home=p4gf_const.P4GF_HOME,
                env=p4gf_const.P4GF_ENV))
    config.set(p4gf_const.SECTION_ENVIRONMENT, NTR('P4PORT'), P4PORT)
    Verbosity.report(
        Verbosity.INFO, _("Setting {p4port} = {p4port_value} in {env}.")
        .format(p4port=NTR('P4PORT'),
                p4port_value=P4PORT,
                env=p4gf_const.P4GF_ENV))
    header = p4gf_util.read_bin_file(NTR('p4gf_env_config.txt'))
    if header is False:
        sys.stderr.write(_('no p4gf_env_config.txt found\n'))
        header = _('# Missing p4gf_env_config.txt file!')
    out = io.StringIO()
    out.write(header)
    config.write(out)
    file_content = out.getvalue()
    out.close()
    p4gf_util.ensure_dir(p4gf_util.parent_dir(p4gf_const.P4GF_ENV))
    with open(p4gf_const.P4GF_ENV, 'w') as f:
        f.write(file_content)
    LOG.debug('created configuration file %s', p4gf_const.P4GF_ENV)
Example #29
0
    def _view_valid_depots(self, depot_list, branch_id, view_p4map, allow_p4gf_depot):
        """Prohibit remote, spec, and other changelist-impaired depot types."""
        valid = True

        # Extract unique list of referenced depots. Only want to warn about
        # each depot once per branch, even if referred to over and over.
        lhs = view_p4map.lhs()
        referenced_depot_name_list = []
        for line in lhs:
            if line.startswith('-'):
                continue
            depot_name = depot_from_view_lhs(line)
            if not depot_name:
                self._report_error(_("branch '{branch_id}': badly formed depot "
                                     "syntax in view: '{line}' not permitted.\n'")
                                   .format(branch_id=branch_id, line=line))
                valid = False
                continue
            if depot_name not in referenced_depot_name_list:
                referenced_depot_name_list.append(depot_name)

        # check each referenced depot for problems
        for depot_name in referenced_depot_name_list:
            if not allow_p4gf_depot and depot_name == p4gf_const.P4GF_DEPOT:
                self._report_error(_("branch '{branch_id}': Git Fusion internal"
                                     " depot '{depot_name}' not permitted.\n'")
                                   .format(branch_id=branch_id, depot_name=depot_name))
                valid = False
                continue

            if depot_name not in depot_list:
                self._report_error(_("branch '{branch_id}': undefined "
                                     "depot '{depot_name}' not permitted "
                                     "(possibly due to lack of permissions).\n'")
                                   .format(branch_id=branch_id, depot_name=depot_name))
                valid = False
                continue

            depot = depot_list[depot_name]
            if depot['type'] not in [NTR('local'), NTR('stream')]:
                self._report_error(_("branch '{branch_id}': depot '{depot_name}'"
                                     " type '{depot_type}' not permitted.\n'")
                                   .format(branch_id=branch_id,
                                           depot_name=depot_name,
                                           depot_type=depot['type']))
                valid = False
                continue

        return valid
def depot_branch_info_from_config(config):
    """ Return DepotBranchInfo from configparser object."""
    dbi = DepotBranchInfo(_dbid_section(config))
    dbi.root_depot_path = config.get(dbi.depot_branch_id, KEY_ROOT_DEPOT_PATH)
    firstbranch = None
    firstcl = None
    branch = []
    cl = []
    fp_basis_change_num = None
    fp_basis_map_line_list = None
    for option in config.options(dbi.depot_branch_id):
        value = config.get(dbi.depot_branch_id, option)
        if option == KEY_PARENT_BRANCH_ID:
            firstbranch = value
        elif option == KEY_PARENT_CHANGELIST:
            firstcl = value
        elif option == KEY_BASIS_CHANGE_NUM:
            fp_basis_change_num = value
        elif option == KEY_BASIS_MAP_LINES:
            fp_basis_map_line_list = p4gf_config.get_view_lines(
                config[dbi.depot_branch_id], KEY_BASIS_MAP_LINES)
            # We're being clever with parent lists here. variables
            # branch and cl are lists of strings that include a
            # numbered "parent -{}-branch-id/changelist:" prefix,
            # so that the two lists will alphanumeric sort
            # identically, keeping branch and cl in step with each
            # other.
        elif option.endswith(NTR('branch-id')):
            branch.append(option + ':' + value)
        elif option.endswith(NTR('changelist')):
            cl.append(option + ':' + value)

    branch = p4gf_util.alpha_numeric_sort(branch)
    cl = p4gf_util.alpha_numeric_sort(cl)

    if firstbranch and firstcl:
        dbi.parent_depot_branch_id_list.append(firstbranch)
        dbi.parent_changelist_list.append(firstcl)

    for i in range(len(branch)):
        dbi.parent_depot_branch_id_list.append(branch[i].split(':')[1])
        dbi.parent_changelist_list.append(cl[i].split(':')[1])

    if fp_basis_change_num is not None:
        dbi.fp_basis_known = True
        dbi.fp_basis_change_num = int(fp_basis_change_num)
        dbi.fp_basis_map_line_list = fp_basis_map_line_list

    return dbi