def main():
    """Set up repo for a view."""
    p4gf_util.has_server_id_or_exit()
    args = _parse_argv()
    p4gf_version_3.log_version_extended(include_checksum=True)
    log_l10n()
    if args.enablemismatchedrhs:
        # Git Fusion should never modify the customer's config file, and
        # use of this option resulted in the config file losing all of the
        # comments and formatting the customer had put in place.
        sys.stderr.write(
            _('The --enablemismatchedrhs option is deprecated,'
              ' please use enable-mismatched-rhs config file'
              ' option instead.\n'))
        sys.exit(1)
    repo_name_p4client = None
    if args.p4client:
        repo_name_p4client = p4gf_util.argv_to_repo_name(args.p4client)
    repo_name = _argv_to_repo_name(args.repo_name)
    p4gf_util.reset_git_enviro()

    p4 = p4gf_create_p4.create_p4_temp_client()
    if not p4:
        raise RuntimeError(_('error connecting to Perforce'))

    LOG.debug("connected to P4 at %s", p4.port)
    p4gf_proc.init()

    try:
        with ExitStack() as stack:
            stack.enter_context(p4gf_create_p4.Closer())
            p4gf_version_3.version_check()
            p4gf_branch.init_case_handling(p4)
            repo_lock = p4gf_lock.RepoLock(p4, repo_name)
            stack.enter_context(repo_lock)
            ctx = p4gf_context.create_context(repo_name)
            ctx.p4gf = p4
            ctx.repo_lock = repo_lock
            initer = InitRepo(p4, repo_lock).set_repo_name(repo_name)
            initer.context = ctx
            initer.set_config_file_path(args.config)
            initer.set_charset(args.charset)
            initer.set_noclone(args.noclone)
            initer.set_start(args.start)
            stack.enter_context(ctx)
            initer.full_init(repo_name_p4client)
    except P4.P4Exception as e:
        _print_stderr(_('Error occurred: {exception}').format(exception=e))
        sys.exit(1)
Exemple #2
0
def repo_is_locked(repo_name):
    """Determine if the repo is locked by GF."""
    #
    # We need to open a new connection every time, because the server
    # common code closes all connections after each poll.
    #
    p4 = p4gf_create_p4.create_p4_temp_client()
    is_locked = False
    repo_name_tx = p4gf_translate.TranslateReponame.url_to_repo(repo_name, p4)
    # Do not block on repo lock - this will cause issues when called from cron
    try:
        with p4gf_lock.RepoLock(p4, repo_name_tx, blocking=False):
            pass
    except p4gf_lock.LockBusy:
        sys.stdout.write(
            _("View '{repo_name}' is locked. Skipping poll update.\n".format(
                repo_name=repo_name)))
        is_locked = True
    p4gf_create_p4.destroy(p4)
    return is_locked
def _lock_all_repos(p4):
    """Quickly acquire locks on all Git Fusion repositories.

    Fail immediately (raise LockBusy) if any repos are currently locked.
    Waiting would only increase the chance of getting blocked on another repo,
    so scan and fail fast instead.

    Return a list of the P4KeyLock instances acquired.
    """
    locks = []
    repos = p4gf_util.repo_config_list(p4)
    if not repos:
        print(_('No Git Fusion clients found.'))
    else:
        for repo in repos:
            lock = p4gf_lock.RepoLock(p4, repo, blocking=False)
            lock.acquire()
            # If that didn't raise an error, then add to the list of locks acquired.
            locks.append(lock)
    return locks
Exemple #4
0
def main():
    """Do the post-receive work."""
    for h in ['-?', '-h', '--help']:
        if h in sys.argv:
            print(_('Git Fusion post-receive hook.'))
            return 2
    p4gf_version_3.print_and_exit_if_argv()

    # If P4GF_FORK_PUSH is not set, then this is not the genuine push
    # payload and simply a premlinary request made by the HTTP client.
    # In the case of SSH, it will always be set.
    if p4gf_const.P4GF_FORK_PUSH not in os.environ:
        return 0

    # Run this now to avoid the warning of p4gf_proc.init() when ps is
    # invoked in the lock acquisition code (and just because).
    p4gf_proc.init()

    # Indicate that the lock is about to be acquired by the upcoming
    # background process; the main server process will wait until the lock
    # acquisition is completed by the background process.
    LOG.debug('main() setting up forked process')
    with p4gf_create_p4.Closer():
        p4 = p4gf_create_p4.create_p4_temp_client()
        repo_name = p4gf_path.cwd_to_repo_name()
        p4gf_log.configure_for_repo(repo_name)
        group_id = os.environ[p4gf_const.P4GF_FORK_PUSH]
        with p4gf_lock.RepoLock(p4, repo_name, group_id=group_id) as lock:
            lock.set_acquire_pending()

    # Spawn a process to do the work that pre-receive hook could not do
    # before git updated the references. This is an attempt to prevent
    # any timing issues with respect to git.
    LOG.debug('main() starting processing in forked process')
    func = functools.partial(forked_main)
    p4gf_proc.double_fork(func)
    LOG.debug('main() forked process initiated')

    return 0
    def _import_submodules(self):
        """For stream clients, create a submodule for each import."""
        # pylint:disable=too-many-statements, too-many-branches
        view = self.paths_parent.parent['View']  # the parent stream's 'View'
        change_view = self.paths_parent.parent.get('ChangeView')  # the parent stream's 'ChangeView'
        import_paths = self.paths_parent.import_paths

        # have already split this function several times...
        usermap = p4gf_usermap.UserMap(self.ctx.p4gf, self.ctx.email_case_sensitivity)
        user_3tuple = usermap.lookup_by_p4user(p4gf_const.P4GF_USER)
        if not user_3tuple:
            LOG.error('Missing Perforce user %s', p4gf_const.P4GF_USER)
            return
        client_name = self.ctx.config.p4client
        LOG.debug('processing imports for %s', client_name)
        LOG.debug3('_import_submodules() view=%s, change_view=%s, import_paths=%s',
                   view, change_view, import_paths)
        change_views = p4gf_streams.stream_imports_with_changes(view, change_view, import_paths)
        LOG.debug2('_import_submodules() change_views=%s', change_views)
        if not change_views and LOG.isEnabledFor(logging.DEBUG2):
            LOG.debug2('_import_submodules() view=%s change_view=%s import_paths=%s',
                       view, change_view, import_paths)
        # initialize and populate the submodules
        old_head = p4gf_pygit2.head_ref(self.ctx.repo)
        for depot_path, change_num, local_path in change_views:
            # avoid double-nesting by excluding the local path from the client path
            client_path = "//{}/...".format(client_name)
            LOG.debug('_import_submodules() for %s => %s', depot_path, client_path)
            stream_name = depot_path[:-4]
            if p4gf_p4spec.spec_exists(self.ctx.p4, 'stream', stream_name):
                # convert stream name to repo name by pruning leading slashes
                repo_name = p4gf_streams.repo_name_from_depot_path(stream_name)
                config = None
                LOG.debug('initializing stream import for %s', depot_path)
            else:
                # create a repo configuration file for this 1-line view
                repo_name = p4gf_streams.repo_name_from_depot_path(depot_path)
                client_less_path = CLIENT_LESS_REGEX.match(client_path).group(1)
                if client_path and client_path[0] == '"':
                    client_less_path = '"' + client_less_path
                repo_view = depot_path + " " + client_less_path
                LOG.debug('creating config for %s', repo_name)
                config = p4gf_config.default_config_repo_for_view_plain(self.ctx.p4,
                                                                        repo_name,
                                                                        repo_view)
            # prepare to initialize the repository
            #
            # Note that we skip the temp client counting mechanism in this
            # case because it is rather difficult to avoid.
            p4 = p4gf_create_p4.create_p4_temp_client(skip_count=True)
            if not p4:
                LOG.error('unable to create P4 instance for %s', repo_name)
                return
            if p4gf_const.READ_ONLY:
                try:
                    repo_config = p4gf_config.RepoConfig.from_depot_file(repo_name, p4)
                    subtxt = p4gf_context.create_context(repo_name)
                    subtxt.p4gf = p4
                    ir = InitRepo(p4, None).set_repo_config(repo_config)
                    ir.context = subtxt
                    ir.init_repo(handle_imports=False)
                    with subtxt:
                        # populate the submodule
                        self._copy_submodule(subtxt, local_path, change_num, user_3tuple)
                except p4gf_config.ConfigLoadError:
                    raise ReadOnlyException(_("Read-only instance cannot initialize repositories."))
            else:
                with p4gf_lock.RepoLock(p4, repo_name) as repo_lock:
                    if config:
                        p4gf_config.create_file_repo_from_config(self.ctx, repo_name, config)
                    LOG.debug('initializing repo for %s', repo_name)
                    repo_config = p4gf_config.RepoConfig.from_depot_file(repo_name, p4,
                                                                         create_if_missing=True)
                    subtxt = p4gf_context.create_context(repo_name)
                    subtxt.p4gf = p4
                    subtxt.repo_lock = repo_lock
                    ir = InitRepo(p4, repo_lock).set_repo_config(repo_config)
                    ir.context = subtxt
                    ir.init_repo(handle_imports=False)
                    with subtxt:
                        # populate the submodule
                        self._copy_submodule(subtxt, local_path, change_num, user_3tuple)
            if p4.connected():
                p4gf_create_p4.p4_disconnect(p4)
        # Remove any submodules controlled by Git Fusion that no longer match
        # any of the current import paths.
        self._deport_submodules(import_paths, user_3tuple)

        if not p4gf_const.READ_ONLY:
            # The process() method above configures 'enable-git-submodules'
            # in the parent repo to disable submodule updates. This is written to p4gf_config,
            # but self.ctx.submodules is not set to False, and remains = True.
            # This so the import of the submodule itself is not rejected.
            # However, if the import fails, the next pull attempt would
            # fail now that 'enable-git-submodules' has been set to False.
            # So .. bypass the submodules protection for the fake push which
            # maybe be creating the imported submodule itself in the parent repo.

            submodules = self.ctx.submodules
            self.ctx.submodules = True    # temporary
            self._ensure_commits_copied(old_head)
            self.ctx.submodules = submodules
def main():
    """Parse the command-line arguments and report on locks."""
    # pylint: disable=too-many-statements
    desc = _("Report the currently held locks in Git Fusion.")
    parser = p4gf_util.create_arg_parser(desc=desc)
    parser.add_argument('--test',
                        action='store_true',
                        help=_('invoke test mode, acquire locks and report'))
    parser.add_argument(
        '--test2',
        action='store_true',
        help=_(
            'invoke test mode, acquire locks and report, set dead processes.'))
    args = parser.parse_args()

    p4gf_util.has_server_id_or_exit()
    server_id = p4gf_util.get_server_id()
    p4 = p4gf_create_p4.create_p4_temp_client()
    if not p4:
        sys.exit(1)
    print("Connecting to P4PORT={} as P4USER={}".format(p4.port, p4.user))
    if args.test or args.test2:
        repo_name = "p4gf_test_status_repo"
        status_key_name = p4gf_p4key.calc_repo_status_p4key_name(
            repo_name, None)
        p4gf_p4key.set(p4, status_key_name, 'Push 1 completed successfully')
        pushid_key_name = p4gf_p4key.calc_repo_push_id_p4key_name(repo_name)
        p4gf_p4key.set(p4, pushid_key_name, '1')
        # create a process and kill it and set its dead pid as a RepoLock owner below.

        if args.test:
            # A test with nothing stale
            with ExitStack() as stack:
                stack.enter_context(p4gf_lock.ReviewsLock(p4))
                stack.enter_context(p4gf_lock.RepoLock(p4, repo_name))
                stack.enter_context(p4gf_git_repo_lock.read_lock(repo_name))
                stack.enter_context(
                    p4gf_git_repo_lock.write_lock(repo_name, upgrade=True))
                print_lock_status(p4, server_id)

        else:  # if args.test2
            # Now a test with some DEAD processes and a stale view Lock
            dead_process = subprocess.Popen(['echo', 'x'],
                                            stdout=subprocess.DEVNULL)
            dead_process.kill()
            while dead_process.returncode is None:
                dead_process.communicate()
            lock2 = None
            with ExitStack() as stack:
                stack.enter_context(p4gf_lock.ReviewsLock(p4))
                # first lock owner
                lock1 = p4gf_lock.RepoLock(p4, repo_name)
                # second lock owner with same group_id and a dead pid
                lock2 = p4gf_lock.RepoLock(p4,
                                           repo_name,
                                           group_id=lock1.group_id)
                lock2.process_id = dead_process.pid
                # acquire the first RepoLock
                stack.enter_context(lock1)
                # Use low level method to add this DEAD pid to the group's lock owners
                lock2.do_acquire()
                stack.enter_context(p4gf_git_repo_lock.read_lock(repo_name))
                stack.enter_context(
                    p4gf_git_repo_lock.write_lock(repo_name, upgrade=True))
                print("Test 1:")
                print_lock_status(p4, server_id)
                p4gf_p4key.set(p4, pushid_key_name, '2')
                p4gf_p4key.set(p4, status_key_name,
                               'Push 2 failed: some error')
                # Finally lets set the P4GF_P4KEY_LOCK_VIEW - the least likley to be stale
                p4gf_p4key.set(p4, lock2.lock_key, '1')
                print("Test 2:")
                print_lock_status(p4, server_id)
                # Cant exit the ExistStack unless we clean this
                p4gf_p4key.delete(p4, lock2.lock_key)
            # Clean up this lock so the test may be run again
            p4gf_p4key.delete(p4, lock2.owners_key)
        # remove test keys
        p4gf_p4key.delete(p4, status_key_name)
        p4gf_p4key.delete(p4, pushid_key_name)
    else:
        print_lock_status(p4, server_id)
def main():
    """Update the disk usage p4 keys for one or more repositories."""
    desc = _("Set/reset the total and pending p4 keys.")
    epilog = _("Without the -y/--reset option, only displays current values.")
    parser = p4gf_util.create_arg_parser(desc, epilog=epilog)
    parser.add_argument('-a', '--all', action='store_true',
                        help=_('process all known Git Fusion repositories'))
    parser.add_argument('-y', '--reset', action='store_true',
                        help=_('perform the reset of the p4 keys'))
    parser.add_argument(NTR('repos'), metavar=NTR('repo'), nargs='*',
                        help=_('name of repository to be updated'))
    args = parser.parse_args()

    # Check that either --all, or 'repos' was specified.
    if not args.all and len(args.repos) == 0:
        sys.stderr.write(_('Missing repo names; try adding --all option.\n'))
        sys.exit(2)
    if args.all and len(args.repos) > 0:
        sys.stderr.write(_('Ambiguous arguments. Choose --all or a repo name.\n'))
        sys.exit(2)

    with p4gf_create_p4.Closer():
        p4 = p4gf_create_p4.create_p4_temp_client()
        if not p4:
            sys.exit(2)
        # Sanity check the connection (e.g. user logged in?) before proceeding.
        try:
            p4.fetch_client()
        except P4.P4Exception as e:
            sys.stderr.write(_('P4 exception occurred: {exception}').format(exception=e))
            sys.exit(1)

        if args.all:
            repos = p4gf_util.repo_config_list(p4)
            if len(repos) == 0:
                print(_('No Git Fusion repositories found, nothing to do.'))
                sys.exit(0)
        else:
            repos = args.repos
        p4gf_create_p4.p4_disconnect(p4)

        for repo in repos:
            repo_name = p4gf_translate.TranslateReponame.git_to_repo(repo)
            print(_("Processing repository {repo_name}... ").format(repo_name=repo_name), end='')
            ctx = p4gf_context.create_context(repo_name)
            with ExitStack() as stack:
                stack.enter_context(ctx)
                ctx.repo_lock = p4gf_lock.RepoLock(ctx.p4gf, repo_name, blocking=False)
                stack.enter_context(ctx.repo_lock)
                limits = PushLimits(ctx)
                if args.reset:
                    # Copy any Perforce changes down to this Git repository.
                    p4gf_copy_p2g.copy_p2g_ctx(ctx)
                    # Attempt to trim any unreferenced objects.
                    p4gf_proc.popen(['git', '--git-dir=' + ctx.repo.path, 'prune'])
                    limits.post_copy()
                # Display current key values and disk usage.
                pending_mb = limits.get_pending_mb()
                total_mb = limits.get_total_mb()
                current_mb = limits.space_total
                print(
                    _('{total_mb:.2f}M total, {pending_mb:.2f}M pending, '
                      '{current_mb:.2f}M current')
                    .format(total_mb=total_mb,
                            pending_mb=pending_mb,
                            current_mb=current_mb), end='')
            print("")
Exemple #8
0
def main():
    """Update one or more repository hook scripts."""
    parser = p4gf_util.create_arg_parser(
        _('Updates the hook scripts in one or more Git Fusion repositories.'))
    parser.add_argument('-a',
                        '--all',
                        action='store_true',
                        help=_('process all known Git Fusion repositories'))
    parser.add_argument(NTR('repos'),
                        metavar=NTR('repo'),
                        nargs='*',
                        help=_('name of repository to be updated'))
    args = parser.parse_args()

    # Check that either --all, or a repo was named.
    if not args.all and len(args.repos) == 0:
        sys.stderr.write(_('Missing repo names; try adding --all option.\n'))
        sys.exit(2)
    if args.all and len(args.repos) > 0:
        sys.stderr.write(
            _('Ambiguous arguments. Choose --all or a repo name.\n'))
        sys.exit(2)

    p4 = p4gf_create_p4.create_p4_temp_client()
    if not p4:
        sys.exit(2)

    # Sanity check the connection (e.g. user logged in?) before proceeding.
    try:
        p4.fetch_client()
    except P4.P4Exception as e:
        sys.stderr.write(
            _('P4 exception occurred: {exception}').format(exception=e))
        sys.exit(1)

    if args.all:
        repos = p4gf_util.repo_config_list(p4)
        if not repos:
            print(_("No repos exist yet."))
    else:
        repos = args.repos
    p4gf_create_p4.p4_disconnect(p4)

    have_error = False
    for git_view in repos:
        repo_name = p4gf_translate.TranslateReponame.git_to_repo(git_view)
        print(_("Processing repository {repo_name}...").format(
            repo_name=repo_name),
              end='')
        try:
            ctx = p4gf_context.create_context(repo_name)
            ctx.create_config_if_missing(False)
            with ExitStack() as stack:
                stack.enter_context(ctx)
                ctx.repo_lock = p4gf_lock.RepoLock(ctx.p4gf,
                                                   repo_name,
                                                   blocking=False)
                stack.enter_context(ctx.repo_lock)
                # If __file__ contains a symlink, decoding at this top level
                # will cause Python to retain it, for use in the hook paths.
                p4gf_init_host.install_hook(ctx.repo_dirs.GIT_DIR,
                                            overwrite=True,
                                            hook_abs_path=__file__)
            print(_(" successful."))
        except p4gf_config.ConfigLoadError as e:
            import logging
            # cannot use __name__ since it will be "__main__"
            logging.getLogger("p4gf_update_hooks").exception(
                "failed to update hooks")
            print(_(" failed."))
            sys.stderr.write(
                _("\n{exception}\nHook scripts not updated for repo '{repo_name}'."
                  ).format(exception=e, repo_name=repo_name))
            have_error = True
    if have_error:
        sys.exit(1)
    def do_it(self):
        """Perform all of the setup, processing, and clean up.

        :rtype: int
        :return: status code for the process upon exit.

        """
        p4gf_util.log_environ(LOG, os.environ, self.label)
        log_l10n()
        p4gf_proc.install_stack_dumper()
        # Kick off garbage collection debugging, if enabled.
        p4gf_mem_gc.init_gc()

        # Use ExitStack to avoid deeply nested code.
        with ExitStack() as stack:
            stack.enter_context(p4gf_create_p4.Closer())
            p4 = p4gf_create_p4.create_p4_temp_client()
            if not p4:
                return 2
            repo_name = p4gf_path.cwd_to_repo_name()
            p4gf_util.reset_git_enviro()

            # Initialize the external process launcher early, before
            # allocating lots of memory, and just after all other
            # conditions have been checked.
            p4gf_proc.init()

            # Assume that something bad will happen (especially with preflight).
            exit_code = os.EX_SOFTWARE
            try:
                p4gf_log.configure_for_repo(repo_name)
                gid = os.environ[p4gf_const.P4GF_FORK_PUSH]
                self.before_p4key_lock(repo_name)
                with p4gf_lock.RepoLock(p4, repo_name,
                                        group_id=gid) as repo_lock:
                    # Work to be done with the p4key lock...
                    self.context = p4gf_context.create_context(repo_name)
                    self.context.p4gf = p4
                    self.context.repo_lock = repo_lock
                    self.context.foruser = os.getenv(p4gf_const.P4GF_FORUSER)
                    stack.enter_context(self.context)
                    self.before()
                    exit_code = self.process()
                if self.after_requires_write_lock():
                    # Work to be done without the p4key lock, but with the
                    # write lock. Note that we release the p4key lock
                    # before acquiring the write lock to avoid deadlock
                    # with the foreground process, which always gets the
                    # repo read/write lock _before_ acquiring the p4key
                    # lock. Hence all this complication with the locks.
                    with p4gf_git_repo_lock.write_lock(repo_name):
                        self.after(exit_code)
                else:
                    # The after() method does not need a write lock...
                    self.after(exit_code)
            finally:
                self.cleanup()
                p4gf_proc.stop()

        # Random tasks after all of the locks have been released.
        msg = NTR("at end of {hook}").format(hook=self.label)
        p4gf_mem_gc.process_garbage(msg)
        p4gf_mem_gc.report_objects(msg)
        return exit_code
def main():
    """Process command line arguments and call functions to do the real
    work of cleaning up the Git mirror and Perforce workspaces.
    """
    # pylint:disable=too-many-branches, too-many-statements
    log_l10n()
    # Set up argument parsing.
    desc = _("""Deletes Git Fusion repositories and workspaces. When you
include the -a or --all option, Git Fusion finds and deletes the following
for all repos on the current server disregarding specified views:
1) All git-fusion-view clients. 2) Client git-fusion--p4 workspace files.
3) Objects in //.git-fusion/objects/...
""")
    epilog = _("""It is recommended to run 'p4gf_delete_repo.py' without
the '-y' flag to preview changes that will be made to the depot before
using the '-y' flag for permanent removal. Use -a or --all to permanently
delete all repo data for all repos on the Perforce server; be aware that
this may take some time, depending on the number and size of the objects.
Use -N, --no-obliterate to quickly delete most of the repo's data and
continue working. This minimizes the impact to server performance.
""")
    parser = p4gf_util.create_arg_parser(desc, epilog=epilog)
    parser.add_argument(
        '-a',
        '--all',
        action='store_true',
        help=_('remove all known Git mirrors on the current server'))
    parser.add_argument('-y',
                        '--delete',
                        action='store_true',
                        help=_('perform the deletion'))
    parser.add_argument('-v',
                        '--verbose',
                        action='store_true',
                        help=_('print details of deletion process'))
    parser.add_argument(
        '-N',
        '--no-obliterate',
        action='store_true',
        help=_('with the --all option, do not obliterate object cache'))
    parser.add_argument(NTR('views'),
                        metavar=NTR('view'),
                        nargs='*',
                        help=_('name of view to be deleted'))
    args = parser.parse_args()
    p4gf_util.has_server_id_or_exit()

    # Check that either --all, or 'views' was specified.
    if not args.all and len(args.views) == 0:
        sys.stderr.write(_('Missing view names; try adding --all option.\n'))
        sys.exit(2)
    if args.all and len(args.views) > 0:
        sys.stderr.write(
            _('Ambiguous arguments. Choose --all or a view name.\n'))
        sys.exit(2)

    # Check that --no-obliterate occurs only with --all
    if not args.all and args.no_obliterate:
        sys.stderr.write(
            _('--no-obliterate permitted only with the --all option.\n'))
        sys.exit(2)

    with p4gf_create_p4.Closer():
        p4 = p4gf_create_p4.create_p4_temp_client()
        if not p4:
            return 2
        # Sanity check the connection (e.g. user logged in?) before proceeding.
        p4gf_branch.init_case_handling(p4)
        try:
            p4.fetch_client()
        except P4.P4Exception as e:
            sys.stderr.write(
                _('P4 exception occurred: {exception}').format(exception=e))
            sys.exit(1)

        metrics = DeletionMetrics()
        if args.all:
            try:
                if p4gf_const.READ_ONLY:
                    delete_all_local(args, p4, metrics)
                else:
                    delete_all(args, p4, metrics)
            except (p4gf_lock.LockBusy, P4.P4Exception) as e:
                sys.stderr.write("{exception}\n".format(exception=e))
                sys.exit(1)
        else:
            # Delete the client(s) for the named view(s).
            for git_view in args.views:
                repo_name = p4gf_translate.TranslateReponame.git_to_repo(
                    git_view)
                client_name = p4gf_util.repo_to_client_name(repo_name)
                try:
                    if p4gf_const.READ_ONLY:
                        delete_client_local(args, p4, client_name, metrics)
                    else:
                        with p4gf_lock.RepoLock(p4, repo_name, blocking=False):
                            delete_client(args, p4, client_name, metrics)
                except (p4gf_lock.LockBusy, P4.P4Exception) as e:
                    sys.stderr.write("{exception}\n".format(exception=e))
                    sys.exit(1)
        if not args.delete:
            print(_('This was report mode. Use -y to make changes.'))
        else:
            print(
                _('Deleted {num_files:d} files, {num_groups:d} groups, '
                  '{num_clients:d} clients, and {num_keys:d} p4keys.').format(
                      num_files=metrics.files,
                      num_groups=metrics.groups,
                      num_clients=metrics.clients,
                      num_keys=metrics.p4keys))
            if args.all:
                print(_('Successfully deleted all repos\n'))
            else:
                print(
                    _('Successfully deleted repos:\n{repos}').format(
                        repos="\n".join(args.views)))