def main(): """ Parses the command line arguments and performs a search for the given email address in the user map. """ p4gf_util.has_server_id_or_exit() log_l10n() # Set up argument parsing. parser = p4gf_util.create_arg_parser( _("Searches for an email address in the user map.")) parser.add_argument(NTR('email'), metavar='E', help=_('email address to find')) args = parser.parse_args() # make sure the world is sane ec = p4gf_init.main() if ec: print(_("p4gf_usermap initialization failed")) sys.exit(ec) with p4gf_create_p4.Closer(): p4 = p4gf_create_p4.create_p4(client=p4gf_util.get_object_client_name()) if not p4: sys.exit(1) usermap = UserMap(p4) user = usermap.lookup_by_email(args.email) if user: print(_("Found user '{}' <{}>").format(user[0], user[2])) sys.exit(0) else: sys.stderr.write(_("No such user found: '{}'\n").format(args.email)) sys.exit(1)
def main(): """Create Perforce user and client for Git Fusion.""" p4gf_version_3.log_version_extended(include_checksum=True) try: log_l10n() p4gf_version_3.version_check() except Exception as e: # pylint: disable=broad-except sys.stderr.write(e.args[0] + '\n') sys.exit(1) # To fetch the object client below we need to ensure there is a server # ID available on this system, and since we require that anyway, may as # well check it now, when we need it. p4gf_util.has_server_id_or_exit() with p4gf_create_p4.Closer(): p4 = p4gf_create_p4.create_p4_temp_client() if not p4: return 2 Verbosity.report(Verbosity.INFO, "P4PORT : {}".format(p4.port)) Verbosity.report(Verbosity.INFO, "P4USER : {}".format(p4.user)) p4gf_util.reset_git_enviro() p4gf_proc.init() try: init(p4) remove_old_temp_and_repo_clients(p4) except PermissionError: LOG.exception("unable to initialize Git Fusion") sys.stderr.write(_("File permissions error, please check ownership" " and mode of ~/.git-fusion directory.\n")) sys.exit(os.EX_NOPERM) return 0
def main(): """create Perforce user and client for Git Fusion""" p4gf_version.log_version() try: log_l10n() p4gf_version.version_check() # pylint: disable=W0703 # Catching too general exception except Exception as e: sys.stderr.write(e.args[0] + '\n') sys.exit(1) with p4gf_create_p4.Closer(): p4 = p4gf_create_p4.create_p4() if not p4: return 2 Verbosity.report(Verbosity.INFO, "P4PORT : {}".format(p4.port)) Verbosity.report(Verbosity.INFO, "P4USER : {}".format(p4.user)) p4gf_util.reset_git_enviro() p4gf_proc.init() init(p4) return 0
def main(): """Validate the configuration for one or more repositories.""" # pylint:disable=too-many-branches desc = _("Report on the validity of a repository configuration.") parser = p4gf_util.create_arg_parser(desc) parser.add_argument('-a', '--all', action='store_true', help=_('process all known Git Fusion repositories')) parser.add_argument(NTR('repos'), metavar=NTR('repo'), nargs='*', help=_('name of repository or file to be validated')) args = parser.parse_args() # Check that either --all, or 'repos' was specified, but not both. if not args.all and len(args.repos) == 0: sys.stderr.write(_('Missing repo names; try adding --all option.\n')) sys.exit(2) if args.all and len(args.repos) > 0: sys.stderr.write(_('Ambiguous arguments. Choose --all or a repo name.\n')) sys.exit(2) with p4gf_create_p4.Closer(): p4 = p4gf_create_p4.create_p4_temp_client() if not p4: sys.exit(2) # Sanity check the connection (e.g. user logged in?) before proceeding. try: p4.fetch_client() except P4.P4Exception as e: sys.stderr.write(_('P4 exception occurred: {exception}').format(exception=e)) sys.exit(1) p4gf_branch.init_case_handling(p4) if args.all: repos = p4gf_util.repo_config_list(p4) if len(repos) == 0: print(_('No Git Fusion repositories found, nothing to do.')) sys.exit(0) else: repos = args.repos for repo in repos: if os.path.exists(repo): print(_("Processing file {repo_name}...").format(repo_name=repo)) try: config = p4gf_config.RepoConfig.from_local_file(repo, p4, repo) except p4gf_config.ConfigLoadError as e: sys.stderr.write("{}\n", e) except p4gf_config.ConfigParseError as e: sys.stderr.write("{}\n", e) else: repo_name = p4gf_translate.TranslateReponame.git_to_repo(repo) print(_("Processing repository {repo_name}...").format(repo_name=repo_name)) try: config = p4gf_config.RepoConfig.from_depot_file(repo_name, p4) except p4gf_config.ConfigLoadError as err: sys.stderr.write("{}\n", err) if Validator(config, p4).is_valid(): print(_("ok")) print("")
def main(): """Set up repo for a view.""" p4gf_util.has_server_id_or_exit() args = _parse_argv() p4gf_version_3.log_version_extended(include_checksum=True) log_l10n() if args.enablemismatchedrhs: # Git Fusion should never modify the customer's config file, and # use of this option resulted in the config file losing all of the # comments and formatting the customer had put in place. sys.stderr.write( _('The --enablemismatchedrhs option is deprecated,' ' please use enable-mismatched-rhs config file' ' option instead.\n')) sys.exit(1) repo_name_p4client = None if args.p4client: repo_name_p4client = p4gf_util.argv_to_repo_name(args.p4client) repo_name = _argv_to_repo_name(args.repo_name) p4gf_util.reset_git_enviro() p4 = p4gf_create_p4.create_p4_temp_client() if not p4: raise RuntimeError(_('error connecting to Perforce')) LOG.debug("connected to P4 at %s", p4.port) p4gf_proc.init() try: with ExitStack() as stack: stack.enter_context(p4gf_create_p4.Closer()) p4gf_version_3.version_check() p4gf_branch.init_case_handling(p4) repo_lock = p4gf_lock.RepoLock(p4, repo_name) stack.enter_context(repo_lock) ctx = p4gf_context.create_context(repo_name) ctx.p4gf = p4 ctx.repo_lock = repo_lock initer = InitRepo(p4, repo_lock).set_repo_name(repo_name) initer.context = ctx initer.set_config_file_path(args.config) initer.set_charset(args.charset) initer.set_noclone(args.noclone) initer.set_start(args.start) stack.enter_context(ctx) initer.full_init(repo_name_p4client) except P4.P4Exception as e: _print_stderr(_('Error occurred: {exception}').format(exception=e)) sys.exit(1)
def main(): """Do the post-receive work.""" for h in ['-?', '-h', '--help']: if h in sys.argv: print(_('Git Fusion post-receive hook.')) return 2 p4gf_version_3.print_and_exit_if_argv() # If P4GF_FORK_PUSH is not set, then this is not the genuine push # payload and simply a premlinary request made by the HTTP client. # In the case of SSH, it will always be set. if p4gf_const.P4GF_FORK_PUSH not in os.environ: return 0 # Run this now to avoid the warning of p4gf_proc.init() when ps is # invoked in the lock acquisition code (and just because). p4gf_proc.init() # Indicate that the lock is about to be acquired by the upcoming # background process; the main server process will wait until the lock # acquisition is completed by the background process. LOG.debug('main() setting up forked process') with p4gf_create_p4.Closer(): p4 = p4gf_create_p4.create_p4_temp_client() repo_name = p4gf_path.cwd_to_repo_name() p4gf_log.configure_for_repo(repo_name) group_id = os.environ[p4gf_const.P4GF_FORK_PUSH] with p4gf_lock.RepoLock(p4, repo_name, group_id=group_id) as lock: lock.set_acquire_pending() # Spawn a process to do the work that pre-receive hook could not do # before git updated the references. This is an attempt to prevent # any timing issues with respect to git. LOG.debug('main() starting processing in forked process') func = functools.partial(forked_main) p4gf_proc.double_fork(func) LOG.debug('main() forked process initiated') return 0
def main(): """Copy the SSH keys from Perforce to the authorized keys file.""" p4gf_util.has_server_id_or_exit() log_l10n() # Set up argument parsing. parser = p4gf_util.create_arg_parser( _("""Copies SSH public keys from Perforce depot to current user's directory. This script assumes OpenSSH is the SSH implementation in use, and as such, writes to 'authorized_keys' in the ~/.ssh directory. If --ssh2 is used, then writes to 'authorization' in the ~/.ssh2 directory, writing the SSH2 formatted public keys in the 'keys' directory under ~/.ssh2, using the Perforce user names to avoid name collisions. If public keys read from the depot are the wrong format (OpenSSH vs. SSH2), they will be converted when written to disk. """)) parser.add_argument('-r', '--rebuild', action=NTR('store_true'), help=_('rebuild keys file')) parser.add_argument('-v', '--verbose', action=NTR('store_true'), help=_('print details of update process')) parser.add_argument('-2', '--ssh2', action=NTR('store_true'), help=_("produce 'SSH2' output")) parser.add_argument('-f', '--file', help=_('path to authorized keys file')) args = parser.parse_args() # Since this script is called often (by cron), try to reduce the lines # that appear in the log by raising the log level for the p4gf_create_p4 # module. logging.getLogger('p4gf_create_p4').setLevel('WARN') with p4gf_create_p4.Closer(): p4 = p4gf_create_p4.create_p4_temp_client() if not p4: return 2 # Sanity check the connection (e.g. user logged in?) before proceeding. try: p4.fetch_client() except P4Exception as e: _print_warn(_('P4 exception occurred: {error}').format(error=e), error=True) sys.exit(1) # Update global settings based on command line arguments. global Verbose Verbose = args.verbose global Ssh2 Ssh2 = args.ssh2 global SshKeysFile SshKeysFile = args.file if not SshKeysFile: SshKeysFile = NTR('~/.ssh2/authorization') if Ssh2 else NTR( '~/.ssh/authorized_keys') if SshKeysFile[0] == '~': SshKeysFile = os.path.expanduser(SshKeysFile) global SshDirectory SshDirectory = os.path.dirname(SshKeysFile) # Update the keys file based either on latest changes or existing files. try: if args.rebuild: rebuild_all_keys(p4) else: update_by_changes(p4) except P4Exception as e: _print_warn(_('P4 exception occurred: {error}').format(error=e), error=True)
'View': view } p4gf_util.ensure_spec_values(p4, 'client', client_name, spec) except P4.P4Exception as e: sys.stderr.write("P4 exception occurred: {}".format(e)) sys.exit(1) try: convert(args, p4) except P4.P4Exception as e: sys.stderr.write("{}\n".format(e)) sys.exit(1) if not args.convert: print("This was report mode. Use -y to make changes.") else: print("Commands run were logged to p4gf_convert_v12_2.log.") if args.delete: print("You must now run: p4 delete //.git-fusion/objects/...") print(" p4 submit") print(" Use a client which has this location in its view") LOG_FILE.write( "Need to run: p4 delete //.git-fusion/objects/...\n") LOG_FILE.write(" p4 submit\n") LOG_FILE.close() if __name__ == "__main__": with p4gf_create_p4.Closer(): main()
def main(): """ Process command line arguments and call functions to do the real work of cleaning up the Git mirror and Perforce workspaces. """ log_l10n() p4gf_util.has_server_id_or_exit() # pylint:disable=C0301 # Line too long? Too bad. Keep tabular code tabular. # Set up argument parsing. parser = p4gf_util.create_arg_parser( _('Deletes Git Fusion repositories and workspaces.')) parser.add_argument('-a', '--all', action='store_true', help=_('remove all known Git mirrors')) parser.add_argument('-y', '--delete', action='store_true', help=_('perform the deletion')) parser.add_argument('-v', '--verbose', action='store_true', help=_('print details of deletion process')) parser.add_argument( '-N', '--no-obliterate', action='store_true', help=_('with the --all option, do not obliterate object cache')) parser.add_argument(NTR('views'), metavar=NTR('view'), nargs='*', help=_('name of view to be deleted')) args = parser.parse_args() # pylint:enable=C0301 # Check that either --all, or 'views' was specified. if not args.all and len(args.views) == 0: sys.stderr.write(_('Missing view names; try adding --all option.\n')) sys.exit(2) # Check that --no-obliterate occurs only with --all if not args.all and args.no_obliterate: sys.stderr.write( _('--no-obliterate permitted only with the --all option.\n')) sys.exit(2) with p4gf_create_p4.Closer(): p4 = p4gf_create_p4.create_p4( client=p4gf_util.get_object_client_name()) if not p4: return 2 # Sanity check the connection (e.g. user logged in?) before proceeding. try: p4.fetch_client() except P4.P4Exception as e: sys.stderr.write(_('P4 exception occurred: {}').format(e)) sys.exit(1) metrics = DeletionMetrics() if args.all: try: delete_all(args, p4, metrics) except P4.P4Exception as e: sys.stderr.write("{}\n".format(e)) sys.exit(1) else: # Delete the client(s) for the named view(s). for git_view in args.views: view_name = p4gf_translate.TranslateReponame.git_to_repo( git_view) client_name = p4gf_util.view_to_client_name(view_name) try: with p4gf_lock.view_lock(p4, view_name, -1): delete_client(args, p4, client_name, metrics) except P4.P4Exception as e: sys.stderr.write("{}\n".format(e)) if not args.delete: print(_('This was report mode. Use -y to make changes.')) else: print( _('Deleted {:d} files, {:d} groups, {:d} clients, and {:d} counters.' ).format(metrics.files, metrics.groups, metrics.clients, metrics.counters)) if args.all: print(_('Successfully deleted all repos\n')) else: print( _('Successfully deleted repos:\n{}').format("\n".join( args.views)))
def main(): """Copy incoming Git commits to Perforce changelists.""" _log_environ(os.environ) log_l10n() LOG.debug("main() running, pid={}".format(os.getpid())) p4gf_proc.install_stack_dumper() for h in ['-?', '-h', '--help']: if h in sys.argv: print(_('Git Fusion pre-receive hook.')) return 2 with p4gf_create_p4.Closer(): p4gf_version.print_and_exit_if_argv() p4 = p4gf_create_p4.create_p4() if not p4: return 2 p4gf_util.reset_git_enviro(p4) view_name = p4gf_util.cwd_to_view_name() view_lock = p4gf_lock.view_lock_heartbeat_only(p4, view_name) with p4gf_context.create_context(view_name, view_lock) as ctx: # this script is called by git while a context and temp clients # are already in use. Don't sabotage that context by deleting # the temp clients from here. ctx.cleanup_client_pool = False # Read each input line (usually only one unless pushing multiple branches) # and convert to a list of "tuples" from which we can assign branches. prl = [] delete_prl = [] while True: line = sys.stdin.readline() if not line: break LOG.debug('main() raw pre-receive-tuple: {}'.format(line)) prt = PreReceiveTuple.from_line(line) if int(prt.new_sha1, 16) == 0: delete_prl.append(prt) else: prl.append(prt) # Initialize the external process launcher early, before allocating lots # of memory, and just after all other conditions have been checked. p4gf_proc.init() # Prepare for possible spawn of GitMirror worker process by forking # now before allocating lots of memory. p4gf_gitmirror.setup_spawn(view_name) # Kick off garbage collection debugging, if enabled. p4gf_gc.init_gc() # Reject attempt to delete any fully populated branch defined in # p4gf_config. Git Fusion never edits p4gf_config, so Git Fusion never # deletes fully populated branches. Edit p4gf_config yourself if you # want to remove a branch from history. for prt in delete_prl: git_branch_name = prt.git_branch_name() if not git_branch_name: continue branch = ctx.git_branch_name_to_branch(git_branch_name) if not branch: LOG.debug( 'attempt to delete branch {} which does not exist'. format(git_branch_name)) break if not branch.is_lightweight: raise RuntimeError( _('Cannot delete branches defined in' ' Git Fusion repo config file: {}').format( git_branch_name)) # Swarm review creates new Git merge commits. Must occur before branch # assignment so that the review reference can be moved to the new merge # commit. gsreview_coll = GSReviewCollection.from_prl(ctx, prl) if gsreview_coll: gsreview_coll.pre_copy_to_p4(prl) # Assign branches to each of the received commits for pushed branches - skip deletes. if prl: assigner = Assigner(ctx.branch_dict(), prl, ctx) assigner.assign() # For each of the heads being pushed, copy their commits to Perforce. if prl: try: err = _copy(ctx, prl=prl, assigner=assigner, gsreview_coll=gsreview_coll) # branch push if err: return _clean_exit(err) except RuntimeError as err: # Log the error. The return call below eats the error and stack trace. LOG.exception(NTR("_copy() raised exception.")) return _clean_exit(err) # For each of the heads being deleted, remove the branch definition from p4gf_config2 if delete_prl: p4gf_call_git.prohibit_interrupt(view_name, os.getpid()) try: err = _delete(ctx, delete_prl) # branch delete if err: return _clean_exit(err) except RuntimeError as err: # Log the error. The return call below eats the error and stack trace. LOG.exception(NTR("_delete() raised exception.")) return _clean_exit(err) # Process all of the tags at once. err = p4gf_tag.process_tags(ctx, prl + delete_prl) if err: return _clean_exit(err) # If we have any new Git Swarm review references that # auth/http_server must rename, send a list of such # references across process boundary, via a file. if gsreview_coll: gsreview_coll.to_file() p4gf_gc.process_garbage("at end of pre_receive_hook") p4gf_gc.report_objects(NTR("at end of pre_receive_hook")) return 0
def main(): """Update the disk usage p4 keys for one or more repositories.""" desc = _("Set/reset the total and pending p4 keys.") epilog = _("Without the -y/--reset option, only displays current values.") parser = p4gf_util.create_arg_parser(desc, epilog=epilog) parser.add_argument('-a', '--all', action='store_true', help=_('process all known Git Fusion repositories')) parser.add_argument('-y', '--reset', action='store_true', help=_('perform the reset of the p4 keys')) parser.add_argument(NTR('repos'), metavar=NTR('repo'), nargs='*', help=_('name of repository to be updated')) args = parser.parse_args() # Check that either --all, or 'repos' was specified. if not args.all and len(args.repos) == 0: sys.stderr.write(_('Missing repo names; try adding --all option.\n')) sys.exit(2) if args.all and len(args.repos) > 0: sys.stderr.write(_('Ambiguous arguments. Choose --all or a repo name.\n')) sys.exit(2) with p4gf_create_p4.Closer(): p4 = p4gf_create_p4.create_p4_temp_client() if not p4: sys.exit(2) # Sanity check the connection (e.g. user logged in?) before proceeding. try: p4.fetch_client() except P4.P4Exception as e: sys.stderr.write(_('P4 exception occurred: {exception}').format(exception=e)) sys.exit(1) if args.all: repos = p4gf_util.repo_config_list(p4) if len(repos) == 0: print(_('No Git Fusion repositories found, nothing to do.')) sys.exit(0) else: repos = args.repos p4gf_create_p4.p4_disconnect(p4) for repo in repos: repo_name = p4gf_translate.TranslateReponame.git_to_repo(repo) print(_("Processing repository {repo_name}... ").format(repo_name=repo_name), end='') ctx = p4gf_context.create_context(repo_name) with ExitStack() as stack: stack.enter_context(ctx) ctx.repo_lock = p4gf_lock.RepoLock(ctx.p4gf, repo_name, blocking=False) stack.enter_context(ctx.repo_lock) limits = PushLimits(ctx) if args.reset: # Copy any Perforce changes down to this Git repository. p4gf_copy_p2g.copy_p2g_ctx(ctx) # Attempt to trim any unreferenced objects. p4gf_proc.popen(['git', '--git-dir=' + ctx.repo.path, 'prune']) limits.post_copy() # Display current key values and disk usage. pending_mb = limits.get_pending_mb() total_mb = limits.get_total_mb() current_mb = limits.space_total print( _('{total_mb:.2f}M total, {pending_mb:.2f}M pending, ' '{current_mb:.2f}M current') .format(total_mb=total_mb, pending_mb=pending_mb, current_mb=current_mb), end='') print("")
def main(poll_only=False): """set up repo for a view view_name_git is the untranslated repo name view_name is the translated repo name """ p4gf_proc.install_stack_dumper() _log_environ(os.environ) with p4gf_server_common.ExceptionAuditLogger()\ , p4gf_create_p4.Closer(): LOG.debug(p4gf_log.memory_usage()) start_time = time.time() args = parse_args(sys.argv[1:]) if not args: return 1 is_push = 'upload' not in args.command[0] # Record the p4 user in environment. We use environment to pass to # git-invoked hook. We don't have to set ctx.authenticated_p4user because # Context.__init__() reads it from environment, which we set here. os.environ[p4gf_const.P4GF_AUTH_P4USER] = args.user # view_name_git is the untranslated repo name # view_name is the translated repo name # print "args={}".format(args) view_name_git = args.options[-1] # translate '/' ':' ' ' .. etc .. for internal view_name view_name = p4gf_translate.TranslateReponame.git_to_repo(view_name_git) LOG.debug("public view_name: {0} internal view_name: {1}".format( view_name_git, view_name)) p4gf_util.reset_git_enviro() p4 = p4gf_create_p4.create_p4() if not p4: return 2 LOG.debug("connected to P4: %s", p4) p4gf_server_common.check_readiness(p4) p4gf_server_common.check_lock_perm(p4) if not p4gf_server_common.check_protects(p4): p4gf_server_common.raise_p4gf_perm() if p4gf_server_common.run_special_command(view_name, p4, args.user): return 0 # Initialize the external process launcher early, before allocating lots # of memory, and just after all other conditions have been checked. p4gf_proc.init() # Prepare for possible spawn of GitMirror worker process by forking # now before allocating lots of memory. p4gf_gitmirror.setup_spawn(view_name) # Kick off garbage collection debugging, if enabled. p4gf_gc.init_gc() if poll_only: view_perm = None else: # Go no further, create NOTHING, if user not authorized. # We use the translated internal view name here for perm authorization required_perm = p4gf_server_common.COMMAND_TO_PERM[args.command[0]] view_perm = p4gf_group.ViewPerm.for_user_and_view( p4, args.user, view_name, required_perm) p4gf_server_common.check_authorization(p4, view_perm, args.user, args.command[0], view_name) # Create Git Fusion server depot, user, config. NOPs if already created. p4gf_init.init(p4) write_motd() # view_name is the internal view_name (identical when notExist special chars) before_lock_time = time.time() with p4gf_lock.view_lock(p4, view_name) as view_lock: after_lock_time = time.time() # Create Git Fusion per-repo client view mapping and config. # # NOPs if already created. # Create the empty directory that will hold the git repo. init_repo_status = p4gf_init_repo.init_repo( p4, view_name, view_lock) if init_repo_status == p4gf_init_repo.INIT_REPO_OK: repo_created = True elif init_repo_status == p4gf_init_repo.INIT_REPO_EXISTS: repo_created = False else: return 1 # If authorization came from default, not explicit group # membership, copy that authorization to a group now. Could # not do this until after p4gf_init_repo() has a chance to # create not-yet-existing groups. if view_perm: view_perm.write_if(p4) # Now that we have valid git-fusion-user and # git-fusion-<view> client, replace our temporary P4 # connection with a more permanent Context, shared for the # remainder of this process. with p4gf_context.create_context(view_name, view_lock) as ctx: LOG.debug("reconnected to P4, p4gf=%s", ctx.p4gf) # Find directory paths to feed to git. ctx.log_context() # cd into the work directory. Not all git functions react well # to --work-tree=xxxx. cwd = os.getcwd() os.chdir(ctx.view_dirs.GIT_WORK_TREE) # Only copy from Perforce to Git if no other process is cloning # from this Git repo right now. shared_in_progress = p4gf_lock.shared_host_view_lock_exists( ctx.p4, view_name) if not shared_in_progress: # Copy any recent changes from Perforce to Git. try: LOG.debug( "bare: No git-upload-pack in progress, force non-bare" " before update Git from Perforce.") p4gf_git.set_bare(False) p4gf_copy_p2g.copy_p2g_ctx(ctx) p4gf_init_repo.process_imports(ctx) # Now is also an appropriate time to clear out any stale Git # Swarm reviews. We're pre-pull, pre-push, time when we've # got exclusive write access to the Git repo, GSReviewCollection.delete_refs_for_closed_reviews(ctx) except p4gf_lock.LockCanceled as lc: LOG.warning(str(lc)) except: # Dump failure to log, BEFORE cleanup, just in case # cleanup ALSO fails and throws its own error (which # happens if we're out of memory). LOG.error(traceback.format_exc()) if repo_created: # Return to the original working directory to allow the # config code to call os.getcwd() without dying, since # we are about to delete the current working directory. os.chdir(cwd) p4gf_server_common.cleanup_client(ctx, view_name) raise if poll_only: code = os.EX_OK else: git_caller = functools.partial(_call_git, args, ctx) try: # Deep in call_git(), we grab an 'p4 reviews' lock on # ctx.clientmap's LHS. Switch that clientmap to our # full union view to prevent simultaneous 'git push'es # from clobbering each other in some shared depot # branch. Must include all lightweight branches, too. ctx.switch_client_view_to_union() exclusive = 'upload' not in args.command[0] code = p4gf_call_git.call_git(git_caller, ctx, view_name, view_lock, exclusive) if is_push: GSReviewCollection.post_push(ctx) except p4gf_atomic_lock.LockConflict as lc: sys.stderr.write("{}\n".format(lc)) code = os.EX_SOFTWARE p4gf_gc.process_garbage(NTR('at end of auth_server')) if LOG.isEnabledFor(logging.DEBUG): end_time = time.time() frm = NTR("Runtime: preparation {} ms, lock acquisition {} ms," " processing {} ms") LOG.debug( frm.format(before_lock_time - start_time, after_lock_time - before_lock_time, end_time - after_lock_time)) return code
def main(): """set up repo for a view""" p4gf_util.has_server_id_or_exit() args = _parse_argv() p4gf_version.log_version() log_l10n() # !!! view_name_git the untranslated repo name # !!! view_name the translated repo name view_name_p4client = None if args.p4client: view_name_p4client = p4gf_util.argv_to_view_name(args.p4client) view_name_git = p4gf_util.argv_to_view_name(args.view) #strip leading '/' to conform with p4gf_auth_server behavior if view_name_git[0] == '/': view_name_git = view_name_git[1:] view_name = p4gf_translate.TranslateReponame.git_to_repo(view_name_git) p4gf_gitmirror.setup_spawn(view_name) p4gf_util.reset_git_enviro() p4 = p4gf_create_p4.create_p4() if not p4: return INIT_REPO_NOVIEW LOG.debug("connected to P4 at %s", p4.port) p4gf_proc.init() try: with p4gf_create_p4.Closer(): p4gf_version.version_check() with p4gf_lock.view_lock(p4, view_name) as view_lock: # Ensure we have a sane environment. p4gf_init.init(p4) # Now that we can trust that the git-fusion--p4 client exists, # switch to that. Change takes effect immediately, don't need to # re-run p4.connect(). p4.client = p4gf_util.get_object_client_name() # If local config file specified, validate it and store in # Perforce now. Even if client exists (aka repo was already # inited), this is one way for an admin to modify an existing # repo's config. if args.config: if not os.path.exists(args.config): _print_stderr( _("error: missing config file '{}'").format( args.config)) return INIT_REPO_CONFIG_FILE_MISSING with Validator.from_local_file(view_name, p4, args.config) as validator: if not validator.is_valid(args.enablemismatchedrhs): return INIT_REPO_CONFIG_FILE_BAD p4gf_config.create_file_repo_with_contents( p4, view_name, args.config) elif args.charset and not Validator.valid_charset( args.charset): _print_stderr( _("error: invalid charset: {}").format(args.charset)) return INIT_REPO_BAD_CHARSET # Initialize the repository if necessary. print(_("Initializing '{}'...").format(view_name)) r = init_repo(p4, view_name, view_lock, args.charset, args.enablemismatchedrhs, view_name_p4client) if r > INIT_REPO_OK: return r print(_("Initialization complete.")) # Write --enablemismatchedrhs to config file if args.enablemismatchedrhs: config = p4gf_config.read_repo(p4, view_name) config[p4gf_config.SECTION_REPO]\ [p4gf_config.KEY_ENABLE_MISMATCHED_RHS] = str(True) p4gf_config.write_repo_if(p4, p4.fetch_client(), view_name, config) # Populate the repo from Perforce unless --noclone. if not args.noclone: return populate_repo(view_name, view_lock, args.start) except P4.P4Exception as e: _print_stderr(_('Error occurred: {}').format(e)) return INIT_REPO_EXISTS
def do_it(self): """Perform all of the setup, processing, and clean up. :rtype: int :return: status code for the process upon exit. """ p4gf_util.log_environ(LOG, os.environ, self.label) log_l10n() p4gf_proc.install_stack_dumper() # Kick off garbage collection debugging, if enabled. p4gf_mem_gc.init_gc() # Use ExitStack to avoid deeply nested code. with ExitStack() as stack: stack.enter_context(p4gf_create_p4.Closer()) p4 = p4gf_create_p4.create_p4_temp_client() if not p4: return 2 repo_name = p4gf_path.cwd_to_repo_name() p4gf_util.reset_git_enviro() # Initialize the external process launcher early, before # allocating lots of memory, and just after all other # conditions have been checked. p4gf_proc.init() # Assume that something bad will happen (especially with preflight). exit_code = os.EX_SOFTWARE try: p4gf_log.configure_for_repo(repo_name) gid = os.environ[p4gf_const.P4GF_FORK_PUSH] self.before_p4key_lock(repo_name) with p4gf_lock.RepoLock(p4, repo_name, group_id=gid) as repo_lock: # Work to be done with the p4key lock... self.context = p4gf_context.create_context(repo_name) self.context.p4gf = p4 self.context.repo_lock = repo_lock self.context.foruser = os.getenv(p4gf_const.P4GF_FORUSER) stack.enter_context(self.context) self.before() exit_code = self.process() if self.after_requires_write_lock(): # Work to be done without the p4key lock, but with the # write lock. Note that we release the p4key lock # before acquiring the write lock to avoid deadlock # with the foreground process, which always gets the # repo read/write lock _before_ acquiring the p4key # lock. Hence all this complication with the locks. with p4gf_git_repo_lock.write_lock(repo_name): self.after(exit_code) else: # The after() method does not need a write lock... self.after(exit_code) finally: self.cleanup() p4gf_proc.stop() # Random tasks after all of the locks have been released. msg = NTR("at end of {hook}").format(hook=self.label) p4gf_mem_gc.process_garbage(msg) p4gf_mem_gc.report_objects(msg) return exit_code
def main(): """Process command line arguments and call functions to do the real work of cleaning up the Git mirror and Perforce workspaces. """ # pylint:disable=too-many-branches, too-many-statements log_l10n() # Set up argument parsing. desc = _("""Deletes Git Fusion repositories and workspaces. When you include the -a or --all option, Git Fusion finds and deletes the following for all repos on the current server disregarding specified views: 1) All git-fusion-view clients. 2) Client git-fusion--p4 workspace files. 3) Objects in //.git-fusion/objects/... """) epilog = _("""It is recommended to run 'p4gf_delete_repo.py' without the '-y' flag to preview changes that will be made to the depot before using the '-y' flag for permanent removal. Use -a or --all to permanently delete all repo data for all repos on the Perforce server; be aware that this may take some time, depending on the number and size of the objects. Use -N, --no-obliterate to quickly delete most of the repo's data and continue working. This minimizes the impact to server performance. """) parser = p4gf_util.create_arg_parser(desc, epilog=epilog) parser.add_argument( '-a', '--all', action='store_true', help=_('remove all known Git mirrors on the current server')) parser.add_argument('-y', '--delete', action='store_true', help=_('perform the deletion')) parser.add_argument('-v', '--verbose', action='store_true', help=_('print details of deletion process')) parser.add_argument( '-N', '--no-obliterate', action='store_true', help=_('with the --all option, do not obliterate object cache')) parser.add_argument(NTR('views'), metavar=NTR('view'), nargs='*', help=_('name of view to be deleted')) args = parser.parse_args() p4gf_util.has_server_id_or_exit() # Check that either --all, or 'views' was specified. if not args.all and len(args.views) == 0: sys.stderr.write(_('Missing view names; try adding --all option.\n')) sys.exit(2) if args.all and len(args.views) > 0: sys.stderr.write( _('Ambiguous arguments. Choose --all or a view name.\n')) sys.exit(2) # Check that --no-obliterate occurs only with --all if not args.all and args.no_obliterate: sys.stderr.write( _('--no-obliterate permitted only with the --all option.\n')) sys.exit(2) with p4gf_create_p4.Closer(): p4 = p4gf_create_p4.create_p4_temp_client() if not p4: return 2 # Sanity check the connection (e.g. user logged in?) before proceeding. p4gf_branch.init_case_handling(p4) try: p4.fetch_client() except P4.P4Exception as e: sys.stderr.write( _('P4 exception occurred: {exception}').format(exception=e)) sys.exit(1) metrics = DeletionMetrics() if args.all: try: if p4gf_const.READ_ONLY: delete_all_local(args, p4, metrics) else: delete_all(args, p4, metrics) except (p4gf_lock.LockBusy, P4.P4Exception) as e: sys.stderr.write("{exception}\n".format(exception=e)) sys.exit(1) else: # Delete the client(s) for the named view(s). for git_view in args.views: repo_name = p4gf_translate.TranslateReponame.git_to_repo( git_view) client_name = p4gf_util.repo_to_client_name(repo_name) try: if p4gf_const.READ_ONLY: delete_client_local(args, p4, client_name, metrics) else: with p4gf_lock.RepoLock(p4, repo_name, blocking=False): delete_client(args, p4, client_name, metrics) except (p4gf_lock.LockBusy, P4.P4Exception) as e: sys.stderr.write("{exception}\n".format(exception=e)) sys.exit(1) if not args.delete: print(_('This was report mode. Use -y to make changes.')) else: print( _('Deleted {num_files:d} files, {num_groups:d} groups, ' '{num_clients:d} clients, and {num_keys:d} p4keys.').format( num_files=metrics.files, num_groups=metrics.groups, num_clients=metrics.clients, num_keys=metrics.p4keys)) if args.all: print(_('Successfully deleted all repos\n')) else: print( _('Successfully deleted repos:\n{repos}').format( repos="\n".join(args.views)))
def _wsgi_app(environ, start_response): """ WSGI application to process the incoming Git client request. This is nearly equivalent to p4gf_auth_server.main() with the exception of input validation and error handling. """ p4gf_log.record_http(environ) p4gf_version.log_version() _log_environ(environ) p4gf_version.version_check() LOG.debug("processing HTTP request, pid={}".format(os.getpid())) # Keep the content type to exactly 'text/plain' so there is at least # the remote chance that Git might show our error messages (does not # appear to work in practice, however). headers = [('Content-Type', 'text/plain')] encoding = sys.getfilesystemencoding() if encoding == 'ascii': # This encoding is wrong and will eventually lead to problems. LOG.error( "Using 'ascii' file encoding will ultimately result in errors, " "please set LANG/LC_ALL to 'utf-8' in web server configuration.") start_response(_('500 Internal Server Error'), headers) return [b"Filesystem encoding not set to acceptable value.\n"] # Sanity check the request. for (name, status, msg) in _REQUIRED_HTTP_PARAMS: if name not in environ: start_response(status, headers) return [msg.encode('UTF-8')] input_name = environ['wsgi.input'] # Extract the view_name_git by removing the expected git request suffixes path_info = environ['PATH_INFO'] git_suffixes = [ '/info/refs', '/HEAD', '/git-upload-pack', '/git-receive-pack' ] path_end = len(path_info) for suffix in git_suffixes: try: path_end = path_info.index(suffix) break except ValueError: pass # slice away the leading slash and the trailing git request suffixes view_name_git = path_info[1:path_end] # and remove the view_name_git from the front of PATH_INFO environ['PATH_INFO'] = path_info[path_end:] LOG.debug("new PATH_INFO {0} view_name_git {1}".format( environ['PATH_INFO'], view_name_git)) if not view_name_git: start_response(_('400 Bad Request'), headers) msg = _('Missing required repository name in URL\n') return [msg.encode('UTF-8')] # translate '/' ':' ' ' .. etc .. for internal view_name view_name = p4gf_translate.TranslateReponame.git_to_repo(view_name_git) LOG.debug("public view_name: {0} internal view_name: {1}".format( view_name_git, view_name)) audit_logger = p4gf_server_common.ExceptionAuditLogger() p4_closer = p4gf_create_p4.Closer() sink = OutputSink() temp_deleter = deleting(input_name) mirror_closer = unmirror(view_name) with audit_logger \ , p4_closer \ , sink \ , temp_deleter \ , mirror_closer: LOG.debug(p4gf_log.memory_usage()) start_time = time.time() p4gf_util.reset_git_enviro() p4 = p4gf_create_p4.create_p4() if not p4: start_response(_('500 Internal Server Error'), headers) return [b"Perforce connection failed\n"] LOG.debug("connected to P4: %s", p4) p4gf_server_common.check_readiness(p4) p4gf_server_common.check_lock_perm(p4) if not p4gf_server_common.check_protects(p4): p4gf_server_common.raise_p4gf_perm() user = environ['REMOTE_USER'] if p4gf_server_common.run_special_command(view_name, p4, user): start_response(_('200 OK'), headers) return [sink.readall()] command = _get_command(environ) if not command: start_response(_('400 Bad Request'), headers) return [b"Unrecognized service\n"] # Other places in the Perforce-to-Git phase will need to know the # name of client user, so set that here. As for Git-to-Perforce, # that is handled later by setting the REMOTE_USER envar. Notice # also that we're setting os.environ and not 'environ'. os.environ[p4gf_const.P4GF_AUTH_P4USER] = user # Likewise, some code needs a hint that the request is coming over # one protocol (HTTP) or the other (SSH). os.environ['REMOTE_ADDR'] = environ['REMOTE_ADDR'] # Initialize the external process launcher early, before allocating lots # of memory, and just after all other conditions have been checked. p4gf_proc.init() # Prepare for possible spawn of GitMirror worker process by forking # now before allocating lots of memory. p4gf_gitmirror.setup_spawn(view_name) # Kick off garbage collection debugging, if enabled. p4gf_gc.init_gc() # Go no further, create NOTHING, if user not authorized. # We use the translated internal view name here for perm authorization required_perm = p4gf_server_common.COMMAND_TO_PERM[command] view_perm = p4gf_group.ViewPerm.for_user_and_view( p4, user, view_name, required_perm) try: p4gf_server_common.check_authorization(p4, view_perm, user, command, view_name) except p4gf_server_common.CommandError as ce: start_response(_('403 Forbidden'), headers) return [str(ce).encode('UTF-8')] # Create Git Fusion server depot, user, config. NOPs if already created. p4gf_init.init(p4) before_lock_time = time.time() with p4gf_lock.view_lock(p4, view_name) as view_lock: after_lock_time = time.time() # Create Git Fusion per-repo client view mapping and config. init_repo_status = p4gf_init_repo.init_repo( p4, view_name, view_lock) if init_repo_status == p4gf_init_repo.INIT_REPO_OK: repo_created = True elif init_repo_status == p4gf_init_repo.INIT_REPO_EXISTS: repo_created = False elif init_repo_status == p4gf_init_repo.INIT_REPO_NOVIEW: start_response(_('404 Not Found'), headers) return [sink.readall()] else: start_response(_('500 Internal Server Error'), headers) return [b"Repository initialization failed\n"] # If authorization came from default, not explicit group # membership, copy that authorization to a group now. Could # not do this until after p4gf_init_repo() has a chance to # create not-yet-existing groups. if view_perm: view_perm.write_if(p4) # Now that we have valid git-fusion-user and # git-fusion-<view> client, replace our temporary P4 # connection with a more permanent Context, shared for the # remainder of this process. with p4gf_context.create_context(view_name, view_lock) as ctx: LOG.debug("reconnected to P4, p4gf=%s", ctx.p4gf) ctx.log_context() # cd into the work directory. Not all git functions react well # to --work-tree=xxxx. cwd = os.getcwd() os.chdir(ctx.view_dirs.GIT_WORK_TREE) # Only copy from Perforce to Git if no other process is cloning # from this Git repo right now. shared_in_progress = p4gf_lock.shared_host_view_lock_exists( ctx.p4, view_name) if not shared_in_progress: # Copy any recent changes from Perforce to Git. try: LOG.debug( "bare: No git-upload-pack in progress, force non-bare" " before update Git from Perforce.") p4gf_git.set_bare(False) p4gf_copy_p2g.copy_p2g_ctx(ctx) p4gf_init_repo.process_imports(ctx) # Now is also an appropriate time to clear out any stale Git # Swarm reviews. We're pre-pull, pre-push, time when we've # got exclusive write access to the Git repo, GSReviewCollection.delete_refs_for_closed_reviews(ctx) except p4gf_lock.LockCanceled as lc: LOG.warning(str(lc)) except: # Dump failure to log, BEFORE cleanup, just in case # cleanup ALSO fails and throws its own error (which # happens if we're out of memory). LOG.error(traceback.format_exc()) if repo_created: # Return to the original working directory to allow the # config code to call os.getcwd() without dying, since # we are about to delete the current working directory. os.chdir(cwd) p4gf_server_common.cleanup_client(ctx, view_name) raise try: exclusive = 'upload' not in command is_push = 'upload' not in command git_caller = functools.partial(_call_git, input_name, environ, ctx) p4gf_call_git.call_git(git_caller, ctx, view_name, view_lock, exclusive) if is_push: GSReviewCollection.post_push(ctx) except p4gf_atomic_lock.LockConflict as lc: start_response(_('500 Internal Server Error'), headers) return ["{}".format(lc).encode('UTF-8')] p4gf_gc.process_garbage('at end of auth_server') if LOG.isEnabledFor(logging.DEBUG): end_time = time.time() frm = NTR( 'Runtime: preparation {} ms, lock acquisition {} ms, processing {} ms' ) LOG.debug( frm.format(before_lock_time - start_time, after_lock_time - before_lock_time, end_time - after_lock_time)) return []