def _create_client(p4, client_name, p4gf_dir): """Create the host-specific Perforce client to enable working with the object cache in the P4GF_DEPOT depot. """ # to prevent the mirrored git commit/tree objects from being retained in the # git-fusion workspace, set client option 'rmdir' and sync #none in p4gf_gitmirror # Assume the usual P4 client default options are being used so that # these options below differ ONLY with normdir -> rmdir # if this assumption proves troublesome, then read/write cycle will be needed. options = NTR('allwrite clobber nocompress unlocked nomodtime rmdir') view = ['//{depot}/... //{client}/...'.format(depot=p4gf_const.P4GF_DEPOT, client=client_name)] spec_created = False if not p4gf_util.spec_exists(p4, 'client', client_name): # See if the old object clients exist, in which case we will remove them. for old_client_name in [OLD_OBJECT_CLIENT, OLDER_OBJECT_CLIENT]: if p4gf_util.spec_exists(p4, 'client', old_client_name): p4.run('client', '-df', old_client_name) _info(_("Old client '{}' deleted.").format(old_client_name)) spec_created = p4gf_util.ensure_spec( p4, 'client', spec_id=client_name, values={'Host': None, 'Root': p4gf_dir, 'Description': _('Created by Perforce Git Fusion'), 'Options': options, 'View': view}) if spec_created: _info(_("Client '{}' created.").format(client_name)) if not spec_created: modified = p4gf_util.ensure_spec_values(p4, 'client', client_name, {'Root': p4gf_dir, 'View': view, 'Options': options}) if modified: _info(_("Client '{}' updated.").format(client_name)) else: _info(_("Client '{}' already exists.").format(client_name))
def main(): """Do the thing.""" global DEBUG, REPORT_FILE args = parse_argv() if args.debug: print(_("args: {} .").format(args)) DEBUG = args.debug # reduce the arg.repo array to a string if args.repo: args.repo = args.repo[0] # Require the first two files or --repo. if args.repo: REPORT_FILE = REPORT_FILE.format(repo=args.repo) if args.gitlogs: # no positional args permitted print(_("You must provide either --repo <repo> OR the required positional arguments.")) sys.exit(2) else: REPORT_FILE = REPORT_FILE.format(repo='from_files') if not (args.gitlogs and args.p4files): print(_("You must provide both gitlogs and p4files.")) sys.exit(2) verify_file_args_exist(args) annotate(args)
def _repo_from_stream_pre(self, stream_name, handle_imports=True): """Create a new repo config from the named stream, with initial validation. Create a new Perforce client spec <client_name> using existing Perforce stream spec <stream_name> as a template (just use its View). Returns one of the INIT_REPO_* constants. """ # stream_name is the name of a stream, e.g. '//depot/stream' # repo_name is the gfinternal repo name if not p4gf_p4spec.spec_exists(self.p4, 'stream', stream_name): raise InitRepoMissingView( _("Stream {stream_name} does not exist.").format( stream_name=stream_name)) self.repo_config = p4gf_config.RepoConfig.from_stream( self.repo_name, self.p4, stream_name) if not self._validate_config(self.repo_config): raise InitRepoBadConfigFile( _("Invalid config file for {repo_name}").format( repo_name=self.repo_name)) # Seed a new client using the stream's view as a template. LOG.info( "Git Fusion repo %s does not exist, creating from existing Perforce stream %s", self.repo_name, stream_name) self._repo_from_stream(stream_name, self.repo_config, handle_imports)
def _upgrade_p4gf(p4): """Perform upgrade from earlier versions of P4GF. This should be invoked using _maybe_perform_init() to avoid race conditions across hosts. """ # If updating from 12.2 to 13.1 we need to create global config file # (this does nothing if file already exists) c = p4gf_config.create_file_global(p4) if c: _info(_("Global config file '{}' created.") .format(p4gf_config.depot_path_global())) else: _info(_("Global config file '{}' already exists.") .format(p4gf_config.depot_path_global())) # Ensure the time zone name has been set, else default to something sensible. r = p4.run('counter', '-u', p4gf_const.P4GF_COUNTER_TIME_ZONE_NAME) tzname = p4gf_util.first_value_for_key(r, 'value') if tzname == "0" or tzname is None: msg = _("Counter '{}' not set, using UTC as default." \ " Change this to your Perforce server's time zone.") \ .format(p4gf_const.P4GF_COUNTER_TIME_ZONE_NAME) LOG.warn(msg) sys.stderr.write(NTR('Git Fusion: {}\n').format(msg)) tzname = None else: # Sanity check the time zone name. try: pytz.timezone(tzname) except pytz.exceptions.UnknownTimeZoneError: LOG.warn("Time zone name '{}' unrecognized, using UTC as default".format(tzname)) tzname = None if tzname is None: p4.run('counter', '-u', p4gf_const.P4GF_COUNTER_TIME_ZONE_NAME, 'UTC')
def _get_config(self): """Read or create repo_config if necessary.""" if self.repo_config: return # If local config file specified, validate it and store in # Perforce now. Even if client exists (aka repo was already # inited), this is one way for an admin to modify an existing # repo's config. if self.config_file_path: try: self.repo_config = p4gf_config.RepoConfig.from_local_file( self.repo_name, self.p4, self.config_file_path) except p4gf_config.ConfigLoadError as e: raise InitRepoMissingConfigFile( _("error: {exception}").format(exception=e)) except p4gf_config.ConfigParseError as e: raise InitRepoBadConfigFile( _("error: {exception}").format(exception=e)) if not self._validate_config(self.repo_config): raise InitRepoBadConfigFile( _('error: invalid config file {path}').format( path=self.config_file_path)) self.repo_config.write_repo_if(self.p4) elif self.charset and not Validator.valid_charset(self.charset): raise InitRepoBadCharset( _("error: invalid charset: {charset}").format( charset=self.charset)) else: self.repo_config = p4gf_config.RepoConfig.make_default( self.repo_name, self.p4)
def _repo_from_template_client(self, repo_name_p4client): """Create a new repo configuration from a template client.""" # repo_name_p4client is the p4client # repo_name is the gfinternal repo name # repo_name differs from repo_name_p4client if latter contains special chars # or was configured with --p4client argument if not p4gf_p4spec.spec_exists(self.p4, 'client', repo_name_p4client): raise InitRepoMissingView( _("Template client {p4client} does not exist.").format( p4client=repo_name_p4client)) template_spec = p4gf_p4spec.fetch_client(self.p4, repo_name_p4client, routeless=True) if 'Stream' in template_spec: if 'StreamAtChange' in template_spec: raise InitRepoBadView(_("StreamAtChange not supported")) self._repo_from_stream_pre(template_spec['Stream']) return if 'ChangeView' in template_spec: raise InitRepoBadView(_("ChangeView not supported")) self.repo_config = p4gf_config.RepoConfig.from_template_client( self.repo_name, self.p4, template_spec, repo_name_p4client) if not self._validate_config(self.repo_config): raise InitRepoBadConfigFile( _("Invalid config file for {repo_name}").format( repo_name=self.repo_name)) # Seed a new client using the view's view as a template. LOG.info( "Git Fusion repo %s does not exist, creating from existing client %s", self.repo_name, repo_name_p4client)
def full_init(self, repo_name_p4client=None): """Initialize the repo.""" # Ensure we have a sane environment. p4gf_init.init(self.p4) self._get_config() # Initialize the repository if necessary. print( _("Initializing '{repo_name}'...").format( repo_name=self.repo_name)) self.init_repo(repo_name_p4client) print(_("Initialization complete.")) if not self.noclone: try: start_at = int(self.start.lstrip('@')) if self.start else 1 self._copy_p2g_with_start(start_at) except ValueError: raise RuntimeError( _('Invalid --start value: {start}').format( start=self.start)) except IndexError: raise RuntimeError( _("Could not find changes >= '{start_at}'").format( start_at=start_at))
def delete_all_local(args, p4, metrics): """Remove "everything" as if from a read-only Git Fusion instance. :param args: parsed command line arguments :param p4: Git user's Perforce client :param metrics: for counting delete actions. Similar to deleting everything from the master server, except that very little is removed from the Perforce server (e.g. counters and files). In short, only the client and local directories are removed. """ p4.user = p4gf_const.P4GF_USER print(_('Connected to {P4PORT}').format(P4PORT=p4.port)) client_name = p4gf_util.get_object_client_name() localroot = get_p4gf_localroot(p4) if not args.delete: if localroot: if args.no_obliterate: print(NTR('p4 sync -f {}...#none').format(localroot)) else: print(NTR('p4 client -f -d {}').format(client_name)) print(NTR('rm -rf {}').format(localroot)) else: if localroot: if not args.no_obliterate: # Need this in order to use --gc later on p4gf_util.p4_client_df(p4, client_name) metrics.clients += 1 print_verbose( args, _("Deleting client '{client_name}'s workspace...").format( client_name=client_name)) _remove_local_root(localroot)
def delete_client_local(args, p4, client_name, metrics): """Delete the named Perforce client and its workspace. :param args: parsed command line arguments :param p4: Git user's Perforce client :param client_name: name of client to be deleted :param metrics: DeletionMetrics for collecting resulting metrics Very little else is removed since this is presumed to be a read-only instance, and as such, submodules, config files, streams, keys, etc are not removed from the Perforce server. """ p4.user = p4gf_const.P4GF_USER repo_name = p4gf_util.client_to_repo_name(client_name) has_main = __name__ == "__main__" check_repo_exists_and_get_repo_config(args, p4, client_name, has_main) delete_non_client_repo_data(args, p4, client_name, metrics, read_only=True) if args.delete: if __name__ == "__main__": server_id_dict = p4gf_util.serverid_dict_for_repo(p4, repo_name) if server_id_dict: print( _('You must delete this repo from these other Git Fusion instances' )) for k, v in server_id_dict.items(): print( _(" {server_id} on host {host}").format(server_id=k, host=v))
def set_user_passwd_if_created(created, user): '''If creating the user, conditionally prompt for and set the passwd.''' global P4_PASSWD, PROMPT_FOR_PASSWD if created: if PROMPT_FOR_PASSWD: prompt_msg = _("Set one password for Perforce users 'git-fusion-user'" "\nand 'git-fusion-reviews-*'.") # When creating additional Git Fusion instance only the new reviews will be created. # Catch this case and avoid a misleading prompt. if user == p4gf_util.gf_reviews_user_name(): prompt_msg = _("Enter a new password for Perforce user '{0}'.").format(user) try: P4_PASSWD = get_passwd(prompt_msg) except KeyboardInterrupt: Verbosity.report(Verbosity.INFO, _("\n Stopping. Passwords not set.")) sys.exit(1) # If we prompted, do so once and use for all the service users, # even if the user enters no password at all. PROMPT_FOR_PASSWD = False if not P4_PASSWD: Verbosity.report(Verbosity.INFO, _("Empty password. Not setting passwords.")) # passwd may be suppressed with --nopasswd option, which also suppresses the prompt. if P4_PASSWD: set_passwd(user, P4_PASSWD) Verbosity.report(Verbosity.INFO, _("Password set for Perforce user '{}'.").format(user))
def main(): """ Parses the command line arguments and performs a search for the given email address in the user map. """ p4gf_util.has_server_id_or_exit() log_l10n() # Set up argument parsing. parser = p4gf_util.create_arg_parser( _("Searches for an email address in the user map.")) parser.add_argument(NTR('email'), metavar='E', help=_('email address to find')) args = parser.parse_args() # make sure the world is sane ec = p4gf_init.main() if ec: print(_("p4gf_usermap initialization failed")) sys.exit(ec) with p4gf_create_p4.Closer(): p4 = p4gf_create_p4.create_p4(client=p4gf_util.get_object_client_name()) if not p4: sys.exit(1) usermap = UserMap(p4) user = usermap.lookup_by_email(args.email) if user: print(_("Found user '{}' <{}>").format(user[0], user[2])) sys.exit(0) else: sys.stderr.write(_("No such user found: '{}'\n").format(args.email)) sys.exit(1)
def main(): ''' Parse the command-line arguments and print a configuration. ''' p4gf_util.has_server_id_or_exit() p4gf_client = p4gf_util.get_object_client_name() p4 = p4gf_create_p4.create_p4(client=p4gf_client) if not p4: sys.exit(1) desc = _("""Display the effective global or repository configuration. All comment lines are elided and formatting is normalized per the default behavior of the configparser Python module. The default configuration options will be produced if either of the configuration files is missing. """) parser = p4gf_util.create_arg_parser(desc=desc) parser.add_argument( NTR('repo'), metavar=NTR('R'), nargs='?', default='', help=_('name of the repository, or none to display global.')) args = parser.parse_args() if args.repo: cfg = get_repo(p4, args.repo) else: cfg = get_global(p4) if not cfg: print(_('Unable to read configuration file!')) cfg.write(sys.stdout)
def set_user_passwd_if_created(created, user): '''If creating the user, conditionally prompt for and set the passwd.''' global P4_PASSWD, PROMPT_FOR_PASSWD if created: if PROMPT_FOR_PASSWD: prompt_msg = _( "Set one password for Perforce users 'git-fusion-user'" "\nand 'git-fusion-reviews-*'.") # When creating additional Git Fusion instance only the new reviews will be created. # Catch this case and avoid a misleading prompt. if user == p4gf_util.gf_reviews_user_name(): prompt_msg = _("Enter a new password for Perforce user '{0}'." ).format(user) try: P4_PASSWD = get_passwd(prompt_msg) except KeyboardInterrupt: Verbosity.report(Verbosity.INFO, _("\n Stopping. Passwords not set.")) sys.exit(1) # If we prompted, do so once and use for all the service users, # even if the user enters no password at all. PROMPT_FOR_PASSWD = False if not P4_PASSWD: Verbosity.report(Verbosity.INFO, _("Empty password. Not setting passwords.")) # passwd may be suppressed with --nopasswd option, which also suppresses the prompt. if P4_PASSWD: set_passwd(user, P4_PASSWD) Verbosity.report( Verbosity.INFO, _("Password set for Perforce user '{}'.").format(user))
def check_valid_filename(name, ctx): """Test the given name for illegal characters. Return None if okay, otherwise an error message. Illegal characters and sequences include: [...] """ for idx, c in enumerate(name): if not is_p4d_printable(c): fullname = name[:idx] + "x{ch:02X}".format(ch=ord(c)) + name[idx:] return _( "Perforce: Non-printable characters not allowed in Perforce: " "character x{ch:02X} in filepath: {filename}").format( filename=fullname, ch=ord(c)) if '...' in name: return _("Perforce: bad filename (...): '{filename}'").format( filename=name) if 'P4D/NT' in ctx.server_version: if ':' in name: return _("Perforce: unsupported filename on windows: {filename}" ).format(filename=name) # This should usually be en_US.UTF-8 which also needs to be defined # on the os encoding = sys.getfilesystemencoding() try: name.encode(encoding, "strict") except UnicodeEncodeError: return _( "Perforce: Cannot convert filename to '{encoding}': {filename}" ).format(encoding=encoding, filename=name) return None
def _enforce_disk_usage(self): """Enforce the total and received megabytes push limits, if any are defined. Raises a PushLimitException if a limit has been exceeded. """ if not (self.space_limit or self.received_limit): return pending_mb = self.get_pending_mb() if self.space_limit: LOG.debug('enforce() measured {0:.2f}M disk usage'.format(self.space_total)) if (self.space_total + pending_mb) > self.space_limit: # Remove the newly introduced, unreferenced, commits so # that the next push has a chance of succeeding. p4gf_proc.popen(['git', '--git-dir=' + self.repo.path, 'prune']) raise PushLimitException( _("Push to repo {repo_name} rejected, space limit exceeded") .format(repo_name=self.repo_name)) if self.received_limit: previous_total = self.get_total_mb() space_total = self.space_total if space_total == 0: # unable to enforce limits, already logged return recieved_mb = space_total - pending_mb - previous_total LOG.debug('enforce() measured {0:.2f}M received'.format(recieved_mb)) if recieved_mb > self.received_limit: # Remove the newly introduced, unreferenced, commits so # that the next push has a chance of succeeding. p4gf_proc.popen(['git', '--git-dir=' + self.repo.path, 'prune']) raise PushLimitException( _("Push to repo {repo_name} rejected, received limit exceeded") .format(repo_name=self.repo_name))
def _validate_view_lines_using_p4_client(self): '''Attempt to create a temp client with the union of all branch views. This will validate all views. Return None if valid, or Error message otherwise. ''' # This client root path will not be used by P4 client_root = '/tmp/git-fusion/validate_config' desc = (_("Created by Perforce Git Fusion for config_validation'.")) # Attributes common to all sorts of clients. spec = { 'Owner': p4gf_const.P4GF_USER, 'LineEnd': NTR('unix'), 'Root': client_root, 'Options': p4gf_const.CLIENT_OPTIONS, 'Host': None, 'Description': desc } spec['View'] = self.branches_union_view_lines try: p4gf_p4spec.set_spec(self.p4, 'client', spec_id=self.tmp_client_name, values=spec) p4gf_util.p4_client_df(self.p4, self.tmp_client_name) return None except P4.P4Exception as e : # Extract the interesting portion of the P4 error message errmsg = [] for line in str(e).splitlines(): if P4CLIENTERROR.search(line): errmsg.append(line) errmsg = "\n".join(errmsg) LOG.debug3("_validate_view_lines_using_p4_client: p4 exception: tmpclient:%s: %s", self.tmp_client_name, errmsg) return _("There is an error in some branch Views.\n{error}").format(error=errmsg)
def outputText(self, h): """assemble file content, then pass it to hasher via temp file Either str or bytearray can be passed to outputText. Since we need to write this to a file and calculate a SHA1, we need bytes. For unicode servers, we have a charset specified which is used to convert a str to bytes. For a nonunicode server, we will have specified "raw" encoding to P4Python, so we should never see a str. """ if self.p4.charset: try: # self.p4.__convert() doesn't work correctly here if type(h) == str: b = getattr(self.p4, '__convert')(self.p4.charset, h) else: b = getattr(self.p4, '__convert')(self.p4.charset, h.decode()) except: msg = _("error: failed '{}' conversion for '{}#{}'").format( self.p4.charset, self.rev.depot_path, self.rev.revision) raise P4Exception(msg) else: if type(h) == str: raise RuntimeError(_('unexpected outputText')) b = h self.appendContent(b) return OutputHandler.HANDLED
def _enforce_commits_and_files(self, prl): """Enforce the file and commits push limits, if any are defined. :param prl: list of pre-receive tuples. Raises a PushLimitException if a limit has been exceeded. """ # Do we require any push commit or file limit enforcement? if not (self.commit_limit or self.file_limit): return # Yes, need to count at least the commits, and maybe the files, too. commit_total = 0 file_total = 0 count_files = self.file_limit is not None for prt in prl: commit_count, file_count = self._count_commits(prt.old_sha1, prt.new_sha1, count_files) commit_total += commit_count file_total += file_count if self.commit_limit: LOG.debug('enforce() found {} commits'.format(commit_total)) if commit_total > self.commit_limit: raise PushLimitException( _("Push to repo {repo_name} rejected, commit limit exceeded") .format(repo_name=self.repo_name)) if self.file_limit: LOG.debug('enforce() found {} files'.format(file_total)) if file_total > self.file_limit: raise PushLimitException( _("Push to repo {repo_name} rejected, file limit exceeded") .format(repo_name=self.repo_name))
def check_lfs_enabled(self): """Validate repo configuration if processing an LFS request. If we're processing a Git LFS request, but the current repo is not configured to allow Git LFS requests, reject. """ method = self.environ.get("REQUEST_METHOD") path_info = self.environ['PATH_INFO'] if method == 'POST' and path_info == BATCH_PATH_INFO: # Special case batch requests as a pass-through at this point, # to be responded to appropriately later in the process. The # client is not expecting a 400 for batch requests. return # Ensure the repository has already been initialized as # initialization via LFS request is forbidden. try: p4gf_config.RepoConfig.from_depot_file(self.repo_name, self.p4, create_if_missing=False) except p4gf_config.ConfigLoadError: raise p4gf_server_common.BadRequestException( _("Repo not yet initialized\n")) enabled = self.repo_config.getboolean( p4gf_config.SECTION_PERFORCE_TO_GIT, p4gf_config.KEY_GIT_LFS_ENABLE) if not enabled: raise p4gf_server_common.BadRequestException( _("Git LFS not enabled for this repo.\n")) http_url = self._get_lfs_url() LOG.debug("check_lfs_enabled() HTTP URL=%s", http_url)
def _upgrade_p4gf(p4): """Perform upgrade from earlier versions of P4GF. This should be invoked using _maybe_perform_init() to avoid race conditions across hosts. """ # If updating from 12.2 to 13.1 we need to create global config file # (this does nothing if file already exists) c = p4gf_config.create_file_global(p4) if c: _info( _("Global config file '{}' created.").format( p4gf_config.depot_path_global())) else: _info( _("Global config file '{}' already exists.").format( p4gf_config.depot_path_global())) # Ensure the time zone name has been set, else default to something sensible. r = p4.run('counter', '-u', p4gf_const.P4GF_COUNTER_TIME_ZONE_NAME) tzname = p4gf_util.first_value_for_key(r, 'value') if tzname == "0" or tzname is None: msg = _("Counter '{}' not set, using UTC as default." \ " Change this to your Perforce server's time zone.") \ .format(p4gf_const.P4GF_COUNTER_TIME_ZONE_NAME) LOG.warn(msg) sys.stderr.write(NTR('Git Fusion: {}\n').format(msg)) tzname = None else: # Sanity check the time zone name. try: pytz.timezone(tzname) except pytz.exceptions.UnknownTimeZoneError: LOG.warn("Time zone name '{}' unrecognized, using UTC as default". format(tzname)) tzname = None if tzname is None: p4.run('counter', '-u', p4gf_const.P4GF_COUNTER_TIME_ZONE_NAME, 'UTC')
def _upgrade_p4gf(p4): """Perform upgrade from earlier versions of P4GF. This should be invoked using _maybe_perform_init() to avoid race conditions across hosts. """ # If updating from 12.2 to 13.1 we need to create global config file # (this does nothing if file already exists) p4gf_config.GlobalConfig.init(p4) with p4.at_exception_level(p4.RAISE_ERROR): if p4gf_config.GlobalConfig.write_if(p4): _info(_("Global config file '{path}' created/updated.") .format(path=p4gf_config.depot_path_global())) else: _info(_("Global config file '{path}' already exists.") .format(path=p4gf_config.depot_path_global())) # Ensure the time zone name has been set, else default to something sensible. tzname = P4Key.get(p4, p4gf_const.P4GF_P4KEY_TIME_ZONE_NAME) if tzname == "0" or tzname is None: msg = _("p4 key '{key}' not set, using UTC as default." " Change this to your Perforce server's time zone.") \ .format(key=p4gf_const.P4GF_P4KEY_TIME_ZONE_NAME) LOG.warning(msg) sys.stderr.write(_('Git Fusion: {message}\n').format(message=msg)) tzname = None else: # Sanity check the time zone name. try: pytz.timezone(tzname) except pytz.exceptions.UnknownTimeZoneError: LOG.warning("Time zone name '{}' unrecognized, using UTC as default".format(tzname)) tzname = None if tzname is None: P4Key.set(p4, p4gf_const.P4GF_P4KEY_TIME_ZONE_NAME, 'UTC')
def _create_file(p4, client_name, local_path, file_content): """Create and submit a file. Write a file to the local Git Fusion workspace and then add and submit to Perforce. NOP if file already exists in Perforce after a 'p4 sync'. """ filename = os.path.basename(local_path) with p4gf_util.restore_client(p4, client_name): try: with p4.at_exception_level(p4.RAISE_NONE): # Sync the file and ensure we really have it. p4.run('sync', '-q', local_path) results = p4.run('have', local_path) if not results: LOG.debug("_write_file(): {} does not exist, will create...".format(local_path)) # Perms are probably read-only, need to remove before writing. if os.path.exists(local_path): os.remove(local_path) else: p4gf_util.ensure_parent_dir(local_path) with open(local_path, 'w') as mf: mf.write(file_content) desc = _("Creating initial '{filename}' file via p4gf_init.py")\ .format(filename=filename) with p4gf_util.NumberedChangelist(p4=p4, description=desc) as nc: nc.p4run('add', local_path) nc.submit() LOG.debug("_write_file(): successfully created {}".format(local_path)) _info(_("File '{path}' created.").format(path=local_path)) else: _info(_("File '{path}' already exists.").format(path=local_path)) except P4.P4Exception as e: LOG.warning('error setting up {file} file: {e}' .format(file=filename, e=str(e)))
def _validate_submodule_url(key, url, args): """Validate within reason. Handle ssh and http handling of errors differently. """ LOG.debug("_validate_submodule_url {0}".format(url)) have_error = False try: u = url.format(**args) except (ValueError, KeyError): have_error = True if not url.endswith('{repo}'): have_error = True if key == p4gf_config.KEY_HTTP_URL: m = not have_error and HTTP_URL_REGEX.match(u) if not m: LOG.error(_('Stream imports require a valid http-url' ' be configured. Contact your administrator.')) return None # http_auth_server will report the error else: m = not have_error and SSH_URL_REGEX.match(u) if not m: msg = _('Stream imports require a valid ssh-url' ' be configured. Contact your administrator.') LOG.error(msg) raise RuntimeError(msg) return u
def read_key_type(key): """Decode the SSH key and returns the key format (e.g. ssh-dss). The input is expected to be a single line of base64 encoded data. """ try: # Based on RFC 4253 section 6.6 "Public Key Algorithms" keydata = base64.b64decode(key.encode()) parts = [] # Decode the entire string to ensure it is valid base64 and not something # nefarious (e.g. control characters, shell escapes, etc). while keydata: # read the length of the data dlen = struct.unpack(NTR('>I'), keydata[:4])[0] # read in <length> bytes data, keydata = keydata[4:dlen + 4], keydata[4 + dlen:] parts.append(data) # only need the first part, the format specifier return parts[0].decode('utf-8') except binascii.Error as e: _print_warn( _("apparently invalid SSH key '{key}' caused '{error}'").format( key=key, error=e)) except UnicodeDecodeError: _print_warn( _("error decoding SSH key type for key '{key}'").format(key=key), error=True) return None
def rebuild_all_keys(p4): """Rebuild the set of keys by reading all active files from the depot.""" latest_change = get_keys_latest_change(p4) if not latest_change: _print_warn(_("No files found in '{path}'").format(path=KEYS_PATH)) return _print_debug( _('rebuilding all keys through change {change}').format( change=latest_change)) keys = read_ssh_configuration() # retain only the lines not managed by our script custom_keys = keys.get(NO_FP, '') keys.clear() if custom_keys: keys.add(NO_FP, '', custom_keys) # now fetch all current keys and add to mapping files = p4.run('files', '-e', '{}@{}'.format(KEYS_PATH, latest_change)) # wipe out ~/.ssh2/git-user-keys directory tree keypath = os.path.join(SshDirectory, KEYS_DIR) if os.path.exists(keypath): shutil.rmtree(keypath) if files: for fi in files: _print_debug( _('adding file {depot_file}').format( depot_file=fi['depotFile'])) ssh_key_add(p4, fi['depotFile'], keys, _REBUILD) write_ssh_configuration(keys) update_last_change_num(p4, latest_change)
def _preflight_check(ctx, prl, gsreview_coll): """Perform a sanity check before inadvertently creating files. :param ctx: Git Fusion context. :type prl: :class:`p4gf_pre_receive_hook.PreReceiveTupleLists` :param prl: list of pushed PreReceiveTuple elements to be set :param gsreview_coll: Git-Swarm review meta data """ LOG.debug('pre-receive preflight check for %s', ctx.config.repo_name) branch_dict = ctx.branch_dict() for prt in prl: branch = _is_gitref_in_gf(prt.ref, branch_dict, is_lightweight=False) ref_is_review = gsreview_coll and gsreview_coll.ref_in_review_list( prt.ref) if ref_is_review: if not ctx.swarm_reviews: raise RuntimeError( _("Swarm reviews are not authorized for this repo." "\nRejecting push of '{ref}'.").format(ref=prt.ref)) elif not ctx.branch_creation and not branch: raise RuntimeError( _("Branch creation is not authorized for this repo." "\nRejecting push of '{ref}'.").format(ref=prt.ref))
def main(): """Parse command line arguments and decide what should be done.""" desc = _("""p4gf_lfs_http_server.py handles LFS requests over HTTP. Typically it is run via a web server and protected by some form of user authentication. The environment variable REMOTE_USER must be set to the name of a valid Perforce user, which is taken to be the user performing a pull or push operation. """) epilog = _("""If the --port argument is given then a simple HTTP server will be started, listening on the specified port. In lieu of REMOTE_USER, the user name is extracted from the URI, which starts with "/~", followed by the user name. To stop the server, send a terminating signal to the process. """) log_l10n() parser = p4gf_util.create_arg_parser(desc, epilog=epilog) parser.add_argument('-p', '--port', type=int, help=_('port on which to listen for LFS reqeuests')) args = parser.parse_args() if args.port: LOG.info("Listening for LFS-HTTP requests on port %s, pid=%s", args.port, os.getpid()) httpd = wsgiref.simple_server.make_server('', args.port, app_wrapper) print(_('Serving on port {port}...').format(port=args.port)) p4gf_http_common.wsgi_install_signal_handler(httpd) p4gf_proc.install_stack_dumper() httpd.serve_forever() else: # Assume we are running inside a web server... p4gf_proc.install_stack_dumper() _handle_cgi()
def as_string_extended(*, p4=None, args=None, include_checksum=False): """Return a page-long dump of Git Fusion, P4D, and uname info.""" # Git Fusion version info, including Git and P4Python. a = as_string(include_checksum) l = [] # Git Fusion server OS version: uname -a l.append(NTR('uname: {}').format(uname())) l.append(NTR('Git Fusion path: {}').format(os.path.dirname(os.path.realpath(__file__)))) l.append(_get_lsb_release()) # P4PORT, if supplied if p4: l.append(_('Perforce server address: {p4port}').format(p4port=p4.port)) # 'p4 info', if we can get it. try: _p4 = _create_p4(p4=p4, args=args) # Run 'p4 info' un-tagged to get human-friendly # server info labels. l.append(NTR("p4 info:")) l.extend(p4gf_p4cache.fetch_info(_p4, tagged=False)) # Run 'p4 info' a SECOND time, tagged, to get # the "unicode" setting that untagged omits. u = p4gf_p4cache.fetch_info(_p4, tagged=True).get(("unicode"), _("disabled")) l.append(_("Unicode: {value}").format(value=u)) except P4Exception: pass return a + "\n".join(l) + "\n"
def check_readiness(p4): """ Check that P4GF is ready for accepting connections from clients. """ # Note the "clever" use of counter names with a shared prefix that # just happen to be the two counters we are interested in retrieving. # (wanted to avoid another call to p4 counter, but without retrieving # _all_ P4GF counters, which could be millions). fetch_counters = lambda p4: p4.run('counters', '-u', '-e', 'git-fusion-pre*') if p4.connected(): counters = fetch_counters(p4) else: with p4gf_create_p4.p4_connect(p4): counters = fetch_counters(p4) # Check if the "prevent further access" counter has been set, and raise an # error if the counter is anything other than zero. value = fetch_counter_value(counters, p4gf_const.P4GF_COUNTER_PREVENT_NEW_SESSIONS) if value and value != '0': raise RuntimeError(_('Git Fusion is shutting down. Please contact your admin.')) # Check that GF submit trigger is installed and has a compatible version. value = fetch_counter_value(counters, p4gf_const.P4GF_COUNTER_PRE_TRIGGER_VERSION) trigger_version_counter = value.split(":")[0].strip() if value else '0' if int(trigger_version_counter) != int(p4gf_const.P4GF_TRIGGER_VERSION): LOG.error("Incompatible trigger version: {0} should be {1} but got {2}".format( p4gf_const.P4GF_COUNTER_PRE_TRIGGER_VERSION, p4gf_const.P4GF_TRIGGER_VERSION, trigger_version_counter)) if int(trigger_version_counter) == 0: raise RuntimeError(_('Git Fusion submit triggers are not installed.' ' Please contact your admin.')) else: raise RuntimeError(_('Git Fusion submit triggers need updating.' ' Please contact your admin.')) p4gf_util.has_server_id_or_exit(log=LOG)
def cont_init(self): """This continue init is called when the P4 syncs, and database inserts etc are done but the reachable computation or the final report is not. Thus we reuse the existing database, and initialize from data stored therein.""" if not os.path.exists(self.sql_db_abspath): print(_("-the --cont option is set but the sql database '{}' does not exist."). format(self.sql_db_abspath)) sys.exit(1) if not os.path.exists(self.git_dir_abspath): print(_("-the --cont option is set but the git repo '{}' does not exist."). format(self.git_dir_abspath)) sys.exit(1) self.print_quiet(_("Continuing with already retrieved objects from Helix.\n" "Using existing sql database: '{}'.\n"). format(self.sql_db_abspath)) self.git_repo = pygit2.Repository(self.git_dir_abspath) self.db = sqlite3.connect( database = self.sql_db_abspath , isolation_level = "EXCLUSIVE" ) self.status = self.sql_get_status() if self.status >= OBJECTS_MOVED: table_names = self.sql_get_admin('table_names') if table_names: self.table_names = pickle.loads(table_names) table_counts = self.sql_get_admin('table_type_counts') if table_counts: self.table_type_counts = pickle.loads(table_counts) if self.status == REPORTED and not self.doreport: print(_("Nothing to do. database 'status' table reports status is REPORTED.")) print(_("Use the '--doreport' option to re-create the report."))
def check_for_localhost(id_from_file, server_id): """Validate that server_id is not being set to 'localhost'.""" needs_exit = False if id_from_file == 'localhost' and server_id == 'localhost': Verbosity.report( Verbosity.INFO, _("Your server_id file '{path}' is set to 'localhost'." " Use the --id argument to choose another id."). format(path=p4gf_util.server_id_file_path())) needs_exit = True if server_id == 'localhost' and not id_from_file: if not ID_FROM_ARGV: Verbosity.report( Verbosity.INFO, _("Git Fusion is attempting to use the default hostname " "'localhost' as the server_id which is not permitted.\n" "Use the --id argument to choose another id.")) needs_exit = True else: Verbosity.report( Verbosity.INFO, _("server_id 'localhost' is not permitted. " " Use the --id argument to choose another id.")) needs_exit = True if server_id == 'localhost' and id_from_file: Verbosity.report( Verbosity.INFO, _("Your server_id file '{path}' is already set to '{server_id}'." "\nYou may not override it with 'localhost'." " Use the --id argument to choose another id."). format(path=p4gf_util.server_id_file_path(), server_id=id_from_file)) needs_exit = True if needs_exit: sys.exit(1)
def ensure_group(): """Create Perforce group git-fusion-group if not already exists.""" users = [] # Keep the order of the users in the same order that P4 insists on # (if the order doesn't match then the group is updated repeatedly). users.append(p4gf_const.P4GF_REVIEWS__ALL_GF) users.append(p4gf_const.P4GF_REVIEWS__NON_GF) users.append(p4gf_util.gf_reviews_user_name()) users.append(p4gf_const.P4GF_USER) args = [p4, NTR("group")] spec = {'Timeout': NTR('unlimited'), 'Users': users} kwargs = {'spec_id': p4gf_const.P4GF_GROUP, 'values': spec} if not p4gf_util.ensure_spec(*args, **kwargs): # We change the list of users in the group from time to time, # so ensure the membership is up to date. users = p4gf_util.first_dict(p4.run('group', '-o', p4gf_const.P4GF_GROUP))['Users'] # Add the gf_reviews_user_name if not already in the group. # This avoids removing already existing reviews users from multiple GF instances. if not p4gf_util.gf_reviews_user_name() in users: users.append(p4gf_util.gf_reviews_user_name()) spec = {'Timeout': NTR('unlimited'), 'Users': users} kwargs = {'spec_id': p4gf_const.P4GF_GROUP, 'values': spec} if p4gf_util.ensure_spec_values(*args, **kwargs): Verbosity.report(Verbosity.INFO , _("Group '{}' updated.").format(p4gf_const.P4GF_GROUP)) else: Verbosity.report(Verbosity.INFO, _("Group '{}' already up to date.") .format(p4gf_const.P4GF_GROUP)) else: Verbosity.report(Verbosity.INFO, _("Group '{}' already up to date.") .format(p4gf_const.P4GF_GROUP)) return False else: Verbosity.report(Verbosity.INFO, _("Group '{}' created.").format(p4gf_const.P4GF_GROUP)) return True
def main(): """Read the log files and report on lock contention.""" desc = _("""Examine lock related log entries and report. How to use: 1) Configure logging to have `p4gf_git_repo_lock` and `p4gf_lock` set to `debug` level. 2) Comment out any `handler` and `filename` (or `file`) entries in logging configuration, such that one XML formatted log file per process will be created. 3) Run the pull or push operations that are of concern. 4) Run this lock_analyze.py script. """) parser = p4gf_util.create_arg_parser(desc=desc) parser.add_argument('repo', metavar="REPO", help=_("name of repository to be analyzed")) parser.add_argument('-d', '--logs', metavar="DIR", help=_("path to log files to be processed")) args = parser.parse_args() logging.basicConfig(format="%(levelname)-7s %(message)s", stream=sys.stdout, level=logging.INFO) if args.logs is None: # default args.logs to GFHOME/.git-fusion/logs args.logs = os.path.join(p4gf_const.P4GF_HOME, '.git-fusion', 'logs') log_files = retrieve_log_files(args.logs, args.repo) lexmr = LockExaminer(args.repo) for log_file in log_files: lexmr.examine_log(os.path.join(args.logs, log_file)) lexmr.print_summary()
def main(): ''' Parse the command-line arguments and print a configuration. ''' p4gf_util.has_server_id_or_exit() p4gf_client = p4gf_util.get_object_client_name() p4 = p4gf_create_p4.create_p4(client=p4gf_client) if not p4: sys.exit(1) desc = _("""Display the effective global or repository configuration. All comment lines are elided and formatting is normalized per the default behavior of the configparser Python module. The default configuration options will be produced if either of the configuration files is missing. """) parser = p4gf_util.create_arg_parser(desc=desc) parser.add_argument(NTR('repo'), metavar=NTR('R'), nargs='?', default='', help=_('name of the repository, or none to display global.')) args = parser.parse_args() if args.repo: cfg = get_repo(p4, args.repo) else: cfg = get_global(p4) if not cfg: print(_('Unable to read configuration file!')) cfg.write(sys.stdout)
def _valid_branches(self): """Check if branch definitions in config file are valid.""" # validation requires use of some settings merged in from the global config # for example [@features] config = self.config # Does the config contain any branch sections? sections = self.config.branch_sections() if not sections: self._report_error(_('repository configuration missing branch ID\n')) return False self._check_duplicate_branches(sections) if LOG.isEnabledFor(logging.DEBUG3): # config contents are too lengthy for debug level cfg_text = p4gf_config.to_text("", p4gf_config.GlobalConfig.instance()) LOG.debug3('global config: %s', cfg_text) # check branch creation option try: config.getboolean(p4gf_config.SECTION_GIT_TO_PERFORCE, p4gf_config.KEY_ENABLE_BRANCH_CREATION) except ValueError: self._report_error(_("repository configuration option '{key}' has illegal value\n") .format(key=p4gf_config.KEY_ENABLE_BRANCH_CREATION)) # check merge commits option try: config.getboolean(p4gf_config.SECTION_GIT_TO_PERFORCE, p4gf_config.KEY_ENABLE_MERGE_COMMITS) except ValueError: self._report_error(_("repository configuration option '{key}' has illegal value\n") .format(key=p4gf_config.KEY_ENABLE_MERGE_COMMITS)) # check read-only option try: config.getboolean(p4gf_config.SECTION_REPO, p4gf_config.KEY_READ_ONLY, fallback=False) except ValueError: self._report_error(_("repository configuration option '{key}' has illegal value\n") .format(key=p4gf_config.KEY_READ_ONLY)) # Examine them and confirm they have branch views and all RHS match enable_mismatched_rhs = \ config.getboolean(p4gf_config.SECTION_REPO, p4gf_config.KEY_ENABLE_MISMATCHED_RHS, fallback=False) first_branch = None for section in sections: branch = self._valid_branch(config.repo_config, section, first_branch) if not branch: return False if not enable_mismatched_rhs and not first_branch: first_branch = branch error_msg = self._validate_view_lines_using_p4_client() if error_msg: self._report_error(error_msg) return False return True
def main(): """Program to check for product updates at updates.perforce.com.""" desc = _( "Report if updates are available by checking at updates.perforce.com.") parser = p4gf_util.create_arg_parser(desc=desc, add_debug_arg=True) parser.add_argument( '--p4port', '-p', metavar='P4PORT', help=_('P4PORT of server - optional - also report P4D version ' 'data to Perforce')) args = parser.parse_args() # get the Git Fusion and P4D product version strings (gf_version, server_version) = get_product_version_strings(args) # Get the local GF version info as dict this_version = p4gf_version_3.as_dict(include_checksum=True) # Munge version strings into url paramters required by updates.perforce.com # add NOARCH to gf version gf_version = gf_version.replace('Git Fusion', 'Git%20Fusion/NOARCH') url = GF_PRODUCT_URL + '?product=' + gf_version if server_version: url = url + '%26product=' + server_version # Ensure all spaces are encoded url = url.replace(' ', '%20') if 'debug' in args: print("debug: url:{}".format(url)) try: webfile = urllib.request.urlopen(url) except (urllib.error.URLError, urllib.error.HTTPError) as e: return URL_ERROR_MSG.format(url=url, error=str(e)) # make the query to the url data = webfile.read() if not data: return ERROR_MSG product_version = json.loads(data.decode()) if 'debug' in args: print("debug: json data:{}".format(product_version)) if 'current' not in product_version: return JSON_KEY_ERROR_MSG # Parse the data and compare c = product_version['current'] current_year_sub = year_sub_to_float(c['major'], c['minor']) this_version_year_sub = year_sub_to_float(this_version['release_year'], this_version['release_sub']) message = NO_UPDATES_EXISTS.format(version=current_year_sub) if this_version_year_sub < current_year_sub: message = UPDATES_EXIST.format(have_version=this_version_year_sub, current_version=current_year_sub) elif this_version_year_sub == current_year_sub and this_version[ 'patchlevel'] < c['build']: message = PATCH_EXISTS.format(version=current_year_sub) return message
def log_info(created, user): """Create Perforce user git-fusion-user if not already exists.""" if created: Verbosity.report(Verbosity.INFO, _("User '{}' created.").format(user)) else: Verbosity.report(Verbosity.INFO, _("User '{}' already exists. Not creating.") .format(user)) return created
def check_triggers(): """ Check all of the GF triggers are installed and the trigger version is correct. """ # pylint: disable=R0912 # Too many branches triggers = fetch_triggers() if not triggers: Verbosity.report(Verbosity.INFO, 'Git Fusion Triggers are not installed.') return gf_triggers = set() for trig in triggers: words = trig.split() if P4_TRIGGER_FILE in trig: gf_triggers.add(words[5]) have_all_triggers = 0 if p4gf_version.p4d_version(p4) < P4D_VERSION_2014_1: trigger_names = P4_TRIGGER_NAMES else: trigger_names = P4_TRIGGER_NAMES_14 for trig in trigger_names: if trig in gf_triggers: have_all_triggers += 1 if have_all_triggers == 0: Verbosity.report(Verbosity.INFO, 'Git Fusion Triggers are not installed.') elif have_all_triggers < 4: Verbosity.report(Verbosity.INFO, 'Git Fusion Triggers are not all installed.') else: # check counter counter = p4.run('counter', '-u', p4gf_const.P4GF_COUNTER_PRE_TRIGGER_VERSION)[0] version = counter['value'] if version != '0': version = version.split(":")[0].strip() version = int(version) if version and version != int(p4gf_const.P4GF_TRIGGER_VERSION): Verbosity.report(Verbosity.INFO, 'Git Fusion Triggers are not up to date.') elif not version: # set the version counter since we detected # that all the triggers are installed _version = "{0} : {1}".format(p4gf_const.P4GF_TRIGGER_VERSION, datetime.datetime.now()) p4.run('counter', '-u', p4gf_const.P4GF_COUNTER_PRE_TRIGGER_VERSION, _version) p4.run('counter', '-u', p4gf_const.P4GF_COUNTER_POST_TRIGGER_VERSION, _version) Verbosity.report(Verbosity.INFO, _("Setting '{0}' = '{1}'").format( p4gf_const.P4GF_COUNTER_PRE_TRIGGER_VERSION, _version)) Verbosity.report(Verbosity.INFO, _("Setting '{0}' = '{1}'").format( p4gf_const.P4GF_COUNTER_POST_TRIGGER_VERSION, _version)) else: Verbosity.report(Verbosity.INFO, _('Git Fusion triggers are up to date.'))
def ensure_depot(): """Create depot P4GF_DEPOT if not already exists.""" created = p4gf_util.ensure_depot_gf(p4) if created: Verbosity.report(Verbosity.INFO, _("Depot '{}' created.").format(p4gf_const.P4GF_DEPOT)) else: Verbosity.report(Verbosity.INFO, _("Depot '{}' already exists. Not creating.") .format(p4gf_const.P4GF_DEPOT)) return created
def add_parse_opts(parser): ''' Add --verbose/-v and --quiet/-q options. ''' # pylint:disable=C0301 # line too long? Too bad. Keep tabular code tabular. parser.add_argument('--verbose', '-v', metavar=NTR('level'), nargs='?' , default='INFO', help=_('Reporting verbosity.')) parser.add_argument('--quiet', '-q', action='store_true' , help=_('Report only errors. Same as --verbose QUIET'))
def git_version_check(): ''' Raise exception if git too old. ''' git_ver = git_version() if not git_ver: raise RuntimeError(_("Unable to determine Git version")) if not git_version_acceptable(git_ver): vers = ".".join([str(v) for v in _GIT_VERSION]) raise RuntimeError(_("Git version {0} or greater required.").format(vers))
def comment_header_repo(): ''' Return the text dump that goes at the top of a newly created per-repo config file. ''' header = p4gf_util.read_bin_file(NTR('p4gf_config.repo.txt')) if header is False: sys.stderr.write(_("no 'p4gf_config.repo.txt' found\n")) header = _('# Missing p4gf_config.repo.txt file!') return header
def _report_error(self, msg): '''Report error message, including path to offending file''' if not self.report_count: sys.stderr.write(_("error: invalid configuration file: '{}'\n") .format(self.config_file_path)) contents = p4gf_config.to_text('', self.config) if self.config else '' LOG.debug('config {} contents: ' + contents) self.report_count += 1 LOG.error("Config {} has error: {}".format(self.config_file_path, msg)) sys.stderr.write(_('error: {}').format(msg))
def has_server_id_or_exit(log=None): ''' Check if the server-id file is present, exiting if not. ''' if read_server_id_from_file() is None: formed = _("Git Fusion is missing '{0}' file '{1}'.").format( p4gf_const.P4GF_ID_FILE, server_id_file_path()) sys.stderr.write(formed + _(' Please contact your administrator.\n')) if log is not None: log.error(formed + _(' Please contact your administrator.\n')) sys.exit(os.EX_SOFTWARE)
def _delete_group(args, p4, group_name, metrics): """Delete one group, if it exists and it's ours.""" LOG.debug("_delete_group() {}".format(group_name)) r = p4.fetch_group(group_name) if r and r.get('Owners') and p4gf_const.P4GF_USER in r.get('Owners'): print_verbose(args, _("Deleting group '{}'...").format(group_name)) p4.run('group', '-a', '-d', group_name) metrics.groups += 1 else: print_verbose(args, _("Not deleting group '{group}':" " Does not exist or '{user}' is not an owner.") .format(group=group_name, user=p4gf_const.P4GF_USER))
def ensure_protects_configurable(): """Grant 'p4 protects -u' permission to admin users.""" v = p4gf_util.first_value_for_key( p4.run('configure', 'show', CONFIGURABLE_ALLOW_ADMIN), KEY_VALUE) if v == '1': Verbosity.report(Verbosity.INFO, _("Configurable '{}' already set to 1. Not setting.") .format(CONFIGURABLE_ALLOW_ADMIN)) return False p4.run('configure', 'set', '{}=1'.format(CONFIGURABLE_ALLOW_ADMIN)) Verbosity.report(Verbosity.INFO, _("Configurable '{}' set to 1.") .format(CONFIGURABLE_ALLOW_ADMIN)) return True
def main(): """ Parse command line arguments and decide what should be done. """ desc = _("""p4gf_http_server.py handles http(s) requests. Typically it is run via a web server and protected by some form of user authentication. The environment variable REMOTE_USER must be set to the name of a valid Perforce user, which is taken to be the user performing a pull or push operation. """) epilog = _("""If the --user argument is given then a simple HTTP server will be started, listening on the port specified by --port. The REMOTE_USER value will be set to the value given to the --user argument. To stop the server, send a terminating signal to the process. """) log_l10n() parser = p4gf_util.create_arg_parser(desc, epilog=epilog) parser.add_argument('-u', '--user', help=_('value for REMOTE_USER variable')) parser.add_argument('-p', '--port', type=int, default=8000, help=_('port on which to listen (default 8000)')) args = parser.parse_args() if args.user: LOG.debug("Listening for HTTP requests on port {} as user {}, pid={}".format( args.port, args.user, os.getpid())) wrapper = functools.partial(_app_wrapper, args.user) httpd = wsgiref.simple_server.make_server('', args.port, wrapper, handler_class=GitFusionRequestHandler) print(_('Serving on port {}...').format(args.port)) def _signal_handler(signum, _frame): """ Ensure the web server is shutdown properly. """ LOG.info("received signal {}, pid={}, exiting".format(signum, os.getpid())) httpd.server_close() sys.exit(0) LOG.debug("installing HTTP server signal handler, pid={}".format(os.getpid())) signal.signal(signal.SIGHUP, _signal_handler) signal.signal(signal.SIGINT, _signal_handler) signal.signal(signal.SIGQUIT, _signal_handler) signal.signal(signal.SIGTERM, _signal_handler) signal.signal(signal.SIGTSTP, _signal_handler) p4gf_proc.install_stack_dumper() httpd.serve_forever() else: # Assume we are running inside a web server... _handle_cgi()
def _remove_tree(tree, contents_only=True): """Delete a directory tree.""" if not os.path.exists(tree): return try: p4gf_util.rm_dir_contents(tree) if not contents_only: if os.path.isdir(tree) and not os.path.islink(tree): os.rmdir(tree) else: os.remove(tree) except FileNotFoundError as e: sys.stderr.write(_('File not found error while removing tree: {}\n').format(e)) except PermissionError as e: sys.stderr.write(_('Permission error while removing tree: {}\n').format(e))
def raise_if_homedir(homedir, view_name, rm_list): """If any path in rm_list is user's home directory, fail with error rather than delete the home directory.""" for e in rm_list: if e == homedir: raise P4.P4Exception(_("One of view '{}'s directories is" " user's home directory!").format(view_name))
def repo_from_config(p4, view_name, client_name, client_root, enable_mismatched_rhs): ''' Create a new Git Fusion repo client spec for this repo. The branch_id section should not matter since we now support all listed branches, but we need to initially set the client view to _something_, so pick one from config.branch_section_list[0]. ''' with Validator.from_depot_p4gf_config(view_name, p4) as validator: if not validator.is_valid(enable_mismatched_rhs): return INIT_REPO_CONFIG_FILE_BAD # borrow the validator's config just a little while longer... section_name = p4gf_config.branch_section_list(validator.config)[0] branch = p4gf_branch.Branch.from_config(validator.config, section_name, p4) branch.set_rhs_client(client_name) try: create_repo_client(p4, view_name, client_name, client_root, branch.view_lines, branch.stream_name) except P4.P4Exception as e: # Prefix error message with additional context. config_path = NTR('{P4GF_DEPOT}/repos/{repo}/p4gf_config')\ .format(P4GF_DEPOT=p4gf_const.P4GF_DEPOT, repo=view_name) e.value = (_("\nError while creating Git branch '{branch}' for repo '{repo}'" "\nCheck the branch view specifications in Git Fusion: {config_path}" "\nDetails: {error_details}") .format( branch = branch.git_branch_name , repo = view_name , config_path = config_path , error_details = e.value )) raise return INIT_REPO_OK
def raise_rejection(sha1, msg): ''' preflight-commit hook rejected. Tell the Git pusher. ''' raise RuntimeError(_('preflight-commit rejected: {sha1} {msg}\n') .format( sha1 = p4gf_util.abbrev(sha1) , msg = msg))
def default_config_repo(p4, name): ''' Return a ConfigParser instance loaded with default values for a single repo. Default values for a repo include a placeholder description and the charset which is copied from the default charset in the global config. ''' global_config = get_global(p4) config = configparser.ConfigParser( interpolation = None , allow_no_value = True) config.add_section(SECTION_REPO) config.set(SECTION_REPO, KEY_DESCRIPTION, _("Created from '{}'").format(name)) config.set(SECTION_REPO, KEY_IGNORE_AUTHOR_PERMS, VALUE_NO) # Copy default values from global config file. config.set( SECTION_REPO, KEY_CHARSET , global_config.get(SECTION_REPO_CREATION, KEY_CHARSET)) key_list = [ KEY_ENABLE_BRANCH_CREATION , KEY_ENABLE_MERGE_COMMITS , KEY_ENABLE_SUBMODULES , KEY_CHANGE_OWNER , KEY_PREFLIGHT_COMMIT ] for key in key_list: config.set( SECTION_REPO, key , global_config.get(SECTION_GIT_TO_PERFORCE, key)) return config
def _assign_branches_named(self): ''' For each pushed branch reference, find a path from its new head location to its old head location (if any). Assign commits to the branch as we find the path. ''' LOG.debug('_assign_branches_named') for branch in self._pushed_branch_sequence(): LOG.debug2('_assign_branches_named branch={}'.format(p4gf_branch.abbrev(branch))) old_head_sha1 = _branch_to_old_head_sha1(branch) # ? Can we trust the old ref # ? in the PreReceiveTuple if # ? non-0000000? new_head_sha1 = self._branch_to_pushed_new_head_sha1(branch) if new_head_sha1 is None: raise RuntimeError(_("BUG: _pushed_branch_sequence() returned a branch" " '{branch}' with no corresponding PreReceiveTuple") .format(branch=branch.to_log(LOG))) if new_head_sha1 == p4gf_const.NULL_COMMIT_SHA1: LOG.debug('_assign_branches_named(): skipping branch={}:' ' new sha1 is 0000000'.format(branch.to_log(LOG))) return if old_head_sha1: self._assign_branch_named_old_to_new( branch , old_head_sha1=old_head_sha1 , new_head_sha1=new_head_sha1) else: self._assign_branch_named_any_to_new( branch , new_head_sha1=new_head_sha1)
def create_repo_client(p4, view_name, client_name, client_root, view, stream): '''Create a Git Fusion repo client.''' desc = (_("Created by Perforce Git Fusion for work in '{view}'.") .format(view=p4gf_translate.TranslateReponame.repo_to_git(view_name))) # if creating from a stream, set 'Stream' but not 'View' # otherwise, set 'View' but not 'Stream' if stream: p4gf_util.set_spec(p4, 'client', spec_id=client_name, values={'Owner' : p4gf_const.P4GF_USER, 'LineEnd' : NTR('unix'), 'Root' : client_root, 'Options' : CLIENT_OPTIONS, 'Host' : None, 'Stream' : stream, 'Description' : desc}) else: p4gf_util.set_spec(p4, 'client', spec_id=client_name, values={'Owner' : p4gf_const.P4GF_USER, 'LineEnd' : NTR('unix'), 'View' : view, 'Root' : client_root, 'Options' : CLIENT_OPTIONS, 'Host' : None, 'Description' : desc}) LOG.debug("Successfully created Git Fusion client %s", client_name)
def unpack_objects(): """ Find all existing pack objects in the Git repository, unpack them, and then remove the now defunct pack and index files. Returns True if successful, False otherwise. """ pack_dir = os.path.join(".git", "objects", "pack") if not os.path.exists(pack_dir): return True pack_files = [os.path.join(pack_dir, f) for f in os.listdir(pack_dir) if f.endswith('.pack')] if pack_files: tmprepo, tmp_pack = _setup_temp_repo() if not tmp_pack: return False cmd = ['git', 'unpack-objects', '-q'] for pack in pack_files: fname = os.path.basename(pack) newpack = os.path.join(tmp_pack, fname) os.rename(pack, newpack) index = pack[:-4] + "idx" os.rename(index, os.path.join(tmp_pack, fname[:-4] + "idx")) ec = p4gf_proc.wait(cmd, stdin=newpack) if ec: raise RuntimeError(_("git-unpack-objects failed with '{}'").format(ec)) shutil.rmtree(tmprepo) return True
def _copy( ctx # P4GF context , prl # list of pushed PreReceiveTuple elements , assigner # commit-to-branch assignments , gsreview_coll # git-swarm review collection GSReviewCollection ): """Copy a sequence of commits from git to Perforce. Returns error message, or None if okay. """ branch_dict = ctx.branch_dict() LOG.debug2('allowing branch creation: {}'.format(ctx.branch_creation)) for prt in prl: LOG.debug("copy: current branch_dict {0}".format(branch_dict)) LOG.debug("copy {0}".format(prt)) if not prt.ref.startswith('refs/heads/'): # Do not process tags at this point. continue branch = is_gitref_in_gf(prt.ref, branch_dict, is_lightweight=False) if not ctx.branch_creation and not branch: msg = (_("Branch creation is not authorized for this repo." "\nRejecting push of '{0}'.") .format(prt.ref)) return msg err = p4gf_copy_to_p4.copy_git_changes_to_p4( ctx , prt = prt , assigner = assigner , gsreview_coll = gsreview_coll ) if err: return err return None
def _lazy_init(debug=False): """ If we have not yet configured the logging system, do so now, using a default set of configuration settings. """ global _configured if not _configured: try: config_file_path = _find_config_file() if config_file_path: parser = _read_configuration(config_file_path) else: parser = configparser.ConfigParser() general, audit = _apply_default_config(parser) if debug: _print_config(_general_section, general) _print_config(_audit_section, audit) _configure_logger(general, ident=_syslog_ident) _configure_logger(audit, _audit_logger_name, _syslog_audit_ident) _configured = True # pylint:disable=W0703 except Exception: # pylint:enable=W0703 # Unable to open log file for write? Some other random error? # Printf and squelch. sys.stderr.write(_('Git Fusion: Unable to configure log.\n')) sys.stderr.write(traceback.format_exc())
def main(): '''Main entry point''' collisions = dict() root_list = sys.argv if not root_list: root_list = ['.'] for root in root_list: for (dir_path, dir_name) in dir_iter(root): dir_name_lower = dir_name.lower() if dir_name == dir_name_lower: continue src = os.path.join(dir_path, dir_name) dst = os.path.join(dir_path, dir_name_lower) if os.path.lexists(dst): # Cannot rename, there's already something # there at dst. collisions[src] = dst continue shutil.move(src, dst) if collisions: for src in sorted(collisions.keys()): sys.stderr.write(_("Case collision: {src:<30} {dst}") .format(src=src, dst=collisions[src]) + "\n") exit(1)