def delete(self, p4): ''' Remove our branch spec. ''' if not self.written: return with p4.at_exception_level(p4.RAISE_NONE): p4gf_util.p4run_logged(p4, ['branch', '-d', self.name])
def _run_p4files(p4, path): ''' Run p4 files on path and return depot paths of any files reported ''' files = p4gf_util.p4run_logged(p4, ['files', path]) return [ f.get('depotFile') for f in files if isinstance(f, dict) and f.get('depotFile') ]
def p4run( self, cmd , log_warnings = logging.WARNING , log_errors = logging.ERROR): """ Run a command, with logging. """ return p4gf_util.p4run_logged( self.p4, cmd , log_warnings = log_warnings , log_errors = log_errors)
def p4run(self, cmd, log_warnings=logging.WARNING, log_errors=logging.ERROR): """ Run a command, with logging. """ return p4gf_util.p4run_logged(self.p4, cmd, log_warnings=log_warnings, log_errors=log_errors)
def update_last_change(ctx, commit): ''' Update p4 key that tracks the last change on a branch ''' # unconditionally add a counter mapping change -> commit sha1 branch_id = commit.details.branch_id key = "git-fusion-index-branch-{repo},{change},{branch}".format( repo=commit.details.viewname, change=commit.details.changelist, branch=branch_id) p4gf_util.p4run_logged(ctx.p4gf, ['counter', '-u', key, commit.sha1]) # only update last change counter if this commit has a higher change # if branch_id in ObjectType.last_commits_cache and\ (int(ObjectType.last_commits_cache[branch_id].split(',')[0]) > int(commit.details.changelist)): return key = "git-fusion-index-last-{repo},{branch_id}".format( repo=commit.details.viewname, branch_id=branch_id) value = "{},{}".format(commit.details.changelist, commit.sha1) p4gf_util.p4run_logged(ctx.p4gf, ['counter', '-u', key, value]) ObjectType.last_commits_cache[branch_id] = value
def _p4run(p4, numbered_change, run_history, cmd, log_warnings, log_errors): ''' Record the command in history, then perform it. ''' if numbered_change: cmd = numbered_change.add_change_option(cmd) run_history.append(cmd) return p4gf_util.p4run_logged(p4, cmd, log_warnings=log_warnings, log_errors=log_errors)
def last_change_for_branches(ctx, branch_ids, must_exist_local=False): ''' Returns highest numbered change for all branches which exists in p4. Searches //P4GF_DEPOT/objects/... for commits and returns ObjectType for commit with highest change_number, or None if no matching commit. If must_exist_local is True, only commits which also exist in the repo are considered in the search. ''' # if only one branch_id given, don't fetch them all if len(branch_ids) == 1: branch_id = branch_ids[0] if not branch_id in ObjectType.last_commits_cache: key = "git-fusion-index-last-{repo},{branch_id}".format( repo=ctx.config.view_name, branch_id=branch_id) r = p4gf_util.p4run_logged(ctx.p4gf, ['counters', '-u', '-e', key]) if r: ObjectType.last_commits_cache[branch_id] = r[0]['value'] if not branch_id in ObjectType.last_commits_cache: return None change, sha1 = ObjectType.last_commits_cache[branch_id].split(',') if must_exist_local and not p4gf_util.sha1_exists(sha1): return None return ObjectType.create_commit(sha1, ctx.config.view_name, int(change), branch_id) # if more than one branch, load up all branches into the cache ObjectType._load_last_commits_cache(ctx) highest = {} k = None for branch_id, v in ObjectType.last_commits_cache.items(): if not branch_id in branch_ids: continue change, sha1 = v.split(',') if branch_id in highest: if int(change) > highest[branch_id][0]: if must_exist_local and not p4gf_util.sha1_exists(sha1): continue highest[branch_id] = (int(change), sha1) elif not branch_ids or branch_id in branch_ids: if must_exist_local and not p4gf_util.sha1_exists(sha1): continue highest[branch_id] = (int(change), sha1) else: continue if not k or int(change) > highest[k][0]: k = branch_id if not k: return None return ObjectType.create_commit(highest[k][1], ctx.config.view_name, highest[k][0], k)
def write_map(self, p4, p4map): ''' Write our spec to Perforce. ''' spec = p4gf_util.p4run_logged(p4, ['branch', '-o', self.name])[0] spec['Options'] = NTR('unlocked') # 'locked' complicates cleanup/delete. spec['View'] = p4map.as_array() spec['Description'] = _("Temporary mapping created during 'git push'.") if LOG.isEnabledFor(logging.DEBUG3): LOG.debug3('write_map() name={} view={}'.format(self.name, spec['View'])) else: LOG.debug2('write_map() name={}'.format(self.name)) p4.save_branch(spec) self.written = True
def _p4run( p4 , numbered_change , run_history , cmd , log_warnings , log_errors ): ''' Record the command in history, then perform it. ''' if numbered_change: cmd = numbered_change.add_change_option(cmd) run_history.append(cmd) return p4gf_util.p4run_logged( p4, cmd , log_warnings = log_warnings , log_errors = log_errors )
def write_map(self, p4, p4map): ''' Write our spec to Perforce. ''' spec = p4gf_util.p4run_logged(p4, ['branch', '-o', self.name])[0] spec['Options'] = NTR( 'unlocked') # 'locked' complicates cleanup/delete. spec['View'] = p4map.as_array() spec['Description'] = _("Temporary mapping created during 'git push'.") if LOG.isEnabledFor(logging.DEBUG3): LOG.debug3('write_map() name={} view={}'.format( self.name, spec['View'])) else: LOG.debug2('write_map() name={}'.format(self.name)) p4.save_branch(spec) self.written = True
def is_locked_by_review(p4, clientmap, check_for_self=False): """Check whether any other GF/submit users have my views under Review""" gf_user = p4gf_util.gf_reviews_user_name() repo_views = remove_exclusionary_maps(clientmap.lhs()) cmd = [NTR('reviews')] + [p4gf_util.dequote(l) for l in repo_views] reviewers = p4gf_util.p4run_logged(p4, cmd) for user in reviewers: _user = user['user'] if _user.startswith(p4gf_const.P4GF_REVIEWS_GF): if _user == p4gf_const.P4GF_REVIEWS__ALL_GF: continue # skip the union Reviews - used only by trigger if check_for_self: if _user == gf_user: return True, gf_user if _user != gf_user: # always check if another user has this view locked return True, _user return False, None
def commit_for_change(ctx, change, branch_id=None): ''' If a commit exists as specified, returns an ObjectType for the commit, else None If no branch_id specified, returns first found matching commit. ''' if not change: return None # first, try cache from_cache = ObjectType.change_to_commit_cache.get(change, branch_id) if from_cache: return ObjectType.create_commit(from_cache[1], ctx.config.view_name, change, from_cache[0]) # not in cache, use index to find commit(s) if not branch_id: branch_id = '*' key = "git-fusion-index-branch-{repo},{change},{branch}".format( repo=ctx.config.view_name, change=change, branch=branch_id) result_sha1 = None result_branch = None r = p4gf_util.p4run_logged(ctx.p4gf, ['counters', '-u', '-e', key]) for rr in r: if not 'counter' in rr: continue m = KEY_BRANCH_REGEX.search(rr['counter']) found_branch = m.group('branch_id') found_sha1 = rr['value'] ObjectType.change_to_commit_cache.append(change, found_branch, found_sha1) if not branch_id == '*': if not found_branch == branch_id: continue result_sha1 = found_sha1 result_branch = found_branch if not result_sha1: return None return ObjectType.create_commit(result_sha1, ctx.config.view_name, change, result_branch)
def _load_last_commits_cache(ctx): ''' If this is the first time called, load the cache of last commits ''' if ObjectType.last_commits_cache_complete: return r = p4gf_util.p4run_logged(ctx.p4gf, [ 'counters', '-u', '-e', 'git-fusion-index-last-{repo},*'.format(repo=ctx.config.view_name) ]) for rr in r: mk = KEY_LAST_REGEX.search(rr['counter']) if not mk: LOG.debug("ignoring unexpected p4 counter: {}".format(rr)) continue mv = VALUE_LAST_REGEX.search(rr['value']) if not mv: LOG.debug("ignoring invalid p4 counter value: {}".format(rr)) ObjectType.last_commits_cache[mk.group('branch_id')] = rr['value'] LOG.debug2('last change,commit for branch {} is {}'.format( mk.group('branch_id'), rr['value'])) ObjectType.last_commits_cache_complete = True
def from_config(config, branch_id, p4=None): ''' Factory to seed from a config file. Returns None if config file lacks a complete and correct branch definition. ''' is_deleted = False if config.has_option(branch_id, p4gf_config.KEY_GIT_BRANCH_DELETED): is_deleted = config.getboolean(branch_id, p4gf_config.KEY_GIT_BRANCH_DELETED) result = Branch() result.branch_id = branch_id branch_config = config[branch_id] result.git_branch_name = branch_config.get(p4gf_config.KEY_GIT_BRANCH_NAME) result.depot_branch = branch_config.get(p4gf_config.KEY_DEPOT_BRANCH_ID) result.deleted = is_deleted if p4gf_config.KEY_STREAM in branch_config and p4gf_config.KEY_VIEW in branch_config: f = _("repository configuration section [{}] may not contain both" " 'view' and 'stream'") raise RuntimeError(f.format(branch_id)) if p4gf_config.KEY_STREAM in branch_config: result.stream_name = branch_config.get(p4gf_config.KEY_STREAM) stream = p4gf_util.p4run_logged(p4, ['stream', '-ov', result.stream_name]) LOG.debug("stream for branch:\n{}\n".format(stream)) if not 'View' in stream[0]: f = _("repository configuration section [{}] '{}' does not refer" " to a valid stream") raise RuntimeError(f.format(branch_id, result.stream_name)) if stream[0]['Type'] == 'task': f = _("repository configuration section [{}] '{}' refers to a task stream") raise RuntimeError(f.format(branch_id, result.stream_name)) if stream[0]['Type'] == 'virtual': result.writable_stream_name = stream[0]['baseParent'] else: result.writable_stream_name = result.stream_name view_lines = stream[0]['View'] LOG.debug("View lines:\n{}\n".format(view_lines)) # if this is a config2, stream branches will have stored # a snapshot of the stream's view at branch create time if p4gf_config.KEY_ORIGINAL_VIEW in branch_config: original_view_lines = branch_config.get(p4gf_config.KEY_ORIGINAL_VIEW) if isinstance(original_view_lines, str): original_view_lines = original_view_lines.splitlines() # Common: first line blank, view starts on second line. if original_view_lines and not len(original_view_lines[0].strip()): del original_view_lines[0] result.original_view_lines = original_view_lines else: view_lines = branch_config.get(p4gf_config.KEY_VIEW) if isinstance(view_lines, str): view_lines = view_lines.splitlines() # Common: first line blank, view starts on second line. if view_lines and not len(view_lines[0].strip()): del view_lines[0] LOG.debug2("view_lines={}".format(view_lines)) if not view_lines: return None if isinstance(view_lines, str): view_lines = view_lines.replace('\t', ' ') elif isinstance(view_lines, list): view_lines = [ln.replace('\t', ' ') for ln in view_lines] result.view_p4map = P4.Map(view_lines) result.view_lines = view_lines return result
def from_config(config, branch_id, p4=None): ''' Factory to seed from a config file. Returns None if config file lacks a complete and correct branch definition. ''' is_deleted = False if config.has_option(branch_id, p4gf_config.KEY_GIT_BRANCH_DELETED): is_deleted = config.getboolean(branch_id, p4gf_config.KEY_GIT_BRANCH_DELETED) result = Branch() result.branch_id = branch_id branch_config = config[branch_id] result.git_branch_name = branch_config.get( p4gf_config.KEY_GIT_BRANCH_NAME) result.depot_branch = branch_config.get( p4gf_config.KEY_DEPOT_BRANCH_ID) result.deleted = is_deleted if p4gf_config.KEY_STREAM in branch_config and p4gf_config.KEY_VIEW in branch_config: f = _("repository configuration section [{}] may not contain both" " 'view' and 'stream'") raise RuntimeError(f.format(branch_id)) if p4gf_config.KEY_STREAM in branch_config: result.stream_name = branch_config.get(p4gf_config.KEY_STREAM) stream = p4gf_util.p4run_logged( p4, ['stream', '-ov', result.stream_name]) LOG.debug("stream for branch:\n{}\n".format(stream)) if not 'View' in stream[0]: f = _( "repository configuration section [{}] '{}' does not refer" " to a valid stream") raise RuntimeError(f.format(branch_id, result.stream_name)) if stream[0]['Type'] == 'task': f = _( "repository configuration section [{}] '{}' refers to a task stream" ) raise RuntimeError(f.format(branch_id, result.stream_name)) if stream[0]['Type'] == 'virtual': result.writable_stream_name = stream[0]['baseParent'] else: result.writable_stream_name = result.stream_name view_lines = stream[0]['View'] LOG.debug("View lines:\n{}\n".format(view_lines)) # if this is a config2, stream branches will have stored # a snapshot of the stream's view at branch create time if p4gf_config.KEY_ORIGINAL_VIEW in branch_config: original_view_lines = branch_config.get( p4gf_config.KEY_ORIGINAL_VIEW) if isinstance(original_view_lines, str): original_view_lines = original_view_lines.splitlines() # Common: first line blank, view starts on second line. if original_view_lines and not len( original_view_lines[0].strip()): del original_view_lines[0] result.original_view_lines = original_view_lines else: view_lines = branch_config.get(p4gf_config.KEY_VIEW) if isinstance(view_lines, str): view_lines = view_lines.splitlines() # Common: first line blank, view starts on second line. if view_lines and not len(view_lines[0].strip()): del view_lines[0] LOG.debug2("view_lines={}".format(view_lines)) if not view_lines: return None if isinstance(view_lines, str): view_lines = view_lines.replace('\t', ' ') elif isinstance(view_lines, list): view_lines = [ln.replace('\t', ' ') for ln in view_lines] result.view_p4map = P4.Map(view_lines) result.view_lines = view_lines return result
def _get_change_counter(p4): ''' Return the current 'p4 counter change' value. ''' r = p4gf_util.p4run_logged(p4, ['counter', 'change']) return r[0]['value']
def delete_client(args, p4, client_name, metrics, prune_objs=True): """Delete the named Perforce client and its workspace. Raises P4Exception if the client is not present, or the client configuration is not set up as expected. Keyword arguments: args -- parsed command line arguments p4 -- Git user's Perforce client client_name -- name of client to be deleted metrics -- DeletionMetrics for collecting resulting metrics prune_objs -- if True, delete associated objects from cache """ # pylint: disable=R0912,R0915 group_list = [ p4gf_const.P4GF_GROUP_VIEW_PULL, p4gf_const.P4GF_GROUP_VIEW_PUSH ] p4.user = p4gf_const.P4GF_USER print_verbose(args, _("Checking for client '{}'...").format(client_name)) if not p4gf_util.spec_exists(p4, 'client', client_name): raise P4.P4Exception( _("No such client '{}' defined").format(client_name)) view_name = p4gf_util.client_to_view_name(client_name) p4gf_dir = p4gf_util.p4_to_p4gf_dir(p4) view_dirs = p4gf_view_dirs.from_p4gf_dir(p4gf_dir, view_name) p4gf_util.ensure_spec_values(p4, 'client', client_name, {'Root': view_dirs.p4root}) view_lock = None # We're clobbering and deleting. Overrule locks. with p4gf_context.create_context(view_name, view_lock) as ctx: command_path = ctx.client_view_path() homedir = os.path.expanduser('~') raise_if_homedir(homedir, view_name, view_dirs.view_container) # Scan for objects associated only with this view so we can remove them. objects_to_delete = [] if prune_objs: objects_to_delete = _find_client_commit_objects( args, p4, view_name) # Do we have a repo config file to delete? config_file = p4gf_config.depot_path_repo(view_name) + '*' config_file_exists = p4gf_util.depot_file_exists(p4, config_file) # What counters shall we delete? counter_list = [] counter_list.append( p4gf_context.calc_last_copied_change_counter_name( view_name, p4gf_util.get_server_id())) for spec in p4.run('counters', '-u', '-e', "git-fusion-index-last-{},*".format(view_name)): counter_list.append(spec['counter']) for spec in p4.run('counters', '-u', '-e', "git-fusion-index-branch-{},*".format(view_name)): counter_list.append(spec['counter']) if not args.delete: print(NTR('p4 sync -f {}#none').format(command_path)) print(NTR('p4 client -f -d {}').format(client_name)) print(NTR('rm -rf {}').format(view_dirs.view_container)) print( NTR('Deleting {} objects from //{}/objects/...').format( len(objects_to_delete), p4gf_const.P4GF_DEPOT)) for group_template in group_list: group = group_template.format(view=view_name) print(NTR('p4 group -a -d {}').format(group)) for c in counter_list: print(NTR('p4 counter -u -d {}').format(c)) if config_file_exists: print(NTR('p4 sync -f {}').format(config_file)) print(NTR('p4 delete {}').format(config_file)) print( NTR('p4 submit -d "Delete repo config for {view_name}" {config_file}' ).format(view_name=view_name, config_file=config_file)) else: print_verbose( args, NTR('Removing client files for {}...').format(client_name)) ctx.p4.run('sync', '-fq', command_path + '#none') print_verbose(args, NTR('Deleting client {}...').format(client_name)) p4.run('client', '-df', client_name) metrics.clients += 1 print_verbose( args, NTR("Deleting repo {0}'s directory {1}...").format( view_name, view_dirs.view_container)) _remove_tree(view_dirs.view_container, contents_only=False) metrics.files += _delete_files(p4, objects_to_delete, view_name) for group_template in group_list: _delete_group(args, p4, group_template.format(view=view_name), metrics) for c in counter_list: _delete_counter(p4, c, metrics) if config_file_exists: p4gf_util.p4run_logged(p4, ['sync', '-fq', config_file]) with p4gf_util.NumberedChangelist( p4=p4, description=_("Delete repo config for '{}'").format( view_name)) as nc: nc.p4run(["delete", config_file]) nc.submit()
def delete_client(args, p4, client_name, metrics, prune_objs=True): """Delete the named Perforce client and its workspace. Raises P4Exception if the client is not present, or the client configuration is not set up as expected. Keyword arguments: args -- parsed command line arguments p4 -- Git user's Perforce client client_name -- name of client to be deleted metrics -- DeletionMetrics for collecting resulting metrics prune_objs -- if True, delete associated objects from cache """ # pylint: disable=R0912,R0915 group_list = [p4gf_const.P4GF_GROUP_VIEW_PULL, p4gf_const.P4GF_GROUP_VIEW_PUSH] p4.user = p4gf_const.P4GF_USER print_verbose(args, _("Checking for client '{}'...").format(client_name)) if not p4gf_util.spec_exists(p4, 'client', client_name): raise P4.P4Exception(_("No such client '{}' defined") .format(client_name)) view_name = p4gf_util.client_to_view_name(client_name) p4gf_dir = p4gf_util.p4_to_p4gf_dir(p4) view_dirs = p4gf_view_dirs.from_p4gf_dir(p4gf_dir, view_name) p4gf_util.ensure_spec_values(p4, 'client', client_name, {'Root': view_dirs.p4root}) view_lock = None # We're clobbering and deleting. Overrule locks. with p4gf_context.create_context(view_name, view_lock) as ctx: command_path = ctx.client_view_path() homedir = os.path.expanduser('~') raise_if_homedir(homedir, view_name, view_dirs.view_container) # Scan for objects associated only with this view so we can remove them. objects_to_delete = [] if prune_objs: objects_to_delete = _find_client_commit_objects(args, p4, view_name) # Do we have a repo config file to delete? config_file = p4gf_config.depot_path_repo(view_name) + '*' config_file_exists = p4gf_util.depot_file_exists(p4, config_file) # What counters shall we delete? counter_list = [] counter_list.append(p4gf_context.calc_last_copied_change_counter_name( view_name, p4gf_util.get_server_id())) for spec in p4.run('counters', '-u', '-e', "git-fusion-index-last-{},*" .format(view_name)): counter_list.append(spec['counter']) for spec in p4.run('counters', '-u', '-e', "git-fusion-index-branch-{},*" .format(view_name)): counter_list.append(spec['counter']) if not args.delete: print(NTR('p4 sync -f {}#none').format(command_path)) print(NTR('p4 client -f -d {}').format(client_name)) print(NTR('rm -rf {}').format(view_dirs.view_container)) print(NTR('Deleting {} objects from //{}/objects/...').format( len(objects_to_delete), p4gf_const.P4GF_DEPOT)) for group_template in group_list: group = group_template.format(view=view_name) print(NTR('p4 group -a -d {}').format(group)) for c in counter_list: print(NTR('p4 counter -u -d {}').format(c)) if config_file_exists: print(NTR('p4 sync -f {}').format(config_file)) print(NTR('p4 delete {}').format(config_file)) print(NTR('p4 submit -d "Delete repo config for {view_name}" {config_file}') .format(view_name=view_name, config_file=config_file)) else: print_verbose(args, NTR('Removing client files for {}...').format(client_name)) ctx.p4.run('sync', '-fq', command_path + '#none') print_verbose(args, NTR('Deleting client {}...').format(client_name)) p4.run('client', '-df', client_name) metrics.clients += 1 print_verbose(args, NTR("Deleting repo {0}'s directory {1}...").format(view_name, view_dirs.view_container)) _remove_tree(view_dirs.view_container, contents_only=False) metrics.files += _delete_files(p4, objects_to_delete, view_name) for group_template in group_list: _delete_group(args, p4, group_template.format(view=view_name), metrics) for c in counter_list: _delete_counter(p4, c, metrics) if config_file_exists: p4gf_util.p4run_logged(p4, ['sync', '-fq', config_file]) with p4gf_util.NumberedChangelist( p4=p4, description=_("Delete repo config for '{}'") .format(view_name)) as nc: nc.p4run(["delete", config_file]) nc.submit()