def _copy_print(self): """p4 print all revs and git-hash-object them into the git repo.""" server_can_unexpand = self.ctx.p4.server_level > 32 printhandler = PrintHandler(need_unexpand=not server_can_unexpand, tempdir=self.ctx.tempdir.name) self.ctx.p4.handler = printhandler args = ["-a"] if server_can_unexpand: args.append("-k") self.ctx.p4.run("print", args, self._path_range()) printhandler.flush() printhandler.progress.progress_finish() # If also grafting, print all revs in existence at time of graft. if self.graft_change: args = [] if server_can_unexpand: args.append("-k") path = self._graft_path() LOG.debug("Printing for grafted history: {}".format(path)) self.ctx.p4.run("print", args, path) printhandler.flush() # If grafting, we just printed revs that refer to changelists # that have no P4Changelist counterpart in self.changes. Make # some skeletal versions now so that FstatHandler will have # someplace to hang its outputStat() P4File instances. for (_key, p4file) in printhandler.revs.revs: if not p4file.change in self.changes: cl = P4Changelist() cl.change = p4file.change self.changes[p4file.change] = cl self.ctx.p4.handler = None self.printed_revs = printhandler.revs
def get_keys_changes(p4, low, high): """Retrieve the set of changes made to the user keys between the two changes. Keyword arguments: p4 -- P4 API low -- earliest change for which to retrieve changes high -- latest change for which to retrieve changes """ rev_range = '@{},{}'.format(low, high) changes = P4Changelist.create_changelist_list_as_dict( p4, KEYS_PATH + rev_range) changes = sorted(changes.keys()) root = '//{}/users'.format(p4gf_const.P4GF_DEPOT) changes = [ P4Changelist.create_using_describe(p4, c, root) for c in changes ] return changes
def _setup(self, start_at, stop_at): """Set RevRange rev_range, figure out which changelists to copy.""" self.rev_range = RevRange.from_start_stop(self.ctx, start_at, stop_at) LOG.debug( "Revision range to copy to Git: {rr}".format(rr=self.rev_range)) # get list of changes to import into git self.changes = P4Changelist.create_changelist_list_as_dict( self.ctx.p4, self._path_range()) # If grafting, get that too. if self.rev_range.graft_change_num: # Ignore all depotFile elements, we just want the change/desc/time/user. self.graft_change = P4Changelist.create_using_describe( self.ctx.p4, self.rev_range.graft_change_num, "ignore_depot_files") self.graft_change.description += ( '\n[grafted history before {start_at}]'.format( start_at=start_at))
def _setup(self, start_at, stop_at): """Set RevRange rev_range, figure out which changelists to copy.""" self.rev_range = RevRange.from_start_stop(self.ctx, start_at, stop_at) LOG.debug("Revision range to copy to Git: {rr}" .format(rr=self.rev_range)) # get list of changes to import into git self.changes = P4Changelist.create_changelist_list_as_dict( self.ctx.p4, self._path_range()) # If grafting, get that too. if self.rev_range.graft_change_num: # Ignore all depotFile elements, we just want the change/desc/time/user. self.graft_change = P4Changelist.create_using_describe( self.ctx.p4, self.rev_range.graft_change_num, "ignore_depot_files") self.graft_change.description += ('\n[grafted history before {start_at}]' .format(start_at=start_at))
def _copy_one(self, cnob): ''' Copy one ChangeNumOnBranch element from Perforce to Git. p4 print all of its file revisions directly into .git/objects as blobs add them to the git-fast-import script ''' _debug2('_copy_one {}', cnob) branch = self.ctx.branch_dict().get(cnob.branch_id) with self.ctx.switched_to_branch(branch): # Keep track of the highest changelist number we've # copied. Can't rely on # self.change_num_on_branch_list[-1] starting with our # highest changelist number because we might discover # new branches during later calls to _copy_one(). change_num = int(cnob.change_num) if self.highest_copied_change_num < change_num: self.highest_copied_change_num = change_num self.cnob_count += 1 # p4 changes -l -m1 @nnn # # Gets changelist description (including possible DescInfo), # owner, time. with self.p2g.perf.timer[CHANGES1]: r = self.ctx.p4run([ 'changes', '-l' # include full changelist description , '-m1' # just this one changelist , '@{}'.format(cnob.change_num) ]) p4changelist = P4Changelist.create_using_changes(r[0]) # p4 filelog -c nnnn -m1 //change_path/... # # Gets integration sources for parent calculations. # Gets files deleted at this rev (which 'p4 print' won't on 11.1). # Gets file list for this changelist. # # Cannot use p4 filelog //{client}/...@=nnn # That does request does not return one fstat for each file # in changelist nnn. with self.p2g.perf.timer[FILELOG]: cmd = ['filelog', '-c', cnob.change_num, '-m1', cnob.path] filelog_results = self.ctx.p4run(cmd) ### Detect lightweight integration sources not yet known. ### Create new Branch views to map them into this repo, ### run 'p4 changes' on them to add their history to our ### change_num_on_branch_list work queue, sorted. dbil = self.p2g.to_depot_branch_list_mc(change_num, filelog_results) new_dbi_set = set(dbil) - self.known_dbi_set if new_dbi_set: ### push_front cnob for dbi in new_dbi_set: LOG.error('AHA detected new integ source: {}'.format(dbi)) ### process dbi into branch, branch into more cnobs ### mergesort new cnobs into cnob deque self.known_dbi_set.update(new_dbi_set) ### return, we'll deal with this later ### +++ save changes and filelog work # p4 print every revision modified by this changelist. # # +++ Also print every revision AFTER this changelist. There's a # +++ high probability that we'll need those revisons later. # +++ Printing them all now _greatly_ reduces the total number of # +++ 'p4 print' requests, reduces the Perforce server's workload # +++ (and thus repsponse time) in generating incremental file # +++ revisions from any files stored using RCS deltas (aka most # +++ files). with self.p2g.perf.timer[CALC_PRINT]: depot_path_rev_list = [] for rr in filelog_results: if ((not isinstance(rr, dict)) or ('depotFile' not in rr) or ('rev' not in rr)): continue p4file = P4File.create_from_filelog(rr) p4changelist.files.append(p4file) depot_path = rr['depotFile'] rev = rr['rev'][0] if self._already_printed(depot_path, rev): continue depot_path_rev_list.append('{}#{},head'.format( depot_path, rev)) rev_total = len(p4changelist.files) _debug2( 'Printing files.' ' change: {change_num}' ' total: {rev_total}' ' need_print: {rev_need_print}' ' already_printed: {rev_already_printed}', change_num=cnob.change_num, rev_need_print=len(depot_path_rev_list), rev_already_printed=rev_total - len(depot_path_rev_list), rev_total=rev_total) if depot_path_rev_list: with self.p2g.perf.timer[PRINT2]: printhandler = self._print_handler() server_can_unexpand = self.ctx.p4.server_level > 32 args = ["-a"] if server_can_unexpand: args.append("-k") cmd = ['print'] + args + depot_path_rev_list with p4gf_util.RawEncoding(self.ctx.p4) \ , p4gf_util.Handler(self.ctx.p4, printhandler) \ , self.ctx.p4.at_exception_level(P4.RAISE_ALL): self.ctx.p4run(cmd) printhandler.flush() # Find each file revision's blob sha1. for p4file in p4changelist.files: symlink_path = _depot_rev_to_symlink( depot_path=p4file.depot_path, rev=p4file.revision, symlink_dir=self.symlink_dir) blob_path = os.readlink(symlink_path) p4file.sha1 = _blob_path_to_sha1(blob_path) # If we can copy the Git commit and its tree objects from # our gitmirror, do so. # Non-MemCapped code calls all FI functions with # timer[FAST_IMPORT] as outer container, so must we. with self.p2g.perf.timer[FAST_IMPORT]: if self.p2g._fast_import_from_gitmirror(p4changelist, branch): LOG.debug2('@{} fast-imported from gitmirror.'.format( cnob.change_num)) return # Build a git-fast-import commit object. ### _fast_import_from_p4() runs its own filelog to ### discover integ sources. That needs to be hoisted up ### to our own filelog and passed down to avoid ### duplicate work. LOG.debug2('@{} fast-importing from p4 changelist.'.format( cnob.change_num)) self.p2g._fast_import_from_p4_mc( change=p4changelist, branch=branch, filelog_results=filelog_results, mark_to_branch_id=self.mark_to_branch_id, branch_id_to_temp_name=self.branch_id_to_temp_name)
def _copy_one(self, cnob): ''' Copy one ChangeNumOnBranch element from Perforce to Git. p4 print all of its file revisions directly into .git/objects as blobs add them to the git-fast-import script ''' _debug2('_copy_one {}', cnob) branch = self.ctx.branch_dict().get(cnob.branch_id) with self.ctx.switched_to_branch(branch): # Keep track of the highest changelist number we've # copied. Can't rely on # self.change_num_on_branch_list[-1] starting with our # highest changelist number because we might discover # new branches during later calls to _copy_one(). change_num = int(cnob.change_num) if self.highest_copied_change_num < change_num: self.highest_copied_change_num = change_num self.cnob_count += 1 # p4 changes -l -m1 @nnn # # Gets changelist description (including possible DescInfo), # owner, time. with self.p2g.perf.timer[CHANGES1]: r = self.ctx.p4run([ 'changes' , '-l' # include full changelist description , '-m1' # just this one changelist , '@{}'.format(cnob.change_num)]) p4changelist = P4Changelist.create_using_changes(r[0]) # p4 filelog -c nnnn -m1 //change_path/... # # Gets integration sources for parent calculations. # Gets files deleted at this rev (which 'p4 print' won't on 11.1). # Gets file list for this changelist. # # Cannot use p4 filelog //{client}/...@=nnn # That does request does not return one fstat for each file # in changelist nnn. with self.p2g.perf.timer[FILELOG]: cmd = [ 'filelog' , '-c', cnob.change_num , '-m1' , cnob.path] filelog_results = self.ctx.p4run(cmd) ### Detect lightweight integration sources not yet known. ### Create new Branch views to map them into this repo, ### run 'p4 changes' on them to add their history to our ### change_num_on_branch_list work queue, sorted. dbil = self.p2g.to_depot_branch_list_mc(change_num, filelog_results) new_dbi_set = set(dbil) - self.known_dbi_set if new_dbi_set: ### push_front cnob for dbi in new_dbi_set: LOG.error('AHA detected new integ source: {}'.format(dbi)) ### process dbi into branch, branch into more cnobs ### mergesort new cnobs into cnob deque self.known_dbi_set.update(new_dbi_set) ### return, we'll deal with this later ### +++ save changes and filelog work # p4 print every revision modified by this changelist. # # +++ Also print every revision AFTER this changelist. There's a # +++ high probability that we'll need those revisons later. # +++ Printing them all now _greatly_ reduces the total number of # +++ 'p4 print' requests, reduces the Perforce server's workload # +++ (and thus repsponse time) in generating incremental file # +++ revisions from any files stored using RCS deltas (aka most # +++ files). with self.p2g.perf.timer[CALC_PRINT]: depot_path_rev_list = [] for rr in filelog_results: if ( (not isinstance(rr, dict)) or ('depotFile' not in rr) or ('rev' not in rr)): continue p4file = P4File.create_from_filelog(rr) p4changelist.files.append(p4file) depot_path = rr['depotFile'] rev = rr['rev'][0] if self._already_printed(depot_path, rev): continue depot_path_rev_list.append('{}#{},head' .format(depot_path, rev)) rev_total = len(p4changelist.files) _debug2('Printing files.' ' change: {change_num}' ' total: {rev_total}' ' need_print: {rev_need_print}' ' already_printed: {rev_already_printed}' , change_num = cnob.change_num , rev_need_print = len(depot_path_rev_list) , rev_already_printed = rev_total - len(depot_path_rev_list) , rev_total = rev_total) if depot_path_rev_list: with self.p2g.perf.timer[PRINT2]: printhandler = self._print_handler() server_can_unexpand = self.ctx.p4.server_level > 32 args = ["-a"] if server_can_unexpand: args.append("-k") cmd = ['print'] + args + depot_path_rev_list with p4gf_util.RawEncoding(self.ctx.p4) \ , p4gf_util.Handler(self.ctx.p4, printhandler) \ , self.ctx.p4.at_exception_level(P4.RAISE_ALL): self.ctx.p4run(cmd) printhandler.flush() # Find each file revision's blob sha1. for p4file in p4changelist.files: symlink_path = _depot_rev_to_symlink( depot_path = p4file.depot_path , rev = p4file.revision , symlink_dir = self.symlink_dir ) blob_path = os.readlink(symlink_path) p4file.sha1 = _blob_path_to_sha1(blob_path) # If we can copy the Git commit and its tree objects from # our gitmirror, do so. # Non-MemCapped code calls all FI functions with # timer[FAST_IMPORT] as outer container, so must we. with self.p2g.perf.timer[FAST_IMPORT]: if self.p2g._fast_import_from_gitmirror(p4changelist, branch): LOG.debug2('@{} fast-imported from gitmirror.' .format(cnob.change_num)) return # Build a git-fast-import commit object. ### _fast_import_from_p4() runs its own filelog to ### discover integ sources. That needs to be hoisted up ### to our own filelog and passed down to avoid ### duplicate work. LOG.debug2('@{} fast-importing from p4 changelist.' .format(cnob.change_num)) self.p2g._fast_import_from_p4_mc( change = p4changelist , branch = branch , filelog_results = filelog_results , mark_to_branch_id = self.mark_to_branch_id , branch_id_to_temp_name = self.branch_id_to_temp_name )