def add_directory(self, path, parent_baton, copyfrom_path, copyfrom_revision, dir_pool=None): self._checkparentdir(parent_baton) baton = self._opendir(path) br_path, branch = self.meta.split_branch_path(path)[:2] if br_path is not None: if not copyfrom_path and not br_path: # This handles the case where a branch root is # replaced without copy info. It will show up as a # deletion and then an add. self.meta.closebranches.discard(branch) self.current.emptybranches[branch] = True else: self.current.emptybranches[branch] = False if br_path is None or not copyfrom_path: return baton if self.meta.get_path_tag(path): del self.current.emptybranches[branch] return baton tag = self.meta.get_path_tag(copyfrom_path) if tag not in self.meta.tags: tag = None if not self.meta.is_path_valid(copyfrom_path, existing=False): # The source path only exists at copyfrom_revision, use # existing=False to guess a possible branch location and # test it against the filemap. The actual path and # revision will be resolved below if necessary. self.addmissing(path, isdir=True) return baton if tag: changeid = self.meta.tags[tag] source_rev, source_branch = self.meta.get_source_rev(changeid)[:2] frompath = '' else: source_rev = copyfrom_revision frompath, source_branch = self.meta.split_branch_path(copyfrom_path)[:2] new_hash = self.meta.get_parent_revision(source_rev + 1, source_branch, True) if frompath is None or new_hash == node.nullid: self.addmissing(path, isdir=True) return baton fromctx = self._getctx(new_hash) if frompath != '/' and frompath != '': frompath = '%s/' % frompath else: frompath = '' copyfromparent = False if frompath == '' and br_path == '': pnode = self.meta.get_parent_revision( self.current.rev.revnum, branch) if pnode == new_hash: # Data parent is topological parent and relative paths # are the same, not need to do anything but restore # files marked as deleted. copyfromparent = True # Get the parent which would have been used for this branch # without the replace action. oldpnode = self.meta.get_parent_revision( self.current.rev.revnum, branch, exact=True) if (oldpnode != revlog.nullid and util.isancestor(self._getctx(oldpnode), fromctx)): # Branch-wide replacement, unmark the branch as deleted self.meta.closebranches.discard(branch) svncopies = {} copies = {} for f in fromctx: if not f.startswith(frompath): continue dest = path + '/' + f[len(frompath):] if not self.meta.is_path_valid(dest): continue if dest in self._deleted: self._deleted.remove(dest) if copyfromparent: continue svncopies[dest] = CopiedFile(new_hash, f, None) if branch == source_branch: copies[dest] = f if copies: # Preserve the directory copy records if no file was changed between # the source and destination revisions, or discard it completely. parentid = self.meta.get_parent_revision( self.current.rev.revnum, branch) if parentid != revlog.nullid: parentctx = self._getctx(parentid) for k, v in copies.iteritems(): if util.issamefile(parentctx, fromctx, v): svncopies[k].copypath = v self._svncopies.update(svncopies) # Copy the externals definitions of copied directories fromext = svnexternals.parse(self.ui, fromctx) for p, v in fromext.iteritems(): pp = p and (p + '/') or '' if pp.startswith(frompath): dest = (path + '/' + pp[len(frompath):]).rstrip('/') self.current.externals[dest] = v return baton
def update_branch_tag_map_for_rev(self, revision): """Given a revision object, determine changes to branches. Returns: a dict of { 'branches': (added_branches, self.closebranches), } where adds are dicts where the keys are branch names and values are the place the branch came from. The deletions are sets of the deleted branches. """ paths = revision.paths added_branches = {} # Reset the tags delta before detecting the new one, and take # care not to fill them until done since split_branch_path() # use them. self.addedtags, self.deletedtags = {}, {} addedtags, deletedtags = {}, {} self.closebranches = set() for p in sorted(paths): t_name = self.get_path_tag(p) if t_name: src_p, src_rev = paths[p].copyfrom_path, paths[p].copyfrom_rev if src_p is not None and src_rev is not None: file, branch = self.split_branch_path(src_p)[:2] from_tag = self.get_path_tag(src_p) if file is None and not from_tag: continue if from_tag and from_tag not in self.tags: # Ignore copies from unknown tags continue if not file: # Direct branch or tag copy if from_tag: changeid = self.tags[from_tag] src_rev, branch = self.get_source_rev(changeid)[:2] if t_name not in addedtags: addedtags[t_name] = branch, src_rev else: # Subbranch or subtag copy t_name = t_name[:-(len(file)+1)] found = t_name in addedtags if found and src_rev > addedtags[t_name][1]: addedtags[t_name] = branch, src_rev elif (paths[p].action == 'D' and p.endswith(t_name) and t_name in self.tags): branch = self.get_source_rev(self.tags[t_name])[1] deletedtags[t_name] = branch, None continue # At this point we know the path is not a tag. In that # case, we only care if it is the root of a new branch (in # this function). This is determined by the following # checks: # 1. Is the file located inside any currently known # branch? If yes, then we're done with it, this isn't # interesting. # 2. Does the file have copyfrom information? If yes, and # the branch is being replaced by what would be an # ancestor, treat it as a regular revert. Otherwise, # we're done: this is a new branch, and we record the # copyfrom in added_branches if it comes from the root # of another branch, or create it from scratch. # 3. Neither of the above. This could be a branch, but it # might never work out for us. It's only ever a branch # (as far as we're concerned) if it gets committed to, # which we have to detect at file-write time anyway. So # we do nothing here. # 4. It's the root of an already-known branch, with an # action of 'D'. We mark the branch as deleted. # 5. It's the parent directory of one or more # already-known branches, so we mark them as deleted. # 6. It's a branch being replaced by another branch or a new # directory - the action will be 'R'. fi, br = self.split_branch_path(p)[:2] if fi is not None: if fi == '': if paths[p].action == 'D': self.closebranches.add(br) # case 4 elif paths[p].action == 'R': # Check the replacing source is not an ancestor # branch of the branch being replaced, this # would just be a revert. if paths[p].copyfrom_path: cfi, cbr = self.split_branch_path( paths[p].copyfrom_path, paths[p].copyfrom_rev)[:2] if cfi == '': cctx = self.repo[self.get_parent_revision( paths[p].copyfrom_rev + 1, cbr)] ctx = self.repo[self.get_parent_revision( revision.revnum, br)] if cctx and util.isancestor(ctx, cctx): continue parent = self._determine_parent_branch( p, paths[p].copyfrom_path, paths[p].copyfrom_rev, revision.revnum) added_branches.update(parent) continue # case 1 if paths[p].action == 'D': for known in self.branches: if self.remotename(known).startswith(p): self.closebranches.add(known) # case 5 parent = self._determine_parent_branch( p, paths[p].copyfrom_path, paths[p].copyfrom_rev, revision.revnum) if not parent and paths[p].copyfrom_path: bpath, branch = self.split_branch_path(p, False)[:2] if (bpath is not None and branch not in self.branches and branch not in added_branches): parent = {branch: (None, 0, revision.revnum)} elif bpath is None: srcpath = paths[p].copyfrom_path srcrev = paths[p].copyfrom_rev parent = {} for br in self.branches: rn = self.remotename(br) if rn.startswith(srcpath[1:] + '/'): bname = posixpath.basename(rn) newbr = posixpath.join(p, bname) parent.update( self._determine_parent_branch( newbr, rn, srcrev, revision.revnum)) added_branches.update(parent) self.addedtags, self.deletedtags = addedtags, deletedtags return { 'branches': (added_branches, self.closebranches), }
def convert_rev(ui, meta, svn, r, tbdelta, firstrun): if svnwrap.subversion_version >= (1, 9, 0): raise hgerror.Abort( "hgsubversion doesn't support stupid mode with Subversion 1.9." ' Please email [email protected] and let us know you' ' saw this, otherwise we may remove stupid mode entirely.') # this server fails at replay if meta.filemap: raise hgerror.Abort('filemaps currently unsupported with stupid replay.') branches = branches_in_paths(meta, tbdelta, r.paths, r.revnum, svn.checkpath, svn.list_files, firstrun) bad_branch_paths = {} for br, bp in branches.iteritems(): bad_branch_paths[br] = [] # This next block might be needed, but for now I'm omitting it until it # can be proven necessary. # for bad in branches.values(): # if bad.startswith(bp) and len(bad) > len(bp): # bad_branch_paths[br].append(bad[len(bp)+1:]) # We've go a branch that contains other branches. We have to be careful # to get results similar to real replay in this case. for existingbr in meta.branches: bad = meta.remotename(existingbr) if bad.startswith(bp) and len(bad) > len(bp): bad_branch_paths[br].append(bad[len(bp)+1:]) deleted_branches = {} for p in r.paths: tag = meta.get_path_tag(p) if tag and tag not in meta.tags: continue branch = meta.localname(p) if not (r.paths[p].action == 'R' and branch in meta.branches): continue # Check the branch is not being replaced by one of its # ancestors, it happens a lot with project-wide reverts. frompath = r.paths[p].copyfrom_path frompath, frombranch = meta.split_branch_path( frompath, existing=False)[:2] if frompath == '': fromnode = meta.get_parent_revision( r.paths[p].copyfrom_rev + 1, frombranch, exact=True) if fromnode != node.nullid: fromctx = meta.repo[fromnode] pctx = meta.repo[meta.get_parent_revision( r.revnum, branch, exact=True)] if util.isancestor(pctx, fromctx): continue closed = checkbranch(meta, r, branch) if closed is not None: deleted_branches[branch] = closed date = meta.fixdate(r.date) check_deleted_branches = set(tbdelta['branches'][1]) for b in branches: if meta.skipbranch(b): continue parentctx = meta.repo[meta.get_parent_revision(r.revnum, b)] tag = meta.get_path_tag(meta.remotename(b)) kind = svn.checkpath(branches[b], r.revnum) if kind != 'd': if not tag: # Branch does not exist at this revision. Get parent # revision and remove everything. deleted_branches[b] = parentctx.node() continue # The nullrev check might not be necessary in theory but svn < # 1.7 failed to diff branch creation so the diff_branchrev() # path does not support this case with svn >= 1.7. We can fix # it, or we can force the existing fetch_branchrev() path. Do # the latter for now. incremental = (meta.revmap.firstpulled > 0 and parentctx.rev() != node.nullrev and not firstrun) if incremental: try: files_touched, filectxfn2 = diff_branchrev( ui, svn, meta, b, branches[b], r, parentctx) except BadPatchApply, e: # Either this revision or the previous one does not exist. ui.note("Fetching entire revision: %s.\n" % e.args[0]) incremental = False if not incremental: files_touched, filectxfn2 = fetch_branchrev( svn, meta, b, branches[b], r, parentctx) externals = {} if meta.layout != 'single': externals = fetch_externals(ui, svn, branches[b], r, parentctx) externals = svnexternals.getchanges(ui, meta.repo, parentctx, externals) files_touched.extend(externals) def filectxfn(repo, memctx, path): if path in externals: if externals[path] is None: raise IOError(errno.ENOENT, 'no externals') return compathacks.makememfilectx(repo, memctx=memctx, path=path, data=externals[path], islink=False, isexec=False, copied=None) for bad in bad_branch_paths[b]: if path.startswith(bad): raise IOError(errno.ENOENT, 'Path %s is bad' % path) return filectxfn2(repo, memctx, path) if '' in files_touched: files_touched.remove('') excluded = [f for f in files_touched if f not in meta.filemap] for f in excluded: files_touched.remove(f) if b: # Regular tag without modifications, it will be committed by # svnmeta.committag(), we can skip the whole branch for now if (tag and tag not in meta.tags and b not in meta.branches and b not in meta.repo.branchmap() and not files_touched): continue if parentctx.node() == node.nullid and not files_touched: meta.repo.ui.debug('skipping commit since parent is null and no files touched.\n') continue for f in files_touched: if f: # this is a case that really shouldn't ever happen, it means # something is very wrong assert f[0] != '/' extra = meta.genextra(r.revnum, b) if tag: if parentctx.node() == node.nullid: continue extra.update({'branch': parentctx.extra().get('branch', None), 'close': 1}) origbranch = extra.get('branch', None) meta.mapbranch(extra) current_ctx = context.memctx( meta.repo, [parentctx.node(), revlog.nullid], util.forceutf8(meta.getmessage(r)), [util.forceutf8(f) for f in files_touched], filectxfn, util.forceutf8(meta.authors[r.author]), date, extra) ha = meta.repo.svn_commitctx(current_ctx) if not tag: if (not origbranch in meta.branches and not meta.get_path_tag(meta.remotename(origbranch))): meta.branches[origbranch] = None, 0, r.revnum meta.revmap[r.revnum, b] = ha else: meta.movetag(tag, ha, r, date) meta.addedtags.pop(tag, None) util.describe_commit(ui, ha, b)
def convert_rev(ui, meta, svn, r, tbdelta, firstrun): # this server fails at replay if meta.filemap: raise hgutil.Abort('filemaps currently unsupported with stupid replay.') branches = branches_in_paths(meta, tbdelta, r.paths, r.revnum, svn.checkpath, svn.list_files, firstrun) brpaths = branches.values() bad_branch_paths = {} for br, bp in branches.iteritems(): bad_branch_paths[br] = [] # This next block might be needed, but for now I'm omitting it until it # can be proven necessary. # for bad in brpaths: # if bad.startswith(bp) and len(bad) > len(bp): # bad_branch_paths[br].append(bad[len(bp)+1:]) # We've go a branch that contains other branches. We have to be careful # to get results similar to real replay in this case. for existingbr in meta.branches: bad = meta.remotename(existingbr) if bad.startswith(bp) and len(bad) > len(bp): bad_branch_paths[br].append(bad[len(bp)+1:]) deleted_branches = {} for p in r.paths: tag = meta.get_path_tag(p) if tag and tag not in meta.tags: continue branch = meta.localname(p) if not (r.paths[p].action == 'R' and branch in meta.branches): continue # Check the branch is not being replaced by one of its # ancestors, it happens a lot with project-wide reverts. frompath = r.paths[p].copyfrom_path frompath, frombranch = meta.split_branch_path( frompath, existing=False)[:2] if frompath == '': fromnode = meta.get_parent_revision( r.paths[p].copyfrom_rev + 1, frombranch, exact=True) if fromnode != node.nullid: fromctx = meta.repo[fromnode] pctx = meta.repo[meta.get_parent_revision( r.revnum, branch, exact=True)] if util.isancestor(pctx, fromctx): continue closed = checkbranch(meta, r, branch) if closed is not None: deleted_branches[branch] = closed date = meta.fixdate(r.date) check_deleted_branches = set(tbdelta['branches'][1]) for b in branches: parentctx = meta.repo[meta.get_parent_revision(r.revnum, b)] tag = meta.get_path_tag(meta.remotename(b)) kind = svn.checkpath(branches[b], r.revnum) if kind != 'd': if not tag: # Branch does not exist at this revision. Get parent # revision and remove everything. deleted_branches[b] = parentctx.node() continue # The nullrev check might not be necessary in theory but svn < # 1.7 failed to diff branch creation so the diff_branchrev() # path does not support this case with svn >= 1.7. We can fix # it, or we can force the existing fetch_branchrev() path. Do # the latter for now. incremental = (meta.firstpulled > 0 and parentctx.rev() != node.nullrev and not firstrun) if incremental: try: files_touched, filectxfn2 = diff_branchrev( ui, svn, meta, b, branches[b], r, parentctx) except BadPatchApply, e: # Either this revision or the previous one does not exist. ui.note("Fetching entire revision: %s.\n" % e.args[0]) incremental = False if not incremental: files_touched, filectxfn2 = fetch_branchrev( svn, meta, b, branches[b], r, parentctx) externals = {} if meta.layout != 'single': externals = fetch_externals(ui, svn, branches[b], r, parentctx) externals = svnexternals.getchanges(ui, meta.repo, parentctx, externals) files_touched.extend(externals) def filectxfn(repo, memctx, path): if path in externals: if externals[path] is None: raise IOError(errno.ENOENT, 'no externals') return compathacks.makememfilectx(repo, path=path, data=externals[path], islink=False, isexec=False, copied=None) for bad in bad_branch_paths[b]: if path.startswith(bad): raise IOError(errno.ENOENT, 'Path %s is bad' % path) return filectxfn2(repo, memctx, path) if '' in files_touched: files_touched.remove('') excluded = [f for f in files_touched if f not in meta.filemap] for f in excluded: files_touched.remove(f) if b: # Regular tag without modifications, it will be committed by # svnmeta.committag(), we can skip the whole branch for now if (tag and tag not in meta.tags and b not in meta.branches and b not in compathacks.branchset(meta.repo) and not files_touched): continue if parentctx.node() == node.nullid and not files_touched: meta.repo.ui.debug('skipping commit since parent is null and no files touched.\n') continue for f in files_touched: if f: # this is a case that really shouldn't ever happen, it means # something is very wrong assert f[0] != '/' extra = meta.genextra(r.revnum, b) if tag: if parentctx.node() == node.nullid: continue extra.update({'branch': parentctx.extra().get('branch', None), 'close': 1}) origbranch = extra.get('branch', None) meta.mapbranch(extra) current_ctx = context.memctx(meta.repo, [parentctx.node(), revlog.nullid], meta.getmessage(r), files_touched, filectxfn, meta.authors[r.author], date, extra) ha = meta.repo.svn_commitctx(current_ctx) if not tag: if (not origbranch in meta.branches and not meta.get_path_tag(meta.remotename(origbranch))): meta.branches[origbranch] = None, 0, r.revnum meta.revmap[r.revnum, b] = ha else: meta.movetag(tag, ha, r, date) meta.addedtags.pop(tag, None) util.describe_commit(ui, ha, b)