def overlay(ui, repo, sourceurl, revs=None, dest=None, into=None, noncontiguous=False, notify=None): """Integrate contents of another repository. This command essentially replays changesets from another repository into this one. Unlike a simple pull + rebase, the files from the remote repository are "overlayed" or unioned with the contents of the destination repository. The functionality of this command is nearly identical to what ``hg transplant`` provides. However, the internal mechanism varies substantially. There are currently several restrictions to what can be imported: * The imported changesets must be in a single DAG head * The imported changesets (as evaluated by ``REVS``) must be a contiguous DAG range (Unless --noncontiguous is passed). * Importing merges is not supported. * The state of the files in the destination directory/changeset must exactly match the last imported changeset. That last point is important: it means that this command can effectively only be used for unidirectional syncing. In other words, the source repository must be the single source of all changes to the destination directory. The restriction of states being identical is to ensure that changesets in the source and destination are as similar as possible. For example, if the file content in the destination did not match the source, then the ``hg diff`` output for the next overlayed changeset would differ from the source. This command supports sending human readable notifications in the event that an overlay failed. Set --notify to an command that handles delivery of these errors. The message will be piped to the command via STDIN. """ # We could potentially support this later. if not into: raise error.Abort(_('--into must be specified')) if not revs: revs = 'all()' sourcerepo = _mirrorrepo(ui, repo, sourceurl) sourcerevs = scmutil.revrange(sourcerepo, [revs]) if not sourcerevs: raise error.Abort(_('unable to determine source revisions')) if dest: destctx = scmutil.revsymbol(repo, dest) else: destctx = scmutil.revsymbol(repo, 'tip') # Backdoor for testing to force static URL. sourceurl = ui.config('overlay', 'sourceurl', sourceurl) _dooverlay(sourcerepo, sourceurl, sourcerevs, repo, destctx, into, noncontiguous, notify)
def get_ctx(repo, ref): try: ctx = repo[ref] except ProgrammingError: # we're unable to find the rev using a regular lookup, we fallback # to slower, but backward compat revsymbol usage ctx = revsymbol(repo, ref) except (LookupError, RepoLookupError): # Similar case as above but only for refs that are not numeric if isinstance(ref, (int, long)): raise ctx = revsymbol(repo, ref) return ctx
def infowebcommand(web): """Get information about the specified changeset(s). This is a legacy API from before the days of Mercurial's built-in JSON API. It is used by unidentified parts of automation. Over time these consumers should transition to the modern/native JSON API. """ req = web.req if 'node' not in req.qsparams: return web.sendtemplate('error', error={'error': "missing parameter 'node'"}) nodes = req.qsparams.getall('node') csets = [] for node in nodes: ctx = scmutil.revsymbol(web.repo, node) csets.append({ 'rev': ctx.rev(), 'node': ctx.hex(), 'user': ctx.user(), 'date': ctx.date(), 'description': ctx.description(), 'branch': ctx.branch(), 'tags': ctx.tags(), 'parents': [p.hex() for p in ctx.parents()], 'children': [c.hex() for c in ctx.children()], 'files': ctx.files(), }) return web.sendtemplate('info', csets=templateutil.mappinglist(csets))
def backoutsgen(_context): '''Generator for backouts list''' backouts = commitparser.parse_backouts(description) if backouts: for node in backouts[0]: try: bctx = scmutil.revsymbol(repo, node) yield {b'node': bctx.hex()} except error.RepoLookupError: pass
def get_filechanges(repo, revision, parents, mleft): """Given some repository and revision, find all changed/deleted files.""" l, c, r = [], [], [] for p in parents: if p < 0: continue mright = revsymbol(repo, b"%d" % p).manifest() l, c, r = split_dict(mleft, mright, l, c, r) l.sort() c.sort() r.sort() return l, c, r
def get_changeset(ui,repo,revision,authors={},encoding=''): # Starting with Mercurial 4.6 lookup no longer accepts raw hashes # for lookups. Work around it by changing our behaviour depending on # how it fails try: node=repo.lookup(revision) except (TypeError, hgerror.ProgrammingError): node=binnode(revsymbol(repo, b"%d" % revision)) # We were given a numeric rev except hgerror.RepoLookupError: node=revision # We got a raw hash (manifest,user,(time,timezone),files,desc,extra)=repo.changelog.read(node) if encoding: user=user.decode(encoding).encode('utf8') desc=desc.decode(encoding).encode('utf8') tz=b"%+03d%02d" % (-timezone // 3600, ((-timezone % 3600) // 60)) branch=get_branch(extra.get(b'branch', b'master')) return (node,manifest,fixup_user(user,authors),(time,tz),files,desc,branch,extra)
def get_changeset(ui,repo,revision,authors={},encoding=''): # Starting with Mercurial 4.6 lookup no longer accepts raw hashes # for lookups. Work around it by changing our behaviour depending on # how it fails try: node=repo.lookup(revision) except hgerror.ProgrammingError: node=binnode(revsymbol(repo,str(revision))) # We were given a numeric rev except hgerror.RepoLookupError: node=revision # We got a raw hash (manifest,user,(time,timezone),files,desc,extra)=repo.changelog.read(node) if encoding: user=user.decode(encoding).encode('utf8') desc=desc.decode(encoding).encode('utf8') tz="%+03d%02d" % (-timezone / 3600, ((-timezone % 3600) / 60)) branch=get_branch(extra.get('branch','master')) return (node,manifest,fixup_user(user,authors),(time,tz),files,desc,branch,extra)
def export_note(ui,repo,revision,count,authors,encoding,is_first): (revnode,_,user,(time,timezone),_,_,_,_)=get_changeset(ui,repo,revision,authors,encoding) if repo[revnode].hidden(): return count parents = [p for p in repo.changelog.parentrevs(revision) if p >= 0] wr(b'commit refs/notes/hg') wr(b'committer %s %d %s' % (user,time,timezone)) wr(b'data 0') if is_first: wr(b'from refs/notes/hg^0') wr(b'N inline :%d' % (revision+1)) hg_hash=revsymbol(repo,b"%d" % revision).hex() wr(b'data %d' % (len(hg_hash))) wr_no_nl(hg_hash) wr() return checkpoint(count)
def get_changeset(ui, repo, revision, authors={}, encoding=''): # Starting with Mercurial 4.6 lookup no longer accepts raw hashes # for lookups. Work around it by changing our behaviour depending on # how it fails try: node = repo.lookup(revision) except hgerror.ProgrammingError: node = binnode(revsymbol(repo, str(revision))) # We were given a numeric rev except hgerror.RepoLookupError: node = revision # We got a raw hash (manifest, user, (time, timezone), files, desc, extra) = repo.changelog.read(node) if encoding: #codecs_list = codecs_for_import #if encoding not in codecs_list: # codecs_list = [encoding] + codecs_for_import # for c in codecs_list: # try: # user = user.decode(c).encode('utf8') # desc = desc.decode(c).encode('utf8') # break # except UnicodeDecodeError: # pass try: user_codec = chardet.detect(user)['encoding'] user = user.decode(user_codec).encode('utf8') except UnicodeDecodeError: sys.stdout.write(' "user" decode error' + "\n") try: desc_codec = chardet.detect(desc)['encoding'] desc = desc.decode(desc_codec).encode('utf8') except UnicodeDecodeError: sys.stdout.write(' "desc" decode error' + "\n") tz = "%+03d%02d" % (-timezone / 3600, ((-timezone % 3600) / 60)) branch = get_branch(extra.get('branch', 'master')) return (node, manifest, fixup_user(user, authors), (time, tz), files, desc, branch, extra)
def ancestor(self, c2, warn=False): # type: (gitchangectx, bool) -> gitchangectx """return the "best" ancestor context of self and c2 If there are multiple candidates, it will show a message and check merge.preferancestor configuration before falling back to the revlog ancestor.""" # deal with workingctxs n2 = c2._node if n2 is None: n2 = c2._parents[0]._node cahs = self._repo.changelog.commonancestorsheads(self._node, n2) if not cahs: anc = nullid elif len(cahs) == 1: anc = cahs[0] else: # experimental config: merge.preferancestor for r in self._repo.ui.configlist('merge', 'preferancestor'): try: ctx = scmutil.revsymbol(self._repo, r) except error.RepoLookupError: continue anc = ctx.node() if anc in cahs: break else: anc = self._repo.changelog.ancestor(self._node, n2) if warn: self._repo.ui.status( (_("note: using %s as ancestor of %s and %s\n") % (short(anc), short(self._node), short(n2))) + ''.join( _(" alternatively, use --config " "merge.preferancestor=%s\n") % short(n) for n in sorted(cahs) if n != anc)) return gitchangectx(self._repo, anc)
def export_commit(ui, repo, revision, old_marks, max, count, authors, branchesmap, sob, brmap, hgtags, encoding='', fn_encoding='', plugins={}): def get_branchname(name): if name in brmap: return brmap[name] n = sanitize_name(name, "branch", branchesmap) brmap[name] = n return n (revnode, _, user, (time, timezone), files, desc, branch, _) = get_changeset(ui, repo, revision, authors, encoding) if repo[revnode].hidden(): return count branch = get_branchname(branch) parents = [p for p in repo.changelog.parentrevs(revision) if p >= 0] author = get_author(desc, user, authors) hg_hash = revsymbol(repo, b"%d" % revision).hex() if plugins and plugins['commit_message_filters']: commit_data = { 'branch': branch, 'parents': parents, 'author': author, 'desc': desc, 'revision': revision, 'hg_hash': hg_hash } for filter in plugins['commit_message_filters']: filter(commit_data) branch = commit_data['branch'] parents = commit_data['parents'] author = commit_data['author'] desc = commit_data['desc'] if len(parents) == 0 and revision != 0: wr(b'reset refs/heads/%s' % branch) wr(b'commit refs/heads/%s' % branch) wr(b'mark :%d' % (revision + 1)) if sob: wr(b'author %s %d %s' % (author, time, timezone)) wr(b'committer %s %d %s' % (user, time, timezone)) wr(b'data %d' % (len(desc) + 1)) # wtf? wr(desc) wr() ctx = revsymbol(repo, b"%d" % revision) man = ctx.manifest() added, changed, removed, type = [], [], [], '' if len(parents) == 0: # first revision: feed in full manifest added = man.keys() added.sort() type = 'full' else: wr(b'from %s' % revnum_to_revref(parents[0], old_marks)) if len(parents) == 1: # later non-merge revision: feed in changed manifest # if we have exactly one parent, just take the changes from the # manifest without expensively comparing checksums f = repo.status(parents[0], revnode) added, changed, removed = f.added, f.modified, f.removed type = 'simple delta' else: # a merge with two parents wr(b'merge %s' % revnum_to_revref(parents[1], old_marks)) # later merge revision: feed in changed manifest # for many files comparing checksums is expensive so only do it for # merges where we really need it due to hg's revlog logic added, changed, removed = get_filechanges(repo, revision, parents, man) type = 'thorough delta' stderr_buffer.write( b'%s: Exporting %s revision %d/%d with %d/%d/%d added/changed/removed files\n' % (branch, type.encode(), revision + 1, max, len(added), len(changed), len(removed))) for filename in removed: if fn_encoding: filename = filename.decode(fn_encoding).encode('utf8') filename = strip_leading_slash(filename) if filename == b'.hgsub': remove_gitmodules(ctx) wr(b'D %s' % filename) export_file_contents(ctx, man, added, hgtags, fn_encoding, plugins) export_file_contents(ctx, man, changed, hgtags, fn_encoding, plugins) wr() return checkpoint(count)
def hg2git(repourl, m, marksfile, mappingfile, headsfile, tipfile, authors={}, branchesmap={}, tagsmap={}, sob=False, force=False, ignore_unnamed_heads=False, hgtags=False, notes=False, encoding='', fn_encoding='', plugins={}): def check_cache(filename, contents): if len(contents) == 0: sys.stderr.write( 'Warning: %s does not contain any data, this will probably make an incremental import fail\n' % filename) _max = int(m) old_marks = load_cache(marksfile, lambda s: int(s) - 1) mapping_cache = load_cache(mappingfile) heads_cache = load_cache(headsfile) state_cache = load_cache(tipfile) if len(state_cache) != 0: for (name, data) in [(marksfile, old_marks), (mappingfile, mapping_cache), (headsfile, state_cache)]: check_cache(name, data) ui, repo = setup_repo(repourl) if not verify_heads(ui, repo, heads_cache, force, ignore_unnamed_heads, branchesmap): return 1 try: tip = repo.changelog.count() except AttributeError: tip = len(repo) min = int(state_cache.get('tip', 0)) max = _max if _max < 0 or max > tip: max = tip for rev in range(0, max): (revnode, _, _, _, _, _, _, _) = get_changeset(ui, repo, rev, authors) if repo[revnode].hidden(): continue mapping_cache[hexlify(revnode)] = b"%d" % rev if submodule_mappings: # Make sure that all mercurial submodules are registered in the submodule-mappings file for rev in range(0, max): ctx = revsymbol(repo, b"%d" % rev) if ctx.hidden(): continue if ctx.substate: for key in ctx.substate: if ctx.substate[key][ 2] == 'hg' and key not in submodule_mappings: sys.stderr.write( "Error: %s not found in submodule-mappings\n" % (key)) return 1 c = 0 brmap = {} for rev in range(min, max): c = export_commit(ui, repo, rev, old_marks, max, c, authors, branchesmap, sob, brmap, hgtags, encoding, fn_encoding, plugins) if notes: for rev in range(min, max): c = export_note(ui, repo, rev, c, authors, encoding, rev == min and min != 0) state_cache['tip'] = max state_cache['repo'] = repourl save_cache(tipfile, state_cache) save_cache(mappingfile, mapping_cache) c = export_tags(ui, repo, old_marks, mapping_cache, c, authors, tagsmap) sys.stderr.write('Issued %d commands\n' % c) return 0
except AttributeError: tip=len(repo) min=int(state_cache.get('tip',0)) max=_max if _max<0 or max>tip: max=tip for rev in range(0,max): (revnode,_,_,_,_,_,_,_)=get_changeset(ui,repo,rev,authors) mapping_cache[revnode.encode('hex_codec')] = str(rev) if submodule_mappings: # Make sure that all submodules are registered in the submodule-mappings file for rev in range(0,max): ctx=revsymbol(repo,str(rev)) if ctx.substate: for key in ctx.substate: if key not in submodule_mappings: sys.stderr.write("Error: %s not found in submodule-mappings\n" % (key)) return 1 c=0 brmap={} for rev in range(min,max): c=export_commit(ui,repo,rev,old_marks,max,c,authors,branchesmap, sob,brmap,hgtags,encoding,fn_encoding, plugins) if notes: for rev in range(min,max): c=export_note(ui,repo,rev,c,authors, encoding, rev == min and min != 0)
def export_commit(ui, repo, revision, old_marks, max, count, authors, branchesmap, sob, brmap, hgtags, encoding='', fn_encoding='', filter_contents=None): def get_branchname(name): if brmap.has_key(name): return brmap[name] n = sanitize_name(name, "branch", branchesmap) brmap[name] = n return n (revnode, _, user, (time, timezone), files, desc, branch, _) = get_changeset(ui, repo, revision, authors, encoding) branch = get_branchname(branch) parents = [p for p in repo.changelog.parentrevs(revision) if p >= 0] if len(parents) == 0 and revision != 0: wr('reset refs/heads/%s' % branch) wr('commit refs/heads/%s' % branch) wr('mark :%d' % (revision + 1)) if sob: wr('author %s %d %s' % (get_author(desc, user, authors), time, timezone)) wr('committer %s %d %s' % (user, time, timezone)) wr('data %d' % (len(desc) + 1)) # wtf? wr(desc) wr() ctx = revsymbol(repo, str(revision)) man = ctx.manifest() added, changed, removed, type = [], [], [], '' if len(parents) == 0: # first revision: feed in full manifest added = man.keys() added.sort() type = 'full' else: wr('from %s' % revnum_to_revref(parents[0], old_marks)) if len(parents) == 1: # later non-merge revision: feed in changed manifest # if we have exactly one parent, just take the changes from the # manifest without expensively comparing checksums f = repo.status(parents[0], revnode)[:3] added, changed, removed = f[1], f[0], f[2] type = 'simple delta' else: # a merge with two parents wr('merge %s' % revnum_to_revref(parents[1], old_marks)) # later merge revision: feed in changed manifest # for many files comparing checksums is expensive so only do it for # merges where we really need it due to hg's revlog logic added, changed, removed = get_filechanges(repo, revision, parents, man) type = 'thorough delta' # in case we have added a gitattribute generated file # we need to copy the merged branch merged_branch = get_branchname(repo.changectx(parents[1]).branch()) merge_lfs_attributes(branch, merged_branch) sys.stderr.write( '%s: Exporting %s revision %d/%d with %d/%d/%d added/changed/removed files\n' % (branch, type, revision + 1, max, len(added), len(changed), len(removed))) if fn_encoding: removed = [r.decode(fn_encoding).encode('utf8') for r in removed] new_lfs_entry = False for entry in added + changed: if len(entry) > 5 and entry[:5] == ".hglf": add_to_lfs(branch, strip_leading_slash(entry[5:])) new_lfs_entry = True for entry in removed: if len(entry) > 5 and entry[:5] == ".hglf": remove_from_lfs(branch, strip_leading_slash(entry[5:])) new_lfs_entry = True # if the pointer is removed, remove the git pointer removed = [ x[5:] if len(x) > 5 and x[:5] == ".hglf" else x for x in removed ] removed = [strip_leading_slash(x) for x in removed] map(lambda r: wr('D %s' % r), removed) export_file_contents(ctx, man, added, hgtags, fn_encoding, filter_contents) export_file_contents(ctx, man, changed, hgtags, fn_encoding, filter_contents) if new_lfs_entry: gitAttributes = build_lfs_attributes(branch) if gitAttributes is not None: wr('M %s inline %s' % ("100644", ".gitattributes")) wr('data %d' % len(gitAttributes)) wr(gitAttributes) sys.stderr.write('Exported .gitattributes\n') wr() return checkpoint(count)
def lookup(self, key): return scmutil.revsymbol(self, key).node()
def headdivergencewebcommand(web): """Get information about divergence between this repo and a changeset. This API was invented to be used by MozReview to obtain information about how a repository/head has progressed/diverged since a commit was submitted for review. It is assumed that this is running on the canonical/mainline repository. Changes in other repositories must be rebased onto or merged into this repository. """ req = web.req if b'node' not in req.qsparams: # TRACKING hg48 if util.versiontuple(n=2) >= (4, 8): return web.sendtemplate(b'error', error=b"missing parameter 'node'") else: return web.sendtemplate( b'error', error={b'error': b"missing parameter 'node'"}) repo = web.repo paths = set(req.qsparams.getall(b'p')) basectx = scmutil.revsymbol(repo, req.qsparams[b'node']) # Find how much this repo has changed since the requested changeset. # Our heuristic is to find the descendant head with the highest revision # number. Most (all?) repositories we care about for this API should have # a single head per branch. And we assume the newest descendant head is # the one we care about the most. We don't care about branches because # if a descendant is on different branch, then the repo has likely # transitioned to said branch. # # If we ever consolidate Firefox repositories, we'll need to reconsider # this logic, especially if release repos with their extra branches/heads # are involved. # Specifying "start" only gives heads that are descendants of "start." headnodes = repo.changelog.heads(start=basectx.node()) headrev = max(repo[n].rev() for n in headnodes) headnode = repo[headrev].node() betweennodes, outroots, outheads = \ repo.changelog.nodesbetween([basectx.node()], [headnode]) # nodesbetween returns base node. So prune. betweennodes = betweennodes[1:] commitsbehind = len(betweennodes) # If rev 0 or a really old revision is passed in, we could DoS the server # by having to iterate nearly all changesets. Establish a cap for number # of changesets to examine. maxnodes = repo.ui.configint(b'hgmo', b'headdivergencemaxnodes', 1000) filemergesignored = False if len(betweennodes) > maxnodes: betweennodes = [] filemergesignored = True filemerges = {} for node in betweennodes: ctx = repo[node] files = set(ctx.files()) for p in files & paths: filemerges.setdefault(p, []).append(ctx.hex()) return web.sendtemplate(b'headdivergence', commitsbehind=commitsbehind, filemerges=filemerges, filemergesignored=filemergesignored)
def lookupbranch(self, key): if key in self.branchmap(): return key return scmutil.revsymbol(self, key).branch()
def export_commit(ui, repo, revision, old_marks, max, count, authors, branchesmap, sob, brmap, hgtags, encoding='', fn_encoding='', filter_contents=None): def get_branchname(name): if brmap.has_key(name): return brmap[name] n = sanitize_name(name, "branch", branchesmap) brmap[name] = n return n (revnode, _, user, (time, timezone), files, desc, branch, _) = get_changeset(ui, repo, revision, authors, encoding) branch = get_branchname(branch) parents = [p for p in repo.changelog.parentrevs(revision) if p >= 0] if len(parents) == 0 and revision != 0: wr('reset refs/heads/%s' % branch) wr('commit refs/heads/%s' % branch) wr('mark :%d' % (revision + 1)) if sob: wr('author %s %d %s' % (get_author(desc, user, authors), time, timezone)) wr('committer %s %d %s' % (user, time, timezone)) wr('data %d' % (len(desc) + 1)) # wtf? wr(desc) wr() ctx = revsymbol(repo, str(revision)) man = ctx.manifest() added, changed, removed, type = [], [], [], '' if len(parents) == 0: # first revision: feed in full manifest added = man.keys() added.sort() type = 'full' else: wr('from %s' % revnum_to_revref(parents[0], old_marks)) if len(parents) == 1: # later non-merge revision: feed in changed manifest # if we have exactly one parent, just take the changes from the # manifest without expensively comparing checksums f = repo.status(parents[0], revnode)[:3] added, changed, removed = f[1], f[0], f[2] type = 'simple delta' else: # a merge with two parents wr('merge %s' % revnum_to_revref(parents[1], old_marks)) # later merge revision: feed in changed manifest # for many files comparing checksums is expensive so only do it for # merges where we really need it due to hg's revlog logic added, changed, removed = get_filechanges(repo, revision, parents, man) type = 'thorough delta' sys.stderr.write( '%s: Exporting %s revision %d/%d with %d/%d/%d added/changed/removed files\n' % (branch, type, revision + 1, max, len(added), len(changed), len(removed))) if fn_encoding: removed = [r.decode(fn_encoding).encode('utf8') for r in removed] removed = [strip_leading_slash(x) for x in removed] map(lambda r: wr('D %s' % r), removed) export_file_contents(ctx, man, added, hgtags, fn_encoding, filter_contents) export_file_contents(ctx, man, changed, hgtags, fn_encoding, filter_contents) wr() return checkpoint(count)
def export_commit(ui,repo,revision,old_marks,max,count,authors, branchesmap,sob,brmap,hgtags,encoding='',fn_encoding='', plugins={}): def get_branchname(name): if brmap.has_key(name): return brmap[name] n=sanitize_name(name, "branch", branchesmap) brmap[name]=n return n (revnode,_,user,(time,timezone),files,desc,branch,_)=get_changeset(ui,repo,revision,authors,encoding) branch=get_branchname(branch) parents = [p for p in repo.changelog.parentrevs(revision) if p >= 0] author = get_author(desc,user,authors) if plugins and plugins['commit_message_filters']: commit_data = {'branch': branch, 'parents': parents, 'author': author, 'desc': desc} for filter in plugins['commit_message_filters']: filter(commit_data) branch = commit_data['branch'] parents = commit_data['parents'] author = commit_data['author'] desc = commit_data['desc'] if len(parents)==0 and revision != 0: wr('reset refs/heads/%s' % branch) wr('commit refs/heads/%s' % branch) wr('mark :%d' % (revision+1)) if sob: wr('author %s %d %s' % (author,time,timezone)) wr('committer %s %d %s' % (user,time,timezone)) wr('data %d' % (len(desc)+1)) # wtf? wr(desc) wr() ctx=revsymbol(repo,str(revision)) man=ctx.manifest() added,changed,removed,type=[],[],[],'' if len(parents) == 0: # first revision: feed in full manifest added=man.keys() added.sort() type='full' else: wr('from %s' % revnum_to_revref(parents[0], old_marks)) if len(parents) == 1: # later non-merge revision: feed in changed manifest # if we have exactly one parent, just take the changes from the # manifest without expensively comparing checksums f=repo.status(parents[0],revnode)[:3] added,changed,removed=f[1],f[0],f[2] type='simple delta' else: # a merge with two parents wr('merge %s' % revnum_to_revref(parents[1], old_marks)) # later merge revision: feed in changed manifest # for many files comparing checksums is expensive so only do it for # merges where we really need it due to hg's revlog logic added,changed,removed=get_filechanges(repo,revision,parents,man) type='thorough delta' sys.stderr.write('%s: Exporting %s revision %d/%d with %d/%d/%d added/changed/removed files\n' % (branch,type,revision+1,max,len(added),len(changed),len(removed))) for filename in removed: if fn_encoding: filename=filename.decode(fn_encoding).encode('utf8') filename=strip_leading_slash(filename) if filename=='.hgsubstate': remove_gitmodules(ctx) wr('D %s' % filename) export_file_contents(ctx,man,added,hgtags,fn_encoding,plugins) export_file_contents(ctx,man,changed,hgtags,fn_encoding,plugins) wr() result = checkpoint(count) # Ask for the git hash of the last commit wr('get-mark :%d' % (revision+1)) sys.stdout.flush() # Read the hash of the last commit git_hash = cat_blob_fd.readline() rev_number_to_git_hash[str(revision)] = git_hash rev_hash_to_git_hash[ctx.hex()] = git_hash write_hg2git_map (str(revision), ctx.hex(), git_hash) return result