def _changesetForRevision(self, revision): from datetime import datetime from vcpx.changes import Changeset, ChangesetEntry action_map = { 'A': ChangesetEntry.ADDED, 'D': ChangesetEntry.DELETED, 'M': ChangesetEntry.UPDATED, 'R': ChangesetEntry.RENAMED } # find parent lines = self.repository.runCommand( ['rev-list', '--pretty=raw', '--max-count=1', revision], GetUpstreamChangesetsFailure) parents = [] user = Changeset.ANONYMOUS_USER loglines = [] date = None for line in lines: if line.startswith('parent'): parents.append(line.split(' ').pop()) if line.startswith('author'): author_fields = line.split(' ')[1:] tz = int(author_fields.pop()) dt = int(author_fields.pop()) user = '******'.join(author_fields) tzsecs = abs(tz) tzsecs = (tzsecs / 100 * 60 + tzsecs % 100) * 60 if tz < 0: tzsecs = -tzsecs date = datetime.fromtimestamp(dt, FixedOffset(tzsecs / 60)) if line.startswith(' '): loglines.append(line.lstrip(' ')) message = '\n'.join(loglines) entries = [] cmd = ['diff-tree', '--root', '-r', '-M', '--name-status'] # haven't thought about merges yet... if parents: cmd.append(parents[0]) cmd.append(revision) files = self.repository.runCommand(cmd, GetUpstreamChangesetsFailure)[:-1] if not parents: # git lets us know what it's diffing against if we omit parent if len(files) > 0: files.pop(0) for line in files: fields = line.split('\t') state = fields.pop(0) name = fields.pop() e = ChangesetEntry(name) e.action_kind = action_map[state[0]] if e.action_kind == ChangesetEntry.RENAMED: e.old_name = fields.pop() entries.append(e) return Changeset(revision, date, user, message, entries)
def _changesetForRevision(self, revision): from datetime import datetime from vcpx.changes import Changeset, ChangesetEntry action_map = {'A': ChangesetEntry.ADDED, 'D': ChangesetEntry.DELETED, 'M': ChangesetEntry.UPDATED, 'R': ChangesetEntry.RENAMED} # find parent lines = self.repository.runCommand(['rev-list', '--pretty=raw', '--max-count=1', revision], GetUpstreamChangesetsFailure) parents = [] user = Changeset.ANONYMOUS_USER loglines = [] date = None for line in lines: if line.startswith('parent'): parents.append(line.split(' ').pop()) if line.startswith('author'): author_fields = line.split(' ')[1:] tz = int(author_fields.pop()) dt = int(author_fields.pop()) user = '******'.join(author_fields) tzsecs = abs(tz) tzsecs = (tzsecs / 100 * 60 + tzsecs % 100) * 60 if tz < 0: tzsecs = -tzsecs date = datetime.fromtimestamp(dt, FixedOffset(tzsecs/60)) if line.startswith(' '): loglines.append(line.lstrip(' ')) message = '\n'.join(loglines) entries = [] cmd = ['diff-tree', '--root', '-r', '-M', '--name-status'] # haven't thought about merges yet... if parents: cmd.append(parents[0]) cmd.append(revision) files = self.repository.runCommand(cmd, GetUpstreamChangesetsFailure)[:-1] if not parents: # git lets us know what it's diffing against if we omit parent if len(files) > 0: files.pop(0) for line in files: fields = line.split('\t') state = fields.pop(0) name = fields.pop() e = ChangesetEntry(name) e.action_kind = action_map[state[0]] if e.action_kind == ChangesetEntry.RENAMED: e.old_name = fields.pop() entries.append(e) return Changeset(revision, date, user, message, entries)
def _changesetFromRevision(self, branch, revision_id): """ Generate changeset for the given Bzr revision """ from datetime import datetime from vcpx.changes import ChangesetEntry, Changeset from vcpx.tzinfo import FixedOffset, UTC revision = branch.repository.get_revision(revision_id) deltatree = branch.get_revision_delta(branch.revision_id_to_revno(revision_id)) entries = [] for delta in deltatree.added: e = ChangesetEntry(delta[0]) e.action_kind = ChangesetEntry.ADDED entries.append(e) for delta in deltatree.removed: e = ChangesetEntry(delta[0]) e.action_kind = ChangesetEntry.DELETED entries.append(e) for delta in deltatree.renamed: e = ChangesetEntry(delta[1]) e.action_kind = ChangesetEntry.RENAMED e.old_name = delta[0] entries.append(e) for delta in deltatree.modified: e = ChangesetEntry(delta[0]) e.action_kind = ChangesetEntry.UPDATED entries.append(e) if revision.timezone is not None: timezone = FixedOffset(revision.timezone / 60) else: timezone = UTC return Changeset(revision.revision_id, datetime.fromtimestamp(revision.timestamp, timezone), revision.committer, revision.message, entries)
def endElement(self, name): if name == 'patch': cset = DarcsChangeset(self.current['name'], self.current['date'], self.current['author'], self.current['comment'], self.current['entries'], tags=self.current.get('tags',[])) cset.darcs_hash = self.current['hash'] if self.darcsdiff: cset.unidiff = self.darcsdiff.execute(TZ='UTC', stdout=PIPE, patchname=cset.revision)[0].read() self.changesets.append(cset) self.current = None elif name in ['name', 'comment']: val = ''.join(self.current_field) if val[:4] == 'TAG ': self.current.setdefault('tags',[]).append(val[4:]) self.current[name] = val elif name == 'move': entry = ChangesetEntry(self.new_name) entry.action_kind = entry.RENAMED entry.old_name = self.old_name self.current['entries'].append(entry) elif name in ['add_file', 'add_directory', 'modify_file', 'remove_file', 'remove_directory']: entry = ChangesetEntry(''.join(self.current_field).strip()) entry.action_kind = { 'add_file': entry.ADDED, 'add_directory': entry.ADDED, 'modify_file': entry.UPDATED, 'remove_file': entry.DELETED, 'remove_directory': entry.DELETED }[name] self.current['entries'].append(entry)
def endElement(self, name): if name == 'patch': cset = DarcsChangeset(self.current['name'], self.current['date'], self.current['author'], self.current['comment'], self.current['entries'], tags=self.current.get('tags',[]), darcs_hash=self.current['hash']) if self.darcsdiff: cset.unidiff = self.darcsdiff.execute(TZ='UTC', stdout=PIPE, patchname=cset.revision)[0].read() self.changesets.append(cset) self.current = None elif name in ['name', 'comment']: val = ''.join(self.current_field) if val[:4] == 'TAG ': self.current.setdefault('tags',[]).append(val[4:]) self.current[name] = val elif name == 'move': entry = ChangesetEntry(self.new_name) entry.action_kind = entry.RENAMED entry.old_name = self.old_name self.current['entries'].append(entry) elif name in ['add_file', 'add_directory', 'modify_file', 'remove_file', 'remove_directory']: current_field = ''.join(self.current_field).strip() if self.inverted: # the filenames in file modifications are outdated # if there are renames for i in self.current['entries']: if i.action_kind == i.RENAMED and current_field.startswith(i.old_name): current_field = current_field.replace(i.old_name, i.name) entry = ChangesetEntry(current_field) entry.action_kind = { 'add_file': entry.ADDED, 'add_directory': entry.ADDED, 'modify_file': entry.UPDATED, 'remove_file': entry.DELETED, 'remove_directory': entry.DELETED }[name] entry.is_directory = name.endswith('directory') self.current['entries'].append(entry)
def endElement(self, name): if name == 'patch': cset = DarcsChangeset(self.current['name'], self.current['date'], self.current['author'], self.current['comment'], self.current['entries'], tags=self.current.get('tags', [])) cset.darcs_hash = self.current['hash'] if self.darcsdiff: cset.unidiff = self.darcsdiff.execute( TZ='UTC', stdout=PIPE, patchname=cset.revision)[0].read() self.changesets.append(cset) self.current = None elif name in ['name', 'comment']: val = ''.join(self.current_field) if val[:4] == 'TAG ': self.current.setdefault('tags', []).append(val[4:]) self.current[name] = val elif name == 'move': entry = ChangesetEntry(self.new_name) entry.action_kind = entry.RENAMED entry.old_name = self.old_name self.current['entries'].append(entry) elif name in [ 'add_file', 'add_directory', 'modify_file', 'remove_file', 'remove_directory' ]: entry = ChangesetEntry(''.join(self.current_field).strip()) entry.action_kind = { 'add_file': entry.ADDED, 'add_directory': entry.ADDED, 'modify_file': entry.UPDATED, 'remove_file': entry.DELETED, 'remove_directory': entry.DELETED }[name] self.current['entries'].append(entry)
def endElement(self, name): if name == 'logentry': # Sort the paths to make tests easier self.current['entries'].sort(lambda a, b: cmp(a.name, b.name)) # Eliminate "useless" entries: SVN does not have atomic # renames, but rather uses a ADD+RM duo. # # So cycle over all entries of this patch, discarding # the deletion of files that were actually renamed, and # at the same time change related entry from ADDED to # RENAMED. # When copying a directory from another location in the # repository (outside the tracked tree), SVN will report files # below this dir that are not being committed as being # removed. # We thus need to change the action_kind for all entries # that are below a dir that was "copyfrom" from a path # outside of this module: # D -> Remove entry completely (it's not going to be in here) # (M,A,R) -> A mv_or_cp = {} for e in self.current['entries']: if e.action_kind == e.ADDED and e.old_name is not None: mv_or_cp[e.old_name] = e def parent_was_copied(n): for p in self.copies: if n.startswith(p + '/'): return True return False # Find renames from deleted directories: # $ svn mv dir/a.txt a.txt # $ svn del dir def check_renames_from_dir(name): for e in mv_or_cp.values(): if e.old_name.startswith(name + '/'): e.action_kind = e.RENAMED entries = [] entries2 = [] for e in self.current['entries']: if e.action_kind == e.DELETED: if mv_or_cp.has_key(e.name): mv_or_cp[e.name].action_kind = e.RENAMED else: check_renames_from_dir(e.name) entries2.append(e) elif e.action_kind == 'R': # In svn parlance, 'R' means Replaced: a typical # scenario is # $ svn mv a.txt b.txt # $ touch a.txt # $ svn add a.txt if mv_or_cp.has_key(e.name): mv_or_cp[e.name].action_kind = e.RENAMED else: check_renames_from_dir(e.name) e.action_kind = e.ADDED entries2.append(e) elif parent_was_copied(e.name): if e.action_kind != e.DELETED: e.action_kind = e.ADDED entries.append(e) else: entries.append(e) # Changes sort: first MODIFY|ADD|RENAME, than REPLACE|DELETE for e in entries2: entries.append(e) svndate = self.current['date'] # 2004-04-16T17:12:48.000000Z y, m, d = map(int, svndate[:10].split('-')) hh, mm, ss = map(int, svndate[11:19].split(':')) ms = int(svndate[20:-1]) timestamp = datetime(y, m, d, hh, mm, ss, ms, UTC) changeset = Changeset(self.current['revision'], timestamp, self.current.get('author'), self.current['msg'], entries) self.changesets.append(changeset) self.current = None elif name in ['author', 'date', 'msg']: self.current[name] = ''.join(self.current_field) elif name == 'path': path = ''.join(self.current_field) entrypath = get_entry_from_path(path) if entrypath: entry = ChangesetEntry(entrypath) if type(self.current_path_action) == type(()): self.copies.append(entry.name) old = get_entry_from_path(self.current_path_action[1]) if old: entry.action_kind = self.ACTIONSMAP[ self.current_path_action[0]] entry.old_name = old self.renamed[entry.old_name] = True else: entry.action_kind = entry.ADDED else: entry.action_kind = self.ACTIONSMAP[ self.current_path_action] self.current['entries'].append(entry)
def _changesetFromRevision(self, branch, revision_id): """ Generate changeset for the given Bzr revision """ from datetime import datetime from vcpx.tzinfo import FixedOffset, UTC revision = branch.repository.get_revision(revision_id) deltatree = branch.get_revision_delta( branch.revision_id_to_revno(revision_id)) entries = [] for delta in deltatree.renamed: e = ChangesetEntry(delta[1]) e.action_kind = ChangesetEntry.RENAMED e.old_name = delta[0] e.is_directory = delta[3] == 'directory' entries.append(e) for delta in deltatree.added: e = ChangesetEntry(delta[0]) e.action_kind = ChangesetEntry.ADDED e.is_directory = delta[2] == 'directory' entries.append(e) for delta in deltatree.removed: e = ChangesetEntry(delta[0]) e.action_kind = ChangesetEntry.DELETED e.is_directory = delta[2] == 'directory' entries.append(e) for delta in deltatree.modified: e = ChangesetEntry(delta[0]) e.action_kind = ChangesetEntry.UPDATED entries.append(e) if revision.timezone is not None: timezone = FixedOffset(revision.timezone / 60) else: timezone = UTC timestamp = datetime.fromtimestamp(revision.timestamp, timezone) author = revision.get_apparent_authors()[0] return BzrChangeset(revision.revision_id, timestamp, author, revision.message, entries)
def changesets_from_cvsps(log, sincerev=None): """ Parse CVSps log. """ from datetime import datetime from vcpx.changes import Changeset, ChangesetEntry from vcpx.repository.cvs import compare_cvs_revs # cvsps output sample: ## --------------------- ## PatchSet 1500 ## Date: 2004/05/09 17:54:22 ## Author: grubert ## Branch: HEAD ## Tag: (none) ## Log: ## Tell the reason for using mbox (not wrapping long lines). ## ## Members: ## docutils/writers/latex2e.py:1.78->1.79 l = None while 1: l = log.readline() if l <> '---------------------\n': break l = log.readline() assert l.startswith('PatchSet '), "Parse error: %s" % l pset = {} pset['revision'] = l[9:-1].strip() l = log.readline() while not l.startswith('Log:'): field, value = l.split(':', 1) pset[field.lower()] = value.strip() l = log.readline() msg = [] l = log.readline() msg.append(l) l = log.readline() while l <> 'Members: \n': msg.append(l) l = log.readline() assert l.startswith('Members:'), "Parse error: %s" % l entries = [] l = log.readline() seen = {} while l.startswith('\t'): if not sincerev or (sincerev < int(pset['revision'])): # Cannot use split here, file may contain ':' cpos = l.rindex(':') file = l[1:cpos] revs = l[cpos + 1:-1] fromrev, torev = revs.strip().split('->') # Due to the fuzzy mechanism, cvsps may group # together two commits on a single entry, thus # giving something like: # # Normalizer.py:1.12->1.13 # Registry.py:1.22->1.23 # Registry.py:1.21->1.22 # Stopwords.py:1.9->1.10 # # Collapse those into a single one. e = seen.get(file) if not e: e = ChangesetEntry(file) e.old_revision = fromrev e.new_revision = torev seen[file] = e entries.append(e) else: if compare_cvs_revs(e.old_revision, fromrev) > 0: e.old_revision = fromrev if compare_cvs_revs(e.new_revision, torev) < 0: e.new_revision = torev if fromrev == 'INITIAL': e.action_kind = e.ADDED elif "(DEAD)" in torev: e.action_kind = e.DELETED e.new_revision = torev[:torev.index('(DEAD)')] else: e.action_kind = e.UPDATED l = log.readline() if not sincerev or (sincerev < int(pset['revision'])): cvsdate = pset['date'] y, m, d = map(int, cvsdate[:10].split('/')) hh, mm, ss = map(int, cvsdate[11:19].split(':')) timestamp = datetime(y, m, d, hh, mm, ss, 0, UTC) pset['date'] = timestamp yield Changeset(pset['revision'], timestamp, pset['author'], ''.join(msg), entries)
def endElement(self, name): if name == 'logentry': # Sort the paths to make tests easier self.current['entries'].sort(lambda a,b: cmp(a.name, b.name)) # Eliminate "useless" entries: SVN does not have atomic # renames, but rather uses a ADD+RM duo. # # So cycle over all entries of this patch, discarding # the deletion of files that were actually renamed, and # at the same time change related entry from ADDED to # RENAMED. # When copying a directory from another location in the # repository (outside the tracked tree), SVN will report files # below this dir that are not being committed as being # removed. # We thus need to change the action_kind for all entries # that are below a dir that was "copyfrom" from a path # outside of this module: # D -> Remove entry completely (it's not going to be in here) # (M,A,R) -> A mv_or_cp = {} for e in self.current['entries']: if e.action_kind == e.ADDED and e.old_name is not None: mv_or_cp[e.old_name] = e def parent_was_copied(n): for p in self.copies: if n.startswith(p+'/'): return True return False # Find renames from deleted directories: # $ svn mv dir/a.txt a.txt # $ svn del dir def check_renames_from_dir(name): for e in mv_or_cp.values(): if e.old_name.startswith(name+'/'): e.action_kind = e.RENAMED entries = [] entries2 = [] for e in self.current['entries']: if e.action_kind==e.DELETED: if mv_or_cp.has_key(e.name): mv_or_cp[e.name].action_kind = e.RENAMED else: check_renames_from_dir(e.name) entries2.append(e) elif e.action_kind=='R': # In svn parlance, 'R' means Replaced: a typical # scenario is # $ svn mv a.txt b.txt # $ touch a.txt # $ svn add a.txt if mv_or_cp.has_key(e.name): mv_or_cp[e.name].action_kind = e.RENAMED else: check_renames_from_dir(e.name) e.action_kind = e.ADDED entries2.append(e) elif parent_was_copied(e.name): if e.action_kind != e.DELETED: e.action_kind = e.ADDED entries.append(e) else: entries.append(e) # Changes sort: first MODIFY|ADD|RENAME, than REPLACE|DELETE for e in entries2: entries.append(e) svndate = self.current['date'] # 2004-04-16T17:12:48.000000Z y,m,d = map(int, svndate[:10].split('-')) hh,mm,ss = map(int, svndate[11:19].split(':')) ms = int(svndate[20:-1]) timestamp = datetime(y, m, d, hh, mm, ss, ms, UTC) changeset = Changeset(self.current['revision'], timestamp, self.current.get('author'), self.current['msg'], entries) self.changesets.append(changeset) self.current = None elif name in ['author', 'date', 'msg']: self.current[name] = ''.join(self.current_field) elif name == 'path': path = ''.join(self.current_field) entrypath = get_entry_from_path(path) if entrypath: entry = ChangesetEntry(entrypath) if type(self.current_path_action) == type( () ): self.copies.append(entry.name) old = get_entry_from_path(self.current_path_action[1]) if old: entry.action_kind = self.ACTIONSMAP[self.current_path_action[0]] entry.old_name = old self.renamed[entry.old_name] = True else: entry.action_kind = entry.ADDED else: entry.action_kind = self.ACTIONSMAP[self.current_path_action] self.current['entries'].append(entry)
def changesets_from_cvsps(log, sincerev=None): """ Parse CVSps log. """ from datetime import datetime from vcpx.changes import Changeset, ChangesetEntry from vcpx.repository.cvs import compare_cvs_revs # cvsps output sample: ## --------------------- ## PatchSet 1500 ## Date: 2004/05/09 17:54:22 ## Author: grubert ## Branch: HEAD ## Tag: (none) ## Log: ## Tell the reason for using mbox (not wrapping long lines). ## ## Members: ## docutils/writers/latex2e.py:1.78->1.79 l = None while 1: l = log.readline() if l <> '---------------------\n': break l = log.readline() assert l.startswith('PatchSet '), "Parse error: %s"%l pset = {} pset['revision'] = l[9:-1].strip() l = log.readline() while not l.startswith('Log:'): field,value = l.split(':',1) pset[field.lower()] = value.strip() l = log.readline() msg = [] l = log.readline() msg.append(l) l = log.readline() while l <> 'Members: \n': msg.append(l) l = log.readline() assert l.startswith('Members:'), "Parse error: %s" % l entries = [] l = log.readline() seen = {} while l.startswith('\t'): if not sincerev or (sincerev<int(pset['revision'])): # Cannot use split here, file may contain ':' cpos = l.rindex(':') file = l[1:cpos] revs = l[cpos+1:-1] fromrev,torev = revs.strip().split('->') # Due to the fuzzy mechanism, cvsps may group # together two commits on a single entry, thus # giving something like: # # Normalizer.py:1.12->1.13 # Registry.py:1.22->1.23 # Registry.py:1.21->1.22 # Stopwords.py:1.9->1.10 # # Collapse those into a single one. e = seen.get(file) if not e: e = ChangesetEntry(file) e.old_revision = fromrev e.new_revision = torev seen[file] = e entries.append(e) else: if compare_cvs_revs(e.old_revision, fromrev)>0: e.old_revision = fromrev if compare_cvs_revs(e.new_revision, torev)<0: e.new_revision = torev if fromrev=='INITIAL': e.action_kind = e.ADDED elif "(DEAD)" in torev: e.action_kind = e.DELETED e.new_revision = torev[:torev.index('(DEAD)')] else: e.action_kind = e.UPDATED l = log.readline() if not sincerev or (sincerev<int(pset['revision'])): cvsdate = pset['date'] y,m,d = map(int, cvsdate[:10].split('/')) hh,mm,ss = map(int, cvsdate[11:19].split(':')) timestamp = datetime(y, m, d, hh, mm, ss, 0, UTC) pset['date'] = timestamp yield Changeset(pset['revision'], timestamp, pset['author'], ''.join(msg), entries)
def _changesetForRevision(self, repo, revision): from datetime import datetime from vcpx.changes import Changeset, ChangesetEntry from vcpx.tzinfo import FixedOffset entries = [] node = self._getNode(repo, revision) parents = repo.changelog.parents(node) nodecontent = repo.changelog.read(node) # hg 0.9.5+ returns a tuple of six elements, last seems useless for us (manifest, user, date, files, message) = nodecontent[:5] dt, tz = date date = datetime.fromtimestamp(dt, FixedOffset( -tz / 60)) # note the minus sign! manifest = repo.manifest.read(manifest) # To find adds, we get the manifests of any parents. If a file doesn't # occur there, it's new. pms = {} for parent in repo.changelog.parents(node): pms.update(repo.manifest.read(repo.changelog.read(parent)[0])) # if files contains only '.hgtags', this is probably a tag cset. # Tailor appears to only support tagging the current version, so only # pass on tags that are for the immediate parents of the current node tags = None if files == ['.hgtags']: tags = [ tag for (tag, tagnode) in repo.tags().iteritems() if tagnode in parents ] # Don't include the file itself in the changeset. It's only useful # to mercurial, and if we do end up making a tailor round trip # the nodes will be wrong anyway. if '.hgtags' in files: files.remove('.hgtags') if pms.has_key('.hgtags'): del pms['.hgtags'] for f in files: e = ChangesetEntry(f) # find renames fl = repo.file(f) oldname = f in manifest and fl.renamed(manifest[f]) if oldname: e.action_kind = ChangesetEntry.RENAMED e.old_name = oldname[0] # hg copy can copy the same file to multiple destinations # Currently this is handled as multiple renames. It would # probably be better to have ChangesetEntry.COPIED. if pms.has_key(oldname[0]): pms.pop(oldname[0]) else: if pms.has_key(f): e.action_kind = ChangesetEntry.UPDATED else: e.action_kind = ChangesetEntry.ADDED entries.append(e) for df in [ file for file in pms.iterkeys() if not manifest.has_key(file) ]: e = ChangesetEntry(df) e.action_kind = ChangesetEntry.DELETED entries.append(e) from mercurial.node import hex revision = hex(node) return Changeset(revision, date, user, message, entries, tags=tags)
def _applyChangeset(self, changeset): from os import walk from os.path import join, isdir from time import sleep # Complete changeset information, determining the is_directory # flag of the removed entries, before updating to the given revision for entry in changeset.entries: if entry.action_kind == entry.DELETED: entry.is_directory = isdir( join(self.repository.basedir, entry.name)) cmd = self.repository.command("update") if self.repository.ignore_externals: cmd.append("--ignore-externals") cmd.extend(["--revision", changeset.revision]) svnup = ExternalCommand(cwd=self.repository.basedir, command=cmd) retry = 0 while True: out, err = svnup.execute(".", stdout=PIPE, stderr=PIPE) if svnup.exit_status == 1: retry += 1 if retry > 3: break delay = 2**retry self.log.error("%s returned status %s saying\n%s", str(svnup), svnup.exit_status, err.read()) self.log.warning("Retrying in %d seconds...", delay) sleep(delay) else: break if svnup.exit_status: raise ChangesetApplicationFailure( "%s returned status %s saying\n%s" % (str(svnup), svnup.exit_status, err.read())) self.log.debug("%s updated to %s", ','.join([e.name for e in changeset.entries]), changeset.revision) # Complete changeset information, determining the is_directory # flag of the added entries implicitly_added_entries = [] known_added_entries = set() for entry in changeset.entries: if entry.action_kind == entry.ADDED: known_added_entries.add(entry.name) fullname = join(self.repository.basedir, entry.name) entry.is_directory = isdir(fullname) # If it is a directory, extend the entries of the # changeset with all its contents, if not already there. if entry.is_directory: for root, subdirs, files in walk(fullname): if '.svn' in subdirs: subdirs.remove('.svn') for f in files: name = join(root, f)[len(self.repository.basedir) + 1:] newe = ChangesetEntry(name) newe.action_kind = newe.ADDED implicitly_added_entries.append(newe) for d in subdirs: name = join(root, d)[len(self.repository.basedir) + 1:] newe = ChangesetEntry(name) newe.action_kind = newe.ADDED newe.is_directory = True implicitly_added_entries.append(newe) for e in implicitly_added_entries: if not e.name in known_added_entries: changeset.entries.append(e) result = [] for line in out: if len(line) > 2 and line[0] == 'C' and line[1] == ' ': self.log.warning("Conflict after svn update: %r", line) result.append(line[2:-1]) return result
def _changesetForRevision(self, repo, revision): from datetime import datetime from vcpx.changes import Changeset, ChangesetEntry from vcpx.tzinfo import FixedOffset entries = [] node = self._getNode(repo, revision) parents = repo.changelog.parents(node) nodecontent = repo.changelog.read(node) # hg 0.9.5+ returns a tuple of six elements, last seems useless for us (manifest, user, date, files, message) = nodecontent[:5] dt, tz = date date = datetime.fromtimestamp(dt, FixedOffset(-tz/60)) # note the minus sign! manifest = repo.manifest.read(manifest) # To find adds, we get the manifests of any parents. If a file doesn't # occur there, it's new. pms = {} for parent in repo.changelog.parents(node): pms.update(repo.manifest.read(repo.changelog.read(parent)[0])) # if files contains only '.hgtags', this is probably a tag cset. # Tailor appears to only support tagging the current version, so only # pass on tags that are for the immediate parents of the current node tags = None if files == ['.hgtags']: tags = [tag for (tag, tagnode) in repo.tags().iteritems() if tagnode in parents] # Don't include the file itself in the changeset. It's only useful # to mercurial, and if we do end up making a tailor round trip # the nodes will be wrong anyway. if '.hgtags' in files: files.remove('.hgtags') if pms.has_key('.hgtags'): del pms['.hgtags'] for f in files: e = ChangesetEntry(f) # find renames fl = repo.file(f) oldname = f in manifest and fl.renamed(manifest[f]) if oldname: e.action_kind = ChangesetEntry.RENAMED e.old_name = oldname[0] # hg copy can copy the same file to multiple destinations # Currently this is handled as multiple renames. It would # probably be better to have ChangesetEntry.COPIED. if pms.has_key(oldname[0]): pms.pop(oldname[0]) else: if pms.has_key(f): e.action_kind = ChangesetEntry.UPDATED else: e.action_kind = ChangesetEntry.ADDED entries.append(e) for df in [file for file in pms.iterkeys() if not manifest.has_key(file)]: e = ChangesetEntry(df) e.action_kind = ChangesetEntry.DELETED entries.append(e) from mercurial.node import hex revision = hex(node) return Changeset(revision, date, user, message, entries, tags=tags)
def _applyChangeset(self, changeset): from os import walk from os.path import join, isdir from time import sleep # Complete changeset information, determining the is_directory # flag of the removed entries, before updating to the given revision for entry in changeset.entries: if entry.action_kind == entry.DELETED: entry.is_directory = isdir(join(self.repository.basedir, entry.name)) cmd = self.repository.command("update") if self.repository.ignore_externals: cmd.append("--ignore-externals") cmd.extend(["--revision", changeset.revision]) svnup = ExternalCommand(cwd=self.repository.basedir, command=cmd) retry = 0 while True: out, err = svnup.execute(".", stdout=PIPE, stderr=PIPE) if svnup.exit_status == 1: retry += 1 if retry>3: break delay = 2**retry self.log.error("%s returned status %s saying\n%s", str(svnup), svnup.exit_status, err.read()) self.log.warning("Retrying in %d seconds...", delay) sleep(delay) else: break if svnup.exit_status: raise ChangesetApplicationFailure( "%s returned status %s saying\n%s" % (str(svnup), svnup.exit_status, err.read())) self.log.debug("%s updated to %s", ','.join([e.name for e in changeset.entries]), changeset.revision) # Complete changeset information, determining the is_directory # flag of the added entries implicitly_added_entries = [] known_added_entries = set() for entry in changeset.entries: if entry.action_kind == entry.ADDED: known_added_entries.add(entry.name) fullname = join(self.repository.basedir, entry.name) entry.is_directory = isdir(fullname) # If it is a directory, extend the entries of the # changeset with all its contents, if not already there. if entry.is_directory: for root, subdirs, files in walk(fullname): if '.svn' in subdirs: subdirs.remove('.svn') for f in files: name = join(root, f)[len(self.repository.basedir)+1:] newe = ChangesetEntry(name) newe.action_kind = newe.ADDED implicitly_added_entries.append(newe) for d in subdirs: name = join(root, d)[len(self.repository.basedir)+1:] newe = ChangesetEntry(name) newe.action_kind = newe.ADDED newe.is_directory = True implicitly_added_entries.append(newe) for e in implicitly_added_entries: if not e.name in known_added_entries: changeset.entries.append(e) result = [] for line in out: if len(line)>2 and line[0] == 'C' and line[1] == ' ': self.log.warning("Conflict after svn update: %r", line) result.append(line[2:-1]) return result