def get_changesets(self, start, stop): self.log.debug('PerforceRepository.get_changesets(%r,%r)' % (start, stop)) import datetime start = datetime.datetime.fromtimestamp(start) stop = datetime.datetime.fromtimestamp(stop) startDate = start.strftime('%Y/%m/%d:%H:%M:%S') stopDate = stop.strftime('%Y/%m/%d:%H:%M:%S') from p4trac.repos import _P4ChangesOutputConsumer output = _P4ChangesOutputConsumer(self._repos) self._connection.run('changes', '-l', '-s', 'submitted', '@>=%s,@<=%s' % (startDate, stopDate), output=output) if output.errors: from p4trac.repos import PerforceError raise PerforceError(output.errors) for change in output.changes: yield self.get_changeset(change)
def sync(self): youngest_stored = self.repos.get_youngest_rev_in_cache(self.db) if youngest_stored != str(self.repos.youngest_rev): # Need to cache all information for changes since the last # sync operation. if youngest_stored is None: youngest_stored = '0' # Obtain a list of changes since the last cache sync from p4trac.repos import _P4ChangesOutputConsumer output = _P4ChangesOutputConsumer(self.repos._repos) self.repos._connection.run('changes', '-l', '-s', 'submitted', '@>%s' % youngest_stored, output=output) if output.errors: from p4trac.repos import PerforceError raise PerforceError(output.errors) changes = output.changes changes.reverse() # Perform the precaching of the file history for files in these # changes. self.repos._repos.precacheFileHistoryForChanges(changes) # Call on to the default implementation now that we've cached # enough information to make it run a bit faster. CachedRepository.sync(self)
def get_tags(self, rev): results = self._connection.run('labels') if results.errors: from p4trac.repos import PerforceError raise PerforceError(results.errors) for rec in results.records: name = self._repos.toUnicode(rec['label']) yield (name, u'@%s' % name)
def updateCache(self, fromChange): from perforce import ConnectionDropped # Update the database in batches of 1000 changes so that we don't # overload the virtual memory system by trying to store information # about every change in the repository at once during the initial # cache population. batchSize = 1000 lowerBound = fromChange upperBound = self.repos.youngest_rev + 1 self.log.debug("Updating cache with changes [%i,%i]" % (lowerBound, upperBound)) try: while lowerBound < upperBound: batchUpperBound = min(lowerBound + batchSize, upperBound) # Get the next batch of changes to cache from p4trac.repos import _P4ChangesOutputConsumer output = _P4ChangesOutputConsumer(self.repos._repos) self.repos._connection.run('changes', '-l', '-s', 'submitted', '@>=%i,@<%i' % (lowerBound, batchUpperBound), output=output) if output.errors: from p4trac.repos import PerforceError raise PerforceError(output.errors) changes = output.changes changes.reverse() # Pre-cache all information about these changes in memory # before caching in the database. Clear the in-memory cache # afterwards to save on memory usage. self.repos._repos.precacheFileInformationForChanges(changes) self.storeChangesInDB(changes) self.repos._repos.clearFileInformationCache() lowerBound += batchSize except ConnectionDropped, e: self.log.debug('Rolling back uncommitted cache updates') self.db.rollback() raise TracError('Connection to Perforce server lost')
def get_properties(self): import p4trac.repos try: results = self._repos._connection.run('fixes', '-c', str(self._change)) if results.errors: raise PerforceError(results.errors) fixes = '' print results.records for record in results.records: tktid = int(record['Job'][self._job_prefix_length:]) fixes += '#%d ' % tktid if fixes != '': yield ('Tickets', fixes, True, 'message') except p4trac.repos.NoSuchChangelist, e: raise NoSuchChangeset(e.change)
def get_properties(self): import p4trac.repos try: results = self._repos._connection.run('fixes', '-c', str(self._change)) if results.errors: raise PerforceError(results.errors) props = {} fixes = '' for record in results.records: tktid = int(record['Job'][self._job_prefix_length:]) self._log.debug("get_properties %d " % tktid) fixes += ' %d' % tktid if fixes != '': props['Tickets'] = to_unicode(fixes) return props except p4trac.repos.NoSuchChangelist, e: raise NoSuchChangeset(e.change)
def previous_rev(self, rev, path=''): self.log.debug('previous_rev(%r)' % rev) if not isinstance(rev, int): rev = self.short_rev(rev) if not isinstance(rev, int): raise NoSuchChangeset(rev) from p4trac.repos import P4ChangesOutputConsumer output = P4ChangesOutputConsumer(self._repos) self._connection.run('changes', '-l', '-s', 'submitted', '-m', '1', '@<%i' % rev, output=output) if output.errors: from p4trac.repos import PerforceError raise PerforceError(output.errors) if output.changes: return max(output.changes) else: return None
def get_history(self, limit=None): self._log.debug('PerforceNode.get_history(%r)' % limit) if self._node.isFile: # Force population of the filelog history for efficiency from p4trac.repos import _P4FileLogOutputConsumer output = _P4FileLogOutputConsumer(self._repos) if limit is None: self._repos._connection.run( 'filelog', '-i', '-l', self._repos.fromUnicode(self._nodePath.fullPath), output=output) else: self._repos._connection.run( 'filelog', '-i', '-l', '-m', str(limit), self._repos.fromUnicode(self._nodePath.fullPath), output=output) from p4trac.repos import NodePath currentNode = self._node i = 0 while i < limit and currentNode is not None: if currentNode.action in [u'add', u'branch', u'import']: if currentNode.integrations: nodePath, how = currentNode.integrations[0] # TODO: Detect whether the copy was really a move yield (normalisePath(currentNode.nodePath.path), currentNode.change, Changeset.COPY) currentNode = self._repos.getNode(nodePath) else: yield(normalisePath(currentNode.nodePath.path), currentNode.change, Changeset.ADD) if currentNode.fileRevision > 1: # Get the previous revision nodePath = NodePath(currentNode.nodePath.path, '#%i' % (currentNode.rev - 1)) currentNode = self._repos.getNode(nodePath) else: currentNode = None elif currentNode.action in [u'edit', u'integrate']: nextNode = None if currentNode.integrations: nodePath, how = currentNode.integrations[0] if how == 'copy': yield (normalisePath(currentNode.nodePath.path), currentNode.change, Changeset.COPY) nextNode = self._repos.getNode(nodePath) else: yield (normalisePath(currentNode.nodePath.path), currentNode.change, Changeset.EDIT) else: yield (normalisePath(currentNode.nodePath.path), currentNode.change, Changeset.EDIT) if nextNode is None: if currentNode.fileRevision > 1: currentNode = self._repos.getNode( NodePath(currentNode.nodePath.path, '#%i' % (currentNode.fileRevision-1))) else: currentNode = None else: currentNode = nextNode elif currentNode.action in [u'delete']: yield (normalisePath(currentNode.nodePath.path), currentNode.change, Changeset.DELETE) if currentNode.fileRevision > 1: currentNode = self._repos.getNode( NodePath(currentNode.nodePath.path, '#%i' % (currentNode.fileRevision - 1))) else: currentNode = None i += 1 elif self._node.isDirectory: # List all changelists that have affected this directory from p4trac.repos import _P4ChangesOutputConsumer output = _P4ChangesOutputConsumer(self._repos) if self._nodePath.isRoot: queryPath = '//...%s' % self._nodePath.rev else: queryPath = '%s/...%s' % (self._nodePath.path, self._nodePath.rev) if limit is None: self._repos._connection.run( 'changes', '-l', '-s', 'submitted', self._repos.fromUnicode(queryPath), output=output) else: self._repos._connection.run( 'changes', '-l', '-s', 'submitted', '-m', str(limit), self._repos.fromUnicode(queryPath), output=output) if output.errors: raise PerforceError(output.errors) changes = output.changes # And describe the contents of those changelists from p4trac.repos import _P4DescribeOutputConsumer output = _P4DescribeOutputConsumer(self._repos) self._repos._connection.run('describe', '-s', output=output, *[str(c) for c in changes]) from p4trac.repos import NodePath for i in xrange(len(changes)): change = changes[i] nodePath = NodePath(self._nodePath.path, change) if i < len(changes)-1: prevChange = changes[i+1] else: prevChange = change-1 prevNodePath = NodePath(self._nodePath.path, prevChange) node = self._repos.getNode(nodePath) prevNode = self._repos.getNode(prevNodePath) if node.isDirectory: if prevNode.isDirectory: yield (normalisePath(self._nodePath.path), change, Changeset.EDIT) else: yield (normalisePath(self._nodePath.path), change, Changeset.ADD) elif prevNode.isDirectory: yield (normalisePath(self._nodePath.path), change, Changeset.DELETE) else: raise NoSuchNode(self._nodePath.path, self._nodePath.rev)
def get_changes(self, old_path, old_rev, new_path, new_rev, ignore_ancestry=1): self._log.debug('PerforceRepository.get_changes(%r,%r,%r,%r)' % ( old_path, old_rev, new_path, new_rev)) from p4trac.repos import NodePath oldNodePath = NodePath(NodePath.normalisePath(old_path), old_rev) oldNode = self._repos.getNode(oldNodePath) newNodePath = NodePath(NodePath.normalisePath(new_path), new_rev) newNode = self._repos.getNode(newNodePath) if (newNode.isFile and oldNode.isDirectory) or \ (newNode.isDirectory and oldNode.isFile): raise TracError("Cannot view changes between directory and file") if newNode.isDirectory or oldNode.isDirectory: if oldNodePath.isRoot: oldQueryPath = u'//...%s' % oldNodePath.rev else: oldQueryPath = u'%s/...%s' % (oldNodePath.path, oldNodePath.rev) if newNodePath.isRoot: newQueryPath = u'//...%s' % newNodePath.rev else: newQueryPath = u'%s/...%s' % (newNodePath.path, newNodePath.rev) elif newNode.isFile or oldNode.isFile: oldQueryPath = oldNodePath.fullPath newQueryPath = newNodePath.fullPath else: raise TracError("Cannot diff two non-existant nodes") from p4trac.repos import _P4Diff2OutputConsumer output = _P4Diff2OutputConsumer(self._repos) self._connection.run( 'diff2', '-ds', self._repos.fromUnicode(oldQueryPath), self._repos.fromUnicode(newQueryPath), output=output) if output.errors: from p4trac.repos import PerforceError raise PerforceError(output.errors) for change in output.changes: oldFileNodePath, newFileNodePath = change if oldFileNodePath is not None: oldFileNode = PerforceNode(oldFileNodePath, self._repos, self._log) else: oldFileNode = None if newFileNodePath is not None: newFileNode = PerforceNode(newFileNodePath, self._repos, self._log) else: newFileNode = None if newFileNode and oldFileNode: yield (oldFileNode, newFileNode, Node.FILE, Changeset.EDIT) elif newFileNode: yield (oldFileNode, newFileNode, Node.FILE, Changeset.ADD) elif oldFileNode: yield (oldFileNode, newFileNode, Node.FILE, Changeset.DELETE)
def next_rev(self, rev, path=''): # Finding the next revision is a little more difficult in Perforce # as we can only ask for the n most recent changes according to a # given criteria. We query batches of changes using a binary search # technique so that the number of changes queried is of the order of # log N where N is the number of changes greater than rev. This way # it is still fairly efficient if the next change is 1 or 1000 changes # later. self.log.debug('next_rev(%r,%r)' % (rev, path)) from p4trac.repos import P4NodePath if not path: path = u'//' else: path = P4NodePath.normalisePath(path) node = self._repos.getNode(P4NodePath(path, rev)) if node.isDirectory: if node.nodePath.isRoot: # Handle the root path specially since it encompasses all # changes and so can use the repository's internal cache. return self._repos.getNextChange(int(rev)) else: queryPath = u'%s/...' % node.nodePath.path else: queryPath = node.nodePath.path queryPath = self._repos.fromUnicode(queryPath) self.log.debug(u'Looking for next_rev after change %i for %s' % (rev, path)) # Perform a binary-search of sorts for the next revision batchSize = 50 lowerBound = rev + 1 upperBound = self.youngest_rev while lowerBound <= upperBound: if lowerBound + batchSize > upperBound: batchUpperBound = upperBound else: middle = (upperBound + lowerBound) / 2 if middle - lowerBound < batchSize: batchUpperBound = lowerBound + batchSize else: batchUpperBound = middle self.log.debug('Looking for changes in range [%i, %i]' % (lowerBound, batchUpperBound)) from p4trac.repos import P4ChangesOutputConsumer output = P4ChangesOutputConsumer(self._repos) depot_path = '%s%s@>=%i,@<=%i' % (rootPath( self._connection), queryPath, lowerBound, batchUpperBound) self._connection.run('changes', '-l', '-s', 'submitted', '-m', str(batchSize), depot_path, output=output) if output.errors: from p4trac.repos import PerforceError raise PerforceError(output.errors) if output.changes: lowest = min(output.changes) assert lowest >= lowerBound assert lowest <= batchUpperBound if lowerBound + batchSize >= batchUpperBound: # There are no earlier changes self.log.debug('next_rev is %i' % lowest) return lowest else: # There may be another earlier changes but we know it # can't be any later than lowest. upperBound = lowest else: # Didn't find any changes in (lowerBound, batchUpperBound) # Try searching from batchUpperBound + 1 onwards lowerBound = batchUpperBound + 1 return None