def putfile(fname, rpath, uname="", commitmsg=""): rpath = core.svn_path_canonicalize(rpath) repos_ptr = repos.open(rpath) fsob = repos.fs(repos_ptr) # open a transaction against HEAD rev = fs.youngest_rev(fsob) txn = repos.fs_begin_txn_for_commit(repos_ptr, rev, uname, commitmsg) root = fs.txn_root(txn) rev_root = fs.revision_root(fsob, rev) kind = fs.check_path(root, fname) if kind == core.svn_node_none: print("file '%s' does not exist, creating..." % fname) fs.make_file(root, fname) elif kind == core.svn_node_dir: print("File '%s' is a dir." % fname) return else: print("Updating file '%s'" % fname) handler, baton = fs.apply_textdelta(root, fname, None, None) ### it would be nice to get an svn_stream_t. for now, just load in the ### whole file and shove it into the FS. delta.svn_txdelta_send_string(open(fname, 'rb').read(), handler, baton) newrev = repos.fs_commit_txn(repos_ptr, txn) print("revision: %s" % newrev)
def idKeyUpdate(self, deltas): """ Convert the SCCS keywords inside of the supplied deltas to subversion keywords. """ # Split up the deltas array into smaller sub-arrays, otherwise # we choke running out of memory due to really large changesets # like the CDDL 2005/06/08 putback in ON that touched every file while len(deltas): if len(deltas) > 1000: print "partitioning deltas into smaller new_deltas" new_deltas = deltas[:1000] deltas = deltas[1000:] """ Convert the SCCS keywords inside of the supplied deltas to subversion keywords. """ subpool = core.svn_pool_create(self.pool) (revision, transaction, root) = self._revisionSetup(subpool, options.userid, "Automated keyword replacement") for delta in new_deltas: if isTextFilename(delta.getFilename()): originalContents = delta.getFileContents("-k") updatedContents = keywordSubstitution(originalContents) if originalContents != updatedContents: handler, baton = fs.apply_textdelta(root, delta.getRepositoryName(), None, None, subpool) svn.delta.svn_txdelta_send_string(updatedContents, handler, baton, subpool) print "sending ", delta.getRepositoryName() print "committing version ", print self._commit(revision, delta.getDate(), transaction, subpool) core.svn_pool_destroy(subpool)
def putfile(fname, rpath, home='.'): _util.apr_initialize() pool = _util.svn_pool_create(None) db_path = os.path.join(home, 'db') if not os.path.exists(db_path): db_path = home fsob = fs.new(pool) fs.open_berkeley(fsob, db_path) # open a transaction against HEAD rev = fs.youngest_rev(fsob, pool) txn = fs.begin_txn(fsob, rev, pool) print ` txn ` root = fs.txn_root(txn, pool) fs.make_file(root, rpath, pool) handler, baton = fs.apply_textdelta(root, rpath, pool) ### it would be nice to get an svn_stream_t. for now, just load in the ### whole file and shove it into the FS. _delta.svn_txdelta_send_string( open(fname, 'rb').read(), handler, baton, pool) conflicts, new_rev = fs.commit_txn(txn) if conflicts: print 'conflicts:', conflicts print 'New revision:', new_rev _util.svn_pool_destroy(pool) _util.apr_terminate()
def write(self, data, uname='', commitmsg='',istext=False): txn = repos.fs_begin_txn_for_commit(self._repo.repos_ptr, self.revno, uname, commitmsg) r = None try: txn_root = fs.txn_root(txn) kind = fs.check_path(txn_root, self.path) if kind == core.svn_node_none: if not _create_file(txn_root, self.path): raise 'attempt to create file, but file creation error: %s'%path pass elif kind == core.svn_node_dir: raise 'attempt to create file, but directory already exists: %s'%self.path if istext: fs.change_node_prop(txn_root, self.path, 'svn:eol-style', 'native') pass handler, baton = fs.apply_textdelta(txn_root, self.path, None, None) delta.svn_txdelta_send_string(data, handler, baton) r = repos.fs_commit_txn(self._repo.repos_ptr, txn) except Exception, a: fs.abort_txn(txn) raise
def add(self, deltas): """ Add the supplied set of deltas to the repository. They will all be added with the same user name, date, and comment. """ # Split up the deltas array into smaller sub-arrays, otherwise # we choke running out of memory due to really large changesets # like the CDDL 2005/06/08 putback in ON that touched every file while len(deltas): if len(deltas) > 1000: print "partitioning deltas into smaller new_deltas" new_deltas = deltas[:1000] deltas = deltas[1000:] # Add all of the directories first, or we will be trying # to cross transactions, which is bad. for delta in new_deltas: self._addDirectories(delta) print "preparing %s version %s (%s) by %s" % (delta.pathname, delta.version, delta.date, delta.author) subpool = core.svn_pool_create(self.pool) (revision, transaction, root) = self._revisionSetup(subpool, new_deltas[0].author, new_deltas[0].comment) for delta in new_deltas: subversionPath = delta.getRepositoryName() kind = fs.check_path(root, subversionPath, subpool) if kind == core.svn_node_none: fs.make_file(root, subversionPath, subpool) elif kind == core.svn_node_dir: raise EnvironmentError(subversionPath + " already present as a directory.") handler, baton = fs.apply_textdelta(root, subversionPath, None, None, subpool) svn.delta.svn_txdelta_send_string(delta.getFileContents(), handler, baton, subpool) if delta.version.isdigit: fs.change_node_prop(root, subversionPath, 'sccs:sid', delta.version, subpool) print "sending ", subversionPath, delta.getDate(), "by", delta.author print "committing version ", print self._commit(revision, delta.getDate(), transaction, subpool) core.svn_pool_destroy(subpool)
def keywordPropertyUpdate(self, files): """ Does the following to text files: 1) Sets svn:keywords property 2) Replace SCCS keywords with SVN equivalents Note that while this will be treated as a separate transaction, the transaction date will be set to the last known date for the given file """ # Break up files into groups in order to avoid # potential "Too many open files" errors thrown when # committing large changesets counter = 0 filesPerCommit = 256 for filename, version in files.iteritems(): if counter%filesPerCommit == 0: if counter > 1: print "committing version ", print self._commit(revision, subversionTime(localtz.localize(datetime.now())), transaction, subpool) core.svn_pool_destroy(subpool) subpool = core.svn_pool_create(self.pool) (revision, transaction, root ) = \ self._revisionSetup(subpool, options.userid, "Automated SCCS keyword -> svn:keyword conversion\n") if isTextFilename(filename): print filename + ":" print " ... Setting svn:keywords property" fs.change_node_prop(root, filename, core.SVN_PROP_KEYWORDS, "LastChangedDate LastChangedRevision LastChangedBy HeadURL Id", subpool) fs.change_node_prop(root, filename, core.SVN_PROP_EOL_STYLE, "native", subpool) oldFileContents = version.getFileContents("-k") newFileContents = keywordSubstitution(oldFileContents) if oldFileContents != newFileContents: print " ... keywords converted" handler, baton = fs.apply_textdelta(root, version.getRepositoryName(), None, None, subpool) svn.delta.svn_txdelta_send_string(newFileContents, handler, baton, subpool) print " ... sending" else: print "skipping ", filename # Note we must unset sccs:sid since it no longer applies fs.change_node_prop(root, filename, 'sccs:sid', None, subpool) counter += 1 # Commit any stragglers print "committing version ", print self._commit(revision, subversionTime(localtz.localize(datetime.now())), transaction, subpool) core.svn_pool_destroy(subpool)
def commit(self, t_fs, ctx): # commit this transaction print 'committing: %s, over %d seconds' % (time.ctime( self.t_min), self.t_max - self.t_min) # create a pool for the entire commit c_pool = util.svn_pool_create(ctx.pool) rev = fs.youngest_rev(t_fs, c_pool) txn = fs.begin_txn(t_fs, rev, c_pool) root = fs.txn_root(txn, c_pool) lastcommit = (None, None) # create a pool for each file; it will be cleared on each iteration f_pool = util.svn_pool_create(c_pool) for f, r in self.changes: # compute a repository path. ensure we have a leading "/" and drop # the ,v from the file name repos_path = '/' + relative_name(ctx.cvsroot, f[:-2]) #print 'DEBUG:', repos_path print ' changing %s : %s' % (r, repos_path) ### hmm. need to clarify OS path separators vs FS path separators dirname = os.path.dirname(repos_path) if dirname != '/': # get the components of the path (skipping the leading '/') parts = string.split(dirname[1:], os.sep) for i in range(1, len(parts) + 1): # reassemble the pieces, adding a leading slash parent_dir = '/' + string.join(parts[:i], '/') if fs.check_path(root, parent_dir, f_pool) == svn_node_none: print ' making dir:', parent_dir fs.make_dir(root, parent_dir, f_pool) if fs.check_path(root, repos_path, f_pool) == svn_node_none: created_file = 1 fs.make_file(root, repos_path, f_pool) else: created_file = 0 handler, baton = fs.apply_textdelta(root, repos_path, f_pool) # figure out the real file path for "co" try: statcache.stat(f) except os.error: dirname, fname = os.path.split(f) f = os.path.join(dirname, 'Attic', fname) statcache.stat(f) pipe = os.popen('co -q -p%s %s' % (r, f), 'r', 102400) # if we just made the file, we can send it in one big hunk, rather # than streaming it in. ### we should watch out for file sizes here; we don't want to yank ### in HUGE files... if created_file: _delta.svn_txdelta_send_string(pipe.read(), handler, baton, f_pool) else: # open an SVN stream onto the pipe stream2 = util.svn_stream_from_stdio(pipe, f_pool) # Get the current file contents from the repo, or, if we have # multiple CVS revisions to the same file being done in this # single commit, then get the contents of the previous # revision from co, or else the delta won't be correct because # the contents in the repo won't have changed yet. if repos_path == lastcommit[0]: infile2 = os.popen("co -q -p%s %s" % (lastcommit[1], f), "r", 102400) stream1 = util.svn_stream_from_stdio(infile2, f_pool) else: stream1 = fs.file_contents(root, repos_path, f_pool) txstream = _delta.svn_txdelta(stream1, stream2, f_pool) _delta.svn_txdelta_send_txstream(txstream, handler, baton, f_pool) # shut down the previous-rev pipe, if we opened it infile2 = None # shut down the current-rev pipe pipe.close() # wipe the pool. this will get rid of the pipe streams and the delta # stream, and anything the FS may have done. util.svn_pool_clear(f_pool) # remember what we just did, for the next iteration lastcommit = (repos_path, r) for f, r in self.deletes: # compute a repository path. ensure we have a leading "/" and drop # the ,v from the file name repos_path = '/' + relative_name(ctx.cvsroot, f[:-2]) print ' deleting %s : %s' % (r, repos_path) # If the file was initially added on a branch, the first mainline # revision will be marked dead, and thus, attempts to delete it will # fail, since it doesn't really exist. if r != '1.1': ### need to discriminate between OS paths and FS paths fs.delete(root, repos_path, f_pool) # wipe the pool, in case the delete loads it up util.svn_pool_clear(f_pool) # get the metadata for this commit author, log, date = self.get_metadata(c_pool) fs.change_txn_prop(txn, 'svn:author', author, c_pool) fs.change_txn_prop(txn, 'svn:log', log, c_pool) conflicts, new_rev = fs.commit_txn(txn) # set the time to the proper (past) time fs.change_rev_prop(t_fs, new_rev, 'svn:date', date, c_pool) ### how come conflicts is a newline? if conflicts != '\n': print ' CONFLICTS:', ` conflicts ` print ' new revision:', new_rev # done with the commit and file pools util.svn_pool_destroy(c_pool)