def add_history(self, path, revision, pool): # If filtering, only add the path and revision to the histories # list if they were actually changed in this revision (where # change means the path itself was changed, or one of its parents # was copied). This is useful for omitting bubble-up directory # changes. if not self.show_all_logs: rev_root = fs.revision_root(self.fs_ptr, revision, pool) changed_paths = fs.paths_changed(rev_root, pool) paths = changed_paths.keys() if path not in paths: # Look for a copied parent test_path = path found = 0 subpool = core.svn_pool_create(pool) while 1: core.svn_pool_clear(subpool) off = string.rfind(test_path, '/') if off < 0: break test_path = test_path[0:off] if test_path in paths: copyfrom_rev, copyfrom_path = \ fs.copied_from(rev_root, test_path, subpool) if copyfrom_rev >= 0 and copyfrom_path: found = 1 break core.svn_pool_destroy(subpool) if not found: return self.histories[revision] = _cleanup_path(path)
def close_file(self, file_baton, text_checksum): changed, path = file_baton if len(path) < 3 or path[-3:] != '.po' or not changed: # This is not a .po file, or it hasn't changed return try: # Read the file contents through a validating UTF-8 decoder subpool = core.svn_pool_create(self.pool) checker = MsgFmtChecker() try: stream = core.Stream( fs.file_contents(self.txn_root, path, subpool)) reader = codecs.getreader('UTF-8')(stream, 'strict') writer = codecs.getwriter('UTF-8')(checker, 'strict') while 1: data = reader.read(core.SVN_STREAM_CHUNK_SIZE) if not data: break writer.write(data) if not checker.close(): sys.exit("PO format check failed for '" + path + "'") except UnicodeError: sys.exit("PO file is not in UTF-8: '" + path + "'") finally: core.svn_pool_destroy(subpool)
def _close(self): if self.pool: core.svn_pool_destroy(self.pool) self.pool = None if self.apr_init: core.apr_terminate() self.apr_init = 0
def setUp(self): dumpfile = open( os.path.join(os.path.split(__file__)[0], 'svnrepos.dump')) svn_fs._import_svn() core.apr_initialize() pool = core.svn_pool_create(None) dumpstream = None try: if os.path.exists(REPOS_PATH): print 'trouble ahead with db/rep-cache.db... see #8278' r = repos.svn_repos_create(REPOS_PATH, '', '', None, None, pool) if hasattr(repos, 'svn_repos_load_fs2'): repos.svn_repos_load_fs2(r, dumpfile, StringIO(), repos.svn_repos_load_uuid_default, '', 0, 0, None, pool) else: dumpstream = core.svn_stream_from_aprfile(dumpfile, pool) repos.svn_repos_load_fs(r, dumpstream, None, repos.svn_repos_load_uuid_default, '', None, None, pool) finally: if dumpstream: core.svn_stream_close(dumpstream) core.svn_pool_destroy(pool) core.apr_terminate()
def propertyUpdate(self, filenames): """ Set the keywords property for the supplied filenames. """ # Split up the filenames array into smaller sub-arrays, otherwise # we choke running out of memory due to a really large SCCS # repository like ON while len(filenames): if len(filenames) > 3000: print "partitioning filenames into smaller new_filenames" new_filenames = filenames[:3000] filenames = filenames[3000:] """ Set the keywords property for the supplied filenames. """ subpool = core.svn_pool_create(self.pool) (revision, transaction, root) = self._revisionSetup(subpool, options.userid, "Automated property set") for filename in new_filenames: if isTextFilename(filename): print "property set for ", filename fs.change_node_prop(root, filename, core.SVN_PROP_KEYWORDS, "LastChangedDate LastChangedRevision LastChangedBy HeadURL Id", subpool) fs.change_node_prop(root, filename, core.SVN_PROP_EOL_STYLE, "native", subpool) else: print "skipping property set for ", filename self._commit(revision, subversionTime(time.localtime()), transaction, subpool) core.svn_pool_destroy(subpool)
def setupClass(cls): svn_fs._import_svn() core.apr_initialize() pool = core.svn_pool_create(None) dumpstream = None cls.repos_path = tempfile.mkdtemp(prefix='svn-tmp') shutil.rmtree(cls.repos_path) dumpfile = open(os.path.join(os.path.split(__file__)[0], 'svn.dump')) try: r = repos.svn_repos_create(cls.repos_path, '', '', None, None, pool) if hasattr(repos, 'svn_repos_load_fs2'): repos.svn_repos_load_fs2(r, dumpfile, StringIO(), repos.svn_repos_load_uuid_default, '', 0, 0, None, pool) else: dumpstream = core.svn_stream_from_aprfile(dumpfile, pool) repos.svn_repos_load_fs(r, dumpstream, None, repos.svn_repos_load_uuid_default, '', None, None, pool) finally: if dumpstream: core.svn_stream_close(dumpstream) core.svn_pool_destroy(pool) core.apr_terminate()
def close_file(self, file_baton, text_checksum): changed, path = file_baton if len(path) < 3 or path[-3:] != ".po" or not changed: # This is not a .po file, or it hasn't changed return try: # Read the file contents through a validating UTF-8 decoder subpool = core.svn_pool_create(self.pool) checker = MsgFmtChecker() try: stream = core.Stream(fs.file_contents(self.txn_root, path, subpool)) reader = codecs.getreader("UTF-8")(stream, "strict") writer = codecs.getwriter("UTF-8")(checker, "strict") while 1: data = reader.read(core.SVN_STREAM_CHUNK_SIZE) if not data: break writer.write(data) if not checker.close(): sys.exit("PO format check failed for '" + path + "'") except UnicodeError: sys.exit("PO file is not in UTF-8: '" + path + "'") finally: core.svn_pool_destroy(subpool)
def setUp(self): dumpfile = open(os.path.join(os.path.split(__file__)[0], 'svnrepos.dump')) svn_fs._import_svn() core.apr_initialize() pool = core.svn_pool_create(None) dumpstream = None try: if os.path.exists(REPOS_PATH): print 'trouble ahead with db/rep-cache.db... see #8278' r = repos.svn_repos_create(REPOS_PATH, '', '', None, None, pool) if hasattr(repos, 'svn_repos_load_fs2'): repos.svn_repos_load_fs2(r, dumpfile, StringIO(), repos.svn_repos_load_uuid_default, '', 0, 0, None, pool) else: dumpstream = core.svn_stream_from_aprfile(dumpfile, pool) repos.svn_repos_load_fs(r, dumpstream, None, repos.svn_repos_load_uuid_default, '', None, None, pool) finally: if dumpstream: core.svn_stream_close(dumpstream) core.svn_pool_destroy(pool) core.apr_terminate()
def idKeyUpdate(self, deltas): """ Convert the SCCS keywords inside of the supplied deltas to subversion keywords. """ # Split up the deltas array into smaller sub-arrays, otherwise # we choke running out of memory due to really large changesets # like the CDDL 2005/06/08 putback in ON that touched every file while len(deltas): if len(deltas) > 1000: print "partitioning deltas into smaller new_deltas" new_deltas = deltas[:1000] deltas = deltas[1000:] """ Convert the SCCS keywords inside of the supplied deltas to subversion keywords. """ subpool = core.svn_pool_create(self.pool) (revision, transaction, root) = self._revisionSetup(subpool, options.userid, "Automated keyword replacement") for delta in new_deltas: if isTextFilename(delta.getFilename()): originalContents = delta.getFileContents("-k") updatedContents = keywordSubstitution(originalContents) if originalContents != updatedContents: handler, baton = fs.apply_textdelta(root, delta.getRepositoryName(), None, None, subpool) svn.delta.svn_txdelta_send_string(updatedContents, handler, baton, subpool) print "sending ", delta.getRepositoryName() print "committing version ", print self._commit(revision, delta.getDate(), transaction, subpool) core.svn_pool_destroy(subpool)
def svn_uri(path): # init core.apr_initialize() pool = core.svn_pool_create(None) core.svn_config_ensure(None, pool) # get commit date uri = client.svn_client_url_from_path(path, pool) # cleanup core.svn_pool_destroy(pool) core.apr_terminate() return uri
def remove(self, filenames): """ Remove the supplied filenames file from the repository. """ subpool = core.svn_pool_create(self.pool) (revision, transaction, root) = self._revisionSetup(subpool, options.userid, "Automated SCCS conversion removal") for file in filenames: print "removing ", file fs.delete(root, file, subpool) self._commit(revision, subversionTime(time.localtime()), transaction, subpool) core.svn_pool_destroy(subpool)
def get_logs(svnrepos, full_name, rev, files): fsroot = svnrepos._getroot(rev) subpool = core.svn_pool_create(svnrepos.pool) for file in files: core.svn_pool_clear(subpool) path = _fs_path_join(full_name, file.name) rev = _get_last_history_rev(fsroot, path, subpool) datestr, author, msg = _fs_rev_props(svnrepos.fs_ptr, rev, subpool) date = _datestr_to_date(datestr, subpool) file.rev = str(rev) file.date = date file.author = author file.log = msg if file.kind == vclib.FILE: file.size = fs.file_length(fsroot, path, subpool) core.svn_pool_destroy(subpool)
def _addDirectories(self, delta): """ Make sure that all of the directories needed for delta are added. """ directoriesToAdd = self._directoriesToAdd(delta) if len(directoriesToAdd) == 0: return subpool = core.svn_pool_create(self.pool) (revision, transaction, root) = \ self._revisionSetup(subpool, options.userid, "Automatic directory addition") for directory in directoriesToAdd: print "adding directory", directory print delta.getDate() fs.make_dir(root, directory, subpool) self.addedDirectories[directory] = 1 self._commit(revision, delta.getDate(), transaction, subpool) core.svn_pool_destroy(subpool)
def add(self, deltas): """ Add the supplied set of deltas to the repository. They will all be added with the same user name, date, and comment. """ # Split up the deltas array into smaller sub-arrays, otherwise # we choke running out of memory due to really large changesets # like the CDDL 2005/06/08 putback in ON that touched every file while len(deltas): if len(deltas) > 1000: print "partitioning deltas into smaller new_deltas" new_deltas = deltas[:1000] deltas = deltas[1000:] # Add all of the directories first, or we will be trying # to cross transactions, which is bad. for delta in new_deltas: self._addDirectories(delta) print "preparing %s version %s (%s) by %s" % (delta.pathname, delta.version, delta.date, delta.author) subpool = core.svn_pool_create(self.pool) (revision, transaction, root) = self._revisionSetup(subpool, new_deltas[0].author, new_deltas[0].comment) for delta in new_deltas: subversionPath = delta.getRepositoryName() kind = fs.check_path(root, subversionPath, subpool) if kind == core.svn_node_none: fs.make_file(root, subversionPath, subpool) elif kind == core.svn_node_dir: raise EnvironmentError(subversionPath + " already present as a directory.") handler, baton = fs.apply_textdelta(root, subversionPath, None, None, subpool) svn.delta.svn_txdelta_send_string(delta.getFileContents(), handler, baton, subpool) if delta.version.isdigit: fs.change_node_prop(root, subversionPath, 'sccs:sid', delta.version, subpool) print "sending ", subversionPath, delta.getDate(), "by", delta.author print "committing version ", print self._commit(revision, delta.getDate(), transaction, subpool) core.svn_pool_destroy(subpool)
def get_logs(svnrepos, full_name, rev, files): dirents = svnrepos._get_dirents(full_name, rev) subpool = core.svn_pool_create(svnrepos.pool) rev_info_cache = { } for file in files: core.svn_pool_clear(subpool) entry = dirents[file.name] if rev_info_cache.has_key(entry.created_rev): rev, author, date, log = rev_info_cache[entry.created_rev] else: ### i think this needs some get_last_history action to be accurate rev, author, date, log, changes = \ _get_rev_details(svnrepos, entry.created_rev, subpool) rev_info_cache[entry.created_rev] = rev, author, date, log file.rev = rev file.author = author file.date = _datestr_to_date(date, subpool) file.log = log file.size = entry.size core.svn_pool_destroy(subpool)
def get_logs(svnrepos, full_name, rev, files): dirents = svnrepos._get_dirents(full_name, rev) subpool = core.svn_pool_create(svnrepos.pool) rev_info_cache = {} for file in files: core.svn_pool_clear(subpool) entry = dirents[file.name] if rev_info_cache.has_key(entry.created_rev): rev, author, date, log = rev_info_cache[entry.created_rev] else: ### i think this needs some get_last_history action to be accurate rev, author, date, log, changes = \ _get_rev_details(svnrepos, entry.created_rev, subpool) rev_info_cache[entry.created_rev] = rev, author, date, log file.rev = rev file.author = author file.date = _datestr_to_date(date, subpool) file.log = log file.size = entry.size core.svn_pool_destroy(subpool)
def _fetch_log(svnrepos, full_name, which_rev, options, pool): revs = [] if options.get('svn_latest_log', 0): rev = _log_helper(svnrepos, which_rev, full_name, pool) if rev: revs.append(rev) else: history_set = _get_history(svnrepos, full_name, which_rev, options) history_revs = history_set.keys() history_revs.sort() history_revs.reverse() subpool = core.svn_pool_create(pool) for history_rev in history_revs: core.svn_pool_clear(subpool) rev = _log_helper(svnrepos, history_rev, history_set[history_rev], subpool) if rev: revs.append(rev) core.svn_pool_destroy(subpool) return revs
def close_file(self, file_baton, text_checksum): changed, path = file_baton if len(path) < 3 or path.lower()[-3:] != '.py' or not changed: # This is not a .py file, don't care about tabs # TODO - only look inside trunk return # Read the file contents through a tab-finder subpool = core.svn_pool_create(self.pool) stream = core.Stream(fs.file_contents(self.txn_root, path, subpool)) data = stream.read() # core.SVN_STREAM_CHUNK_SIZE) for line in data.splitlines(): if _tabs.match(line): core.svn_pool_destroy(subpool) msg = ("Python file contains lines that begin with tabs: '%s'\n" "There may be others as well." % (path,)) sys.stderr.write(msg) sys.exit(1) core.svn_pool_destroy(subpool)
def setUp(self): dumpfile = open( os.path.join(os.path.split(__file__)[0], 'svnrepos.dump')) core.apr_initialize() pool = core.svn_pool_create(None) dumpstream = None try: r = repos.svn_repos_create(REPOS_PATH, '', '', None, None, pool) if hasattr(repos, 'svn_repos_load_fs2'): repos.svn_repos_load_fs2(r, dumpfile, StringIO(), repos.svn_repos_load_uuid_default, '', 0, 0, None, pool) else: dumpstream = core.svn_stream_from_aprfile(dumpfile, pool) repos.svn_repos_load_fs(r, dumpstream, None, repos.svn_repos_load_uuid_default, '', None, None, pool) finally: if dumpstream: core.svn_stream_close(dumpstream) core.svn_pool_destroy(pool) core.apr_terminate()
def setUp(self): dumpfile = open(os.path.join(os.path.split(__file__)[0], 'svnrepos.dump')) core.apr_initialize() pool = core.svn_pool_create(None) dumpstream = None try: r = repos.svn_repos_create(REPOS_PATH, '', '', None, None, pool) if hasattr(repos, 'svn_repos_load_fs2'): repos.svn_repos_load_fs2(r, dumpfile, StringIO(), repos.svn_repos_load_uuid_default, '', 0, 0, None, pool) else: dumpstream = core.svn_stream_from_aprfile(dumpfile, pool) repos.svn_repos_load_fs(r, dumpstream, None, repos.svn_repos_load_uuid_default, '', None, None, pool) finally: if dumpstream: core.svn_stream_close(dumpstream) core.svn_pool_destroy(pool) core.apr_terminate()
def close_file(self, file_baton, text_checksum): changed, path = file_baton if len(path) < 3 or path.lower()[-3:] != '.py' or not changed: # This is not a .py file, don't care about tabs # TODO - only look inside trunk return # Read the file contents through a tab-finder subpool = core.svn_pool_create(self.pool) stream = core.Stream(fs.file_contents(self.txn_root, path, subpool)) data = stream.read() # core.SVN_STREAM_CHUNK_SIZE) for line in data.splitlines(): if _tabs.match(line): core.svn_pool_destroy(subpool) msg = ( "Python file contains lines that begin with tabs: '%s'\n" "There may be others as well." % (path, )) sys.stderr.write(msg) sys.exit(1) core.svn_pool_destroy(subpool)
def sync(db, repos, fs_ptr, pool): """ updates the revision and node_change tables to be in sync with the repository. """ if core.SVN_VER_MAJOR < 1: raise EnvironmentError, \ "Subversion >= 1.0 required: Found %d.%d.%d" % \ (core.SVN_VER_MAJOR, core.SVN_VER_MINOR, core.SVN_VER_MICRO) cursor = db.cursor() cursor.execute('SELECT ifnull(max(rev), 0) FROM revision') youngest_stored = int(cursor.fetchone()[0]) max_rev = fs.youngest_rev(fs_ptr, pool) num = max_rev - youngest_stored offset = youngest_stored + 1 subpool = core.svn_pool_create(pool) for rev in range(num): message = fs.revision_prop(fs_ptr, rev + offset, core.SVN_PROP_REVISION_LOG, subpool) author = fs.revision_prop(fs_ptr, rev + offset, core.SVN_PROP_REVISION_AUTHOR, subpool) date = fs.revision_prop(fs_ptr, rev + offset, core.SVN_PROP_REVISION_DATE, subpool) date = core.svn_time_from_cstring(date, subpool) / 1000000 cursor.execute( 'INSERT INTO revision (rev, time, author, message) ' 'VALUES (%s, %s, %s, %s)', rev + offset, date, author, message) insert_change(subpool, fs_ptr, rev + offset, cursor) core.svn_pool_clear(subpool) core.svn_pool_destroy(subpool) db.commit()
def export_revision(rev, repo, fs, pool): sys.stderr.write("Exporting revision %s... " % rev) revpool = svn_pool_create(pool) svn_pool_clear(revpool) # Open a root object representing the youngest (HEAD) revision. root = svn_fs_revision_root(fs, rev, revpool) # And the list of what changed in this revision. changes = svn_fs_paths_changed(root, revpool) i = 1 marks = {} file_changes = [] for path, change_type in changes.iteritems(): c_t = ct_short[change_type.change_kind] if svn_fs_is_dir(root, path, revpool): continue if not path.startswith(trunk_path): # We don't handle branches. Or tags. Yet. pass else: if c_t == 'D': file_changes.append("D %s" % path.replace(trunk_path, '')) else: marks[i] = path.replace(trunk_path, '') file_changes.append("M 644 :%s %s" % (i, marks[i])) sys.stdout.write("blob\nmark :%s\n" % i) dump_file_blob(root, path, revpool) i += 1 # Get the commit author and message props = svn_fs_revision_proplist(fs, rev, revpool) # Do the recursive crawl. if props.has_key('svn:author'): author = "%s <%s@localhost>" % (props['svn:author'], props['svn:author']) else: author = 'nobody <nobody@localhost>' if len(file_changes) == 0: svn_pool_destroy(revpool) sys.stderr.write("skipping.\n") return svndate = props['svn:date'][0:-8] commit_time = mktime(strptime(svndate, '%Y-%m-%dT%H:%M:%S')) sys.stdout.write("commit refs/heads/master\n") sys.stdout.write("committer %s %s -0000\n" % (author, int(commit_time))) sys.stdout.write("data %s\n" % len(props['svn:log'])) sys.stdout.write(props['svn:log']) sys.stdout.write("\n") sys.stdout.write('\n'.join(file_changes)) sys.stdout.write("\n\n") svn_pool_destroy(revpool) sys.stderr.write("done!\n")
except OSError : cli_parser.error( "Can't create password file") lchown(options.authdir + options.repo + '-passwdfile',0,apachegid) chmod(options.authdir + options.repo + '-passwdfile',0640) try: # we create the repository using the <rant> undocumented</rant> swig bindings. # took me a while to figure how to do this. # thanks to folks at #subversion-dev for give me some guidelines. core.apr_initialize() pool = core.svn_pool_create(None) repos.svn_repos_create(options.location + options.repo, '', '', None, { fs.SVN_FS_CONFIG_FS_TYPE: options.filesystem }, pool) core.svn_pool_destroy(pool) core.apr_terminate() except OSError: cli_parser.error( "Failed to create the repository") else: for dire in ['dav','db','locks']: lchown(options.location + options.repo + sep + dire, apacheuid, apachegid) for root, dirs, files in walk(options.location + options.repo + sep + dire ): for name in files : lchown(join(root, name) , apacheuid , apachegid) for name in dirs: lchown(join(root, name), apacheuid , apachegid) print "Reloading apache.." # there is no webserver.apache.reload() function yet :-)
def delete_pool(self, pool): """Deletes the passed-in pool. Returns None, to assign to pool in caller.""" core.svn_pool_destroy(pool) return None
def close(self): """Close the repository, aborting any uncommitted transactions""" core.svn_pool_destroy(self.pool) core.apr_terminate()
def __del__(self): core.svn_pool_destroy(self.pool) core.apr_terminate()
def __del__(self): core.svn_pool_destroy(self._pool)
def keywordPropertyUpdate(self, files): """ Does the following to text files: 1) Sets svn:keywords property 2) Replace SCCS keywords with SVN equivalents Note that while this will be treated as a separate transaction, the transaction date will be set to the last known date for the given file """ # Break up files into groups in order to avoid # potential "Too many open files" errors thrown when # committing large changesets counter = 0 filesPerCommit = 256 for filename, version in files.iteritems(): if counter%filesPerCommit == 0: if counter > 1: print "committing version ", print self._commit(revision, subversionTime(localtz.localize(datetime.now())), transaction, subpool) core.svn_pool_destroy(subpool) subpool = core.svn_pool_create(self.pool) (revision, transaction, root ) = \ self._revisionSetup(subpool, options.userid, "Automated SCCS keyword -> svn:keyword conversion\n") if isTextFilename(filename): print filename + ":" print " ... Setting svn:keywords property" fs.change_node_prop(root, filename, core.SVN_PROP_KEYWORDS, "LastChangedDate LastChangedRevision LastChangedBy HeadURL Id", subpool) fs.change_node_prop(root, filename, core.SVN_PROP_EOL_STYLE, "native", subpool) oldFileContents = version.getFileContents("-k") newFileContents = keywordSubstitution(oldFileContents) if oldFileContents != newFileContents: print " ... keywords converted" handler, baton = fs.apply_textdelta(root, version.getRepositoryName(), None, None, subpool) svn.delta.svn_txdelta_send_string(newFileContents, handler, baton, subpool) print " ... sending" else: print "skipping ", filename # Note we must unset sccs:sid since it no longer applies fs.change_node_prop(root, filename, 'sccs:sid', None, subpool) counter += 1 # Commit any stragglers print "committing version ", print self._commit(revision, subversionTime(localtz.localize(datetime.now())), transaction, subpool) core.svn_pool_destroy(subpool)
def export_revision(rev, repo, fs, pool): sys.stderr.write("Exporting revision %s... " % rev) revpool = svn_pool_create(pool) svn_pool_clear(revpool) # Open a root object representing the youngest (HEAD) revision. root = svn_fs_revision_root(fs, rev, revpool) # And the list of what changed in this revision. changes = svn_fs_paths_changed(root, revpool) i = 1 marks = {} file_changes = [] for path, change_type in changes.iteritems(): c_t = ct_short[change_type.change_kind] if svn_fs_is_dir(root, path, revpool): continue if not path.startswith(trunk_path): # We don't handle branches. Or tags. Yet. pass else: if c_t == "D": file_changes.append("D %s" % path.replace(trunk_path, "")) else: marks[i] = path.replace(trunk_path, "") file_changes.append("M 644 :%s %s" % (i, marks[i])) sys.stdout.write("blob\nmark :%s\n" % i) dump_file_blob(root, path, revpool) i += 1 # Get the commit author and message props = svn_fs_revision_proplist(fs, rev, revpool) # Do the recursive crawl. if props.has_key("svn:author"): author = "%s <%s@localhost>" % (props["svn:author"], props["svn:author"]) else: author = "nobody <nobody@localhost>" if len(file_changes) == 0: svn_pool_destroy(revpool) sys.stderr.write("skipping.\n") return svndate = props["svn:date"][0:-8] commit_time = mktime(strptime(svndate, "%Y-%m-%dT%H:%M:%S")) sys.stdout.write("commit refs/heads/master\n") sys.stdout.write("committer %s %s -0000\n" % (author, int(commit_time))) sys.stdout.write("data %s\n" % len(props["svn:log"])) sys.stdout.write(props["svn:log"]) sys.stdout.write("\n") sys.stdout.write("\n".join(file_changes)) sys.stdout.write("\n\n") svn_pool_destroy(revpool) sys.stderr.write("done!\n")