"-f", "--final-rev", help="Final revision to import", dest="final_rev", metavar="FINAL_REV", type="int" ) parser.add_option("-t", "--trunk-path", help="Path in repo to /trunk", dest="trunk_path", metavar="TRUNK_PATH") parser.add_option( "-b", "--branches-path", help="Path in repo to /branches", dest="branches_path", metavar="BRANCHES_PATH" ) parser.add_option("-T", "--tags-path", help="Path in repo to /tags", dest="tags_path", metavar="TAGS_PATH") (options, args) = parser.parse_args() if options.trunk_path != None: trunk_path = options.trunk_path if options.branches_path != None: branches_path = options.branches_path if options.tags_path != None: tags_path = options.tags_path if options.final_rev != None: final_rev = options.final_rev if len(args) != 1: parser.print_help() sys.exit(2) # Canonicalize (enough for Subversion, at least) the repository path. repos_path = os.path.normpath(args[0]) if repos_path == ".": repos_path = "" # Call the app-wrapper, which takes care of APR initialization/shutdown # and the creation and cleanup of our top-level memory pool. run_app(crawl_revisions, repos_path)
writer = codecs.getwriter("UTF-8")(checker, "strict") while 1: data = reader.read(core.SVN_STREAM_CHUNK_SIZE) if not data: break writer.write(data) if not checker.close(): sys.exit("PO format check failed for '" + path + "'") except UnicodeError: sys.exit("PO file is not in UTF-8: '" + path + "'") finally: core.svn_pool_destroy(subpool) def check_po(pool, repos_path, txn): def authz_cb(root, path, pool): return 1 fs_ptr = repos.fs(repos.open(repos_path, pool)) txn_ptr = fs.open_txn(fs_ptr, txn, pool) txn_root = fs.txn_root(txn_ptr, pool) base_root = fs.revision_root(fs_ptr, fs.txn_base_revision(txn_ptr), pool) editor = ChangeReceiver(txn_root, base_root, pool) e_ptr, e_baton = delta.make_editor(editor, pool) repos.dir_delta(base_root, "", "", txn_root, "", e_ptr, e_baton, authz_cb, 0, 1, 0, 0, pool) if __name__ == "__main__": assert len(sys.argv) == 3 core.run_app(check_po, sys.argv[1], sys.argv[2])
data = stream.read() # core.SVN_STREAM_CHUNK_SIZE) for line in data.splitlines(): if _tabs.match(line): core.svn_pool_destroy(subpool) msg = ("Python file contains lines that begin with tabs: '%s'\n" "There may be others as well." % (path,)) sys.stderr.write(msg) sys.exit(1) core.svn_pool_destroy(subpool) def check_tabs(pool, repos_path, txn): def authz_cb(root, path, pool): return 1 fs_ptr = repos.svn_repos_fs(repos.svn_repos_open(repos_path, pool)) txn_ptr = fs.open_txn(fs_ptr, txn, pool) txn_root = fs.txn_root(txn_ptr, pool) base_root = fs.revision_root(fs_ptr, fs.txn_base_revision(txn_ptr), pool) editor = ChangeReceiver(txn_root, base_root, pool) e_ptr, e_baton = delta.make_editor(editor, pool) repos.svn_repos_dir_delta(base_root, '', '', txn_root, '', e_ptr, e_baton, authz_cb, 0, 1, 0, 0, pool) if __name__ == '__main__': assert len(sys.argv) == 3 core.run_app(check_tabs, sys.argv[1], sys.argv[2])
while 1: data = reader.read(core.SVN_STREAM_CHUNK_SIZE) if not data: break writer.write(data) if not checker.close(): sys.exit("PO format check failed for '" + path + "'") except UnicodeError: sys.exit("PO file is not in UTF-8: '" + path + "'") finally: core.svn_pool_destroy(subpool) def check_po(pool, repos_path, txn): def authz_cb(root, path, pool): return 1 fs_ptr = repos.fs(repos.open(repos_path, pool)) txn_ptr = fs.open_txn(fs_ptr, txn, pool) txn_root = fs.txn_root(txn_ptr, pool) base_root = fs.revision_root(fs_ptr, fs.txn_base_revision(txn_ptr), pool) editor = ChangeReceiver(txn_root, base_root, pool) e_ptr, e_baton = delta.make_editor(editor, pool) repos.dir_delta(base_root, '', '', txn_root, '', e_ptr, e_baton, authz_cb, 0, 1, 0, 0, pool) if __name__ == '__main__': assert len(sys.argv) == 3 core.run_app(check_po, sys.argv[1], sys.argv[2])
print >> sys.stderr, ' %s != %s' % (dir, dir2) same = False pathpos += 1 if pathpos > 10: same = False return '/'.join(firstone[:pathpos-1]) def showfile(self, path, changedroot, changed): """Find the path which conflicts""" if changedroot == '': changedpath = path else: changedpath = changedroot + '/' + path for added in changed.added: if (string.lower(added) == string.lower(changedpath)): return added for added in changed.addeddir: if (string.lower(added) == string.lower(changedpath)): return added return '' if __name__ == "__main__": # Check for sane usage. if len(sys.argv) != 3: sys.stderr.write("Usage: %s REPOS TXN\n" % (os.path.basename(sys.argv[0]))) sys.exit(1) core.run_app(CheckCase, os.path.normpath(sys.argv[1]), sys.argv[2]) sys.exit(exitstat)
root = fs.svn_fs_revision_root(fs_ptr, youngest_rev, pool) if not fs.svn_fs_node_prop(root, path, core.SVN_PROP_NEEDS_LOCK, pool): sys.stderr.write( """Locking of path '%s' prohibited by repository policy (must have %s property set) """ % (path, core.SVN_PROP_NEEDS_LOCK) ) return 1 return 0 def _usage_and_exit(): sys.stderr.write( """ Usage: %s REPOS-DIR PATH This script, intended for use as a Subversion pre-lock hook, verifies that the PATH that USER is attempting to lock has the %s property set on it, returning success iff it does. """ % (os.path.basename(sys.argv[0]), core.SVN_PROP_NEEDS_LOCK) ) sys.exit(1) if __name__ == "__main__": if len(sys.argv) < 3: _usage_and_exit() sys.exit(core.run_app(main, sys.argv[1], sys.argv[2]))
def main(): kill_preds = 1 ### Until this thing learns to purge the 'changes', it ise ### basically useless (because dumps/loads are entirely ### 'changes'-table driven). So just bail. print "This script will, at the moment, destroy your repository." print "You don't really want that, right?" sys.exit(0) # Parse the commandline arguments. argc = len(sys.argv) if argc < 4: print __doc__ sys.exit(1) repos_path, path, revision = sys.argv[1:4] # Fetch the NODE-REV-ID of the PATH@REV which holds our interest. sys.stdout.write('Harvesting info for "%s" in r%s.\n' % \ (path, revision)) sys.stdout.write('-- Determining node revision ID... ') sys.stdout.flush() node_id = core.run_app(get_node_id, repos_path, path, revision) sys.stdout.write('done. [%s]\n' % node_id) # Scan the nodes table, parsing skels and building a node tree. nodes = {} sys.stdout.write('-- Building node tree... ') sys.stdout.flush() nodes_table = os.path.join(repos_path, 'db', 'nodes') nodes_db = bsddb3.btopen(nodes_table, 'w') for key in nodes_db.keys(): if key == 'next-key': continue value = nodes_db[key] prev_id, is_dir = parse_node_skel(value) nodes[key] = [prev_id, is_dir, []] for key in nodes.keys(): value = nodes[key] if value[0]: prev_value = nodes[value[0]] prev_value[2].append(key) nodes[value[0]] = prev_value sys.stdout.write('done. [found %d node(s)]\n' % len(nodes.keys())) # Determine the nodes we wish to purge. affected_nodes = [] sys.stdout.write('-- Building node purge list... ') sys.stdout.flush() if kill_preds: prev_id = node_id while nodes[prev_id][0]: prev_id = nodes[prev_id][0] append_successors(nodes, prev_id, affected_nodes) sys.stdout.write('done. [found %d node(s)]\n' % len(affected_nodes)) for id in affected_nodes: sys.stdout.write(' -- %s\n' % id) # Now, the hard part. We need to find every directory listing # that contains one of our to-be-purge nodes, and then remove # those nodes from the entries list. dirlists = [] sys.stdout.write('-- Fixing affected directory entries lists... ') sys.stdout.flush() strings_table = os.path.join(repos_path, 'db', 'strings') strings_db = bsddb3.btopen(strings_table, 'w') reps_table = os.path.join(repos_path, 'db', 'representations') reps_db = bsddb3.btopen(reps_table, 'w') dirs_fixed = 0 entries_fixed = 0 for key in nodes.keys(): value = nodes[key] if value[1]: node = nodes_db[key] kill_count = fix_affected_dirlists(node, reps_db, strings_db, affected_nodes, dirlists) if kill_count: sys.stdout.write(' -- %s\n' % key) dirs_fixed = dirs_fixed + 1 entries_fixed = entries_fixed + kill_count sys.stdout.write('done. [fixed %d entries in %d dirs]\n' \ % (entries_fixed, dirs_fixed)) sys.stdout.write('-- Removing deleted nodes... ') sys.stdout.flush() for key in affected_nodes: del (nodes_db[key]) sys.stdout.write('done. [removed %d nodes]\n' % len(affected_nodes)) # Cleanup after ourselves. strings_db.sync() nodes_db.sync() reps_db.sync() strings_db.close() reps_db.close() nodes_db.close()
def main(): kill_preds = 1 ### Until this thing learns to purge the 'changes', it ise ### basically useless (because dumps/loads are entirely ### 'changes'-table driven). So just bail. print "This script will, at the moment, destroy your repository." print "You don't really want that, right?" sys.exit(0) # Parse the commandline arguments. argc = len(sys.argv) if argc < 4: print __doc__ sys.exit(1) repos_path, path, revision = sys.argv[1:4] # Fetch the NODE-REV-ID of the PATH@REV which holds our interest. sys.stdout.write('Harvesting info for "%s" in r%s.\n' % (path, revision)) sys.stdout.write("-- Determining node revision ID... ") sys.stdout.flush() node_id = core.run_app(get_node_id, repos_path, path, revision) sys.stdout.write("done. [%s]\n" % node_id) # Scan the nodes table, parsing skels and building a node tree. nodes = {} sys.stdout.write("-- Building node tree... ") sys.stdout.flush() nodes_table = os.path.join(repos_path, "db", "nodes") nodes_db = bsddb3.btopen(nodes_table, "w") for key in nodes_db.keys(): if key == "next-key": continue value = nodes_db[key] prev_id, is_dir = parse_node_skel(value) nodes[key] = [prev_id, is_dir, []] for key in nodes.keys(): value = nodes[key] if value[0]: prev_value = nodes[value[0]] prev_value[2].append(key) nodes[value[0]] = prev_value sys.stdout.write("done. [found %d node(s)]\n" % len(nodes.keys())) # Determine the nodes we wish to purge. affected_nodes = [] sys.stdout.write("-- Building node purge list... ") sys.stdout.flush() if kill_preds: prev_id = node_id while nodes[prev_id][0]: prev_id = nodes[prev_id][0] append_successors(nodes, prev_id, affected_nodes) sys.stdout.write("done. [found %d node(s)]\n" % len(affected_nodes)) for id in affected_nodes: sys.stdout.write(" -- %s\n" % id) # Now, the hard part. We need to find every directory listing # that contains one of our to-be-purge nodes, and then remove # those nodes from the entries list. dirlists = [] sys.stdout.write("-- Fixing affected directory entries lists... ") sys.stdout.flush() strings_table = os.path.join(repos_path, "db", "strings") strings_db = bsddb3.btopen(strings_table, "w") reps_table = os.path.join(repos_path, "db", "representations") reps_db = bsddb3.btopen(reps_table, "w") dirs_fixed = 0 entries_fixed = 0 for key in nodes.keys(): value = nodes[key] if value[1]: node = nodes_db[key] kill_count = fix_affected_dirlists(node, reps_db, strings_db, affected_nodes, dirlists) if kill_count: sys.stdout.write(" -- %s\n" % key) dirs_fixed = dirs_fixed + 1 entries_fixed = entries_fixed + kill_count sys.stdout.write("done. [fixed %d entries in %d dirs]\n" % (entries_fixed, dirs_fixed)) sys.stdout.write("-- Removing deleted nodes... ") sys.stdout.flush() for key in affected_nodes: del (nodes_db[key]) sys.stdout.write("done. [removed %d nodes]\n" % len(affected_nodes)) # Cleanup after ourselves. strings_db.sync() nodes_db.sync() reps_db.sync() strings_db.close() reps_db.close() nodes_db.close()
def getRevisionInfo(revision): #path = '/Users/richard/tmp/test_repo' #rev = 2 return core.run_app(inner, str(revision['repository']['path']), int(revision['revision']))
def main(): if len(sys.argv) != 2: print('USAGE: %s REPOS' % sys.argv[0]) sys.exit(1) core.run_app(tweak_dates, sys.argv[1])
same = False pathpos += 1 if pathpos > 10: same = False return '/'.join(firstone[:pathpos - 1]) def showfile(self, path, changedroot, changed): """Find the path which conflicts""" if changedroot == '': changedpath = path else: changedpath = changedroot + '/' + path for added in changed.added: if (string.lower(added) == string.lower(changedpath)): return added for added in changed.addeddir: if (string.lower(added) == string.lower(changedpath)): return added return '' if __name__ == "__main__": # Check for sane usage. if len(sys.argv) != 3: sys.stderr.write("Usage: REPOS TXN\n" % (os.path.basename(sys.argv[0]))) sys.exit(1) core.run_app(CheckCase, os.path.normpath(sys.argv[1]), sys.argv[2]) sys.exit(exitstat)
metavar="sccs timezone", help="The timezone of the SCCS repository (default:UTC)") (options, args) = parser.parse_args() # Make sure that we have all of the options we need. if options.userid == None: parser.error("You must supply a user id with --user") if options.svn_repository == None: parser.error("You must supply a Subversion repository with --svn-repository") if options.sccs_repository == None: parser.error("You must supply a SCCS repository with --sccs-repository") if os.path.exists(options.svn_repository): print "Repository directory %s already exists!" % options.svn_repository print "Exiting." sys.exit(1) if options.sccs_timezone != None: localtz = timezone(options.sccs_timezone) svnadminResult = os.system("svnadmin create " + options.svn_repository) if svnadminResult != 0: print "svnadmin returned %s instead of 0" % svnadminResult sys.exit(2) core.run_app(run)
% (VIEWCVS_URL, urllib.quote(change.path), rev, urllib.quote(change.base_path[1:]), change.base_rev, urllib.quote(change.path), rev) else: ### (without history, show new file) action = MSG_ACTION_ADDED diff_url = '%s/%s?view=auto&rev=%d' \ % (VIEWCVS_URL, urllib.quote(change.path), rev) elif change.text_changed: ### Modified action = MSG_ACTION_MODIFIED diff_url = '%s/%s?view=diff&rev=%d&p1=%s&r1=%d&p2=%s&r2=%d' \ % (VIEWCVS_URL, urllib.quote(change.path), rev, urllib.quote(change.base_path[1:]), change.base_rev, urllib.quote(change.path), rev) if action: comment = comment + "%s: %s\n %s\n" % (action, path, diff_url) # Connect to the xmlrpc server, and transmit our data. Server(SCARAB_XMLRPC_URL).simple.addComment(log, author, comment, DISABLE_EMAILS) if __name__ == '__main__': if len(sys.argv) < 3: sys.stderr.write("Publish Subversion commits into to Scarab.") sys.stderr.write("\nUSAGE: %s REPOS-DIR REVISION\n" % (sys.argv[0])) sys.exit(1) sys.exit(core.run_app(main, sys.argv[1], int(sys.argv[2]), None))
def main(): core.run_app(rsvn)
metavar="svn repository directory", help="The location of the Subversion repository; this location will be destroyed!") parser.add_option("-i", "--sccs-repository", dest="sccs_repository", metavar="sccs root directory", help="The location of the SCCS repository") (options, args) = parser.parse_args() # Make sure that we have all of the options we need. if options.userid == None: parser.error("You must supply a user id with --user") if options.svn_repository == None: parser.error("You must supply a Subversion repository with --svn-repository") if options.sccs_repository == None: parser.error("You must supply a SCCS repository with --sccs-repository") if os.path.exists(options.svn_repository): print "Repository directory %s already exists!" % options.svn_repository print "Exiting." sys.exit(1) svnadminResult = os.system("svnadmin create --fs-type bdb " + options.svn_repository) if svnadminResult != 0: print "svnadmin returned %s instead of 0" % svnadminResult sys.exit(2) core.run_app(run)
parser.add_option('-T', '--tags-path', help='Path in repo to /tags', dest='tags_path', metavar='TAGS_PATH') (options, args) = parser.parse_args() if options.trunk_path != None: trunk_path = options.trunk_path if options.branches_path != None: branches_path = options.branches_path if options.tags_path != None: tags_path = options.tags_path if options.final_rev != None: final_rev = options.final_rev if len(args) != 1: parser.print_help() sys.exit(2) # Canonicalize (enough for Subversion, at least) the repository path. repos_path = os.path.normpath(args[0]) if repos_path == '.': repos_path = '' # Call the app-wrapper, which takes care of APR initialization/shutdown # and the creation and cleanup of our top-level memory pool. run_app(crawl_revisions, repos_path) ########NEW FILE########
retval = test_props(cc.get_root_props()) if retval: return retval # Generate the path-based changes list. e_ptr, e_baton = delta.make_editor(cc, pool) repos.svn_repos_replay(root, e_ptr, e_baton, pool) # Call the path change validator. changes = cc.get_changes() paths = changes.keys() paths.sort(lambda a, b: core.svn_path_compare_paths(a, b)) for path in paths: change = changes[path] retval = test_path_change(path, change) if retval: return retval return 0 def _usage_and_exit(): sys.stderr.write("USAGE: %s REPOS-DIR TXN-NAME\n" % (sys.argv[0])) sys.exit(1) if __name__ == '__main__': if len(sys.argv) < 3: _usage_and_exit() sys.exit(core.run_app(main, sys.argv[1], sys.argv[2]))
data = stream.read() # core.SVN_STREAM_CHUNK_SIZE) for line in data.splitlines(): if _tabs.match(line): core.svn_pool_destroy(subpool) msg = ( "Python file contains lines that begin with tabs: '%s'\n" "There may be others as well." % (path, )) sys.stderr.write(msg) sys.exit(1) core.svn_pool_destroy(subpool) def check_tabs(pool, repos_path, txn): def authz_cb(root, path, pool): return 1 fs_ptr = repos.svn_repos_fs(repos.svn_repos_open(repos_path, pool)) txn_ptr = fs.open_txn(fs_ptr, txn, pool) txn_root = fs.txn_root(txn_ptr, pool) base_root = fs.revision_root(fs_ptr, fs.txn_base_revision(txn_ptr), pool) editor = ChangeReceiver(txn_root, base_root, pool) e_ptr, e_baton = delta.make_editor(editor, pool) repos.svn_repos_dir_delta(base_root, '', '', txn_root, '', e_ptr, e_baton, authz_cb, 0, 1, 0, 0, pool) if __name__ == '__main__': assert len(sys.argv) == 3 core.run_app(check_tabs, sys.argv[1], sys.argv[2])