def __init__(self, pool, path, txn): self.pool = pool; repos_ptr = repos.open(path, pool) self.fs_ptr = repos.fs(repos_ptr) self.look = SVNLook(self.pool, path, 'changed', None, txn) # Get the list of files and directories which have been added. changed = self.look.cmd_changed() if debug: for item in changed.added + changed.addeddir: print >> sys.stderr, 'Adding: ' + item.encode('utf-8') if self.numadded(changed) != 0: # Find the part of the file tree which they live in. changedroot = self.findroot(changed) if debug: print >> sys.stderr, 'Changedroot is ' + changedroot.encode('utf-8') # Get that part of the file tree. tree = self.look.cmd_tree(changedroot) if debug: print >> sys.stderr, 'File tree:' for path in tree.paths.keys(): print >> sys.stderr, ' [%d] %s len %d' % (tree.paths[path], path.encode('utf-8'), len(path)) # If a member of the paths hash has a count of more than one there is a # case conflict. for path in tree.paths.keys(): if tree.paths[path] > 1: # Find out if this is one of the files being added, if not ignore it. addedfile = self.showfile(path, changedroot, changed) if addedfile <> '': print >> sys.stderr, "Case conflict: " + addedfile.encode('utf-8') \ + "\nA file with same filename but different cases already exist!" globals()["exitstat"] = 1
def __init__(self, path, rev=None, txn=None, cmd=None): """ path - path to repository rev - revision number txn - name of transaction (usually the one about to be committed) cmd - if set, specifies cmd_* method to execute txn takes precedence over rev; if both are None, inspect the head revision """ path = core.svn_path_canonicalize(path) repos_ptr = repos.open(path) self.fs_ptr = repos.fs(repos_ptr) # if set, txn takes precendence if txn: self.txn_ptr = fs.open_txn(self.fs_ptr, txn) else: self.txn_ptr = None if rev is None: rev = fs.youngest_rev(self.fs_ptr) else: rev = int(rev) self.rev = rev if cmd != None: getattr(self, 'cmd_' + cmd)()
def __init__(self, env=None, strict=False, enable=(), enable_agilo=True, env_key='agilo'): self.env = env if env is None: self.env = self._create_stub_environment(enable, strict, enable_agilo=enable_agilo, env_key=env_key) import time; time.sleep(0.5) else: # Prevent programmer errors assert strict == False assert enable == () assert enable_agilo == True self.env_path = self.env.path self.objects = list() self.files = list() self._ticket_counter = 0 try: self._upgrade_environment(self.env) except: # might have already happened pass self.svn_repos = None try: repo_path = RepositoryManager(self.env).repository_dir self.svn_repos = repos.open(repo_path) except: #No repo configured pass
def putfile(fname, rpath, uname="", commitmsg=""): rpath = core.svn_path_canonicalize(rpath) repos_ptr = repos.open(rpath) fsob = repos.fs(repos_ptr) # open a transaction against HEAD rev = fs.youngest_rev(fsob) txn = repos.fs_begin_txn_for_commit(repos_ptr, rev, uname, commitmsg) root = fs.txn_root(txn) rev_root = fs.revision_root(fsob, rev) kind = fs.check_path(root, fname) if kind == core.svn_node_none: print("file '%s' does not exist, creating..." % fname) fs.make_file(root, fname) elif kind == core.svn_node_dir: print("File '%s' is a dir." % fname) return else: print("Updating file '%s'" % fname) handler, baton = fs.apply_textdelta(root, fname, None, None) ### it would be nice to get an svn_stream_t. for now, just load in the ### whole file and shove it into the FS. delta.svn_txdelta_send_string(open(fname, 'rb').read(), handler, baton) newrev = repos.fs_commit_txn(repos_ptr, txn) print("revision: %s" % newrev)
def blame(path, filename, rev=None): annotresult = {} path = core.svn_path_canonicalize(path) repos_ptr = repos.open(path) fsob = repos.fs(repos_ptr) if rev is None: rev = fs.youngest_rev(fsob) filedata = '' for i in xrange(0, rev + 1): root = fs.revision_root(fsob, i) if fs.check_path(root, filename) != core.svn_node_none: first = i break print "First revision is %d" % first print "Last revision is %d" % rev for i in xrange(first, rev + 1): previousroot = root root = fs.revision_root(fsob, i) if i != first: if not fs.contents_changed(root, filename, previousroot, filename): continue file = fs.file_contents(root, filename) previousdata = filedata filedata = '' while 1: data = core.svn_stream_read(file, CHUNK_SIZE) if not data: break filedata = filedata + data print "Current revision is %d" % i diffresult = difflib.ndiff(previousdata.splitlines(1), filedata.splitlines(1)) # print ''.join(diffresult) k = 0 for j in diffresult: if j[0] == ' ': if annotresult.has_key(k): k = k + 1 continue else: annotresult[k] = (i, j[2:]) k = k + 1 continue elif j[0] == '?': continue annotresult[k] = (i, j[2:]) if j[0] != '-': k = k + 1 # print ''.join(diffresult) # print annotresult for x in xrange(len(annotresult.keys())): sys.stdout.write("Line %d (rev %d):%s" % (x, annotresult[x][0], annotresult[x][1]))
def load(self, repo_path): repo_path = core.svn_path_canonicalize(repo_path) repos_ptr = repos.open(repo_path) fs_ptr = repos.fs(repos_ptr) rev = fs.youngest_rev(fs_ptr) base_root = fs.revision_root(fs_ptr, 0) root = fs.revision_root(fs_ptr, rev) hist = fs.node_history(root, self.root) while hist is not None: hist = fs.history_prev(hist,0) dummy,rev = fs.history_location(hist) d = fs.revision_prop(fs_ptr, rev, core.SVN_PROP_REVISION_DATE) author = fs.revision_prop(fs_ptr, rev, \ core.SVN_PROP_REVISION_AUTHOR) if author == 'svnadmin': continue self.last_author = author self.last_date = core.svn_time_from_cstring(d) / 1000000 self.last_rev = rev def authz_cb(root, path, pool): return 1 editor = SvnDumperEditor(self) e_ptr, e_baton = delta.make_editor(editor) repos.dir_delta(base_root, '', '', root, self.root, e_ptr, e_baton, authz_cb, 0, 1, 0, 0) break
def load(self, repo_path): repo_path = core.svn_path_canonicalize(repo_path) repos_ptr = repos.open(repo_path) fs_ptr = repos.fs(repos_ptr) rev = fs.youngest_rev(fs_ptr) base_root = fs.revision_root(fs_ptr, 0) root = fs.revision_root(fs_ptr, rev) hist = fs.node_history(root, self.root) while hist is not None: hist = fs.history_prev(hist, 0) dummy, rev = fs.history_location(hist) d = fs.revision_prop(fs_ptr, rev, core.SVN_PROP_REVISION_DATE) author = fs.revision_prop(fs_ptr, rev, \ core.SVN_PROP_REVISION_AUTHOR) if author == 'svnadmin': continue self.last_author = author self.last_date = core.svn_time_from_cstring(d) / 1000000 self.last_rev = rev def authz_cb(root, path, pool): return 1 editor = SvnDumperEditor(self) e_ptr, e_baton = delta.make_editor(editor) repos.dir_delta(base_root, '', '', root, self.root, e_ptr, e_baton, authz_cb, 0, 1, 0, 0) break
def blame(path, filename, rev=None): annotresult = {} path = core.svn_path_canonicalize(path) repos_ptr = repos.open(path) fsob = repos.fs(repos_ptr) if rev is None: rev = fs.youngest_rev(fsob) filedata = '' for i in range(0, rev+1): root = fs.revision_root(fsob, i) if fs.check_path(root, filename) != core.svn_node_none: first = i break print("First revision is %d" % first) print("Last revision is %d" % rev) for i in range(first, rev+1): previousroot = root root = fs.revision_root(fsob, i) if i != first: if not fs.contents_changed(root, filename, previousroot, filename): continue file = fs.file_contents(root, filename) previousdata = filedata filedata = '' while True: data = core.svn_stream_read(file, CHUNK_SIZE) if not data: break filedata = filedata + data print("Current revision is %d" % i) diffresult = difflib.ndiff(previousdata.splitlines(1), filedata.splitlines(1)) # print ''.join(diffresult) k = 0 for j in diffresult: if j[0] == ' ': if k in annotresult: k = k + 1 continue else: annotresult[k] = (i, j[2:]) k = k + 1 continue elif j[0] == '?': continue annotresult[k] = (i, j[2:]) if j[0] != '-': k = k + 1 # print ''.join(diffresult) # print annotresult for x in range(len(annotresult.keys())): sys.stdout.write("Line %d (r%d):%s" % (x, annotresult[x][0], annotresult[x][1]))
def get_node_id(pool, repos_path, path, revision): # Open the repository and filesystem. repos_ptr = repos.open(repos_path, pool) fs_ptr = repos.fs(repos_ptr) # Fetch the node revision ID of interest rev_root = fs.revision_root(fs_ptr, int(revision), pool) return fs.unparse_id(fs.node_id(rev_root, path, pool), pool)
def duplicate_ephemeral_txnprops(repos_path, txn_name): fs_ptr = repos.fs(repos.open(repos_path)) txn_t = fs.open_txn(fs_ptr, txn_name) for name, value in fs.txn_proplist(txn_t).items(): if name.startswith(core.SVN_PROP_TXN_PREFIX): name = core.SVN_PROP_REVISION_PREFIX + \ name[len(core.SVN_PROP_TXN_PREFIX):] fs.change_txn_prop(txn_t, name, value)
def connect(self, repository): """ Connect with repository @todo: Create validation for repository url """ # Open repository connection self.repository = repos.open(repository) self.fs_ptr = repos.fs(self.repository)
def check_po(pool, repos_path, txn): def authz_cb(root, path, pool): return 1 fs_ptr = repos.fs(repos.open(repos_path, pool)) txn_ptr = fs.open_txn(fs_ptr, txn, pool) txn_root = fs.txn_root(txn_ptr, pool) base_root = fs.revision_root(fs_ptr, fs.txn_base_revision(txn_ptr), pool) editor = ChangeReceiver(txn_root, base_root, pool) e_ptr, e_baton = delta.make_editor(editor, pool) repos.dir_delta(base_root, "", "", txn_root, "", e_ptr, e_baton, authz_cb, 0, 1, 0, 0, pool)
def dumpprops(path, filename='', rev=None): path = core.svn_path_canonicalize(path) repos_ptr = repos.open(path) fsob = repos.fs(repos_ptr) if rev is None: rev = fs.youngest_rev(fsob) root = fs.revision_root(fsob, rev) print_props(root, filename) if fs.is_dir(root, filename): walk_tree(root, filename)
def check_po(pool, repos_path, txn): def authz_cb(root, path, pool): return 1 fs_ptr = repos.fs(repos.open(repos_path, pool)) txn_ptr = fs.open_txn(fs_ptr, txn, pool) txn_root = fs.txn_root(txn_ptr, pool) base_root = fs.revision_root(fs_ptr, fs.txn_base_revision(txn_ptr), pool) editor = ChangeReceiver(txn_root, base_root, pool) e_ptr, e_baton = delta.make_editor(editor, pool) repos.dir_delta(base_root, '', '', txn_root, '', e_ptr, e_baton, authz_cb, 0, 1, 0, 0, pool)
def __init__(self, path): """initialize an SVNShell object""" Cmd.__init__(self) path = core.svn_path_canonicalize(path) self.fs_ptr = repos.fs(repos.open(path)) self.is_rev = 1 self.rev = fs.youngest_rev(self.fs_ptr) self.txn = None self.root = fs.revision_root(self.fs_ptr, self.rev) self.path = "/" self._setup_prompt() self.cmdloop()
def __init__(self, pool, path, cmd, rev, txn): self.pool = pool repos_ptr = repos.open(path, pool) self.fs_ptr = repos.fs(repos_ptr) if txn: self.txn_ptr = fs.open_txn(self.fs_ptr, txn, pool) else: self.txn_ptr = None if rev is None: rev = fs.youngest_rev(self.fs_ptr, pool) self.rev = rev
def __init__(self, path, cmd, rev, txn): repos_ptr = repos.open(path) self.fs_ptr = repos.fs(repos_ptr) if txn: self.txn_ptr = fs.open_txn(self.fs_ptr, txn) else: self.txn_ptr = None if rev is None: rev = fs.youngest_rev(self.fs_ptr) self.rev = rev getattr(self, 'cmd_' + cmd)()
def main(): argc = len(sys.argv) if argc < 3 or argc > 4: print("Usage: %s PATH-TO-REPOS PATH-IN-REPOS [REVISION]" % sys.argv[0]) sys.exit(1) fs_ptr = repos.fs(repos.open(sys.argv[1])) if argc == 3: revision = fs.youngest_rev(fs_ptr) else: revision = int(sys.argv[3]) check_history(fs_ptr, sys.argv[2], revision) sys.exit(0)
def __init__(self, path, cmd, rev, txn): path = core.svn_path_canonicalize(path) repos_ptr = repos.open(path) self.fs_ptr = repos.fs(repos_ptr) if txn: self.txn_ptr = fs.open_txn(self.fs_ptr, txn) else: self.txn_ptr = None if rev is None: rev = fs.youngest_rev(self.fs_ptr) self.rev = rev getattr(self, 'cmd_' + cmd)()
def setUp(self): """Load a Subversion repository""" # Isolate each test from the others with a fresh repository. # Eventually, we should move this into a shared TestCase base # class that all test cases in this directory can use. SubversionRepositoryTestSetup().setUp() ra.initialize() # Open repository directly for cross-checking self.repos = repos.open(REPOS_PATH) self.fs = repos.fs(self.repos) self.callbacks = ra.Callbacks() self.ra_ctx = ra.open2(REPOS_URL, self.callbacks, None, None)
def getfile(path, filename, rev=None): path = core.svn_path_canonicalize(path) repos_ptr = repos.open(path) fsob = repos.fs(repos_ptr) if rev is None: rev = fs.youngest_rev(fsob) print("Using youngest revision %s" % rev) root = fs.revision_root(fsob, rev) file = fs.file_contents(root, filename) while 1: data = core.svn_stream_read(file, CHUNK_SIZE) if not data: break sys.stdout.write(data)
def getfile(path, filename, rev=None): path = core.svn_path_canonicalize(path) repos_ptr = repos.open(path) fsob = repos.fs(repos_ptr) if rev is None: rev = fs.youngest_rev(fsob) print("Using youngest revision %s" % rev) root = fs.revision_root(fsob, rev) file = fs.file_contents(root, filename) while True: data = core.svn_stream_read(file, CHUNK_SIZE) if not data: break sys.stdout.write(data)
def setUp(self): """Load a Subversion repository""" # Isolate each test from the others with a fresh repository. # Eventually, we should move this into a shared TestCase base # class that all test cases in this directory can use. SubversionRepositoryTestSetup().setUp() # Open repository directly for cross-checking self.repos = repos.open(REPOS_PATH) self.fs = repos.fs(self.repos) self.path = core.svn_path_canonicalize(tempfile.mktemp()) client_ctx = client.create_context() rev = core.svn_opt_revision_t() rev.kind = core.svn_opt_revision_head client.checkout2(REPOS_URL, self.path, rev, rev, True, True, client_ctx) self.wc = wc.adm_open3(None, self.path, True, -1, None)
def validate_added_extensions(repos_path, txn_name, extensions, action): # Open the repository and transaction. fs_ptr = repos.fs(repos.open(repos_path)) txn_t = fs.open_txn(fs_ptr, txn_name) txn_root = fs.txn_root(txn_t) # Fetch the changes made in this transaction. changes = fs.svn_fs_paths_changed(txn_root) paths = changes.keys() # Check the changes. for path in paths: change = changes[path] # Always allow deletions. if change.change_kind == fs.path_change_delete: continue # Always allow non-files. kind = fs.check_path(txn_root, path) if kind != core.svn_node_file: continue # If this was a newly added (without history) file ... if ((change.change_kind == fs.path_change_replace) \ or (change.change_kind == fs.path_change_add)): copyfrom_rev, copyfrom_path = fs.copied_from(txn_root, path) if copyfrom_rev == core.SVN_INVALID_REVNUM: # ... then check it for a valid extension. base, ext = os.path.splitext(path) if ext: ext = ext[1:].lower() if ((ext in extensions) and (action == 'deny')) \ or ((ext not in extensions) and (action == 'allow')): sys.stderr.write( "Path '%s' has an extension disallowed by server " "configuration.\n" % (path)) sys.exit(1)
def __init__(self, path, rev, txn): self.repo_root_path = core.svn_path_canonicalize(path) root_ptr = repos.open(self.repo_root_path) self.fs_ptr = repos.fs(root_ptr) # Set the revision/transaction if txn: self.txn_ptr = fs.open_txn(self.fs_ptr, txn) else: self.txn_ptr = None if rev is None: rev = fs.youngest_rev(self.fs_ptr) self.rev = rev # Set the root if self.txn_ptr: self.root = fs.txn_root(self.txn_ptr) else: self.root = fs.revision_root(self.fs_ptr, self.rev) # Set the base revision/transaction if self.txn_ptr: self.base_rev = fs.txn_base_revision(self.txn_ptr) else: self.base_rev = self.rev - 1 # Set the base root of the comparison self.base_root = fs.revision_root(self.fs_ptr, self.base_rev) # Get all the changes and sort by path editor = repos.ChangeCollector(self.fs_ptr, self.root) e_ptr, e_baton = delta.make_editor(editor) repos.replay(self.root, e_ptr, e_baton) self.changelist = editor.get_changes().items() self.changelist.sort()
def validate_added_extensions(repos_path, txn_name, extensions, action): # Open the repository and transaction. fs_ptr = repos.fs(repos.open(repos_path)) txn_t = fs.open_txn(fs_ptr, txn_name) txn_root = fs.txn_root(txn_t) # Fetch the changes made in this transaction. changes = fs.svn_fs_paths_changed(txn_root) paths = changes.keys() # Check the changes. for path in paths: change = changes[path] # Always allow deletions. if change.change_kind == fs.path_change_delete: continue # Always allow non-files. kind = fs.check_path(txn_root, path) if kind != core.svn_node_file: continue # If this was a newly added (without history) file ... if ((change.change_kind == fs.path_change_replace) \ or (change.change_kind == fs.path_change_add)): copyfrom_rev, copyfrom_path = fs.copied_from(txn_root, path) if copyfrom_rev == core.SVN_INVALID_REVNUM: # ... then check it for a valid extension. base, ext = os.path.splitext(path) if ext: ext = ext[1:].lower() if ((ext in extensions) and (action == 'deny')) \ or ((ext not in extensions) and (action == 'allow')): sys.stderr.write("Path '%s' has an extension disallowed by server " "configuration.\n" % (path)) sys.exit(1)
def __init__(self, pool, path, txn): self.pool = pool repos_ptr = repos.open(path, pool) self.fs_ptr = repos.fs(repos_ptr) self.look = SVNLook(self.pool, path, 'changed', None, txn) # Get the list of files and directories which have been added. changed = self.look.cmd_changed() if debug: for item in changed.added + changed.addeddir: print >> sys.stderr, 'Adding: ' + item.encode('utf-8') if self.numadded(changed) != 0: # Find the part of the file tree which they live in. changedroot = self.findroot(changed) if debug: print >> sys.stderr, 'Changedroot is ' + changedroot.encode( 'utf-8') # Get that part of the file tree. tree = self.look.cmd_tree(changedroot) if debug: print >> sys.stderr, 'File tree:' for path in tree.paths.keys(): print >> sys.stderr, ' [%d] %s len %d' % ( tree.paths[path], path.encode('utf-8'), len(path)) # If a member of the paths hash has a count of more than one there is a # case conflict. for path in tree.paths.keys(): if tree.paths[path] > 1: # Find out if this is one of the files being added, if not ignore it. addedfile = self.showfile(path, changedroot, changed) if addedfile <> '': print >> sys.stderr, "Case conflict: " + addedfile.encode( 'utf-8') globals()["exitstat"] = 1
def __init__(self, env=None, strict=False, enable=(), enable_agilo=True, env_key='agilo'): self.env = env if env is None: self.env = self._create_stub_environment(enable, strict, enable_agilo=enable_agilo, env_key=env_key) import time time.sleep(0.5) else: # Prevent programmer errors assert strict == False assert enable == () assert enable_agilo == True self.env_path = self.env.path self.objects = list() self.files = list() self._ticket_counter = 0 try: self._upgrade_environment(self.env) except: # might have already happened pass self.svn_repos = None try: repo_path = RepositoryManager(self.env).repository_dir self.svn_repos = repos.open(repo_path) except: #No repo configured pass
def __init__(self, repository_path): self.repos_ptr = repos.open(repository_path) self.fs_ptr = repos.fs(self.repos_ptr)
def setUp(self): """Load a Subversion repository""" self.repos = repos.open(REPOS_PATH) self.fs = repos.fs(self.repos) self.rev = fs.youngest_rev(self.fs)
def MakeRecordsFromPath(srcrepo, srcrev, srcpath, dstpath, record_source): """Generate Records adding the contents of a given repo/rev/path. Args: srcrepo: path to the source repository srcrev: revision number srcpath: path within the source repository dstpath: destination path in the repository being filtered record_source: the source attribute of the Records generated Returns: a list of Records Raises: RuntimeError: if svnrdump seems to have failed This is the fundamental feature of a working svndumpfilter replacement. In the upstream svndumpfilter, copyfrom operations that reference paths that are excluded by filtering cannot be resolved. To fix that, each svndumpfilter replacement must find a way to turn that copy operation into a series of add operations. svndumpfilter2 originally did this by calling the svnlook utility to list the directory structure, grab file contents, list property names, and grab property values. This resulted in calling svnlook once per tree, twice per file, and once per property. For a time, we tried instead making a single call to svnrdump which, unlike svnadmin dump (which is used by svndumpfilter3 with disastrous results for performance) can output a dump file containing only the desired subdirectory. The dump file is parsed into Records, the paths have the destination path prepended, and the Records are inserted into the dump. Unfortunately, svnrdump always produces format 3 dumps which use deltas. Even though it was only used for a single non-incremental revision, every file's contents were in the form of a delta. Since some tools (such as p4convert-svn) do not support deltas, svnrdump was done away with, replaced by the SVN SWIG bindings. It turns out that this same functionality is critical to 'internalizing' SVN externals. By generating Records that add all of the files and directories in the repo/rev/path referenced by an svn:external property, the history can be made to look as though the actual files had been there all along, not just a reference to them. Further filtering of these generated Records must be done in the case of externals to delete externals when they are removed and modify the filesystem when the revision is changed, rather than deleting and reading it every time (see externals.FromRev, externals.Diff, Diff). """ srcrepo = svn_core.svn_path_canonicalize(srcrepo) repo_ptr = svn_repos.open(srcrepo) fs = svn_repos.fs(repo_ptr) root = svn_fs.revision_root(fs, srcrev) output = [] # Perform a depth-first search stack = [srcpath] while stack: path = stack.pop() if srcpath: relative_path = path[len(srcpath):] node_path = dstpath + relative_path else: node_path = (dstpath + '/' + path) if path else dstpath if svn_fs.is_dir(root, path): record = Record(action='add', kind='dir', path=node_path, source=record_source) # Add children to the stack prefix = (path + '/') if path else '' for name in svn_fs.dir_entries(root, path).keys(): stack.append(prefix + name) else: record = Record(action='add', kind='file', path=node_path, source=record_source) # Fetch file content stream = svn_fs.file_contents(root, path) record.text = _ReadSVNStream(stream) checksum = svn_fs.file_md5_checksum(root, path) record.headers['Text-content-md5'] = checksum.encode('hex_codec') # Fetch properties props = svn_fs.node_proplist(root, path) record.props = {key: str(value) for key, value in props.iteritems()} output.append(record) return output
return dir + '/' + name def ensure_names(path, names, txn_root): if (not names.has_key(path)): names[path] = [] for name, dirent in fs.dir_entries(txn_root, path).iteritems(): names[path].append([canonicalize(name), name]) names = {} # map of: key - path, value - list of two element lists of names clashes = {} # map of: key - path, value - map of: key - path, value - dummy native = locale.getlocale()[1] if not native: native = 'ascii' repos_handle = repos.open(sys.argv[1].decode(native).encode('utf-8')) fs_handle = repos.fs(repos_handle) txn_handle = fs.open_txn(fs_handle, sys.argv[2].decode(native).encode('utf-8')) txn_root = fs.txn_root(txn_handle) new_paths = get_new_paths(txn_root) for path in new_paths: dir, name = split_path(path) canonical = canonicalize(name) ensure_names(dir, names, txn_root) for name_pair in names[dir]: if (name_pair[0] == canonical and name_pair[1] != name): canonical_path = join_path(dir, canonical) if (not clashes.has_key(canonical_path)): clashes[canonical_path] = {} clashes[canonical_path][join_path(dir, name)] = True
def ensure_names(path, names, txn_root): if (path not in names): names[path] = [] for name, dirent in fs.dir_entries(txn_root, path).items(): name = name.decode('utf-8') names[path].append([canonicalize(name), name]) names = {} # map of: key - path, value - list of two element lists of names clashes = {} # map of: key - path, value - map of: key - path, value - dummy native = locale.getlocale()[1] if not native: native = 'ascii' repos_handle = repos.open(sys.argv[1].encode('utf-8')) fs_handle = repos.fs(repos_handle) txn_handle = fs.open_txn(fs_handle, sys.argv[2].encode('utf-8')) txn_root = fs.txn_root(txn_handle) new_paths = get_new_paths(txn_root) for path in new_paths: dir, name = split_path(path) canonical = canonicalize(name) ensure_names(dir, names, txn_root) for name_pair in names[dir]: if (name_pair[0] == canonical and name_pair[1] != name): canonical_path = join_path(dir, canonical) if (canonical_path not in clashes): clashes[canonical_path] = {} clashes[canonical_path][join_path(dir, name)] = True
from svn import core, fs, delta, repos import codecs #argvs[1]:repository path #argvs[2]:changeset folder #argvs[3]:start revision #argvs[4]:end revision argvs = sys.argv #コマンドライン引数リスト argc = len(argvs) #引数の個数 if (argc != 5): #5でなければ出る usage(1) path = argvs[1] path = core.svn_path_canonicalize(path) repos_ptr = repos.open(path) fs_ptr = repos.fs(repos_ptr) changeset_folder = argvs[2] start_rev = int(argvs[3]) end_rev = int(argvs[4]) if start_rev > end_rev: sys.exit(exit) rev = fs.youngest_rev(fs_ptr) if start_rev > rev and end_rev > rev: sys.exit(exit) if start_rev > rev:
if (dir == '/'): return '/' + name return dir + '/' + name def ensure_names(path, names, txn_root): if (not names.has_key(path)): names[path] = [] for name, dirent in fs.dir_entries(txn_root, path).iteritems(): names[path].append([canonicalize(name), name]) names = {} # map of: key - path, value - list of two element lists of names clashes = {} # map of: key - path, value - map of: key - path, value - dummy native = locale.getlocale()[1] if not native: native = 'ascii' repos_handle = repos.open(sys.argv[1].decode(native).encode('utf-8')) fs_handle = repos.fs(repos_handle) txn_handle = fs.open_txn(fs_handle, sys.argv[2].decode(native).encode('utf-8')) txn_root = fs.txn_root(txn_handle) new_paths = get_new_paths(txn_root) for path in new_paths: dir, name = split_path(path) canonical = canonicalize(name) ensure_names(dir, names, txn_root) for name_pair in names[dir]: if (name_pair[0] == canonical and name_pair[1] != name): canonical_path = join_path(dir, canonical) if (not clashes.has_key(canonical_path)): clashes[canonical_path] = {} clashes[canonical_path][join_path(dir, name)] = True