def test_file_node_stat(self): node = FileNode('foobar', 'empty... almost') mode = node.mode # default should be 0100644 self.assertTrue(mode & stat.S_IRUSR) self.assertTrue(mode & stat.S_IWUSR) self.assertTrue(mode & stat.S_IRGRP) self.assertTrue(mode & stat.S_IROTH) self.assertFalse(mode & stat.S_IWGRP) self.assertFalse(mode & stat.S_IWOTH) self.assertFalse(mode & stat.S_IXUSR) self.assertFalse(mode & stat.S_IXGRP) self.assertFalse(mode & stat.S_IXOTH)
def test_new_branch(self): # This check must not be removed to ensure the 'branches' LazyProperty # gets hit *before* the new 'foobar' branch got created: assert 'foobar' not in self.repo.branches self.imc.add(FileNode('docs/index.txt', content='Documentation\n')) foobar_tip = self.imc.commit( message=u'New branch: foobar', author=u'joe', branch='foobar', ) assert 'foobar' in self.repo.branches assert foobar_tip.branch == 'foobar'
def _get_commits(cls): commits = [ { 'message': 'Initial commit', 'author': 'Joe Doe <*****@*****.**>', 'date': datetime.datetime(2010, 1, 1, 20), 'added': [ FileNode('foobar', content='foobar'), FileNode('foobar2', content='foobar2'), ], }, { 'message': 'Changed foobar, added foobar3', 'author': 'Jane Doe <*****@*****.**>', 'date': datetime.datetime(2010, 1, 1, 21), 'added': [ FileNode('foobar3', content='foobar3'), ], 'changed': [ FileNode('foobar', 'FOOBAR'), ], }, { 'message': 'Removed foobar, changed foobar3', 'author': 'Jane Doe <*****@*****.**>', 'date': datetime.datetime(2010, 1, 1, 22), 'changed': [ FileNode('foobar3', content='FOOBAR\nFOOBAR\nFOOBAR\n'), ], 'removed': [FileNode('foobar')], }, ] return commits
def test_modify_and_undo_modification_diff(self, pr_util): commits = [ { 'message': 'a' }, { 'message': 'b', 'added': [FileNode('file_b', 'test_content b\n')] }, { 'message': 'c', 'changed': [FileNode('file_b', 'test_content b modified\n')] }, { 'message': 'd', 'changed': [FileNode('file_b', 'test_content b\n')] }, ] # open a PR from a to b, adding file_b pull_request = pr_util.create_pull_request( commits=commits, target_head='a', source_head='b', revisions=['b'], name_suffix='per-file-review') # modify PR modifying file file_b pr_util.add_one_commit(head='c') assert_pr_file_changes(pull_request, added=[], modified=['file_b'], removed=[]) # move the head again to d, which rollbacks change, # meaning we should indicate no changes pr_util.add_one_commit(head='d') assert_pr_file_changes(pull_request, added=[], modified=[], removed=[])
def test_new_branch(self): self.imc.add(FileNode('docs/index.txt', content='Documentation\n')) foobar_tip = self.imc.commit( message=u'New branch: foobar', author=u'joe', branch='foobar', ) assert 'foobar' in self.repo.branches assert foobar_tip.branch == 'foobar' # 'foobar' should be the only branch that contains the new commit branch = self.repo.branches.values() assert branch[0] != branch[1]
def test_comment_stays_unflagged_on_unchanged_diff(self, pr_util): commits = [ { 'message': 'a' }, { 'message': 'b', 'added': [FileNode('file_b', 'test_content\n')] }, { 'message': 'c', 'added': [FileNode('file_c', 'test_content\n')] }, ] pull_request = pr_util.create_pull_request(commits=commits, target_head='a', source_head='b', revisions=['b']) pr_util.create_inline_comment(file_path='file_b') pr_util.add_one_commit(head='c') assert_inline_comments(pull_request, visible=1, outdated=0)
def _get_file_node(self, commit_id, f_path): if commit_id not in ['', None, 'None', '0' * 12, '0' * 40]: commit = c.rhodecode_repo.get_commit(commit_id=commit_id) try: node = commit.get_node(f_path) if node.is_dir(): raise NodeError('%s path is a %s not a file' % (node, type(node))) except NodeDoesNotExistError: commit = EmptyCommit(commit_id=commit_id, idx=commit.idx, repo=commit.repository, alias=commit.repository.alias, message=commit.message, author=commit.author, date=commit.date) node = FileNode(f_path, '', commit=commit) else: commit = EmptyCommit(repo=c.rhodecode_repo, alias=c.rhodecode_repo.alias) node = FileNode(f_path, '', commit=commit) return node
def test_merge_rebase_source_is_updated_bookmark(self, vcsbackend_hg): target_repo = vcsbackend_hg.create_repo(number_of_commits=1) source_repo = vcsbackend_hg.clone_repo(target_repo) vcsbackend_hg.add_file(target_repo, 'README_MERGE1', 'Version 1') vcsbackend_hg.add_file(source_repo, 'README_MERGE2', 'Version 2') imc = source_repo.in_memory_commit imc.add(FileNode('file_x', content=source_repo.name)) imc.commit(message=u'Automatic commit from repo merge test', author=u'Automatic') target_commit = target_repo.get_commit() source_commit = source_repo.get_commit() vcsbackend_hg.add_file(source_repo, 'LICENSE', 'LICENSE Info') default_branch = target_repo.DEFAULT_BRANCH_NAME bookmark_name = 'bookmark' source_repo._update(default_branch) source_repo.bookmark(bookmark_name) target_ref = Reference('branch', default_branch, target_commit.raw_id) source_ref = Reference('book', bookmark_name, source_commit.raw_id) workspace = 'test-merge' with mock.patch.object(rhodecode.lib.vcs.conf.settings, 'HG_USE_REBASE_FOR_MERGING', return_value=True): merge_response = target_repo.merge(target_ref, source_repo, source_ref, workspace, 'test user', '*****@*****.**', 'merge message 1', dry_run=False) expected_merge_response = MergeResponse(True, True, merge_response.merge_commit_id, MergeFailureReason.NONE) assert merge_response == expected_merge_response target_repo = backends.get_backend(vcsbackend_hg.alias)( target_repo.path) last_commit = target_repo.get_commit() assert last_commit.message == source_commit.message assert last_commit.author == source_commit.author # This checks that we effectively did a rebase assert last_commit.raw_id != source_commit.raw_id # Check the target has only 4 commits: 2 were already in target and # only two should have been added assert len(target_repo.commit_ids) == 2 + 2
def wrapped_diff(filenode_old, filenode_new, cut_off_limit=None, ignore_whitespace=True, line_context=3, enable_comments=False): """ returns a wrapped diff into a table, checks for cut_off_limit and presents proper message """ if filenode_old is None: filenode_old = FileNode(filenode_new.path, '', EmptyChangeset()) if filenode_old.is_binary or filenode_new.is_binary: diff = wrap_to_table(_('Binary file')) stats = (0, 0) size = 0 elif cut_off_limit != -1 and (cut_off_limit is None or (filenode_old.size < cut_off_limit and filenode_new.size < cut_off_limit)): f_gitdiff = get_gitdiff(filenode_old, filenode_new, ignore_whitespace=ignore_whitespace, context=line_context) diff_processor = DiffProcessor(f_gitdiff, format='gitdiff') diff = diff_processor.as_html(enable_comments=enable_comments) stats = diff_processor.stat() size = len(diff or '') else: diff = wrap_to_table( _('Changeset was too big and was cut off, use ' 'diff menu to display this diff')) stats = (0, 0) size = 0 if not diff: submodules = filter(lambda o: isinstance(o, SubModuleNode), [filenode_new, filenode_old]) if submodules: diff = wrap_to_table(escape('Submodule %r' % submodules[0])) else: diff = wrap_to_table(_('No changes detected')) cs1 = filenode_old.changeset.raw_id cs2 = filenode_new.changeset.raw_id return size, cs1, cs2, diff, stats
def test_comment_flagged_on_change(self, pr_util, change, content): commits = [ { 'message': 'a' }, { 'message': 'b', 'added': [FileNode('file_b', 'test_content\n')] }, { 'message': 'c', change: [FileNode('file_b', content)] }, ] pull_request = pr_util.create_pull_request(commits=commits, target_head='a', source_head='b', revisions=['b']) pr_util.create_inline_comment(file_path='file_b') with outdated_comments_patcher(): pr_util.add_one_commit(head='c') assert_inline_comments(pull_request, visible=0, outdated=1)
def _get_commits(cls): return [ { 'message': 'Initial', 'author': 'Joe Doe <*****@*****.**>', 'date': datetime.datetime(2010, 1, 1, 20), 'added': [ FileNode('foobar/static/js/admin/base.js', content='base'), FileNode( 'foobar/static/admin', content='admin', mode=0120000), # this is a link FileNode('foo', content='foo'), ], }, { 'message': 'Second', 'author': 'Joe Doe <*****@*****.**>', 'date': datetime.datetime(2010, 1, 1, 22), 'added': [ FileNode('foo2', content='foo2'), ], }, ]
def _get_commits(cls): nodes = [ FileNode(cls.fname, content='Foobar'), ] commits = [ { 'message': 'Initial commit', 'author': 'Joe Doe <*****@*****.**>', 'date': datetime.datetime(2010, 1, 1, 20), 'added': nodes, }, ] return commits
def test_file_node_is_executable(self): node = FileNode('foobar', 'empty... almost', mode=0100755) self.assertTrue(node.is_executable()) node = FileNode('foobar', 'empty... almost', mode=0100500) self.assertTrue(node.is_executable()) node = FileNode('foobar', 'empty... almost', mode=0100644) self.assertFalse(node.is_executable())
def _get_commits(cls): start_date = datetime.datetime(2010, 1, 1, 20) for x in xrange(5): yield { 'message': 'Commit %d' % x, 'author': 'Joe Doe <*****@*****.**>', 'date': start_date + datetime.timedelta(hours=12 * x), 'added': [ FileNode('%d/file_%d.txt' % (x, x), content='Foobar %d' % x), ], }
def test_comment_stays_unflagged_on_change_below(self, pr_util): original_content = ''.join(['line {}\n'.format(x) for x in range(10)]) updated_content = original_content + 'new_line_at_end\n' commits = [ { 'message': 'a' }, { 'message': 'b', 'added': [FileNode('file_b', original_content)] }, { 'message': 'c', 'changed': [FileNode('file_b', updated_content)] }, ] pull_request = pr_util.create_pull_request(commits=commits, target_head='a', source_head='b', revisions=['b']) pr_util.create_inline_comment(file_path='file_b') pr_util.add_one_commit(head='c') assert_inline_comments(pull_request, visible=1, outdated=0)
def test_get_changeset(self): old_head = self.repo.get_changeset() self.imc.add(FileNode('docs/index.txt', content='Documentation\n')) head = self.imc.commit( message=u'New branch: foobar', author=u'joe', branch='foobar', ) self.assertEqual(self.repo.workdir.get_branch(), self.default_branch) self.repo.workdir.checkout_branch('foobar') self.assertEqual(self.repo.workdir.get_changeset(), head) # Make sure that old head is still there after update to defualt branch self.repo.workdir.checkout_branch(self.default_branch) self.assertEqual(self.repo.workdir.get_changeset(), old_head)
def test_checkout_branch(self): from rhodecode.lib.vcs.exceptions import BranchDoesNotExistError # first, 'foobranch' does not exist. self.assertRaises(BranchDoesNotExistError, self.repo.workdir.checkout_branch, branch='foobranch') # create new branch 'foobranch'. self.imc.add(FileNode('file1', content='blah')) self.imc.commit(message=u'asd', author=u'john', branch='foobranch') # go back to the default branch self.repo.workdir.checkout_branch() self.assertEqual(self.repo.workdir.get_branch(), self.backend_class.DEFAULT_BRANCH_NAME) # checkout 'foobranch' self.repo.workdir.checkout_branch('foobranch') self.assertEqual(self.repo.workdir.get_branch(), 'foobranch')
def test_merge_target_is_bookmark(self, vcsbackend_hg): target_repo = vcsbackend_hg.create_repo(number_of_commits=1) source_repo = vcsbackend_hg.clone_repo(target_repo) vcsbackend_hg.add_file(target_repo, 'README_MERGE1', 'Version 1') vcsbackend_hg.add_file(source_repo, 'README_MERGE2', 'Version 2') imc = source_repo.in_memory_commit imc.add(FileNode('file_x', content=source_repo.name)) imc.commit(message=u'Automatic commit from repo merge test', author=u'Automatic') target_commit = target_repo.get_commit() source_commit = source_repo.get_commit() default_branch = target_repo.DEFAULT_BRANCH_NAME bookmark_name = 'bookmark' target_repo._update(default_branch) target_repo.bookmark(bookmark_name) target_ref = Reference('book', bookmark_name, target_commit.raw_id) source_ref = Reference('branch', default_branch, source_commit.raw_id) workspace = 'test-merge' merge_response = target_repo.merge(target_ref, source_repo, source_ref, workspace, 'test user', '*****@*****.**', 'merge message 1', dry_run=False) expected_merge_response = MergeResponse(True, True, merge_response.merge_commit_id, MergeFailureReason.NONE) assert merge_response == expected_merge_response target_repo = backends.get_backend(vcsbackend_hg.alias)( target_repo.path) target_commits = list(target_repo.get_commits()) commit_ids = [c.raw_id for c in target_commits[:-1]] assert source_ref.commit_id in commit_ids assert target_ref.commit_id in commit_ids merge_commit = target_commits[-1] assert merge_commit.raw_id == merge_response.merge_commit_id assert merge_commit.message.strip() == 'merge message 1' assert merge_commit.author == 'test user <*****@*****.**>' # Check the bookmark was updated in the target repo assert (target_repo.bookmarks[bookmark_name] == merge_response.merge_commit_id)
def test_mimetype(self): py_node = FileNode('test.py') tar_node = FileNode('test.tar.gz') ext = 'CustomExtension' my_node2 = FileNode('myfile2') my_node2._mimetype = [ext] my_node3 = FileNode('myfile3') my_node3._mimetype = [ext,ext] self.assertEqual(py_node.mimetype,'text/x-python') self.assertEqual(py_node.get_mimetype(),('text/x-python',None)) self.assertEqual(tar_node.mimetype,'application/x-tar') self.assertEqual(tar_node.get_mimetype(),('application/x-tar','gzip')) self.assertRaises(NodeError,my_node2.get_mimetype) self.assertEqual(my_node3.mimetype,ext) self.assertEqual(my_node3.get_mimetype(),[ext,ext])
def test_multiple_commits(self): N = 3 # number of commits to perform last = None for x in xrange(N): fname = 'file%s' % str(x).rjust(5, '0') content = 'foobar\n' * x node = FileNode(fname, content=content) self.imc.add(node) commit = self.imc.commit(u"Commit no. %s" % (x + 1), author=u'vcs') assert last != commit last = commit # Check commit number for same repo assert len(self.repo.commit_ids) == N # Check commit number for recreated repo repo = self.Backend(self.repo_path) assert len(repo.commit_ids) == N
def prepare_for_success(self, vcsbackend): self.target_repo = vcsbackend.create_repo(number_of_commits=1) self.source_repo = vcsbackend.clone_repo(self.target_repo) vcsbackend.add_file(self.target_repo, 'README_MERGE1', 'Version 1') vcsbackend.add_file(self.source_repo, 'README_MERGE2', 'Version 2') imc = self.source_repo.in_memory_commit imc.add(FileNode('file_x', content=self.source_repo.name)) imc.commit(message=u'Automatic commit from repo merge test', author=u'Automatic') self.target_commit = self.target_repo.get_commit() self.source_commit = self.source_repo.get_commit() # This only works for Git and Mercurial default_branch = self.target_repo.DEFAULT_BRANCH_NAME self.target_ref = Reference('branch', default_branch, self.target_commit.raw_id) self.source_ref = Reference('branch', default_branch, self.source_commit.raw_id) self.workspace = 'test-merge'
def test_add_in_bulk(self): rev_count = len(self.repo.revisions) to_add = [ FileNode(node.path, content=node.content) for node in self.nodes ] self.imc.add(*to_add) message = u'Added: %s' % ', '.join((node.path for node in self.nodes)) author = unicode(self.__class__) changeset = self.imc.commit(message=message, author=author) newtip = self.repo.get_changeset() self.assertEqual(changeset, newtip) self.assertEqual(rev_count + 1, len(self.repo.revisions)) self.assertEqual(newtip.message, message) self.assertEqual(newtip.author, author) self.assertTrue(not any((self.imc.added, self.imc.changed, self.imc.removed))) for node in to_add: self.assertEqual(newtip.get_node(node.path).content, node.content)
def test_multiple_commits(self): N = 3 # number of commits to perform last = None for x in xrange(N): fname = 'file%s' % str(x).rjust(5, '0') content = 'foobar\n' * x node = FileNode(fname, content=content) self.imc.add(node) commit = self.imc.commit(u"Commit no. %s" % (x + 1), author=u'vcs') self.assertTrue(last != commit) last = commit # Check commit number for same repo self.assertEqual(len(self.repo.revisions), N) # Check commit number for recreated repo backend = self.get_backend() repo = backend(self.repo_path) self.assertEqual(len(repo.revisions), N)
def create_node(self, repo, repo_name, cs, user, author, message, content, f_path): if repo.alias == 'hg': from rhodecode.lib.vcs.backends.hg import MercurialInMemoryChangeset as IMC elif repo.alias == 'git': from rhodecode.lib.vcs.backends.git import GitInMemoryChangeset as IMC # decoding here will force that we have proper encoded values # in any other case this will throw exceptions and deny commit if isinstance(content, (basestring, )): content = safe_str(content) elif isinstance(content, ( file, cStringIO.OutputType, )): content = content.read() else: raise Exception('Content is of unrecognized type %s' % (type(content))) message = safe_unicode(message) author = safe_unicode(author) path = safe_str(f_path) m = IMC(repo) if isinstance(cs, EmptyChangeset): # EmptyChangeset means we we're editing empty repository parents = None else: parents = [cs] m.add(FileNode(path, content=content)) tip = m.commit(message=message, author=author, parents=parents, branch=cs.branch) new_cs = tip.short_id action = 'push_local:%s' % new_cs action_logger(user, action, repo_name) self.mark_for_invalidation(repo_name)
def create_node(self, repo, repo_name, cs, user, author, message, content, f_path): user = self._get_user(user) IMC = self._get_IMC_module(repo.alias) # decoding here will force that we have proper encoded values # in any other case this will throw exceptions and deny commit if isinstance(content, (basestring, )): content = safe_str(content) elif isinstance(content, ( file, cStringIO.OutputType, )): content = content.read() else: raise Exception('Content is of unrecognized type %s' % (type(content))) message = safe_unicode(message) author = safe_unicode(author) path = safe_str(f_path) m = IMC(repo) if isinstance(cs, EmptyChangeset): # EmptyChangeset means we we're editing empty repository parents = None else: parents = [cs] m.add(FileNode(path, content=content)) tip = m.commit(message=message, author=author, parents=parents, branch=cs.branch) self.mark_for_invalidation(repo_name) self._handle_push(repo, username=user.username, action='push_local', repo_name=repo_name, revisions=[tip.raw_id]) return tip
def get_node(self, path): """ Returns `Node` object from the given `path`. If there is no node at the given `path`, `NodeDoesNotExistError` would be raised. """ path = self._fix_path(path) if path not in self.nodes: if path in self._file_paths: node = FileNode(path, commit=self) elif path in self._dir_paths: if path == '': node = RootNode(commit=self) else: node = DirNode(path, commit=self) else: raise self.no_node_at_path(path) # cache node self.nodes[path] = node return self.nodes[path]
def commit_change(self, repo, repo_name, commit, user, author, message, content, f_path): """ Commits changes :param repo: SCM instance """ user = self._get_user(user) # decoding here will force that we have proper encoded values # in any other case this will throw exceptions and deny commit content = safe_str(content) path = safe_str(f_path) # message and author needs to be unicode # proper backend should then translate that into required type message = safe_unicode(message) author = safe_unicode(author) imc = repo.in_memory_commit imc.change(FileNode(path, content, mode=commit.get_file_mode(f_path))) try: # TODO: handle pre-push action ! tip = imc.commit(message=message, author=author, parents=[commit], branch=commit.branch) except Exception as e: log.error(traceback.format_exc()) raise IMCCommitError(str(e)) finally: # always clear caches, if commit fails we want fresh object also self.mark_for_invalidation(repo_name) # We trigger the post-push action hooks_utils.trigger_post_push_hook(username=user.username, action='push_local', repo_name=repo_name, repo_alias=repo.alias, commit_ids=[tip.raw_id]) return tip
def get_node(self, path): if isinstance(path, unicode): path = path.encode('utf-8') path = self._fix_path(path) if not path in self.nodes: try: id_ = self._get_id_for_path(path) except ChangesetError: raise NodeDoesNotExistError( "Cannot find one of parents' " "directories for a given path: %s" % path) _GL = lambda m: m and objects.S_ISGITLINK(m) if _GL(self._stat_modes.get(path)): node = SubModuleNode(path, url=None, changeset=id_, alias=self.repository.alias) else: obj = self.repository._repo.get_object(id_) if isinstance(obj, objects.Tree): if path == '': node = RootNode(changeset=self) else: node = DirNode(path, changeset=self) node._tree = obj elif isinstance(obj, objects.Blob): node = FileNode(path, changeset=self) node._blob = obj else: raise NodeDoesNotExistError( "There is no file nor directory " "at the given path '%s' at revision %s" % (path, self.short_id)) # cache node self.nodes[path] = node return self.nodes[path]
def get_nodes(self, path): if self._get_kind(path) != NodeKind.DIR: raise CommitError("Directory does not exist for commit %s at " " '%s'" % (self.raw_id, path)) path = self._fix_path(path) id_, _ = self._get_id_for_path(path) tree_id = self._remote[id_]['id'] dirnodes = [] filenodes = [] alias = self.repository.alias for name, stat_, id_, type_ in self._remote.tree_items(tree_id): if type_ == 'link': url = self._get_submodule_url('/'.join((path, name))) dirnodes.append( SubModuleNode(name, url=url, commit=id_, alias=alias)) continue if path != '': obj_path = '/'.join((path, name)) else: obj_path = name if obj_path not in self._stat_modes: self._stat_modes[obj_path] = stat_ if type_ == 'tree': dirnodes.append(DirNode(obj_path, commit=self)) elif type_ == 'blob': filenodes.append(FileNode(obj_path, commit=self, mode=stat_)) else: raise CommitError( "Requested object should be Tree or Blob, is %s", type_) nodes = dirnodes + filenodes for node in nodes: if node.path not in self.nodes: self.nodes[node.path] = node nodes.sort() return nodes
def get_node(self, path): """ Returns ``Node`` object from the given ``path``. If there is no node at the given ``path``, ``ChangesetError`` would be raised. """ path = self._fix_path(path) if not path in self.nodes: if path in self._file_paths: node = FileNode(path, changeset=self) elif path in self._dir_paths or path in self._dir_paths: if path == '': node = RootNode(changeset=self) else: node = DirNode(path, changeset=self) else: raise NodeDoesNotExistError("There is no file nor directory " "at the given path: %r at revision %r" % (path, self.short_id)) # cache node self.nodes[path] = node return self.nodes[path]
def get_nodes(self, path): """ Returns combined ``DirNode`` and ``FileNode`` objects list representing state of changeset at the given ``path``. If node at the given ``path`` is not instance of ``DirNode``, ChangesetError would be raised. """ if self._get_kind(path) != NodeKind.DIR: raise ChangesetError("Directory does not exist for revision %s at " " '%s'" % (self.revision, path)) path = self._fix_path(path) filenodes = [ FileNode(f, changeset=self) for f in self._file_paths if os.path.dirname(f) == path ] dirs = path == '' and '' or [ d for d in self._dir_paths if d and posixpath.dirname(d) == path ] dirnodes = [ DirNode(d, changeset=self) for d in dirs if os.path.dirname(d) == path ] als = self.repository.alias for k, vals in self._extract_submodules().iteritems(): #vals = url,rev,type loc = vals[0] cs = vals[1] dirnodes.append(SubModuleNode(k, url=loc, changeset=cs, alias=als)) nodes = dirnodes + filenodes # cache nodes for node in nodes: self.nodes[node.path] = node nodes.sort() return nodes
def get_nodes(self, path): """ Returns combined ``DirNode`` and ``FileNode`` objects list representing state of commit at the given ``path``. If node at the given ``path`` is not instance of ``DirNode``, CommitError would be raised. """ if self._get_kind(path) != NodeKind.DIR: raise CommitError("Directory does not exist for idx %s at '%s'" % (self.idx, path)) path = self._fix_path(path) filenodes = [ FileNode(f, commit=self) for f in self._file_paths if os.path.dirname(f) == path ] # TODO: johbo: Check if this can be done in a more obvious way dirs = path == '' and '' or [ d for d in self._dir_paths if d and vcspath.dirname(d) == path ] dirnodes = [ DirNode(d, commit=self) for d in dirs if os.path.dirname(d) == path ] alias = self.repository.alias for k, vals in self._submodules.iteritems(): loc = vals[0] commit = vals[1] dirnodes.append( SubModuleNode(k, url=loc, commit=commit, alias=alias)) nodes = dirnodes + filenodes # cache nodes for node in nodes: self.nodes[node.path] = node nodes.sort() return nodes