def test_lookup_tree(self): o_id = tree_lookup_path(self.get_object, self.tree_id, 'ad')[1] self.assertTrue(isinstance(self.store[o_id], Tree)) o_id = tree_lookup_path(self.get_object, self.tree_id, 'ad/bd')[1] self.assertTrue(isinstance(self.store[o_id], Tree)) o_id = tree_lookup_path(self.get_object, self.tree_id, 'ad/bd/')[1] self.assertTrue(isinstance(self.store[o_id], Tree))
def test_lookup_tree(self): o_id = tree_lookup_path(self.get_object, self.tree_id, b"ad")[1] self.assertIsInstance(self.store[o_id], Tree) o_id = tree_lookup_path(self.get_object, self.tree_id, b"ad/bd")[1] self.assertIsInstance(self.store[o_id], Tree) o_id = tree_lookup_path(self.get_object, self.tree_id, b"ad/bd/")[1] self.assertIsInstance(self.store[o_id], Tree)
def find_last_change_revision(self, path, commit_id): if not isinstance(path, bytes): raise TypeError(path) commit = self.store[commit_id] target_mode, target_sha = tree_lookup_path(self.store.__getitem__, commit.tree, path) if path == b'': target_mode = stat.S_IFDIR if target_mode is None: raise AssertionError("sha %r for %r in %r" % (target_sha, path, commit_id)) while True: parent_commits = [] for parent_commit in [self.store[c] for c in commit.parents]: try: mode, sha = tree_lookup_path(self.store.__getitem__, parent_commit.tree, path) except (NotTreeError, KeyError): continue else: parent_commits.append(parent_commit) if path == b'': mode = stat.S_IFDIR # Candidate found iff, mode or text changed, # or is a directory that didn't previously exist. if mode != target_mode or (not stat.S_ISDIR(target_mode) and sha != target_sha): return (path, commit.id) if parent_commits == []: break commit = parent_commits[0] return (path, commit.id)
def test_commit_modified(self): r = self._repo f = open(os.path.join(r.path, "a"), "wb") try: f.write("new contents") finally: f.close() os.symlink("a", os.path.join(self._repo_dir, "b")) r.stage(["a", "b"]) commit_sha = r.do_commit( "modified a", committer="Test Committer <*****@*****.**>", author="Test Author <*****@*****.**>", commit_timestamp=12395, commit_timezone=0, author_timestamp=12395, author_timezone=0, ) self.assertEqual([self._root_commit], r[commit_sha].parents) a_mode, a_id = tree_lookup_path(r.get_object, r[commit_sha].tree, "a") self.assertEqual(stat.S_IFREG | 0o644, a_mode) self.assertEqual("new contents", r[a_id].data) b_mode, b_id = tree_lookup_path(r.get_object, r[commit_sha].tree, "b") self.assertTrue(stat.S_ISLNK(b_mode)) self.assertEqual("a", r[b_id].data)
def test_lookup_tree(self): o_id = tree_lookup_path(self.get_object, self.tree_id, b'ad')[1] self.assertTrue(isinstance(self.store[o_id], Tree)) o_id = tree_lookup_path(self.get_object, self.tree_id, b'ad/bd')[1] self.assertTrue(isinstance(self.store[o_id], Tree)) o_id = tree_lookup_path(self.get_object, self.tree_id, b'ad/bd/')[1] self.assertTrue(isinstance(self.store[o_id], Tree))
def __nonzero__(self): # Verify this file is in the tree for the given commit sha try: tree_lookup_path(self.wiki.repo.get_object, self.wiki.repo[self.sha].tree, self.filename) except KeyError: # We'll get a KeyError if self.sha isn't in the repo, or if self.filename isn't in the tree of our commit return False return True
def test_commit_no_encode_decode(self): r = self._repo repo_path_bytes = r.path.encode(sys.getfilesystemencoding()) encodings = ('utf8', 'latin1') names = [u'À'.encode(encoding) for encoding in encodings] for name, encoding in zip(names, encodings): full_path = os.path.join(repo_path_bytes, name) with open(full_path, 'wb') as f: f.write(encoding.encode('ascii')) # These files are break tear_down_repo, so cleanup these files # ourselves. self.addCleanup(os.remove, full_path) r.stage(names) commit_sha = r.do_commit(b'Files with different encodings', committer=b'Test Committer <*****@*****.**>', author=b'Test Author <*****@*****.**>', commit_timestamp=12395, commit_timezone=0, author_timestamp=12395, author_timezone=0, ref=None, merge_heads=[self._root_commit]) for name, encoding in zip(names, encodings): mode, id = tree_lookup_path(r.get_object, r[commit_sha].tree, name) self.assertEqual(stat.S_IFREG | 0o644, mode) self.assertEqual(encoding.encode('ascii'), r[id].data)
def get_record_stream(self, keys, ordering, include_delta_closure): if ordering == 'topological': graph = Graph(self) keys = graph.iter_topo_order(keys) store = self.change_scanner.repository._git.object_store for (path, text_revision) in keys: try: commit_id, mapping = (self.change_scanner.repository. lookup_bzr_revision_id(text_revision)) except NoSuchRevision: yield GitAbsentContentFactory(store, path, text_revision) continue try: tree_id = store[commit_id].tree except KeyError: yield GitAbsentContentFactory(store, path, text_revision) continue try: (mode, blob_sha) = tree_lookup_path(store.__getitem__, tree_id, encode_git_path(path)) except KeyError: yield GitAbsentContentFactory(store, path, text_revision) else: yield GitBlobContentFactory(store, path, text_revision, blob_sha)
def make_template_context(self, repo, rev, path): repo, rev, path, commit = _get_repo_and_rev(repo, rev, path) try: submodule_rev = tree_lookup_path( repo.__getitem__, commit.tree, encode_for_git(path))[1] except KeyError: raise NotFound("Parent path for submodule missing") try: (submodule_url, submodule_path) = _get_submodule( repo, commit, encode_for_git(path)) except KeyError: submodule_url = None submodule_path = None # TODO(jelmer): Rather than printing an information page, # redirect to the page in klaus for the repository at # submodule_path, revision submodule_rev. self.context = { 'view': self.view_name, 'repo': repo, 'rev': rev, 'commit': commit, 'branches': repo.get_branch_names(exclude=rev), 'tags': repo.get_tag_names(), 'path': path, 'subpaths': list(subpaths(path)) if path else None, 'submodule_url': force_unicode(submodule_url), 'submodule_path': force_unicode(submodule_path), 'submodule_rev': force_unicode(submodule_rev), 'base_href': None, }
def list_directory(self, directory_path): """Return a list of File objects for in the directory path. If the path doesn't exist, returns None. If the path exists but is empty, an empty list is returned. Otherwise a list of File objects in that directory. """ if directory_path is None: directory_path = '' else: directory_path = directory_path.strip(posixpath.sep) commit_id, root_id = self._get_root() if directory_path == '': sha = root_id mode = stat.S_IFDIR else: if root_id is None: return None try: (mode, sha) = tree_lookup_path(self.store.__getitem__, root_id, directory_path) except KeyError: return None if mode is not None and stat.S_ISDIR(mode): ret = [] for (name, mode, sha) in self.store[sha].iteritems(): ret.append( File(self.store, mode, sha, posixpath.join(directory_path, name), commit_id)) return ret else: return None
def make_template_context(self, repo, rev, path): repo, rev, path, commit = _get_repo_and_rev(repo, rev, path) try: submodule_rev = tree_lookup_path(repo.__getitem__, commit.tree, encode_for_git(path))[1] except KeyError: raise NotFound("Parent path for submodule missing") try: (submodule_url, submodule_path) = _get_submodule(repo, commit, encode_for_git(path)) except KeyError: submodule_url = None submodule_path = None # TODO(jelmer): Rather than printing an information page, # redirect to the page in klaus for the repository at # submodule_path, revision submodule_rev. self.context = { 'view': self.view_name, 'repo': repo, 'rev': rev, 'commit': commit, 'branches': repo.get_branch_names(exclude=rev), 'tags': repo.get_tag_names(), 'path': path, 'subpaths': list(subpaths(path)) if path else None, 'submodule_url': force_unicode(submodule_url), 'submodule_path': force_unicode(submodule_path), 'submodule_rev': force_unicode(submodule_rev), 'base_href': None, }
def make_template_context(self, repo, namespace, rev, path): repo, rev, path, commit = _get_repo_and_rev(repo, namespace, rev, path) try: submodule_rev = tree_lookup_path(repo.__getitem__, commit.tree, encode_for_git(path))[1] except KeyError: raise NotFound("Parent path for submodule missing") try: (submodule_url, submodule_path) = _get_submodule(repo, commit, encode_for_git(path)) except KeyError: submodule_url = None submodule_path = None # TODO(jelmer): Rather than printing an information page, # redirect to the page in klaus for the repository at # submodule_path, revision submodule_rev. self.context = { "view": self.view_name, "repo": repo, "rev": rev, "commit": commit, "branches": repo.get_branch_names(exclude=rev), "tags": repo.get_tag_names(), "path": path, "subpaths": list(subpaths(path)) if path else None, "submodule_url": force_unicode(submodule_url), "submodule_path": force_unicode(submodule_path), "submodule_rev": force_unicode(submodule_rev), "base_href": None, }
def test_commit_no_encode_decode(self): r = self._repo repo_path_bytes = os.fsencode(r.path) encodings = ('utf8', 'latin1') names = [u'À'.encode(encoding) for encoding in encodings] for name, encoding in zip(names, encodings): full_path = os.path.join(repo_path_bytes, name) with open(full_path, 'wb') as f: f.write(encoding.encode('ascii')) # These files are break tear_down_repo, so cleanup these files # ourselves. self.addCleanup(os.remove, full_path) r.stage(names) commit_sha = r.do_commit( b'Files with different encodings', committer=b'Test Committer <*****@*****.**>', author=b'Test Author <*****@*****.**>', commit_timestamp=12395, commit_timezone=0, author_timestamp=12395, author_timezone=0, ref=None, merge_heads=[self._root_commit]) for name, encoding in zip(names, encodings): mode, id = tree_lookup_path(r.get_object, r[commit_sha].tree, name) self.assertEqual(stat.S_IFREG | 0o644, mode) self.assertEqual(encoding.encode('ascii'), r[id].data)
def _get_object(self, key, branch='master', commit_sha=None): try: if not commit_sha: commit_sha = self.branch_head(branch) (mode, sha) = tree_lookup_path(self.repo.get_object, self._repo_tree(commit_sha), key) return self.repo[sha] except KeyError: return None
def test_commit_modified(self): r = self._repo with open(os.path.join(r._path_bytes, b'a'), 'wb') as f: f.write(b'new contents') os.symlink('a', os.path.join(r._path_bytes, b'b')) r.stage(['a', 'b']) commit_sha = r.do_commit(b'modified a', committer=b'Test Committer <*****@*****.**>', author=b'Test Author <*****@*****.**>', commit_timestamp=12395, commit_timezone=0, author_timestamp=12395, author_timezone=0) self.assertEqual([self._root_commit], r[commit_sha].parents) a_mode, a_id = tree_lookup_path(r.get_object, r[commit_sha].tree, b'a') self.assertEqual(stat.S_IFREG | 0o644, a_mode) self.assertEqual(b'new contents', r[a_id].data) b_mode, b_id = tree_lookup_path(r.get_object, r[commit_sha].tree, b'b') self.assertTrue(stat.S_ISLNK(b_mode)) self.assertEqual(b'a', r[b_id].data)
def get_blob_or_tree(self, commit, path): """Return the Git tree or blob object for `path` at `commit`.""" try: (mode, oid) = tree_lookup_path(self.__getitem__, commit.tree, encode_for_git(path)) except NotTreeError: # Some part of the path was a file where a folder was expected. # Example: path="/path/to/foo.txt" but "to" is a file in "/path". raise KeyError return self[oid]
def data(self): cache_key = self._cache_key('data') cached = cache.get(cache_key) if cached: return cached mode, sha = tree_lookup_path(self.wiki.repo.get_object, self.wiki.repo[self.sha].tree, self.filename.encode()) data = self.wiki.repo[sha].data cache.set(cache_key, data) return data
def get_file(self, path): """Return an object representing the file.""" commit_id, root_id = self._get_root() if root_id is None: return None try: (mode, sha) = tree_lookup_path(self.store.__getitem__, root_id, path) except KeyError: return None return File(self.store, mode, sha, path, commit_id)
def dulwich_get_file_revision(repo, commit, path): """ Return a previous version of a file """ # adapted from http://www.aaronheld.com/post/using-python-dulwich-to-load-any-version-of-a-file-from-a-local-git-repo # repo : absolute path to git repository e.g. '/Users/mahoney/Desktop/testrepo' # commit : sha1 hash of a commit e.g. 'd2ecde1b475eafa1293f4827de656d40ee13aaac' # path : relative path of file beneath repo e.g. 'folder/file1.txt' r = Repo( repo) # r is hash_table : sha keys => objects (commit,tree,blob,tag) tree = r[commit].tree # same as r.get_object(commit) (mode, file_sha) = tree_lookup_path(r.get_object, tree, path) return r[file_sha].data
def geojson(self): cache_key = self._cache_key('geojson') cached = cache.get(cache_key) if cached: return cached filename = self.filename[:-3] + '.geojson' mode, sha = tree_lookup_path(self.wiki.repo.get_object, self.wiki.repo[self.sha].tree, filename) data = self.wiki.repo[sha].data cache.set(cache_key, data) return data
def iter_files_bytes(self, desired_files): """Iterate through file versions. Files will not necessarily be returned in the order they occur in desired_files. No specific order is guaranteed. Yields pairs of identifier, bytes_iterator. identifier is an opaque value supplied by the caller as part of desired_files. It should uniquely identify the file version in the caller's context. (Examples: an index number or a TreeTransform trans_id.) bytes_iterator is an iterable of bytestrings for the file. The kind of iterable and length of the bytestrings are unspecified, but for this implementation, it is a list of bytes produced by VersionedFile.get_record_stream(). :param desired_files: a list of (file_id, revision_id, identifier) triples """ per_revision = {} for (file_id, revision_id, identifier) in desired_files: per_revision.setdefault(revision_id, []).append( (file_id, identifier)) for revid, files in viewitems(per_revision): try: (commit_id, mapping) = self.lookup_bzr_revision_id(revid) except errors.NoSuchRevision: raise errors.RevisionNotPresent(revid, self) try: commit = self._git.object_store[commit_id] except KeyError: raise errors.RevisionNotPresent(revid, self) root_tree = commit.tree for fileid, identifier in files: try: path = mapping.parse_file_id(fileid) except ValueError: raise errors.RevisionNotPresent((fileid, revid), self) try: obj = tree_lookup_path(self._git.object_store.__getitem__, root_tree, path.encode('utf-8')) if isinstance(obj, tuple): (mode, item_id) = obj obj = self._git.object_store[item_id] except KeyError: raise errors.RevisionNotPresent((fileid, revid), self) else: if obj.type_name == b"tree": yield (identifier, []) elif obj.type_name == b"blob": yield (identifier, obj.chunked) else: raise AssertionError("file text resolved to %r" % obj)
def test_commit_symlink(self): r = self._repo os.symlink('a', os.path.join(r.path, 'b')) r.stage(['a', 'b']) commit_sha = r.do_commit(b'Symlink b', committer=b'Test Committer <*****@*****.**>', author=b'Test Author <*****@*****.**>', commit_timestamp=12395, commit_timezone=0, author_timestamp=12395, author_timezone=0) self.assertEqual([self._root_commit], r[commit_sha].parents) b_mode, b_id = tree_lookup_path(r.get_object, r[commit_sha].tree, b'b') self.assertTrue(stat.S_ISLNK(b_mode)) self.assertEqual(b'a', r[b_id].data)
def test_commit_modified(self): r = self._repo with open(os.path.join(r.path, 'a'), 'wb') as f: f.write(b'new contents') r.stage(['a']) commit_sha = r.do_commit('modified a', committer='Test Committer <*****@*****.**>', author='Test Author <*****@*****.**>', commit_timestamp=12395, commit_timezone=0, author_timestamp=12395, author_timezone=0) self.assertEqual([self._root_commit], r[commit_sha].parents) _, blob_id = tree_lookup_path(r.get_object, r[commit_sha].tree, 'a') self.assertEqual(b'new contents', r[blob_id].data)
def _get_object(self, key, branch='master', commit_sha=None, bypass_head_cache=True): if branch == 'master' and not bypass_head_cache: obj = self.head_cache.get(self._head_cache_key(key), None) if obj: return obj try: if not commit_sha: commit_sha = self.branch_head(branch) (mode, sha) = tree_lookup_path(self.repo.get_object, self._repo_tree(commit_sha), key) return self.repo[sha] except KeyError: return None except NotTreeError: return None
def get_file_contents(tree, path): """Gets contents of a file. Args: tree: A Tree instance containing the file. path: An encoded path to the file Returns: A string containing all contents of the file """ app_config = app.config['GIT_REPOS_FOLDER'] rep = Repo(app_config + '/terraform') (mode, sha) = tree_lookup_path(rep.get_object, tree, path) return rep[sha].data.decode()
def data(self): cache_key = self._cache_key('data') cached = cache.get(cache_key) if cached: return cached mode, sha = tree_lookup_path(self.wiki.repo.get_object, self.wiki.repo[self.sha].tree, self.filename) data = self.wiki.repo[sha].data # print "cache_key" # print cache_key # print "data" # print data # print "e" cache.set(cache_key, data) return data
def test_commit_symlink(self): r = self._repo os.symlink("a", os.path.join(r.path, "b")) r.stage(["a", "b"]) commit_sha = r.do_commit( b"Symlink b", committer=b"Test Committer <*****@*****.**>", author=b"Test Author <*****@*****.**>", commit_timestamp=12395, commit_timezone=0, author_timestamp=12395, author_timezone=0, ) self.assertEqual([self._root_commit], r[commit_sha].parents) b_mode, b_id = tree_lookup_path(r.get_object, r[commit_sha].tree, b"b") self.assertTrue(stat.S_ISLNK(b_mode)) self.assertEqual(b"a", r[b_id].data)
def remove(repo=".", paths=None, cached=False): """Remove files from the staging area. Args: repo: Repository for the files paths: Paths to remove """ with open_repo_closing(repo) as r: index = r.open_index() for p in paths: full_path = os.path.abspath(p).encode(sys.getfilesystemencoding()) tree_path = path_to_tree_path(r.path, p) try: index_sha = index[tree_path].sha except KeyError: raise Exception('%s did not match any files' % p) if not cached: try: st = os.lstat(full_path) except OSError: pass else: try: blob = blob_from_path_and_stat(full_path, st) except IOError: pass else: try: committed_sha = tree_lookup_path( r.__getitem__, r[r.head()].tree, tree_path)[1] except KeyError: committed_sha = None if blob.id != index_sha and index_sha != committed_sha: raise Exception( 'file has staged content differing ' 'from both the file and head: %s' % p) if index_sha != committed_sha: raise Exception( 'file has staged changes: %s' % p) os.remove(full_path) del index[tree_path] index.write()
def test_commit_modified(self): r = self._repo with open(os.path.join(r.path, "a"), "wb") as f: f.write(b"new contents") r.stage(["a"]) commit_sha = r.do_commit( b"modified a", committer=b"Test Committer <*****@*****.**>", author=b"Test Author <*****@*****.**>", commit_timestamp=12395, commit_timezone=0, author_timestamp=12395, author_timezone=0, ) self.assertEqual([self._root_commit], r[commit_sha].parents) a_mode, a_id = tree_lookup_path(r.get_object, r[commit_sha].tree, b"a") self.assertEqual(stat.S_IFREG | 0o644, a_mode) self.assertEqual(b"new contents", r[a_id].data)
def remove(repo=".", paths=None, cached=False): """Remove files from the staging area. :param repo: Repository for the files :param paths: Paths to remove """ with open_repo_closing(repo) as r: index = r.open_index() for p in paths: full_path = os.path.abspath(p).encode(sys.getfilesystemencoding()) tree_path = path_to_tree_path(r.path, p) try: index_sha = index[tree_path].sha except KeyError: raise Exception('%s did not match any files' % p) if not cached: try: st = os.lstat(full_path) except OSError: pass else: try: blob = blob_from_path_and_stat(full_path, st) except IOError: pass else: try: committed_sha = tree_lookup_path( r.__getitem__, r[r.head()].tree, tree_path)[1] except KeyError: committed_sha = None if blob.id != index_sha and index_sha != committed_sha: raise Exception( 'file has staged content differing ' 'from both the file and head: %s' % p) if index_sha != committed_sha: raise Exception( 'file has staged changes: %s' % p) os.remove(full_path) del index[tree_path] index.write()
def test_commit_modified(self): r = self._repo f = open(os.path.join(r.path, 'a'), 'wb') try: f.write('new contents') finally: f.close() r.stage(['a']) commit_sha = r.do_commit( 'modified a', committer='Test Committer <*****@*****.**>', author='Test Author <*****@*****.**>', commit_timestamp=12395, commit_timezone=0, author_timestamp=12395, author_timezone=0) self.assertEqual([self._root_commit], r[commit_sha].parents) _, blob_id = tree_lookup_path(r.get_object, r[commit_sha].tree, 'a') self.assertEqual('new contents', r[blob_id].data)
def get_object_by_path(repo, path, committish=None): """Get an object by path. :param repo: A path to the repository :param path: Path to look up :param committish: Commit to look up path in :return: A `ShaFile` object """ if committish is None: committish = "HEAD" # Get the repository with open_repo_closing(repo) as r: commit = parse_commit(repo, committish) base_tree = commit.tree if not isinstance(path, bytes): path = path.encode(commit.encoding or DEFAULT_ENCODING) (mode, sha) = tree_lookup_path(r.object_store.__getitem__, base_tree, path) return r[sha]
def test_commit_modified(self): r = self._repo f = open(os.path.join(r.path, "a"), "wb") try: f.write("new contents") finally: f.close() r.stage(["a"]) commit_sha = r.do_commit( "modified a", committer="Test Committer <*****@*****.**>", author="Test Author <*****@*****.**>", commit_timestamp=12395, commit_timezone=0, author_timestamp=12395, author_timezone=0, ) self.assertEqual([self._root_commit], r[commit_sha].parents) _, blob_id = tree_lookup_path(r.get_object, r[commit_sha].tree, "a") self.assertEqual("new contents", r[blob_id].data)
def get_object_by_path(repo, path, committish=None): """Get an object by path. :param repo: A path to the repository :param path: Path to look up :param committish: Commit to look up path in :return: A `ShaFile` object """ if committish is None: committish = "HEAD" # Get the repository with open_repo_closing(repo) as r: commit = parse_commit(r, committish) base_tree = commit.tree if not isinstance(path, bytes): path = path.encode(commit.encoding or DEFAULT_ENCODING) (mode, sha) = tree_lookup_path( r.object_store.__getitem__, base_tree, path) return r[sha]
def get_object_by_path(repo, path, committish=None): """Get an object by path. Args: repo: A path to the repository path: Path to look up committish: Commit to look up path in Returns: A `ShaFile` object """ if committish is None: committish = "HEAD" # Get the repository with open_repo_closing(repo) as r: commit = parse_commit(r, committish) base_tree = commit.tree if not isinstance(path, bytes): path = commit_encode(commit, path) (mode, sha) = tree_lookup_path( r.object_store.__getitem__, base_tree, path) return r[sha]
def verify_commit_reconstruction(target_git_object_retriever, lookup_object, o, rev, ret_tree, parent_trees, mapping, unusual_modes, verifiers): new_unusual_modes = mapping.export_unusual_file_modes(rev) if new_unusual_modes != unusual_modes: raise AssertionError("unusual modes don't match: %r != %r" % ( unusual_modes, new_unusual_modes)) # Verify that we can reconstruct the commit properly rec_o = target_git_object_retriever._reconstruct_commit(rev, o.tree, True, verifiers) if rec_o != o: raise AssertionError("Reconstructed commit differs: %r != %r" % ( rec_o, o)) diff = [] new_objs = {} for path, obj, ie in _tree_to_objects( ret_tree, parent_trees, target_git_object_retriever._cache.idmap, unusual_modes, mapping.BZR_DUMMY_FILE): old_obj_id = tree_lookup_path(lookup_object, o.tree, path)[1] new_objs[path] = obj if obj.id != old_obj_id: diff.append((path, lookup_object(old_obj_id), obj)) for (path, old_obj, new_obj) in diff: while (old_obj.type_name == "tree" and new_obj.type_name == "tree" and sorted(old_obj) == sorted(new_obj)): for name in old_obj: if old_obj[name][0] != new_obj[name][0]: raise AssertionError( "Modes for %s differ: %o != %o" % (path, old_obj[name][0], new_obj[name][0])) if old_obj[name][1] != new_obj[name][1]: # Found a differing child, delve deeper path = posixpath.join(path, name) old_obj = lookup_object(old_obj[name][1]) new_obj = new_objs[path] break raise AssertionError( "objects differ for %s: %r != %r" % (path, old_obj, new_obj))
def test_lookup_blob(self): o_id = tree_lookup_path(self.get_object, self.tree_id, b'a')[1] self.assertTrue(isinstance(self.store[o_id], Blob))
def test_lookup_submodule(self): tree_lookup_path(self.get_object, self.tree_id, b"d")[1] self.assertRaises(SubmoduleEncountered, tree_lookup_path, self.get_object, self.tree_id, b"d/a")
def test_lookup_blob(self): o_id = tree_lookup_path(self.get_object, self.tree_id, b"a")[1] self.assertIsInstance(self.store[o_id], Blob)
def get_file(self, r, tree, path): (mode, sha) = tree_lookup_path(r.get_object, tree, path) return r[sha].data
def test_lookup_blob(self): o_id = tree_lookup_path(self.get_object, self.tree_id, 'a')[1] self.assertTrue(isinstance(self.store[o_id], Blob))