Ejemplo n.º 1
0
    def stage(self, paths):
        """Stage a set of paths.

        :param paths: List of paths, relative to the repository path
        """
        if isinstance(paths, basestring):
            paths = [paths]
        from dulwich.index import index_entry_from_stat
        index = self.open_index()
        for path in paths:
            full_path = os.path.join(self.path, path)
            try:
                st = os.lstat(full_path)
                is_symbolic_link = stat.S_ISLNK(st.st_mode)
            except OSError:
                # File no longer exists
                try:
                    del index[path]
                except KeyError:
                    pass  # already removed
            else:
                blob = Blob()
                if not is_symbolic_link:
                    f = open(full_path, 'rb')
                    try:
                        blob.data = f.read()
                    finally:
                        f.close()
                else:
                    blob.data = os.readlink(full_path)
                self.object_store.add_object(blob)
                index[path] = index_entry_from_stat(st, blob.id, 0)
        index.write()
Ejemplo n.º 2
0
    def stage(self, paths):
        """Stage a set of paths.

        :param paths: List of paths, relative to the repository path
        """
        if isinstance(paths, basestring):
            paths = [paths]
        from dulwich.index import index_entry_from_stat
        index = self.open_index()
        for path in paths:
            full_path = os.path.join(self.path, path)
            try:
                st = os.lstat(full_path)
                is_symbolic_link = stat.S_ISLNK(st.st_mode)
            except OSError:
                # File no longer exists
                try:
                    del index[path]
                except KeyError:
                    pass  # already removed
            else:
                blob = Blob()
                if not is_symbolic_link:
                    f = open(full_path, 'rb')
                    try:
                        blob.data = f.read()
                    finally:
                        f.close()
                else:
                    blob.data = os.readlink(full_path)
                self.object_store.add_object(blob)
                index[path] = index_entry_from_stat(st, blob.id, 0)
        index.write()
Ejemplo n.º 3
0
 def test_delta_medium_object(self):
     # This tests an object set that will have a copy operation
     # 2**20 in size.
     with self.get_pack(pack1_sha) as orig_pack:
         orig_blob = orig_pack[a_sha]
         new_blob = Blob()
         new_blob.data = orig_blob.data + (b'x' * 2 ** 20)
         new_blob_2 = Blob()
         new_blob_2.data = new_blob.data + b'y'
         all_to_pack = list(orig_pack.pack_tuples()) + [(new_blob, None),
                                                        (new_blob_2, None)]
     pack_path = os.path.join(self._tempdir, b'pack_with_deltas')
     write_pack(pack_path, all_to_pack, deltify=True)
     output = run_git_or_fail(['verify-pack', '-v', pack_path])
     self.assertEqual(set(x[0].id for x in all_to_pack),
                      _git_verify_pack_object_list(output))
     # We specifically made a new blob that should be a delta
     # against the blob a_sha, so make sure we really got only 3
     # non-delta objects:
     got_non_delta = int(_NON_DELTA_RE.search(output).group('non_delta'))
     self.assertEqual(
         3, got_non_delta,
         'Expected 3 non-delta objects, got %d' % got_non_delta)
     # We expect one object to have a delta chain length of two
     # (new_blob_2), so let's verify that actually happens:
     self.assertIn(b'chain length = 2', output)
Ejemplo n.º 4
0
 def test_delta_medium_object(self):
     # This tests an object set that will have a copy operation
     # 2**20 in size.
     with self.get_pack(pack1_sha) as orig_pack:
         orig_blob = orig_pack[a_sha]
         new_blob = Blob()
         new_blob.data = orig_blob.data + (b'x' * 2**20)
         new_blob_2 = Blob()
         new_blob_2.data = new_blob.data + b'y'
         all_to_pack = list(orig_pack.pack_tuples()) + [(new_blob, None),
                                                        (new_blob_2, None)]
     pack_path = os.path.join(self._tempdir, 'pack_with_deltas')
     write_pack(pack_path, all_to_pack, deltify=True)
     output = run_git_or_fail(['verify-pack', '-v', pack_path])
     self.assertEqual(set(x[0].id for x in all_to_pack),
                      _git_verify_pack_object_list(output))
     # We specifically made a new blob that should be a delta
     # against the blob a_sha, so make sure we really got only 3
     # non-delta objects:
     got_non_delta = int(_NON_DELTA_RE.search(output).group('non_delta'))
     self.assertEqual(
         3, got_non_delta,
         'Expected 3 non-delta objects, got %d' % got_non_delta)
     # We expect one object to have a delta chain length of two
     # (new_blob_2), so let's verify that actually happens:
     self.assertIn(b'chain length = 2', output)
Ejemplo n.º 5
0
 def test_delta_large_object(self):
     # This tests an object set that will have a copy operation
     # 2**25 in size. This is a copy large enough that it requires
     # two copy operations in git's binary delta format.
     raise SkipTest("skipping slow, large test")
     with self.get_pack(pack1_sha) as orig_pack:
         new_blob = Blob()
         new_blob.data = "big blob" + ("x" * 2**25)
         new_blob_2 = Blob()
         new_blob_2.data = new_blob.data + "y"
         all_to_pack = list(orig_pack.pack_tuples()) + [
             (new_blob, None),
             (new_blob_2, None),
         ]
     pack_path = os.path.join(self._tempdir, "pack_with_deltas")
     write_pack(pack_path, all_to_pack, deltify=True)
     output = run_git_or_fail(["verify-pack", "-v", pack_path])
     self.assertEqual(
         {x[0].id
          for x in all_to_pack},
         _git_verify_pack_object_list(output),
     )
     # We specifically made a new blob that should be a delta
     # against the blob a_sha, so make sure we really got only 4
     # non-delta objects:
     got_non_delta = int(_NON_DELTA_RE.search(output).group("non_delta"))
     self.assertEqual(
         4,
         got_non_delta,
         "Expected 4 non-delta objects, got %d" % got_non_delta,
     )
Ejemplo n.º 6
0
 def test_delta_large_object(self):
     # This tests an object set that will have a copy operation
     # 2**25 in size. This is a copy large enough that it requires
     # two copy operations in git's binary delta format.
     raise SkipTest('skipping slow, large test')
     orig_pack = self.get_pack(pack1_sha)
     orig_blob = orig_pack[a_sha]
     new_blob = Blob()
     new_blob.data = 'big blob' + ('x' * 2**25)
     new_blob_2 = Blob()
     new_blob_2.data = new_blob.data + 'y'
     all_to_pack = list(orig_pack.pack_tuples()) + [(new_blob, None),
                                                    (new_blob_2, None)]
     pack_path = os.path.join(self._tempdir, "pack_with_deltas")
     write_pack(pack_path, all_to_pack, deltify=True)
     output = run_git_or_fail(['verify-pack', '-v', pack_path])
     self.assertEqual(set(x[0].id for x in all_to_pack),
                      _git_verify_pack_object_list(output))
     # We specifically made a new blob that should be a delta
     # against the blob a_sha, so make sure we really got only 4
     # non-delta objects:
     got_non_delta = int(_NON_DELTA_RE.search(output).group('non_delta'))
     self.assertEqual(
         4, got_non_delta,
         'Expected 4 non-delta objects, got %d' % got_non_delta)
Ejemplo n.º 7
0
 def test_delta_large_object(self):
     # This tests an object set that will have a copy operation
     # 2**25 in size. This is a copy large enough that it requires
     # two copy operations in git's binary delta format.
     raise SkipTest('skipping slow, large test')
     with self.get_pack(pack1_sha) as orig_pack:
         orig_blob = orig_pack[a_sha]
         new_blob = Blob()
         new_blob.data = 'big blob' + ('x' * 2 ** 25)
         new_blob_2 = Blob()
         new_blob_2.data = new_blob.data + 'y'
         all_to_pack = list(orig_pack.pack_tuples()) + [(new_blob, None),
                                                        (new_blob_2, None)]
     pack_path = os.path.join(self._tempdir, "pack_with_deltas")
     write_pack(pack_path, all_to_pack, deltify=True)
     output = run_git_or_fail(['verify-pack', '-v', pack_path])
     self.assertEqual(set(x[0].id for x in all_to_pack),
                      _git_verify_pack_object_list(output))
     # We specifically made a new blob that should be a delta
     # against the blob a_sha, so make sure we really got only 4
     # non-delta objects:
     got_non_delta = int(_NON_DELTA_RE.search(output).group('non_delta'))
     self.assertEqual(
         4, got_non_delta,
         'Expected 4 non-delta objects, got %d' % got_non_delta)
Ejemplo n.º 8
0
def blob_from_path_and_stat(path, st):
    """Create a blob from a path and a stat object.

    :param path: Full path to file
    :param st: A stat object
    :return: A `Blob` object
    """
    blob = Blob()
    if not stat.S_ISLNK(st.st_mode):
        with open(path, 'rb') as f:
            blob.data = f.read()
    else:
        blob.data = os.readlink(path)
    return blob
Ejemplo n.º 9
0
def blob_from_path_and_stat(path, st):
    """Create a blob from a path and a stat object.

    :param path: Full path to file
    :param st: A stat object
    :return: A `Blob` object
    """
    blob = Blob()
    if not stat.S_ISLNK(st.st_mode):
        with open(path, 'rb') as f:
            blob.data = f.read()
    else:
        blob.data = os.readlink(path)
    return blob
Ejemplo n.º 10
0
def blob_from_path_and_stat(fs_path, st):
    """Create a blob from a path and a stat object.

    :param fs_path: Full file system path to file
    :param st: A stat object
    :return: A `Blob` object
    """
    assert isinstance(fs_path, bytes)
    blob = Blob()
    if not stat.S_ISLNK(st.st_mode):
        with open(fs_path, 'rb') as f:
            blob.data = f.read()
    else:
        blob.data = os.readlink(fs_path)
    return blob
Ejemplo n.º 11
0
def blob_from_path_and_stat(fs_path, st):
    """Create a blob from a path and a stat object.

    :param fs_path: Full file system path to file
    :param st: A stat object
    :return: A `Blob` object
    """
    assert isinstance(fs_path, bytes)
    blob = Blob()
    if not stat.S_ISLNK(st.st_mode):
        with open(fs_path, 'rb') as f:
            blob.data = f.read()
    else:
        blob.data = os.readlink(fs_path)
    return blob
Ejemplo n.º 12
0
    def stage(self, paths):
        """Stage a set of paths.

        :param paths: List of paths, relative to the repository path
        """
        from dulwich.index import cleanup_mode
        index = self.open_index()
        for path in paths:
            blob = Blob()
            try:
                st = os.stat(path)
            except OSError:
                # File no longer exists
                del index[path]
            else:
                f = open(path, 'rb')
                try:
                    blob.data = f.read()
                finally:
                    f.close()
                self.object_store.add_object(blob)
                # XXX: Cleanup some of the other file properties as well?
                index[path] = (st.st_ctime, st.st_mtime, st.st_dev, st.st_ino,
                    cleanup_mode(st.st_mode), st.st_uid, st.st_gid, st.st_size,
                    blob.id, 0)
        index.write()
Ejemplo n.º 13
0
 def ie_to_hexsha(path, ie):
     try:
         return shamap[path]
     except KeyError:
         pass
     # FIXME: Should be the same as in parent
     if ie.kind in ("file", "symlink"):
         try:
             return idmap.lookup_blob_id(ie.file_id, ie.revision)
         except KeyError:
             # no-change merge ?
             blob = Blob()
             blob.data = tree.get_file_text(path)
             if add_cache_entry is not None:
                 add_cache_entry(blob, (ie.file_id, ie.revision), path)
             return blob.id
     elif ie.kind == "directory":
         # Not all cache backends store the tree information,
         # calculate again from scratch
         ret = directory_to_tree(path, ie.children.values(), ie_to_hexsha,
                                 unusual_modes, dummy_file_name,
                                 ie.parent_id is None)
         if ret is None:
             return ret
         return ret.id
     else:
         raise AssertionError
Ejemplo n.º 14
0
    def _tree_from_structure(self, structure):
        # TODO : Support directories
        tree = Tree()

        for file_info in structure:

            # str only
            try:
                data = file_info["data"].encode("ascii")
                name = file_info["name"].encode("ascii")
                mode = file_info["mode"]
            except:
                # Skip file on encoding errors
                continue

            blob = Blob()

            blob.data = data

            # Store file's contents
            self.repo.object_store.add_object(blob)

            # Add blob entry
            tree.add(name, mode, blob.id)

        # Store tree
        self.repo.object_store.add_object(tree)

        return tree.id
Ejemplo n.º 15
0
def symlink_to_blob(symlink_target):
    from dulwich.objects import Blob
    blob = Blob()
    if isinstance(symlink_target, str):
        symlink_target = symlink_target.encode('utf-8')
    blob.data = symlink_target
    return blob
Ejemplo n.º 16
0
    def stage(self, paths):
        """Stage a set of paths.

        :param paths: List of paths, relative to the repository path
        """
        from dulwich.index import cleanup_mode
        index = self.open_index()
        for path in paths:
            full_path = os.path.join(self.path, path)
            blob = Blob()
            try:
                st = os.stat(full_path)
            except OSError:
                # File no longer exists
                try:
                    del index[path]
                except KeyError:
                    pass  # Doesn't exist in the index either
            else:
                f = open(full_path, 'rb')
                try:
                    blob.data = f.read()
                finally:
                    f.close()
                self.object_store.add_object(blob)
                # XXX: Cleanup some of the other file properties as well?
                index[path] = (st.st_ctime, st.st_mtime, st.st_dev, st.st_ino,
                    cleanup_mode(st.st_mode), st.st_uid, st.st_gid, st.st_size,
                    blob.id, 0)
        index.write()
Ejemplo n.º 17
0
    def test_emit_commit(self):
        b = Blob()
        b.data = "FOO"
        t = Tree()
        t.add(stat.S_IFREG | 0644, "foo", b.id)
        c = Commit()
        c.committer = c.author = "Jelmer <jelmer@host>"
        c.author_time = c.commit_time = 1271345553
        c.author_timezone = c.commit_timezone = 0
        c.message = "msg"
        c.tree = t.id
        self.store.add_objects([(b, None), (t, None), (c, None)])
        self.fastexporter.emit_commit(c, "refs/heads/master")
        self.assertEquals("""blob
mark :1
data 3
FOO
commit refs/heads/master
mark :2
author Jelmer <jelmer@host> 1271345553 +0000
committer Jelmer <jelmer@host> 1271345553 +0000
data 3
msg
M 644 1 foo
""", self.stream.getvalue())
Ejemplo n.º 18
0
    def _tree_from_structure(self, structure):
        # TODO : Support directories
        tree = Tree()

        for file_info in structure:

            # str only
            try:
                data = file_info['data'].encode('ascii')
                name = file_info['name'].encode('ascii')
                mode = file_info['mode']
            except:
                # Skip file on encoding errors
                continue

            blob = Blob()

            blob.data = data

            # Store file's contents
            self.repo.object_store.add_object(blob)

            # Add blob entry
            tree.add(
                name,
                mode,
                blob.id
            )

        # Store tree
        self.repo.object_store.add_object(tree)

        return tree.id
Ejemplo n.º 19
0
def symlink_to_blob(symlink_target):
    from dulwich.objects import Blob
    blob = Blob()
    if isinstance(symlink_target, str):
        symlink_target = encode_git_path(symlink_target)
    blob.data = symlink_target
    return blob
Ejemplo n.º 20
0
    def test_emit_commit(self):
        b = Blob()
        b.data = "FOO"
        t = Tree()
        t.add("foo", stat.S_IFREG | 0o644, b.id)
        c = Commit()
        c.committer = c.author = "Jelmer <jelmer@host>"
        c.author_time = c.commit_time = 1271345553
        c.author_timezone = c.commit_timezone = 0
        c.message = "msg"
        c.tree = t.id
        self.store.add_objects([(b, None), (t, None), (c, None)])
        self.fastexporter.emit_commit(c, "refs/heads/master")
        self.assertEqual(
            """blob
mark :1
data 3
FOO
commit refs/heads/master
mark :2
author Jelmer <jelmer@host> 1271345553 +0000
committer Jelmer <jelmer@host> 1271345553 +0000
data 3
msg
M 644 1 foo
""", self.stream.getvalue())
Ejemplo n.º 21
0
    def stage(self, paths):
        """Stage a set of paths.

        :param paths: List of paths, relative to the repository path
        """
        from dulwich.index import cleanup_mode
        index = self.open_index()
        for path in paths:
            full_path = os.path.join(self.path, path)
            blob = Blob()
            try:
                st = os.stat(full_path)
            except OSError:
                # File no longer exists
                try:
                    del index[path]
                except KeyError:
                    pass  # Doesn't exist in the index either
            else:
                with open(full_path, 'rb') as f:
                    blob.data = f.read()
                self.object_store.add_object(blob)
                # XXX: Cleanup some of the other file properties as well?
                index[path] = (st.st_ctime, st.st_mtime, st.st_dev, st.st_ino,
                    cleanup_mode(st.st_mode), st.st_uid, st.st_gid, st.st_size,
                    blob.id, 0)
        index.write()
Ejemplo n.º 22
0
def blob_from_path(basepath, path):
    """Returns a tuple of (sha_id, mode, blob)
    """
    fullpath = os.path.join(basepath, path)
    with open(fullpath, 'rb') as working_file:
        blob = Blob()
        blob.data = working_file.read()
    return (path, os.stat(fullpath).st_mode, blob)
Ejemplo n.º 23
0
def blob_from_path(basepath, path):
    """Returns a tuple of (sha_id, mode, blob)
    """
    fullpath = os.path.join(basepath, path)
    with open(fullpath, 'rb') as working_file:
        blob = Blob()
        blob.data = working_file.read()
    return (path, os.stat(fullpath).st_mode, blob)
Ejemplo n.º 24
0
def blob_from_path_and_stat(path, st):
    """Create a blob from a path and a stat object.

    :param path: Full path to file
    :param st: A stat object
    :return: A `Blob` object
    """
    blob = Blob()
    if not stat.S_ISLNK(st.st_mode):
        with open(path, 'rb') as f:
            blob.data = f.read()
    else:
        if not isinstance(path, bytes):
            blob.data = os.readlink(path.encode(sys.getfilesystemencoding()))
        else:
            blob.data = os.readlink(path)

    return blob
Ejemplo n.º 25
0
 def test_single_blob(self):
     blob = Blob()
     blob.data = b"foo"
     self.store.add_object(blob)
     blobs = [(b"bla", blob.id, stat.S_IFREG)]
     rootid = commit_tree(self.store, blobs)
     self.assertEqual(rootid, b"1a1e80437220f9312e855c37ac4398b68e5c1d50")
     self.assertEqual((stat.S_IFREG, blob.id), self.store[rootid][b"bla"])
     self.assertEqual(set([rootid, blob.id]), set(self.store._data.keys()))
Ejemplo n.º 26
0
 def test_single_blob(self):
     blob = Blob()
     blob.data = b"foo"
     self.store.add_object(blob)
     blobs = [(b"bla", blob.id, stat.S_IFREG)]
     rootid = commit_tree(self.store, blobs)
     self.assertEqual(rootid, b"1a1e80437220f9312e855c37ac4398b68e5c1d50")
     self.assertEqual((stat.S_IFREG, blob.id), self.store[rootid][b"bla"])
     self.assertEqual(set([rootid, blob.id]), set(self.store._data.keys()))
Ejemplo n.º 27
0
 def test_git_dir(self):
     obj = Tree()
     a = Blob()
     a.data = b"foo"
     obj.add(b".git", 0o100644, a.id)
     self.repo.object_store.add_objects(
         [(a, None), (obj, None)])
     self.assertEqual(
             [(obj.id, 'invalid name .git')],
             [(sha, str(e)) for (sha, e) in porcelain.fsck(self.repo)])
Ejemplo n.º 28
0
 def test_git_dir(self):
     obj = Tree()
     a = Blob()
     a.data = b"foo"
     obj.add(b".git", 0o100644, a.id)
     self.repo.object_store.add_objects(
         [(a, None), (obj, None)])
     self.assertEqual(
             [(obj.id, 'invalid name .git')],
             [(sha, str(e)) for (sha, e) in porcelain.fsck(self.repo)])
Ejemplo n.º 29
0
def blob_from_path_and_mode(fs_path, mode, tree_encoding="utf-8"):
    """Create a blob from a path and a stat object.

    Args:
      fs_path: Full file system path to file
      st: A stat object
    Returns: A `Blob` object
    """
    assert isinstance(fs_path, bytes)
    blob = Blob()
    if stat.S_ISLNK(mode):
        if sys.platform == "win32":
            # os.readlink on Python3 on Windows requires a unicode string.
            fs_path = os.fsdecode(fs_path)
            blob.data = os.readlink(fs_path).encode(tree_encoding)
        else:
            blob.data = os.readlink(fs_path)
    else:
        with open(fs_path, "rb") as f:
            blob.data = f.read()
    return blob
Ejemplo n.º 30
0
 def test_nested(self):
     blob = Blob()
     blob.data = "foo"
     self.store.add_object(blob)
     blobs = [("bla/bar", blob.id, stat.S_IFREG)]
     rootid = commit_tree(self.store, blobs)
     self.assertEqual(rootid, "d92b959b216ad0d044671981196781b3258fa537")
     dirid = self.store[rootid]["bla"][1]
     self.assertEqual(dirid, "c1a1deb9788150829579a8b4efa6311e7b638650")
     self.assertEqual((stat.S_IFDIR, dirid), self.store[rootid]["bla"])
     self.assertEqual((stat.S_IFREG, blob.id), self.store[dirid]["bar"])
     self.assertEqual(set([rootid, dirid, blob.id]), set(self.store._data.keys()))
Ejemplo n.º 31
0
    def commit(self):
        # XXX: evidence for the rest of
        # this functions is supposed not to exist
        # yes, its that
        # XXX: generate all objects at once and
        #     add them as pack instead of legacy objects
        r = self.repo.repo
        store = r.object_store
        new_objects = []
        names = sorted(self.contents)
        nametree = defaultdict(list)
        for name in names:
            base = name.strip('/')
            while base:
                nbase = os.path.dirname(base)
                nametree[nbase].append(base)
                base = nbase

        if self.base_commit:
            tree = r.tree(self.base_commit.commit.tree)
            tree._ensure_parsed()
            print tree._entries
        else:
            tree = Tree()

        for src, dest in self.renames:
            src = src.strip('/')
            dest = dest.strip('/')
            tree[dest] = tree[src]
            del tree[src]

        for name in names:
            blob = Blob()
            blob.data = self.contents[name]
            new_objects.append((blob, name))
            tree.add(0555, os.path.basename(name), blob.id)

        new_objects.append((tree, ''))
        commit = Commit()
        if self.base_commit:
            commit.parents = [self.base_commit.commit.id]
        commit.tree = tree.id
        commit.message = self.extra['message']
        commit.committer = self.author
        commit.commit_time = int(self.time_unix)
        commit.commit_timezone = self.time_offset
        commit.author = self.author
        commit.author_time = int(self.time_unix)
        commit.author_timezone = self.time_offset
        new_objects.append((commit, ''))
        store.add_objects(new_objects)
        self.repo.repo.refs['HEAD'] = commit.id
Ejemplo n.º 32
0
def blob_from_path_and_stat(fs_path, st):
    """Create a blob from a path and a stat object.

    :param fs_path: Full file system path to file
    :param st: A stat object
    :return: A `Blob` object
    """
    assert isinstance(fs_path, bytes)
    blob = Blob()
    if not stat.S_ISLNK(st.st_mode):
        with open(fs_path, 'rb') as f:
            blob.data = f.read()
    else:
        if sys.platform == 'win32' and sys.version_info[0] == 3:
            # os.readlink on Python3 on Windows requires a unicode string.
            # TODO(jelmer): Don't assume tree_encoding == fs_encoding
            tree_encoding = sys.getfilesystemencoding()
            fs_path = fs_path.decode(tree_encoding)
            blob.data = os.readlink(fs_path).encode(tree_encoding)
        else:
            blob.data = os.readlink(fs_path)
    return blob
Ejemplo n.º 33
0
def blob_from_path_and_stat(fs_path, st):
    """Create a blob from a path and a stat object.

    :param fs_path: Full file system path to file
    :param st: A stat object
    :return: A `Blob` object
    """
    assert isinstance(fs_path, bytes)
    blob = Blob()
    if not stat.S_ISLNK(st.st_mode):
        with open(fs_path, 'rb') as f:
            blob.data = f.read()
    else:
        if sys.platform == 'win32' and sys.version_info[0] == 3:
            # os.readlink on Python3 on Windows requires a unicode string.
            # TODO(jelmer): Don't assume tree_encoding == fs_encoding
            tree_encoding = sys.getfilesystemencoding()
            fs_path = fs_path.decode(tree_encoding)
            blob.data = os.readlink(fs_path).encode(tree_encoding)
        else:
            blob.data = os.readlink(fs_path)
    return blob
Ejemplo n.º 34
0
 def test_nested(self):
     blob = Blob()
     blob.data = b"foo"
     self.store.add_object(blob)
     blobs = [(b"bla/bar", blob.id, stat.S_IFREG)]
     rootid = commit_tree(self.store, blobs)
     self.assertEqual(rootid, b"d92b959b216ad0d044671981196781b3258fa537")
     dirid = self.store[rootid][b"bla"][1]
     self.assertEqual(dirid, b"c1a1deb9788150829579a8b4efa6311e7b638650")
     self.assertEqual((stat.S_IFDIR, dirid), self.store[rootid][b"bla"])
     self.assertEqual((stat.S_IFREG, blob.id), self.store[dirid][b"bar"])
     self.assertEqual(set([rootid, dirid, blob.id]),
                      set(self.store._data.keys()))
Ejemplo n.º 35
0
 def test_blob(self):
     self.map.start_write_group()
     updater = self.cache.get_updater(Revision(b"myrevid"))
     updater.add_object(self._get_test_commit(),
                        {"testament3-sha1": b"Test"}, None)
     b = Blob()
     b.data = b"TEH BLOB"
     updater.add_object(b, (b"myfileid", b"myrevid"), None)
     updater.finish()
     self.map.commit_write_group()
     self.assertEqual([("blob", (b"myfileid", b"myrevid"))],
                      list(self.map.lookup_git_sha(b.id)))
     self.assertEqual(b.id, self.map.lookup_blob_id(b"myfileid",
                                                    b"myrevid"))
Ejemplo n.º 36
0
 def test_simple(self):
     c1, c2, c3 = build_commit_graph(self.repo.object_store, [[1], [2, 1],
         [3, 1, 2]])
     b = Blob()
     b.data = b"foo the bar"
     t = Tree()
     t.add(b"somename", 0o100644, b.id)
     self.repo.object_store.add_object(t)
     self.repo.object_store.add_object(b)
     sha = porcelain.commit_tree(
         self.repo.path, t.id, message=b"Withcommit.",
         author=b"Joe <*****@*****.**>",
         committer=b"Jane <*****@*****.**>")
     self.assertTrue(isinstance(sha, bytes))
     self.assertEqual(len(sha), 40)
Ejemplo n.º 37
0
 def test_deltas_work(self):
     orig_pack = self.get_pack(pack1_sha)
     orig_blob = orig_pack[a_sha]
     new_blob = Blob()
     new_blob.data = orig_blob.data + "x"
     all_to_pack = list(orig_pack.pack_tuples()) + [(new_blob, None)]
     pack_path = os.path.join(self._tempdir, "pack_with_deltas")
     write_pack(pack_path, all_to_pack, deltify=True)
     output = run_git_or_fail(["verify-pack", "-v", pack_path])
     self.assertEqual(set(x[0].id for x in all_to_pack), _git_verify_pack_object_list(output))
     # We specifically made a new blob that should be a delta
     # against the blob a_sha, so make sure we really got only 3
     # non-delta objects:
     got_non_delta = int(_NON_DELTA_RE.search(output).group("non_delta"))
     self.assertEqual(3, got_non_delta, "Expected 3 non-delta objects, got %d" % got_non_delta)
Ejemplo n.º 38
0
 def test_get_raw(self):
     b = Blob()
     b.data = b'a\nb\nc\nd\ne\n'
     self.store.lock_read()
     self.addCleanup(self.store.unlock)
     self.assertRaises(KeyError, self.store.get_raw, b.id)
     bb = BranchBuilder(branch=self.branch)
     bb.start_series()
     bb.build_snapshot(None, [
         ('add', ('', None, 'directory', None)),
         ('add', ('foo', b'foo-id', 'file', b'a\nb\nc\nd\ne\n')),
     ])
     bb.finish_series()
     # read locks cache
     self.assertRaises(KeyError, self.store.get_raw, b.id)
     self.store.unlock()
     self.store.lock_read()
     self.assertEqual(b.as_raw_string(), self.store.get_raw(b.id)[1])
Ejemplo n.º 39
0
 def test_contains(self):
     b = Blob()
     b.data = b'a\nb\nc\nd\ne\n'
     self.store.lock_read()
     self.addCleanup(self.store.unlock)
     self.assertFalse(b.id in self.store)
     bb = BranchBuilder(branch=self.branch)
     bb.start_series()
     bb.build_snapshot(None, [
         ('add', ('', None, 'directory', None)),
         ('add', ('foo', b'foo-id', 'file', b'a\nb\nc\nd\ne\n')),
     ])
     bb.finish_series()
     # read locks cache
     self.assertFalse(b.id in self.store)
     self.store.unlock()
     self.store.lock_read()
     self.assertTrue(b.id in self.store)
Ejemplo n.º 40
0
 def test_deltas_work(self):
     with self.get_pack(pack1_sha) as orig_pack:
         orig_blob = orig_pack[a_sha]
         new_blob = Blob()
         new_blob.data = orig_blob.data + b'x'
         all_to_pack = list(orig_pack.pack_tuples()) + [(new_blob, None)]
     pack_path = os.path.join(self._tempdir, 'pack_with_deltas')
     write_pack(pack_path, all_to_pack, deltify=True)
     output = run_git_or_fail(['verify-pack', '-v', pack_path])
     self.assertEqual(set(x[0].id for x in all_to_pack),
                      _git_verify_pack_object_list(output))
     # We specifically made a new blob that should be a delta
     # against the blob a_sha, so make sure we really got only 3
     # non-delta objects:
     got_non_delta = int(_NON_DELTA_RE.search(output).group('non_delta'))
     self.assertEqual(
         3, got_non_delta,
         'Expected 3 non-delta objects, got %d' % got_non_delta)
Ejemplo n.º 41
0
 def test_get_blob(self):
     self.branch.lock_write()
     self.addCleanup(self.branch.unlock)
     b = Blob()
     b.data = b'a\nb\nc\nd\ne\n'
     self.store.lock_read()
     self.addCleanup(self.store.unlock)
     self.assertRaises(KeyError, self.store.__getitem__, b.id)
     bb = BranchBuilder(branch=self.branch)
     bb.start_series()
     bb.build_snapshot(None,
                       [('add', ('', None, 'directory', None)),
                        ('add', ('foo', b'foo-id', 'file', b'a\nb\nc\nd\ne\n')),
                        ])
     bb.finish_series()
     # read locks cache
     self.assertRaises(KeyError, self.store.__getitem__, b.id)
     self.store.unlock()
     self.store.lock_read()
     self.assertEqual(b, self.store[b.id])
Ejemplo n.º 42
0
 def test_directory_converted_to_symlink(self):
     self.requireFeature(SymlinkFeature(self.test_dir))
     b = Blob()
     b.data = b'trgt'
     self.store.lock_read()
     self.addCleanup(self.store.unlock)
     self.assertRaises(KeyError, self.store.__getitem__, b.id)
     tree = self.branch.controldir.create_workingtree()
     self.build_tree_contents([
         ('foo/', ),
         ('foo/bar', b'a\nb\nc\nd\ne\n')])
     tree.add(['foo', 'foo/bar'])
     revid1 = tree.commit('commit 1')
     shutil.rmtree('foo')
     os.symlink('trgt', 'foo')
     revid2 = tree.commit('commit 2')
     # read locks cache
     self.assertRaises(KeyError, self.store.__getitem__, b.id)
     self.store.unlock()
     self.store.lock_read()
     self.assertEqual(b, self.store[b.id])
Ejemplo n.º 43
0
def normalize_blob(blob, conversion, binary_detection):
    """ Takes a blob as input returns either the original blob if
    binary_detection is True and the blob content looks like binary, else
    return a new blob with converted data
    """
    # Read the original blob
    data = blob.data

    # If we need to detect if a file is binary and the file is detected as
    # binary, do not apply the conversion function and return the original
    # chunked text
    if binary_detection is True:
        if is_binary(data):
            return blob

    # Now apply the conversion
    converted_data = conversion(data)

    new_blob = Blob()
    new_blob.data = converted_data

    return new_blob
Ejemplo n.º 44
0
def normalize_blob(blob, conversion, binary_detection):
    """ Takes a blob as input returns either the original blob if
    binary_detection is True and the blob content looks like binary, else
    return a new blob with converted data
    """
    # Read the original blob
    data = blob.data

    # If we need to detect if a file is binary and the file is detected as
    # binary, do not apply the conversion function and return the original
    # chunked text
    if binary_detection is True:
        if is_binary(data):
            return blob

    # Now apply the conversion
    converted_data = conversion(data)

    new_blob = Blob()
    new_blob.data = converted_data

    return new_blob
Ejemplo n.º 45
0
    def stage_data(self, name, data):
        name = name.lstrip(os.path.sep)
        f = open(os.path.join(self.path, name), 'wb')
        try:
            f.write(data)
        finally:
            f.close()
        return self.stage(name)

        from dulwich.index import index_entry_from_stat
        index = self.open_index()

        blob = Blob()
        blob.data = data
        self.object_store.add_object(blob)

        time_now = int(time.time())
        index[name] = (time_now, time_now, 0,
                        0, 0644, 0,
                        0, len(data), blob.id, 0)
        #index[name] = (stat_val.st_ctime, stat_val.st_mtime, stat_val.st_dev,
        #                stat_val.st_ino, 0644, stat_val.st_uid,
        #                stat_val.st_gid, stat_val.st_size, blob.id, 0)
        index.write()
Ejemplo n.º 46
0
 def test_emit_blob(self):
     b = Blob()
     b.data = "fooBAR"
     self.fastexporter.emit_blob(b)
     self.assertEquals('blob\nmark :1\ndata 6\nfooBAR\n',
         self.stream.getvalue())
Ejemplo n.º 47
0
from dulwich.objects import Blob
from dulwich.objects import Commit

spam = Blob.from_string("My new file content\n")
print spam.id

blob = Blob()
blob.data = "My new file content\n"
print blob.id

c1 = Commit()
Ejemplo n.º 48
0
from unittest import TestCase

from dulwich.objects import (
    Blob,
    )
from dulwich.object_store import (
    DiskObjectStore,
    MemoryObjectStore,
    )
import os
import shutil
import tempfile


testobject = Blob()
testobject.data = "yummy data"


class SpecificDiskObjectStoreTests(TestCase):

    def setUp(self):
        self.store_dir = tempfile.mkdtemp()

    def tearDown(self):
        shutil.rmtree(self.store_dir)

    def test_pack_dir(self):
        o = DiskObjectStore(self.store_dir)
        self.assertEquals(os.path.join(self.store_dir, "pack"), o.pack_dir)

    def test_empty_packs(self):
Ejemplo n.º 49
0
 def record_iter_changes(self, workingtree, basis_revid, iter_changes):
     seen_root = False
     for (file_id, path, changed_content, versioned, parent, name, kind,
          executable) in iter_changes:
         if kind[1] in ("directory", ):
             self._inv_delta.append(
                 (path[0], path[1], file_id,
                  entry_factory[kind[1]](file_id, name[1], parent[1])))
             if kind[0] in ("file", "symlink"):
                 self._blobs[path[0].encode("utf-8")] = None
                 self._any_changes = True
             if path[1] == "":
                 seen_root = True
             continue
         self._any_changes = True
         if path[1] is None:
             self._inv_delta.append((path[0], path[1], file_id, None))
             self._blobs[path[0].encode("utf-8")] = None
             continue
         try:
             entry_kls = entry_factory[kind[1]]
         except KeyError:
             raise KeyError("unknown kind %s" % kind[1])
         entry = entry_kls(file_id, name[1], parent[1])
         if kind[1] == "file":
             entry.executable = executable[1]
             blob = Blob()
             f, st = workingtree.get_file_with_stat(path[1])
             try:
                 blob.data = f.read()
             finally:
                 f.close()
             entry.text_size = len(blob.data)
             entry.text_sha1 = osutils.sha_string(blob.data)
             self.store.add_object(blob)
             sha = blob.id
         elif kind[1] == "symlink":
             symlink_target = workingtree.get_symlink_target(path[1])
             blob = Blob()
             blob.data = symlink_target.encode("utf-8")
             self.store.add_object(blob)
             sha = blob.id
             entry.symlink_target = symlink_target
             st = None
         elif kind[1] == "tree-reference":
             sha = read_submodule_head(workingtree.abspath(path[1]))
             reference_revision = workingtree.get_reference_revision(
                 path[1])
             entry.reference_revision = reference_revision
             st = None
         else:
             raise AssertionError("Unknown kind %r" % kind[1])
         mode = object_mode(kind[1], executable[1])
         self._inv_delta.append((path[0], path[1], file_id, entry))
         encoded_new_path = path[1].encode("utf-8")
         self._blobs[encoded_new_path] = (mode, sha)
         if st is not None:
             yield file_id, path[1], (entry.text_sha1, st)
         if self._mapping.generate_file_id(encoded_new_path) != file_id:
             self._override_fileids[encoded_new_path] = file_id
         else:
             self._override_fileids[encoded_new_path] = None
     if not seen_root and len(self.parents) == 0:
         raise RootMissing()
     if getattr(workingtree, "basis_tree", False):
         basis_tree = workingtree.basis_tree()
     else:
         if len(self.parents) == 0:
             basis_revid = _mod_revision.NULL_REVISION
         else:
             basis_revid = self.parents[0]
         basis_tree = self.repository.revision_tree(basis_revid)
     # Fill in entries that were not changed
     for entry in basis_tree._iter_tree_contents(include_trees=False):
         if entry.path in self._blobs:
             continue
         self._blobs[entry.path] = (entry.mode, entry.sha)
     if not self._lossy:
         try:
             fileid_map = dict(basis_tree._fileid_map.file_ids)
         except AttributeError:
             fileid_map = {}
         for path, file_id in viewitems(self._override_fileids):
             if not isinstance(path, bytes):
                 raise TypeError(path)
             if file_id is None:
                 if path in fileid_map:
                     del fileid_map[path]
             else:
                 if not isinstance(file_id, bytes):
                     raise TypeError(file_id)
                 fileid_map[path] = file_id
         if fileid_map:
             fileid_blob = self._mapping.export_fileid_map(fileid_map)
         else:
             fileid_blob = None
         if fileid_blob is not None:
             if self._mapping.BZR_FILE_IDS_FILE is None:
                 raise SettingCustomFileIdsUnsupported(fileid_map)
             self.store.add_object(fileid_blob)
             self._blobs[self._mapping.BZR_FILE_IDS_FILE] = (stat.S_IFREG
                                                             | 0o644,
                                                             fileid_blob.id)
         else:
             self._blobs[self._mapping.BZR_FILE_IDS_FILE] = None
     self.new_inventory = None
Ejemplo n.º 50
0
 def test_emit_blob(self):
     b = Blob()
     b.data = "fooBAR"
     self.fastexporter.emit_blob(b)
     self.assertEqual('blob\nmark :1\ndata 6\nfooBAR\n',
                      self.stream.getvalue())