def blob_handler(self, cmd): """Process a BlobCommand.""" if cmd.mark is not None: dataref = cmd.id else: dataref = osutils.sha_strings(cmd.data) self.cache_mgr.store_blob(dataref, cmd.data)
def _install_inventory_records(self, records): if self._info['serializer'] == self._repository._serializer.format_num: return self._install_mp_records_keys(self._repository.inventories, records) for key, metadata, bytes in records: revision_id = key[-1] parent_ids = metadata['parents'] parents = [self._repository.get_inventory(p) for p in parent_ids] p_texts = [ self._source_serializer.write_inventory_to_string(p) for p in parents ] target_lines = multiparent.MultiParent.from_patch(bytes).to_lines( p_texts) sha1 = osutils.sha_strings(target_lines) if sha1 != metadata['sha1']: raise errors.BadBundle("Can't convert to target format") target_inv = self._source_serializer.read_inventory_from_string( ''.join(target_lines)) self._handle_root(target_inv, parent_ids) try: self._repository.add_inventory(revision_id, target_inv, parent_ids) except errors.UnsupportedInventoryKind: raise errors.IncompatibleRevision(repr(self._repository))
def test_pack_preserves_chk_bytes_store(self): leaf_lines = ["chkleaf:\n", "0\n", "1\n", "0\n", "\n"] leaf_sha1 = osutils.sha_strings(leaf_lines) node_lines = [ "chknode:\n", "0\n", "1\n", "1\n", "foo\n", "\x00sha1:%s\n" % (leaf_sha1, ) ] node_sha1 = osutils.sha_strings(node_lines) expected_set = set([('sha1:' + leaf_sha1, ), ('sha1:' + node_sha1, )]) repo = self.make_repository('.') repo.lock_write() try: repo.start_write_group() try: # Internal node pointing at a leaf. repo.chk_bytes.add_lines((None, ), None, node_lines, random_id=True) except: repo.abort_write_group() raise else: repo.commit_write_group() repo.start_write_group() try: # Leaf in a separate pack. repo.chk_bytes.add_lines((None, ), None, leaf_lines, random_id=True) except: repo.abort_write_group() raise else: repo.commit_write_group() repo.pack() self.assertEqual(expected_set, repo.chk_bytes.keys()) finally: repo.unlock() # and reopening repo = repo.bzrdir.open_repository() repo.lock_read() try: self.assertEqual(expected_set, repo.chk_bytes.keys()) finally: repo.unlock()
def test_get_file_sha1(self): work_tree = self.make_branch_and_tree('tree') self.build_tree_contents([('tree/file', 'file content')]) work_tree.add('file', 'file-id') tree = self._convert_tree(work_tree) tree.lock_read() self.addCleanup(tree.unlock) expected = osutils.sha_strings('file content') self.assertEqual(expected, tree.get_file_sha1('file-id'))
def get_lines(self, version_id): """See VersionedFile.get_lines().""" int_index = self._maybe_lookup(version_id) result = [line for (origin, lineno, line) in self._extract([int_index])] expected_sha1 = self._sha1s[int_index] measured_sha1 = sha_strings(result) if measured_sha1 != expected_sha1: raise errors.WeaveInvalidChecksum( 'file %s, revision %s, expected: %s, measured %s' % (self._weave_name, version_id, expected_sha1, measured_sha1)) return result
def test_pack_preserves_chk_bytes_store(self): leaf_lines = ["chkleaf:\n", "0\n", "1\n", "0\n", "\n"] leaf_sha1 = osutils.sha_strings(leaf_lines) node_lines = ["chknode:\n", "0\n", "1\n", "1\n", "foo\n", "\x00sha1:%s\n" % (leaf_sha1,)] node_sha1 = osutils.sha_strings(node_lines) expected_set = set([("sha1:" + leaf_sha1,), ("sha1:" + node_sha1,)]) repo = self.make_repository(".") repo.lock_write() try: repo.start_write_group() try: # Internal node pointing at a leaf. repo.chk_bytes.add_lines((None,), None, node_lines, random_id=True) except: repo.abort_write_group() raise else: repo.commit_write_group() repo.start_write_group() try: # Leaf in a separate pack. repo.chk_bytes.add_lines((None,), None, leaf_lines, random_id=True) except: repo.abort_write_group() raise else: repo.commit_write_group() repo.pack() self.assertEqual(expected_set, repo.chk_bytes.keys()) finally: repo.unlock() # and reopening repo = repo.bzrdir.open_repository() repo.lock_read() try: self.assertEqual(expected_set, repo.chk_bytes.keys()) finally: repo.unlock()
def test_get_file_verifier(self): work_tree = self.make_branch_and_tree('tree') self.build_tree_contents([('tree/file1', 'file content'), ('tree/file2', 'file content')]) work_tree.add(['file1', 'file2'], ['file-id-1', 'file-id-2']) tree = self._convert_tree(work_tree) tree.lock_read() self.addCleanup(tree.unlock) (kind, data) = tree.get_file_verifier('file-id-1') self.assertEqual(tree.get_file_verifier('file-id-1'), tree.get_file_verifier('file-id-2')) if kind == "SHA1": expected = osutils.sha_strings('file content') self.assertEqual(expected, data)
def test_get_file_verifier(self): work_tree = self.make_branch_and_tree('tree') self.build_tree_contents([ ('tree/file1', 'file content'), ('tree/file2', 'file content')]) work_tree.add(['file1', 'file2'], ['file-id-1', 'file-id-2']) tree = self._convert_tree(work_tree) tree.lock_read() self.addCleanup(tree.unlock) (kind, data) = tree.get_file_verifier('file-id-1') self.assertEquals( tree.get_file_verifier('file-id-1'), tree.get_file_verifier('file-id-2')) if kind == "SHA1": expected = osutils.sha_strings('file content') self.assertEqual(expected, data)
def _install_inventory_records(self, records): if self._info['serializer'] == self._repository._serializer.format_num: return self._install_mp_records_keys(self._repository.inventories, records) for key, metadata, bytes in records: revision_id = key[-1] parent_ids = metadata['parents'] parents = [self._repository.get_inventory(p) for p in parent_ids] p_texts = [self._source_serializer.write_inventory_to_string(p) for p in parents] target_lines = multiparent.MultiParent.from_patch(bytes).to_lines( p_texts) sha1 = osutils.sha_strings(target_lines) if sha1 != metadata['sha1']: raise errors.BadBundle("Can't convert to target format") target_inv = self._source_serializer.read_inventory_from_string( ''.join(target_lines)) self._handle_root(target_inv, parent_ids) try: self._repository.add_inventory(revision_id, target_inv, parent_ids) except errors.UnsupportedInventoryKind: raise errors.IncompatibleRevision(repr(self._repository))
def _add(self, version_id, lines, parents, sha1=None, nostore_sha=None): """Add a single text on top of the weave. Returns the index number of the newly added version. version_id Symbolic name for this version. (Typically the revision-id of the revision that added it.) parents List or set of direct parent version numbers. lines Sequence of lines to be added in the new version. :param nostore_sha: See VersionedFile.add_lines. """ self._check_lines_not_unicode(lines) self._check_lines_are_lines(lines) if not sha1: sha1 = sha_strings(lines) if sha1 == nostore_sha: raise errors.ExistingContent if version_id in self._name_map: return self._check_repeated_add(version_id, parents, lines, sha1) self._check_versions(parents) ## self._check_lines(lines) new_version = len(self._parents) # if we abort after here the (in-memory) weave will be corrupt because only # some fields are updated # XXX: FIXME implement a succeed-or-fail of the rest of this routine. # - Robert Collins 20060226 self._parents.append(parents[:]) self._sha1s.append(sha1) self._names.append(version_id) self._name_map[version_id] = new_version if not parents: # special case; adding with no parents revision; can do # this more quickly by just appending unconditionally. # even more specially, if we're adding an empty text we # need do nothing at all. if lines: self._weave.append(('{', new_version)) self._weave.extend(lines) self._weave.append(('}', None)) return new_version if len(parents) == 1: pv = list(parents)[0] if sha1 == self._sha1s[pv]: # special case: same as the single parent return new_version ancestors = self._inclusions(parents) l = self._weave # basis a list of (origin, lineno, line) basis_lineno = [] basis_lines = [] for origin, lineno, line in self._extract(ancestors): basis_lineno.append(lineno) basis_lines.append(line) # another small special case: a merge, producing the same text # as auto-merge if lines == basis_lines: return new_version # add a sentinel, because we can also match against the final line basis_lineno.append(len(self._weave)) # XXX: which line of the weave should we really consider # matches the end of the file? the current code says it's the # last line of the weave? #print 'basis_lines:', basis_lines #print 'new_lines: ', lines s = self._matcher(None, basis_lines, lines) # offset gives the number of lines that have been inserted # into the weave up to the current point; if the original edit instruction # says to change line A then we actually change (A+offset) offset = 0 for tag, i1, i2, j1, j2 in s.get_opcodes(): # i1,i2 are given in offsets within basis_lines; we need to map them # back to offsets within the entire weave #print 'raw match', tag, i1, i2, j1, j2 if tag == 'equal': continue i1 = basis_lineno[i1] i2 = basis_lineno[i2] # the deletion and insertion are handled separately. # first delete the region. if i1 != i2: self._weave.insert(i1+offset, ('[', new_version)) self._weave.insert(i2+offset+1, (']', new_version)) offset += 2 if j1 != j2: # there may have been a deletion spanning up to # i2; we want to insert after this region to make sure # we don't destroy ourselves i = i2 + offset self._weave[i:i] = ([('{', new_version)] + lines[j1:j2] + [('}', None)]) offset += 2 + (j2 - j1) return new_version
def _add_lines(self, version_id, parents, lines, parent_texts, left_matching_blocks, nostore_sha, random_id, check_content): """See VersionedFile.add_lines.""" idx = self._add(version_id, lines, map(self._lookup, parents), nostore_sha=nostore_sha) return sha_strings(lines), sum(map(len, lines)), idx
def test_get_sha1s(self): self.assertEquals({("A", ): osutils.sha_strings(["FOO"])}, self.texts.get_sha1s([("A", )]))
def as_sha1(self): return sha_strings(self.as_text_lines())
def test_get_sha1s(self): self.assertEquals({("A",): osutils.sha_strings(["FOO"])}, self.texts.get_sha1s([("A",)]))