def _combine_journals(self, start, stop): """Combine a number of journals to get a tree model.""" model = journals.Combiner() journaldir = self._journaldir() for journal_id in range(start, stop + 1): model.add(journals.parse(journaldir.get_bytes(str(journal_id)))) return model.as_tree()
def test_parse_v2(self): journal = journals.parse("""l-mirror-journal-2 1234\0replace\0symlink\0foo bar/baz\0file\0e935b6eefc78846802e12039d6dd9a7e27622301\0000\x000\x00abc\0new\0file\00012039d6dd9a7e27622301e935b6eefc78846802e\00011\x002\0abc/def\0del\0dir""") expected = journals.Journal() expected.add('abc', 'new', journals.FileContent('12039d6dd9a7e27622301e935b6eefc78846802e', 11, 2.0)) expected.add('abc/def', 'del', journals.DirContent()) expected.add('1234', 'replace', ( journals.SymlinkContent('foo bar/baz'), journals.FileContent('e935b6eefc78846802e12039d6dd9a7e27622301', 0, 0.0))) self.assertEqual(expected.paths, journal.paths)
def get_generator(self, from_journal, to_journal): """Get a ReplayGenerator for some journals. Signatures are not checked - the client should be cross checking and signature checking. :param from_journal: The first journal to include. :param to_journal: The last journal to include. """ needed = range(from_journal, to_journal + 1) combiner = journals.Combiner() journal_dir = self._journaldir() for journal_id in needed: journal_bytes = journal_dir.get_bytes(str(journal_id)) journal = journals.parse(journal_bytes) combiner.add(journal) return journals.ReplayGenerator(combiner.journal, self._contentdir(), self.ui)
def test_parse_empty(self): journal = journals.parse('l-mirror-journal-1\n') self.assertEqual({}, journal.paths)
def receive(self, another_mirrorset): """Perform a receive from another_mirrorset.""" # XXX: check its a mirror of the same set. UUID or convergence? self.ui.output_log(5, 'l_mirror.mirrorset', 'Starting transmission from mirror %s at %s to %s at %s' % (another_mirrorset.name, another_mirrorset.base.base, self.name, self.base.base)) metadata = self._get_metadata() source_meta = another_mirrorset._get_metadata() latest = int(metadata.get('metadata', 'latest')) source_latest = int(source_meta.get('metadata', 'latest')) signed = self._is_signed() # XXX: BASIS: basis handling needed here (and thats when we # need non-overlapping syncing. if source_latest > latest: needed = range(latest + 1, source_latest + 1) new_journals = len(needed) combiner = journals.Combiner() source_journaldir = another_mirrorset._journaldir() journal_dir = self._journaldir() for journal_id in needed: journal_bytes = source_journaldir.get_bytes(str(journal_id)) if signed: # Copy the sig, check its valid in the current keyring. sig_name = "%s.sig" % journal_id sig_bytes = source_journaldir.get_bytes(sig_name) journal_dir.put_bytes(sig_name, sig_bytes) self.gpgv_strategy.verify(journal_dir, sig_name, journal_bytes) journal_dir.put_bytes(str(journal_id), journal_bytes) journal = journals.parse(journal_bytes) combiner.add(journal) changed_paths = len(combiner.journal.paths) # If the keyring was changed and we were not signed before, copy # the keyring and check that all signed journals validate. keyringpath = '.lmirror/sets/%s/lmirror.gpg' % self.name if keyringpath in combiner.journal.paths: minijournal = journals.Journal() minijournal.paths[keyringpath] = combiner.journal.paths[ keyringpath] generator = journals.ReplayGenerator(minijournal, another_mirrorset._contentdir(), self.ui) replayer = journals.TransportReplay(minijournal, generator, self.base, self.ui) replayer.replay() for journal_id in needed: try: sig_name = "%s.sig" % journal_id sig_bytes = source_journaldir.get_bytes(sig_name) journal_dir.put_bytes(sig_name, sig_bytes) except NoSuchFile: continue self.gpgv_strategy.verify(journal_dir, sig_name, journal_dir.get_bytes(str(journal_id))) # Now we have a journal that is GPG checked representing what we # want to receive. replayer = journals.TransportReplay(combiner.journal, another_mirrorset.get_generator(latest + 1, source_latest), self.base, self.ui) replayer.replay() metadata.set('metadata', 'latest', str(source_latest)) metadata.set('metadata', 'timestamp', source_meta.get('metadata', 'timestamp')) self._set_metadata(metadata) else: changed_paths = 0 new_journals = 0 self.ui.output_log(5, 'l_mirror.mirrorset', 'Received %d path changes in %d journals from mirror %s at ' ' %s to %s at %s' % (changed_paths, new_journals, another_mirrorset.name, another_mirrorset.base.base, self.name, self.base.base))