def push(self, db_branch_id, bzr_branch, required_format, stacked_on_url=None): """Push up `bzr_branch` as the Bazaar branch for `code_import`. :return: A boolean that is true if the push was non-trivial (i.e. actually transferred revisions). """ self.transport.create_prefix() target_url = self._getMirrorURL(db_branch_id, push=True) try: remote_branch = Branch.open(target_url) except NotBranchError: remote_branch = BzrDir.create_branch_and_repo( target_url, format=required_format) old_branch = None else: if remote_branch.bzrdir.needs_format_conversion(required_format): # For upgrades, push to a new branch in # the new format. When done pushing, # retire the old .bzr directory and rename # the new one in place. old_branch = remote_branch upgrade_url = urljoin(target_url, "backup.bzr") try: remote_branch.bzrdir.root_transport.delete_tree( 'backup.bzr') except NoSuchFile: pass remote_branch = BzrDir.create_branch_and_repo( upgrade_url, format=required_format) else: old_branch = None # This can be done safely, since only modern formats are used to # import to. if stacked_on_url is not None: remote_branch.set_stacked_on_url(stacked_on_url) pull_result = remote_branch.pull(bzr_branch, overwrite=True) # Because of the way we do incremental imports, there may be revisions # in the branch's repo that are not in the ancestry of the branch tip. # We need to transfer them too. remote_branch.repository.fetch(bzr_branch.repository) if old_branch is not None: # The format has changed; move the new format # branch in place. base_transport = old_branch.bzrdir.root_transport base_transport.delete_tree('.bzr') base_transport.rename("backup.bzr/.bzr", ".bzr") base_transport.rmdir("backup.bzr") return pull_result.old_revid != pull_result.new_revid
def push(self, db_branch_id, bzr_branch, required_format, stacked_on_url=None): """Push up `bzr_branch` as the Bazaar branch for `code_import`. :return: A boolean that is true if the push was non-trivial (i.e. actually transferred revisions). """ self.transport.create_prefix() target_url = self._getMirrorURL(db_branch_id) try: remote_branch = Branch.open(target_url) except NotBranchError: remote_branch = BzrDir.create_branch_and_repo( target_url, format=required_format) old_branch = None else: if remote_branch.bzrdir.needs_format_conversion( required_format): # For upgrades, push to a new branch in # the new format. When done pushing, # retire the old .bzr directory and rename # the new one in place. old_branch = remote_branch upgrade_url = urljoin(target_url, "backup.bzr") try: remote_branch.bzrdir.root_transport.delete_tree( 'backup.bzr') except NoSuchFile: pass remote_branch = BzrDir.create_branch_and_repo( upgrade_url, format=required_format) else: old_branch = None # This can be done safely, since only modern formats are used to # import to. if stacked_on_url is not None: remote_branch.set_stacked_on_url(stacked_on_url) pull_result = remote_branch.pull(bzr_branch, overwrite=True) # Because of the way we do incremental imports, there may be revisions # in the branch's repo that are not in the ancestry of the branch tip. # We need to transfer them too. remote_branch.repository.fetch(bzr_branch.repository) if old_branch is not None: # The format has changed; move the new format # branch in place. base_transport = old_branch.bzrdir.root_transport base_transport.delete_tree('.bzr') base_transport.rename("backup.bzr/.bzr", ".bzr") base_transport.rmdir("backup.bzr") return pull_result.old_revid != pull_result.new_revid
def test_weaves_are_retrieved_once(self): self.build_tree(("source/", "source/file", "target/")) # This test depends on knit dasta storage. wt = self.make_branch_and_tree('source', format='dirstate-tags') branch = wt.branch wt.add(["file"], ["id"]) wt.commit("added file") open("source/file", 'w').write("blah\n") wt.commit("changed file") target = BzrDir.create_branch_and_repo("target/") source = Branch.open(self.get_readonly_url("source/")) self.assertEqual(target.fetch(source), (2, [])) # this is the path to the literal file. As format changes # occur it needs to be updated. FIXME: ask the store for the # path. self.log("web server logs are:") http_logs = self.get_readonly_server().logs self.log('\n'.join(http_logs)) # unfortunately this log entry is branch format specific. We could # factor out the 'what files does this format use' to a method on the # repository, which would let us to this generically. RBC 20060419 # RBC 20080408: Or perhaps we can assert that no files are fully read # twice? self.assertEqual(1, self._count_log_matches('/ce/id.kndx', http_logs)) self.assertEqual(1, self._count_log_matches('/ce/id.knit', http_logs)) self.assertEqual(1, self._count_log_matches('inventory.kndx', http_logs)) # this r-h check test will prevent regressions, but it currently already # passes, before the patch to cache-rh is applied :[ self.assertTrue(1 >= self._count_log_matches('revision-history', http_logs)) self.assertTrue(1 >= self._count_log_matches('last-revision', http_logs)) # FIXME naughty poking in there. self.get_readonly_server().logs = [] # check there is nothing more to fetch. We take care to re-use the # existing transport so that the request logs we're about to examine # aren't cluttered with redundant probes for a smart server. # XXX: Perhaps this further parameterisation: test http with smart # server, and test http without smart server? source = Branch.open( self.get_readonly_url("source/"), possible_transports=[source.bzrdir.root_transport]) self.assertEqual(target.fetch(source), (0, [])) # should make just two requests http_logs = self.get_readonly_server().logs self.log("web server logs are:") self.log('\n'.join(http_logs)) self.assertEqual(1, self._count_log_matches('branch-format', http_logs)) self.assertEqual(1, self._count_log_matches('branch/format', http_logs)) self.assertEqual(1, self._count_log_matches('repository/format', http_logs)) self.assertTrue(1 >= self._count_log_matches('revision-history', http_logs)) self.assertTrue(1 >= self._count_log_matches('last-revision', http_logs)) self.assertEqual(4, len(http_logs))
def pull(self, db_branch_id, target_path, required_format, needs_tree=False, stacked_on_url=None): """Pull down the Bazaar branch of an import to `target_path`. :return: A Bazaar branch for the code import corresponding to the database branch with id `db_branch_id`. """ remote_url = self._getMirrorURL(db_branch_id) try: remote_bzr_dir = BzrDir.open(remote_url) except NotBranchError: local_branch = BzrDir.create_branch_and_repo( target_path, format=required_format) if needs_tree: local_branch.bzrdir.create_workingtree() if stacked_on_url: local_branch.set_stacked_on_url(stacked_on_url) return local_branch # The proper thing to do here would be to call # "remote_bzr_dir.sprout()". But 2a fetch slowly checks which # revisions are in the ancestry of the tip of the remote branch, which # we strictly don't care about, so we just copy the whole thing down # at the vfs level. control_dir = remote_bzr_dir.root_transport.relpath( remote_bzr_dir.transport.abspath('.')) target = get_transport_from_path(target_path) target_control = target.clone(control_dir) target_control.create_prefix() remote_bzr_dir.transport.copy_tree_to_transport(target_control) local_bzr_dir = BzrDir.open_from_transport(target) if local_bzr_dir.needs_format_conversion(format=required_format): try: local_bzr_dir.root_transport.delete_tree('backup.bzr') except NoSuchFile: pass upgrade(target_path, required_format, clean_up=True) if needs_tree: local_bzr_dir.create_workingtree() return local_bzr_dir.open_branch()
def test_new_files(self): if sys.platform == 'win32': raise TestSkipped('chmod has no effect on win32') # Though it would be nice to test that SFTP to a server # which does support chmod has the right effect # bodge around for stubsftpserver not letting use connect # more than once _t = get_transport(self.get_url()) os.mkdir('local') t_local = self.make_branch_and_tree('local') b_local = t_local.branch open('local/a', 'wb').write('foo\n') t_local.add('a') t_local.commit('foo') # Delete them because we are modifying the filesystem underneath them chmod_r('local/.bzr', 0644, 0755) check_mode_r(self, 'local/.bzr', 0644, 0755) t = WorkingTree.open('local') b_local = t.branch self.assertEqualMode(0755, b_local.control_files._dir_mode) self.assertEqualMode(0644, b_local.control_files._file_mode) self.assertEqualMode(0755, b_local.bzrdir._get_dir_mode()) self.assertEqualMode(0644, b_local.bzrdir._get_file_mode()) os.mkdir('sftp') sftp_url = self.get_url('sftp') b_sftp = BzrDir.create_branch_and_repo(sftp_url) b_sftp.pull(b_local) del b_sftp chmod_r('sftp/.bzr', 0644, 0755) check_mode_r(self, 'sftp/.bzr', 0644, 0755) b_sftp = Branch.open(sftp_url) self.assertEqualMode(0755, b_sftp.control_files._dir_mode) self.assertEqualMode(0644, b_sftp.control_files._file_mode) self.assertEqualMode(0755, b_sftp.bzrdir._get_dir_mode()) self.assertEqualMode(0644, b_sftp.bzrdir._get_file_mode()) open('local/a', 'wb').write('foo2\n') t_local.commit('foo2') b_sftp.pull(b_local) # The mode should be maintained after commit check_mode_r(self, 'sftp/.bzr', 0644, 0755) open('local/b', 'wb').write('new b\n') t_local.add('b') t_local.commit('new b') b_sftp.pull(b_local) check_mode_r(self, 'sftp/.bzr', 0644, 0755) del b_sftp # Recursively update the modes of all files chmod_r('sftp/.bzr', 0664, 0775) check_mode_r(self, 'sftp/.bzr', 0664, 0775) b_sftp = Branch.open(sftp_url) self.assertEqualMode(0775, b_sftp.control_files._dir_mode) self.assertEqualMode(0664, b_sftp.control_files._file_mode) self.assertEqualMode(0775, b_sftp.bzrdir._get_dir_mode()) self.assertEqualMode(0664, b_sftp.bzrdir._get_file_mode()) open('local/a', 'wb').write('foo3\n') t_local.commit('foo3') b_sftp.pull(b_local) check_mode_r(self, 'sftp/.bzr', 0664, 0775) open('local/c', 'wb').write('new c\n') t_local.add('c') t_local.commit('new c') b_sftp.pull(b_local) check_mode_r(self, 'sftp/.bzr', 0664, 0775)