def merge_bundle(reader, tree, check_clean, merge_type, reprocess, show_base, change_reporter=None): """Merge a revision bundle into the current tree.""" pb = ui.ui_factory.nested_progress_bar() try: pp = ProgressPhase("Merge phase", 6, pb) pp.next_phase() install_bundle(tree.branch.repository, reader) merger = Merger(tree.branch, this_tree=tree, change_reporter=change_reporter) merger.pp = pp merger.pp.next_phase() if check_clean and tree.has_changes(): raise errors.UncommittedChanges(self) merger.other_rev_id = reader.target merger.other_tree = merger.revision_tree(reader.target) merger.other_basis = reader.target merger.pp.next_phase() merger.find_base() if merger.base_rev_id == merger.other_rev_id: note(gettext("Nothing to do.")) return 0 merger.merge_type = merge_type merger.show_base = show_base merger.reprocess = reprocess conflicts = merger.do_merge() merger.set_pending() finally: pb.clear() return conflicts
def gssapi_login(self, user): # Try GSSAPI login first # Used FTP response codes: # 235 [ADAT=base64data] - indicates that the security data exchange # completed successfully. # 334 [ADAT=base64data] - indicates that the requested security # mechanism is ok, and includes security data to be used by the # client to construct the next command. # 335 [ADAT=base64data] - indicates that the security data is # acceptable, and more is required to complete the security # data exchange. resp = self.sendcmd('AUTH GSSAPI') if resp.startswith('334 '): rc, self.vc = kerberos.authGSSClientInit("ftp@%s" % self.host) if kerberos.authGSSClientStep(self.vc, "") != 1: while resp[:4] in ('334 ', '335 '): authdata = kerberos.authGSSClientResponse(self.vc) resp = self.sendcmd('ADAT ' + authdata) if resp[:9] in ('235 ADAT=', '335 ADAT='): rc = kerberos.authGSSClientStep(self.vc, resp[9:]) if not ((resp.startswith('235 ') and rc == 1) or (resp.startswith('335 ') and rc == 0)): raise ftplib.error_reply, resp note( gettext("Authenticated as %s") % kerberos.authGSSClientUserName(self.vc)) # Monkey patch ftplib self.putcmd = self.mic_putcmd self.getline = self.mic_getline self.sendcmd('USER ' + user) return resp mutter("Unable to use GSSAPI authentication: %s", resp)
def redirected_transport(transport, exception, redirection_notice): note(redirection_notice) url, filename = urlutils.split(exception.target, exclude_trailing_slash=False) if not filename: raise errors.NotABundle('A directory cannot be a bundle') return get_transport(url)
def _set_revision(self, revision): if revision is None: return self._revision = revision if revision.committer is not None: self.committer.set_text(revision.committer) else: self.committer.set_text("") author = revision.properties.get('author', '') if author != '': self.author.set_text(author) self.author.show() self.author_label.show() else: self.author.hide() self.author_label.hide() if revision.timestamp is not None: self.timestamp.set_text(format_date(revision.timestamp, revision.timezone)) try: self.branchnick.show() self.branchnick_label.show() self.branchnick.set_text(revision.properties['branch-nick']) except KeyError: self.branchnick.hide() self.branchnick_label.hide() self._add_parents_or_children(revision.parent_ids, self.parents_widgets, self.parents_table) file_info = revision.properties.get('file-info', None) if file_info is not None: try: file_info = bdecode(file_info.encode('UTF-8')) except ValueError: trace.note('Invalid per-file info for revision:%s, value: %r', revision.revision_id, file_info) file_info = None if file_info: if self._file_id is None: text = [] for fi in file_info: text.append('%(path)s\n%(message)s' % fi) self.file_info_buffer.set_text('\n'.join(text)) self.file_info_box.show() else: text = [] for fi in file_info: if fi['file_id'] == self._file_id: text.append(fi['message']) if text: self.file_info_buffer.set_text('\n'.join(text)) self.file_info_box.show() else: self.file_info_box.hide() else: self.file_info_box.hide()
def get_branch(repo, relpath, format=None): """Return existing branch in destination repo. Create new if don't exist. @param format: force create new branch in specified format. """ repo_trans = repo.bzrdir.root_transport try: br_dir = BzrDir.open(repo_trans.abspath(relpath)) branch = br_dir.open_branch() except errors.NotBranchError: # create destination branch directory, creating parents as needed. needed = [relpath] while needed: try: repo_trans.mkdir(needed[-1]) needed.pop() except errors.NoSuchFile: parent = urlutils.dirname(needed[-1]) if parent == '': raise errors.BzrCommandError('Could not create branch dir') needed.append(parent) br_dir = BzrDir.create(repo_trans.abspath(relpath)) if format is None: format = BranchFormat.get_default_format() branch = format.initialize(br_dir) note('Created destination branch %s' % relpath) if branch.repository.bzrdir.root_transport.base != repo_trans.base: raise errors.BzrCommandError('Branch %s does not use repository %s' % (relpath, repo_trans.base)) # XXX: hack to make sure the branch is using the same repository # instance, for locking purposes branch.repository = repo return branch
def run( self, path=None, perms=None, preload=True, children_timeout=LPForkingService.WAIT_FOR_CHILDREN_TIMEOUT, pid_file=None, ): if pid_file is not None: self._daemonize(pid_file) if path is None: path = LPForkingService.DEFAULT_PATH if perms is None: perms = LPForkingService.DEFAULT_PERMISSIONS if preload: # We 'note' this because it often takes a fair amount of time. trace.note("Preloading %d modules" % (len(libraries_to_preload),)) self._preload_libraries() service = LPForkingService(path, perms) service.WAIT_FOR_CHILDREN_TIMEOUT = children_timeout service.main_loop() if pid_file is not None: try: os.remove(pid_file) except (OSError, IOError) as e: trace.mutter("Failed to cleanup pid_file: %s\n%s" % (pid_file, e))
def should_run(self): """Determine whether we can and should do anything. We only update the remote branch if it is local or accessible by ssh, which means URLs which start with bzr+ssh, ssh or sftp. """ location = self.target() if location is None: return False def _is_probably_not_a_path(loc): return '://' in loc if _is_probably_not_a_path(location): if (location.startswith('sftp://') or location.startswith('ssh://') or location.startswith('bzr+ssh://') or location.startswith('file://')): return True else: trace.note('Not updating post_commit_mirror location %s' % location) return False else: return True
def upgrade_repository(repository, generate_rebase_map, determine_new_revid, revision_id=None, allow_changes=False, verbose=False): """Upgrade the revisions in repository until the specified stop revision. :param repository: Repository in which to upgrade. :param foreign_repository: Repository to fetch new revisions from. :param new_mapping: New mapping. :param revision_id: Revision id up until which to upgrade, or None for all revisions. :param allow_changes: Allow changes to mappings. :param verbose: Whether to print list of rewrites :return: Dictionary of mapped revisions """ # Find revisions that need to be upgraded, create # dictionary with revision ids in key, new parents in value try: repository.lock_write() (plan, revid_renames) = create_upgrade_plan(repository, generate_rebase_map, determine_new_revid, revision_id=revision_id, allow_changes=allow_changes) if verbose: for revid in rebase_todo(repository, plan): trace.note("%s -> %s" % (revid, plan[revid][0])) rebase(repository, plan, CommitBuilderRevisionRewriter(repository)) return revid_renames finally: repository.unlock()
def gssapi_login(self, user): # Try GSSAPI login first # Used FTP response codes: # 235 [ADAT=base64data] - indicates that the security data exchange # completed successfully. # 334 [ADAT=base64data] - indicates that the requested security # mechanism is ok, and includes security data to be used by the # client to construct the next command. # 335 [ADAT=base64data] - indicates that the security data is # acceptable, and more is required to complete the security # data exchange. resp = self.sendcmd('AUTH GSSAPI') if resp.startswith('334 '): rc, self.vc = kerberos.authGSSClientInit("ftp@%s" % self.host) if kerberos.authGSSClientStep(self.vc, "") != 1: while resp[:4] in ('334 ', '335 '): authdata = kerberos.authGSSClientResponse(self.vc) resp = self.sendcmd('ADAT ' + authdata) if resp[:9] in ('235 ADAT=', '335 ADAT='): rc = kerberos.authGSSClientStep(self.vc, resp[9:]) if not ((resp.startswith('235 ') and rc == 1) or (resp.startswith('335 ') and rc == 0)): raise ftplib.error_reply, resp note(gettext("Authenticated as %s") % kerberos.authGSSClientUserName(self.vc)) # Monkey patch ftplib self.putcmd = self.mic_putcmd self.getline = self.mic_getline self.sendcmd('USER ' + user) return resp mutter("Unable to use GSSAPI authentication: %s", resp)
def _update_branch(self, br, last_mark): """Update a branch with last revision and tag information. :return: whether the branch was changed or not """ last_rev_id = self.cache_mgr.lookup_committish(last_mark) self.repo.lock_read() try: graph = self.repo.get_graph() revno = graph.find_distance_to_null(last_rev_id, []) finally: self.repo.unlock() existing_revno, existing_last_rev_id = br.last_revision_info() changed = False if revno != existing_revno or last_rev_id != existing_last_rev_id: br.set_last_revision_info(revno, last_rev_id) changed = True # apply tags known in this branch my_tags = {} if self.tags: graph = self.repo.get_graph() ancestry = [r for (r, ps) in graph.iter_ancestry([last_rev_id]) if ps is not None] for tag,rev in self.tags.items(): if rev in ancestry: my_tags[tag] = rev if my_tags: br.tags._set_tag_dict(my_tags) changed = True if changed: tagno = len(my_tags) note("\t branch %s now has %d %s and %d %s", br.nick, revno, single_plural(revno, "revision", "revisions"), tagno, single_plural(tagno, "tag", "tags")) return changed
def _note_lock(self, lock_type): if 'relock' in debug.debug_flags and self._prev_lock == lock_type: if lock_type == 'r': type_name = 'read' else: type_name = 'write' trace.note(gettext('{0!r} was {1} locked again'), self, type_name) self._prev_lock = lock_type
def attempt_lock(self): # Once we have acquired the lock, it is okay for # the other lock to check it try: return super(LockDir1, self).attempt_lock() finally: note('lock1: releasing check lock') wait_to_check_lock.release()
def redirected(transport, e, redirection_notice): self.policy.checkOneURL(e.target) redirected_transport = transport._redirected_to(e.source, e.target) if redirected_transport is None: raise errors.NotBranchError(e.source) trace.note('%s is%s redirected to %s', transport.base, e.permanently, redirected_transport.base) return redirected_transport
def redirected(transport, e, redirection_notice): self.policy.checkOneURL(e.target) redirected_transport = transport._redirected_to( e.source, e.target) if redirected_transport is None: raise errors.NotBranchError(e.source) trace.note('%s is%s redirected to %s', transport.base, e.permanently, redirected_transport.base) return redirected_transport
def report(self, to_file): """Write a human-readable description of the result.""" if self.branch_push_result is None: if self.stacked_on is not None: note(gettext('Created new stacked branch referring to %s.') % self.stacked_on) else: note(gettext('Created new branch.')) else: self.branch_push_result.report(to_file)
def _get_submit_location(self, branch): submit_location = branch.get_submit_branch() location_type = 'submit branch' if submit_location is None: submit_location = branch.get_parent() location_type = 'parent branch' if submit_location is None: raise errors.NoSubmitBranch(branch) trace.note('Using %s %s', location_type, submit_location) return submit_location
def run(self, location=None, dry_run=False): from bzrlib.plugins.launchpad.lp_registration import (LaunchpadService) if location is None: location = u'.' web_url = self._get_web_url(LaunchpadService(), location) trace.note(gettext('Opening %s in web browser') % web_url) if not dry_run: import webbrowser # this import should not be lazy # otherwise bzr.exe lacks this module webbrowser.open(web_url)
def report(self, to_file): """Write a human-readable description of the result.""" if self.branch_push_result is None: if self.stacked_on is not None: note( gettext('Created new stacked branch referring to %s.') % self.stacked_on) else: note(gettext('Created new branch.')) else: self.branch_push_result.report(to_file)
def run(self, location=None, dry_run=False): from bzrlib.plugins.launchpad.lp_registration import ( LaunchpadService) if location is None: location = u'.' web_url = self._get_web_url(LaunchpadService(), location) trace.note(gettext('Opening %s in web browser') % web_url) if not dry_run: import webbrowser # this import should not be lazy # otherwise bzr.exe lacks this module webbrowser.open(web_url)
def _get_submit_location(self, branch): submit_location = branch.get_submit_branch() location_type = 'submit branch' if submit_location is None: submit_location = branch.get_parent() location_type = 'parent branch' if submit_location is None: raise errors.NoSubmitBranch(branch) trace.note( gettext('Using {0} {1}').format(location_type, submit_location)) return submit_location
def _get_submit_location(self, branch): submit_location = branch.get_submit_branch() location_type = 'submit branch' if submit_location is None: submit_location = branch.get_parent() location_type = 'parent branch' if submit_location is None: raise errors.NoSubmitBranch(branch) trace.note(gettext('Using {0} {1}').format(location_type, submit_location)) return submit_location
def run(self, public_url=None, project='', product=None, branch_name='', branch_title='', branch_description='', author='', link_bug=None, dry_run=False): from bzrlib.plugins.launchpad.lp_registration import ( BranchRegistrationRequest, BranchBugLinkRequest, DryRunLaunchpadService, LaunchpadService) if public_url is None: try: b = _mod_branch.Branch.open_containing('.')[0] except NotBranchError: raise BzrCommandError(gettext( 'register-branch requires a public ' 'branch url - see bzr help register-branch.')) public_url = b.get_public_branch() if public_url is None: raise NoPublicBranch(b) if product is not None: project = product trace.note(gettext( '--product is deprecated; please use --project.')) rego = BranchRegistrationRequest(branch_url=public_url, branch_name=branch_name, branch_title=branch_title, branch_description=branch_description, product_name=project, author_email=author, ) linko = BranchBugLinkRequest(branch_url=public_url, bug_id=link_bug) if not dry_run: service = LaunchpadService() # This gives back the xmlrpc url that can be used for future # operations on the branch. It's not so useful to print to the # user since they can't do anything with it from a web browser; it # might be nice for the server to tell us about an html url as # well. else: # Run on service entirely in memory service = DryRunLaunchpadService() service.gather_user_credentials() rego.submit(service) if link_bug: linko.submit(service) self.outf.write('Branch registered.\n')
def _remove_pending_dir(self, tmpname): """Remove the pending directory This is called if we failed to rename into place, so that the pending dirs don't clutter up the lockdir. """ self._trace("remove %s", tmpname) try: self.transport.delete(tmpname + self.__INFO_NAME) self.transport.rmdir(tmpname) except PathError, e: note("error removing pending lock: %s", e)
def run(self, public_url=None, project='', product=None, branch_name='', branch_title='', branch_description='', author='', link_bug=None, dry_run=False): from bzrlib.plugins.launchpad.lp_registration import ( BranchRegistrationRequest, BranchBugLinkRequest, DryRunLaunchpadService, LaunchpadService) if public_url is None: try: b = _mod_branch.Branch.open_containing('.')[0] except NotBranchError: raise BzrCommandError( gettext('register-branch requires a public ' 'branch url - see bzr help register-branch.')) public_url = b.get_public_branch() if public_url is None: raise NoPublicBranch(b) if product is not None: project = product trace.note( gettext('--product is deprecated; please use --project.')) rego = BranchRegistrationRequest( branch_url=public_url, branch_name=branch_name, branch_title=branch_title, branch_description=branch_description, product_name=project, author_email=author, ) linko = BranchBugLinkRequest(branch_url=public_url, bug_id=link_bug) if not dry_run: service = LaunchpadService() # This gives back the xmlrpc url that can be used for future # operations on the branch. It's not so useful to print to the # user since they can't do anything with it from a web browser; it # might be nice for the server to tell us about an html url as # well. else: # Run on service entirely in memory service = DryRunLaunchpadService() service.gather_user_credentials() rego.submit(service) if link_bug: linko.submit(service) self.outf.write('Branch registered.\n')
def _resolve(self, url, _request_factory=ResolveLaunchpadPathRequest, _lp_login=None): """Resolve the base URL for this transport.""" url, path = self._update_url_scheme(url) if _lp_login is None: _lp_login = get_lp_login() path = path.strip('/') path = self._expand_user(path, url, _lp_login) if _lp_login is not None: result = self._resolve_locally(path, url, _request_factory) if 'launchpad' in debug.debug_flags: local_res = result result = self._resolve_via_xmlrpc(path, url, _request_factory) trace.note( gettext('resolution for {0}\n local: {1}\n remote: {2}'). format(url, local_res['urls'], result['urls'])) else: result = self._resolve_via_xmlrpc(path, url, _request_factory) if 'launchpad' in debug.debug_flags: trace.mutter("resolve_lp_path(%r) == %r", url, result) _warned_login = False for url in result['urls']: scheme, netloc, path, query, fragment = urlsplit(url) if self._requires_launchpad_login(scheme, netloc, path, query, fragment): # Only accept launchpad.net bzr+ssh URLs if we know # the user's Launchpad login: if _lp_login is not None: break if _lp_login is None: if not _warned_login: trace.warning( 'You have not informed bzr of your Launchpad ID, and you must do this to\n' 'write to Launchpad or access private data. See "bzr help launchpad-login".' ) _warned_login = True else: # Use the URL if we can create a transport for it. try: transport.get_transport(url) except (errors.PathError, errors.TransportError): pass else: break else: raise errors.InvalidURL(path=url, extra='no supported schemes') return url
def repo_push(src_repo, dst_repo, pb, overwrite=False): src_repo.lock_read() try: dst_repo.lock_write() try: src_repo_trans = src_repo.bzrdir.root_transport dst_repo_trans = dst_repo.bzrdir.root_transport pb.update('Getting list of branches', 0, 1) branches = list_branches(src_repo) note('Pushing %d branches from %s to %s' % (len(branches), src_repo_trans.base, dst_repo_trans.base)) # XXX: ideally this would only fetch the tips of the # branches we found previously. pb.update('Fetching entire repo', 0, 1) dst_repo.fetch(src_repo, pb=pb) # Now synchronise the revision histories of the local and # remote branches. The previous fetch() call has made # sure that the corresponding revisions exist in dst_repo. for index, src_branch in enumerate(branches): pb.update('Updating branches', index, len(branches)) relpath = src_repo_trans.relpath( src_branch.bzrdir.root_transport.base) format = BranchFormat.find_format(src_branch.bzrdir) dst_branch = get_branch(dst_repo, relpath, format) src_history = src_branch.revision_history() dst_history = dst_branch.revision_history() # If we aren't overwriting and the destination history # is not a subset of the source history, error out. # XXX this implementation is buggy in some cases if not overwrite and (src_history[:len(dst_history)] != dst_history): raise errors.BzrCommandError('Branch %s has diverged' % relpath) # push tags src_branch.tags.merge_to(dst_branch.tags) if src_history != dst_history: dst_branch.set_revision_history(src_history) note('%d revision(s) pushed to %s' % (len(src_history) - len(dst_history), relpath)) finally: dst_repo.unlock() finally: src_repo.unlock()
def scan_branch(branch, needed_refs, to_unlock): """Scan a branch for refs. :param branch: The branch to schedule for checking. :param needed_refs: Refs we are accumulating. :param to_unlock: The unlock list accumulating. """ note(gettext("Checking branch at '%s'.") % (branch.base,)) branch.lock_read() to_unlock.append(branch) branch_refs = branch._get_check_refs() for ref in branch_refs: reflist = needed_refs.setdefault(ref, []) reflist.append(branch)
def _wait_for_clients_to_disconnect(self): self._poll_active_connections() if not self._active_connections: return trace.note(gettext('Waiting for %d client(s) to finish') % (len(self._active_connections),)) t_next_log = self._timer() + self._LOG_WAITING_TIMEOUT while self._active_connections: now = self._timer() if now >= t_next_log: trace.note(gettext('Still waiting for %d client(s) to finish') % (len(self._active_connections),)) t_next_log = now + self._LOG_WAITING_TIMEOUT self._poll_active_connections(self._SHUTDOWN_POLL_TIMEOUT)
def do_write(self): """Write all data to the bundle""" trace.note(ngettext('Bundling %d revision.', 'Bundling %d revisions.', len(self.revision_ids)), len(self.revision_ids)) self.repository.lock_read() try: self.bundle.begin() self.write_info() self.write_files() self.write_revisions() self.bundle.end() finally: self.repository.unlock() return self.revision_ids
def run(self, from_location, to_location, revision=None): branch = Branch.open_containing('.')[0] root = local_path_from_url(branch.base) # select what do it if isdir(pathjoin(root, to_location, '.bzr')) or isdir(pathjoin(root, to_location, '.svn')): if branch.get_bound_location(): cmd = ['update', to_location] else: cmd = ['pull', from_location, '--directory', to_location] else: if branch.get_bound_location(): cmd = ['checkout', from_location, to_location] else: cmd = ['branch', from_location, to_location] # command branch don't create recursive directory dirs = to_location.rpartition('/') if dirs[0] != '' and not isdir(dirs[0]): os.makedirs(dirs[0].encode(get_user_encoding())) # if use revision options but not for 'update' if revision is not None:# and cmd[0] != 'update': cmd += ['--revision', revision[0].user_spec] note('Add external ' + ' '.join(cmd)) run_bzr_catch_user_errors(cmd) bzrmeta = pathjoin(root, '.bzrmeta') if not isdir(bzrmeta): os.mkdir(bzrmeta) # add new branch to config and snapshot files line = from_location + ' ' + self._quoted_if_need(to_location) if revision: line += ' ' + revision[0].user_spec self._add_to_file(root, externals.CONFIG_PATH, line) self._add_to_file(root, externals.SNAPSHOT_PATH, line) # add ignore mask from bzrlib import IGNORE_FILENAME self._add_to_file(root, IGNORE_FILENAME, './' + to_location) # add config files to repository cmd = ['add', '.bzrignore', '.bzrmeta/externals', '.bzrmeta/externals-snapshot'] run_bzr_catch_user_errors(cmd)
def _resolve(self, url, _request_factory=ResolveLaunchpadPathRequest, _lp_login=None): """Resolve the base URL for this transport.""" url, path = self._update_url_scheme(url) if _lp_login is None: _lp_login = get_lp_login() path = path.strip('/') path = self._expand_user(path, url, _lp_login) if _lp_login is not None: result = self._resolve_locally(path, url, _request_factory) if 'launchpad' in debug.debug_flags: local_res = result result = self._resolve_via_xmlrpc(path, url, _request_factory) trace.note(gettext( 'resolution for {0}\n local: {1}\n remote: {2}').format( url, local_res['urls'], result['urls'])) else: result = self._resolve_via_xmlrpc(path, url, _request_factory) if 'launchpad' in debug.debug_flags: trace.mutter("resolve_lp_path(%r) == %r", url, result) _warned_login = False for url in result['urls']: scheme, netloc, path, query, fragment = urlsplit(url) if self._requires_launchpad_login(scheme, netloc, path, query, fragment): # Only accept launchpad.net bzr+ssh URLs if we know # the user's Launchpad login: if _lp_login is not None: break if _lp_login is None: if not _warned_login: trace.warning( 'You have not informed bzr of your Launchpad ID, and you must do this to\n' 'write to Launchpad or access private data. See "bzr help launchpad-login".') _warned_login = True else: # Use the URL if we can create a transport for it. try: transport.get_transport(url) except (errors.PathError, errors.TransportError): pass else: break else: raise errors.InvalidURL(path=url, extra='no supported schemes') return url
def _wait_for_clients_to_disconnect(self): self._poll_active_connections() if not self._active_connections: return trace.note( gettext('Waiting for %d client(s) to finish') % (len(self._active_connections), )) t_next_log = self._timer() + self._LOG_WAITING_TIMEOUT while self._active_connections: now = self._timer() if now >= t_next_log: trace.note( gettext('Still waiting for %d client(s) to finish') % (len(self._active_connections), )) t_next_log = now + self._LOG_WAITING_TIMEOUT self._poll_active_connections(self._SHUTDOWN_POLL_TIMEOUT)
def get_set_encoding(encoding, branch): """Return encoding value from branch config if encoding is None, otherwise store encoding value in branch config. """ if encoding is None: config = get_branch_config(branch) encoding = config.get_user_option("encoding") or 'utf-8' if not is_valid_encoding(encoding): from bzrlib.trace import note note(('NOTE: Invalid encoding value in branch config: %s\n' 'utf-8 will be used instead') % encoding) encoding = 'utf-8' else: if branch: # we should check boolean branch value to support 2 fake branch cases: branch is None, branch is FakeBranch branch.get_config().set_user_option("encoding", encoding) return encoding
def _flush_blobs_to_disk(self): blobs = self._sticky_blobs.keys() sticky_blobs = self._sticky_blobs total_blobs = len(sticky_blobs) blobs.sort(key=lambda k:len(sticky_blobs[k])) if self._tempdir is None: tempdir = tempfile.mkdtemp(prefix='fastimport_blobs-') self._tempdir = tempdir self._cleanup.tempdir = self._tempdir self._cleanup.small_blobs = tempfile.TemporaryFile( prefix='small-blobs-', dir=self._tempdir) small_blob_ref = weakref.ref(self._cleanup.small_blobs) # Even though we add it to _Cleanup it seems that the object can be # destroyed 'too late' for cleanup to actually occur. Probably a # combination of bzr's "die directly, don't clean up" and how # exceptions close the running stack. def exit_cleanup(): small_blob = small_blob_ref() if small_blob is not None: small_blob.close() shutil.rmtree(tempdir, ignore_errors=True) atexit.register(exit_cleanup) count = 0 bytes = 0 n_small_bytes = 0 while self._sticky_memory_bytes > self._sticky_flushed_size: id = blobs.pop() blob = self._sticky_blobs.pop(id) n_bytes = len(blob) self._sticky_memory_bytes -= n_bytes if n_bytes < self._small_blob_threshold: f = self._cleanup.small_blobs f.seek(0, os.SEEK_END) self._disk_blobs[id] = (f.tell(), n_bytes, None) f.write(blob) n_small_bytes += n_bytes else: fd, name = tempfile.mkstemp(prefix='blob-', dir=self._tempdir) os.write(fd, blob) os.close(fd) self._disk_blobs[id] = (0, n_bytes, name) bytes += n_bytes del blob count += 1 trace.note('flushed %d/%d blobs w/ %.1fMB (%.1fMB small) to disk' % (count, total_blobs, bytes / 1024. / 1024, n_small_bytes / 1024. / 1024))
def _update_branches(self, old_revno, old_revid, new_revno): """Update the master and local branch to the new revision. This will try to make sure that the master branch is updated before the local branch. :param old_revno: Revision number of master branch before the commit :param old_revid: Tip of master branch before the commit :param new_revno: Revision number of the new commit """ if not self.builder.updates_branch: self._process_pre_hooks(old_revno, new_revno) # Upload revision data to the master. # this will propagate merged revisions too if needed. if self.bound_branch: self._set_progress_stage("Uploading data to master branch") # 'commit' to the master first so a timeout here causes the # local branch to be out of date (new_revno, self.rev_id ) = self.master_branch.import_last_revision_info_and_tags( self.branch, new_revno, self.rev_id, lossy=self._lossy) if self._lossy: self.branch.fetch(self.master_branch, self.rev_id) # and now do the commit locally. self.branch.set_last_revision_info(new_revno, self.rev_id) else: try: self._process_pre_hooks(old_revno, new_revno) except: # The commit builder will already have updated the branch, # revert it. self.branch.set_last_revision_info(old_revno, old_revid) raise # Merge local tags to remote if self.bound_branch: self._set_progress_stage("Merging tags to master branch") tag_updates, tag_conflicts = self.branch.tags.merge_to( self.master_branch.tags) if tag_conflicts: warning_lines = [' ' + name for name, _, _ in tag_conflicts] note( gettext("Conflicting tags in bound branch:\n{0}".format( "\n".join(warning_lines))))
def _set_state(self, revspec, state): """Set the state of the given revspec and bisecting. Returns boolean indicating if bisection is done.""" bisect_log = BisectLog() if bisect_log.is_done(): note("No further bisection is possible.\n") bisect_log._current.show_rev_log(self.outf) return True if revspec: bisect_log.set_status_from_revspec(revspec, state) else: bisect_log.set_current(state) bisect_log.bisect(self.outf) bisect_log.save() return False
def _update_branches(self, old_revno, old_revid, new_revno): """Update the master and local branch to the new revision. This will try to make sure that the master branch is updated before the local branch. :param old_revno: Revision number of master branch before the commit :param old_revid: Tip of master branch before the commit :param new_revno: Revision number of the new commit """ if not self.builder.updates_branch: self._process_pre_hooks(old_revno, new_revno) # Upload revision data to the master. # this will propagate merged revisions too if needed. if self.bound_branch: self._set_progress_stage("Uploading data to master branch") # 'commit' to the master first so a timeout here causes the # local branch to be out of date (new_revno, self.rev_id) = self.master_branch.import_last_revision_info_and_tags( self.branch, new_revno, self.rev_id, lossy=self._lossy) if self._lossy: self.branch.fetch(self.master_branch, self.rev_id) # and now do the commit locally. self.branch.set_last_revision_info(new_revno, self.rev_id) else: try: self._process_pre_hooks(old_revno, new_revno) except: # The commit builder will already have updated the branch, # revert it. self.branch.set_last_revision_info(old_revno, old_revid) raise # Merge local tags to remote if self.bound_branch: self._set_progress_stage("Merging tags to master branch") tag_updates, tag_conflicts = self.branch.tags.merge_to( self.master_branch.tags) if tag_conflicts: warning_lines = [' ' + name for name, _, _ in tag_conflicts] note( gettext("Conflicting tags in bound branch:\n{0}".format( "\n".join(warning_lines))) )
def _error_messages(exporter): """Extract fmt string from bzrlib.errors.""" context = exporter.get_context(errors) base_klass = errors.BzrError for name in dir(errors): klass = getattr(errors, name) if not inspect.isclass(klass): continue if not issubclass(klass, base_klass): continue if klass is base_klass: continue if klass.internal_error: continue fmt = getattr(klass, "_fmt", None) if fmt: note(gettext("Exporting message from error: %s"), name) exporter.poentry_in_context(context, fmt)
def run(self, revision=None): from bzrlib import ui from bzrlib.plugins.launchpad import lp_api import webbrowser b = _mod_branch.Branch.open_containing('.')[0] pb = ui.ui_factory.nested_progress_bar() b.lock_read() try: revno = self._find_merged_revno(revision, b, pb) merged = self._find_proposals(revno, b, pb) if len(merged) == 0: raise BzrCommandError(gettext('No review found.')) trace.note(gettext('%d proposals(s) found.') % len(merged)) for mp in merged: webbrowser.open(lp_api.canonical_url(mp)) finally: b.unlock() pb.finished()
def _update(tree, source_repository): """Update a working tree to the latest revision of its branch. :param tree: the working tree :param source_repository: repository holding the revisions """ tree.lock_tree_write() try: to_branch = tree.branch if tree.last_revision() == to_branch.last_revision(): note("Tree is up to date at revision %d.", to_branch.revno()) return base_tree = source_repository.revision_tree(tree.last_revision()) merge.Merge3Merger(tree, tree, base_tree, to_branch.basis_tree()) tree.set_last_revision(to_branch.last_revision()) note('Updated to revision %d.' % to_branch.revno()) finally: tree.unlock()
def scan_tree(base_tree, tree, needed_refs, to_unlock): """Scan a tree for refs. :param base_tree: The original tree check opened, used to detect duplicate tree checks. :param tree: The tree to schedule for checking. :param needed_refs: Refs we are accumulating. :param to_unlock: The unlock list accumulating. """ if base_tree is not None and tree.basedir == base_tree.basedir: return note(gettext("Checking working tree at '%s'.") % (tree.basedir,)) tree.lock_read() to_unlock.append(tree) tree_refs = tree._get_check_refs() for ref in tree_refs: reflist = needed_refs.setdefault(ref, []) reflist.append(tree)
def _make_smart_server(self, host, port, inet, timeout): if timeout is None: c = config.GlobalStack() timeout = c.get('serve.client_timeout') if inet: stdin, stdout = self._get_stdin_stdout() smart_server = medium.SmartServerPipeStreamMedium( stdin, stdout, self.transport, timeout=timeout) else: if host is None: host = medium.BZR_DEFAULT_INTERFACE if port is None: port = medium.BZR_DEFAULT_PORT smart_server = SmartTCPServer(self.transport, client_timeout=timeout) smart_server.start_server(host, port) trace.note(gettext('listening on port: %s') % smart_server.port) self.smart_server = smart_server
def update_lp(self): """Update the Launchpad copy of this branch.""" if not self._check_update: return self.bzr.lock_read() try: if self.lp.last_scanned_id is not None: if self.bzr.last_revision() == self.lp.last_scanned_id: trace.note(gettext('%s is already up-to-date.') % self.lp.bzr_identity) return graph = self.bzr.repository.get_graph() if not graph.is_ancestor(self.lp.last_scanned_id, self.bzr.last_revision()): raise errors.DivergedBranches(self.bzr, self.push_bzr) trace.note(gettext('Pushing to %s') % self.lp.bzr_identity) self.bzr.push(self.push_bzr) finally: self.bzr.unlock()
def run_bisect(self, script): import subprocess note("Starting bisect.") self.start() while True: try: process = subprocess.Popen(script, shell=True) process.wait() retcode = process.returncode if retcode == 0: done = self._set_state(None, 'yes') elif retcode == 125: break else: done = self._set_state(None, 'no') if done: break except RuntimeError: break
def get_lp_login(_config=None): """Return the user's Launchpad username. :raises: MismatchedUsername if authentication.conf and bazaar.conf disagree about username. """ if _config is None: _config = GlobalStack() username = _config.get('launchpad_username') if username is not None: auth = AuthenticationConfig() auth_username = _get_auth_user(auth) # Auto-upgrading if auth_username is None: trace.note(gettext('Setting ssh/sftp usernames for launchpad.net.')) _set_auth_user(username, auth) elif auth_username != username: raise MismatchedUsernames() return username
def _make_smart_server(self, host, port, inet, timeout): if timeout is None: c = config.GlobalStack() timeout = c.get('serve.client_timeout') if inet: stdin, stdout = self._get_stdin_stdout() smart_server = medium.SmartServerPipeStreamMedium(stdin, stdout, self.transport, timeout=timeout) else: if host is None: host = medium.BZR_DEFAULT_INTERFACE if port is None: port = medium.BZR_DEFAULT_PORT smart_server = SmartTCPServer(self.transport, client_timeout=timeout) smart_server.start_server(host, port) trace.note(gettext('listening on port: %s') % smart_server.port) self.smart_server = smart_server
def run(self, revision=None): from bzrlib import ui from bzrlib.plugins.launchpad import lp_api import webbrowser b = _mod_branch.Branch.open_containing('.')[0] pb = ui.ui_factory.nested_progress_bar() b.lock_read() try: if revision is None: revision_id = b.last_revision() else: revision_id = revision[0].as_revision_id(b) merged = self._find_proposals(revision_id, pb) if len(merged) == 0: raise BzrCommandError(gettext('No review found.')) trace.note(gettext('%d proposals(s) found.') % len(merged)) for mp in merged: webbrowser.open(lp_api.canonical_url(mp)) finally: b.unlock() pb.finished()
def run(self): """Perform the unshelving operation.""" self.tree.lock_tree_write() cleanups = [self.tree.unlock] try: if self.read_shelf: trace.note( gettext('Using changes with id "%d".') % self.shelf_id) unshelver = self.manager.get_unshelver(self.shelf_id) cleanups.append(unshelver.finalize) if unshelver.message is not None: trace.note(gettext('Message: %s') % unshelver.message) change_reporter = delta._ChangeReporter() merger = unshelver.make_merger(None) merger.change_reporter = change_reporter if self.apply_changes: merger.do_merge() elif self.show_diff: self.write_diff(merger) else: self.show_changes(merger) if self.delete_shelf: self.manager.delete_shelf(self.shelf_id) trace.note( gettext('Deleted changes with id "%d".') % self.shelf_id) finally: for cleanup in reversed(cleanups): cleanup()