def find_existing_proposed(main_branch, hoster, name): """Find an existing derived branch with the specified name, and proposal. Args: main_branch: Main branch hoster: The hoster name: Name of the derived branch Returns: Tuple with (base_branch, existing_branch, existing_proposal) Base branch won't be None; The existing_branch and existing_proposal can be None. """ try: existing_branch = hoster.get_derived_branch(main_branch, name=name) except errors.NotBranchError: return (main_branch, None, None) else: note('Branch %s already exists (branch at %s)', name, existing_branch.user_url) # If there is an open or rejected merge proposal, resume that. merged_proposal = None for mp in hoster.iter_proposals( existing_branch, main_branch, status='all'): if not mp.is_merged(): return (existing_branch, existing_branch, mp) else: merged_proposal = mp else: if merged_proposal is not None: note('There is a proposal that has already been merged at %s.', merged_proposal.url) return (main_branch, existing_branch, None) else: # No related merge proposals found return (main_branch, None, None)
def run(self, location, working_directory, manifest=None, if_changed_from=None, revision=None): if revision is not None and len(revision) > 0: if len(revision) != 1: raise errors.BzrCommandError("only a single revision can be " "specified") revspec = revision[0] else: revspec = None possible_transports = [] base_branch = get_prepared_branch_from_location( location, possible_transports=possible_transports, revspec=revspec) if if_changed_from is not None: old_recipe = get_old_recipe(if_changed_from, possible_transports) else: old_recipe = None changed = resolve_revisions(base_branch, if_changed_from=old_recipe) if not changed: trace.note("Unchanged") return 0 manifest_path = manifest or os.path.join(working_directory, "bzr-builder.manifest") build_tree(base_branch, working_directory) write_manifest_to_transport(manifest_path, base_branch, possible_transports)
def create_cache_dir(self): cache_dir = create_cache_dir() dir = os.path.join(cache_dir, self.uuid) if not os.path.exists(dir): trace.note("Initialising Subversion metadata cache in %s.", dir) os.mkdir(dir) return dir
def find_repo_url(data): for key, value in data['info']['project_urls'].items(): if key == 'Repository': note('Found repository URL %s for pypi project %s', value, name) return value parsed_url = urlparse(value) if (parsed_url.hostname == 'github.com' and parsed_url.path.strip('/').count('/') == 1): return value
def should_create_proposal(self): tags = set() for result, unused_summary in self.applied: tags.update(result.fixed_lintian_tags) # Is there enough to create a new merge proposal? if not tags - self._propose_addon_only: note('%r: only add-on fixers found', self) return False return True
def pre_check(local_tree): try: subprocess.check_call(args.pre_check, shell=True, cwd=local_tree.basedir) except subprocess.CalledProcessError: note('%r: pre-check failed, skipping', pkg) return False return True
def create_cache_dir(self): cache_dir = create_cache_dir() assert isinstance(cache_dir, str) dir = os.path.join(cache_dir, str(self.uuid)) if not os.path.exists(dir): trace.note("Initialising Subversion metadata cache in %s.", dir.decode(osutils._fs_enc)) os.mkdir(dir) return dir
def post_check(local_tree, since_revid): try: subprocess.check_call(args.post_check, shell=True, cwd=local_tree.basedir, env={'SINCE_REVID': since_revid}) except subprocess.CalledProcessError: note('%r: post-check failed, skipping', pkg) return False return True
def _run_command(command, basedir, msg, error_msg, not_installed_msg=None, env=None, success_exit_codes=None, indata=None): """ Run a command in a subprocess. :param command: list with command and parameters :param msg: message to display to the user :param error_msg: message to display if something fails. :param not_installed_msg: the message to display if the command isn't available. :param env: Optional environment to use rather than os.environ. :param success_exit_codes: Exit codes to consider succesfull, defaults to [0]. :param indata: Data to write to standard input """ def subprocess_setup(): signal.signal(signal.SIGPIPE, signal.SIG_DFL) trace.note(msg) # Hide output if -q is in use. quiet = trace.is_quiet() if quiet: kwargs = {"stderr": subprocess.STDOUT, "stdout": subprocess.PIPE} else: kwargs = {} if env is not None: kwargs["env"] = env trace.mutter("running: %r", command) try: proc = subprocess.Popen(command, cwd=basedir, stdin=subprocess.PIPE, preexec_fn=subprocess_setup, **kwargs) except OSError as e: if e.errno != errno.ENOENT: raise if not_installed_msg is None: raise raise MissingDependency(msg=not_installed_msg) output = proc.communicate(indata) if success_exit_codes is None: success_exit_codes = [0] if proc.returncode not in success_exit_codes: if quiet: raise errors.BzrCommandError("%s: %s" % (error_msg, output)) else: raise errors.BzrCommandError(error_msg)
def get_set_encoding(encoding, branch): """Return encoding value from branch config if encoding is None, otherwise store encoding value in branch config. """ if encoding is None: config = get_branch_config(branch) encoding = config.get_user_option("encoding") or 'utf-8' if not is_valid_encoding(encoding): from breezy.trace import note note(('NOTE: Invalid encoding value in branch config: %s\n' 'utf-8 will be used instead') % encoding) encoding = 'utf-8' else: if branch: # we should check boolean branch value to support 2 fake branch cases: branch is None, branch is FakeBranch branch.get_config().set_user_option("encoding", encoding) return encoding
def create_or_update_proposal( local_branch, main_branch, hoster, name, mp_description, existing_branch=None, existing_proposal=None, overwrite=False, labels=None, dry_run=False): """Create or update a merge proposal. Args: local_branch: Local branch with changes to propose main_branch: Target branch to propose against hoster: Associated hoster for main branch mp_description: Merge proposal description existing_branch: Existing derived branch existing_proposal: Existing merge proposal overwrite: Whether to overwrite changes labels: Labels to add dry_run: Whether to just dry-run the change Returns: Tuple with (proposal, is_new) """ if not dry_run: if existing_branch is not None: local_branch.push(existing_branch, overwrite=overwrite) remote_branch = existing_branch else: remote_branch, public_branch_url = hoster.publish_derived( local_branch, main_branch, name=name, overwrite=overwrite) if existing_proposal is not None: if not dry_run: existing_proposal.set_description(mp_description) return (existing_proposal, False) else: if not dry_run: proposal_builder = hoster.get_proposer( remote_branch, main_branch) try: mp = proposal_builder.create_proposal( description=mp_description, labels=labels) except errors.PermissionDenied: note('Permission denied while trying to create ' 'proposal.') raise else: mp = DryRunProposal( local_branch, main_branch, labels=labels, description=mp_description) return (mp, True)
def run_main(args): import os from breezy import osutils from breezy.plugins.propose import propose as _mod_propose from breezy.trace import note, show_error from ..proposal import ( propose_or_push, ) from ..run import ( ScriptBranchChanger, ScriptMadeNoChanges, ) from . import ( open_packaging_branch, ) main_branch = open_packaging_branch(args.package) if args.name is None: name = os.path.splitext(osutils.basename(args.script.split(' ')[0]))[0] else: name = args.name # TODO(jelmer): Check that ScriptBranchChanger updates upstream version if # it touches anything outside of debian/. try: result = propose_or_push(main_branch, name, ScriptBranchChanger(args.script), refresh=args.refresh, labels=args.label, dry_run=args.dry_run, mode=args.mode) except _mod_propose.UnsupportedHoster as e: show_error('No known supported hoster for %s. Run \'svp login\'?', e.branch.user_url) return 1 except _mod_propose.HosterLoginRequired as e: show_error( 'Credentials for hosting site at %r missing. Run \'svp login\'?', e.hoster.base_url) return 1 except ScriptMadeNoChanges: show_error('Script did not make any changes.') return 1 if result.merge_proposal: if result.is_new: note('Merge proposal created.') else: note('Merge proposal updated.') if result.merge_proposal.url: note('URL: %s', result.merge_proposal.url) note('Description: %s', result.merge_proposal.get_description()) if args.diff: result.show_base_diff(sys.stdout.buffer)
def make_changes(self, local_tree): with local_tree.lock_write(): if not local_tree.has_filename('debian/control'): note('%r: missing control file', self) return since_revid = local_tree.last_revision() if self._pre_check: if not self._pre_check(local_tree): return if self._update_changelog is None: update_changelog = should_update_changelog(local_tree.branch) else: update_changelog = self._update_changelog self.applied, failed = run_lintian_fixers( local_tree, self._fixers, committer=self._committer, update_changelog=update_changelog, compat_release=self._compat_release) if failed: note('%r: some fixers failed to run: %r', self, failed) if not self.applied: note('%r: no fixers to apply', self) return if self._post_check: if not self._post_check(local_tree, since_revid): raise PostCheckFailed() if self._build_verify: build(local_tree.basedir)
def export_threads(self, root_transport): """Export the threads in this loom as branches. :param root_transport: Transport for the directory to place branches under. Defaults to branch root transport. """ threads = self.get_loom_state().get_threads() for thread_name, thread_revision, _parents in threads: thread_transport = root_transport.clone(thread_name) user_location = urlutils.unescape_for_display( thread_transport.base, 'utf-8') try: control_dir = controldir.ControlDir.open( thread_transport.base, possible_transports=[thread_transport]) tree, branch = control_dir._get_tree_branch() except errors.NotBranchError: trace.note('Creating branch at %s' % user_location) branch = controldir.ControlDir.create_branch_convenience( thread_transport.base, possible_transports=[thread_transport]) tree, branch = branch.controldir.open_tree_or_branch( thread_transport.base) else: if thread_revision == branch.last_revision(): trace.note('Skipping up-to-date branch at %s' % user_location) continue else: trace.note('Updating branch at %s' % user_location) if tree is not None: tree.pull(self, stop_revision=thread_revision) else: branch.pull(self, stop_revision=thread_revision)
def run(self, location=".", set=False, repository_wide=False): from breezy import errors as bzr_errors from breezy.controldir import ControlDir from breezy.msgeditor import edit_commit_message from breezy.trace import note from ..repository import SvnRepository from ..mapping3.base import ( BzrSvnMappingv3, config_set_scheme, get_property_scheme, set_property_scheme, ) from .scheme import ( scheme_from_branch_list, ) def scheme_str(scheme): if scheme is None: return "" return "".join(map(lambda x: x+"\n", scheme.to_lines())) dir = ControlDir.open_containing(location)[0] repos = dir.find_repository() if not isinstance(repos, SvnRepository): raise bzr_errors.BzrCommandError("Not a Subversion repository: %s" % location) if repository_wide: scheme = get_property_scheme(repos) else: scheme = BzrSvnMappingv3.from_repository(repos).scheme if set: schemestr = edit_commit_message("", start_message=scheme_str(scheme)) scheme = scheme_from_branch_list( map(lambda x:x.strip("\n"), schemestr.splitlines())) if repository_wide: set_property_scheme(repos, scheme) else: config_set_scheme(repos, scheme, None, mandatory=True) elif scheme is not None: note(scheme_str(scheme))
def load_tests(loader, basic_tests, pattern): testmod_names = [ 'mock', 'test_annotate', 'test_autocomplete', 'test_bugs', 'test_cat', 'test_commit', 'test_commit_data', # 'test_diffview', # - broken by API changes 'test_extra_isignored', 'test_extra_isversioned', 'test_i18n', 'test_log', 'test_loggraphviz', 'test_logmodel', 'test_revisionmessagebrowser', # RJLRJL ignore spellcheck for now 'test_spellcheck', 'test_subprocess', 'test_tree_branch', 'test_treewidget', 'test_util', 'test_decorator', 'test_guidebar', 'test_extdiff', ] for name in testmod_names: m = "%s.%s" % (__name__, name) try: basic_tests.addTests(loader.loadTestsFromModuleName(m)) except ImportError as e: if str(e).endswith('PyQt5'): trace.note( 'QBrz: skip module %s because PyQt5 is not installed' % m) else: raise return basic_tests
async def open_guessed_salsa_branch(conn, pkg, vcs_type, vcs_url, possible_transports=None): # Don't do this as a top-level export, since it imports asyncpg, which # isn't available on jenkins.debian.net. package = await conn.fetchrow( 'SELECT name, maintainer_email FROM package WHERE name = $1', pkg) probers = select_probers("git") vcs_url, params = urlutils.split_segment_parameters_raw(vcs_url) tried = set(vcs_url) for salsa_url in itertools.chain( possible_urls_from_alioth_url(vcs_type, vcs_url), possible_salsa_urls_from_package_name(package['name'], package['maintainer_email']), ): if not salsa_url or salsa_url in tried: continue tried.add(salsa_url) salsa_url = urlutils.join_segment_parameters_raw(salsa_url, *params) note("Trying to access salsa URL %s instead.", salsa_url) try: branch = open_branch_ext(salsa_url, possible_transports=possible_transports, probers=probers) except BranchOpenFailure: pass else: note("Converting alioth URL: %s -> %s", vcs_url, salsa_url) return branch return None
def main(args): main_branch = _mod_branch.Branch.open(args.url) if args.name is None: name = os.path.splitext(osutils.basename(args.script.split(' ')[0]))[0] else: name = args.name commit_pending = { 'auto': None, 'yes': True, 'no': False }[args.commit_pending] try: result = propose_or_push(main_branch, name, ScriptBranchChanger(args.script, commit_pending), refresh=args.refresh, labels=args.label, mode=args.mode, dry_run=args.dry_run) except _mod_propose.UnsupportedHoster as e: show_error('No known supported hoster for %s. Run \'svp login\'?', e.branch.user_url) return 1 except _mod_propose.HosterLoginRequired as e: show_error( 'Credentials for hosting site at %r missing. Run \'svp login\'?', e.hoster.base_url) return 1 except ScriptMadeNoChanges: show_error('Script did not make any changes.') return 1 if result.merge_proposal: if result.is_new: note('Merge proposal created.') else: note('Merge proposal updated.') if result.merge_proposal.url: note('URL: %s', result.merge_proposal.url) note('Description: %s', result.merge_proposal.get_description()) if args.diff: result.show_base_diff(sys.stdout.buffer)
def main(args): for package in args.packages: main_branch = open_packaging_branch(package) # TODO(jelmer): Work out how to propose pristine-tar changes for # merging upstream. try: result = propose_or_push( main_branch, "new-upstream", NewUpstreamMerger(args.snapshot), mode=args.mode, dry_run=args.dry_run) except UpstreamAlreadyImported as e: note('Last upstream version %s already imported', e.version) return 1 if result.merge_proposal: if result.is_new: note('%s: Created new merge proposal %s.', package, result.merge_proposal.url) else: note('%s: Updated merge proposal %s.', package, result.merge_proposal.url)
def report(self, to_file): """Write a human-readable description of the result.""" if self.branch_push_result is None: trace.note('Created new branch at %s.', self.target_branch_path) else: self.branch_push_result.report(to_file)
def run(self, location, working_basedir=None, manifest=None, if_changed_from=None, package=None, distribution=None, dput=None, key_id=None, no_build=None, watch_ppa=False, append_version=None, safe=False, allow_fallback_to_native=False): try: try: import debian except ImportError: # In older versions of python-debian the main package was named # debian_bundle import debian_bundle except ImportError: raise errors.BzrCommandError( "The 'debian' python module " "is required for 'bzr dailydeb'. Install the " "python-debian package.") from breezy.plugins.builder.deb_util import ( add_autobuild_changelog_entry, build_source_package, calculate_package_dir, changelog, debian_source_package_name, dput_source_package, extract_upstream_tarball, force_native_format, get_source_format, sign_source_package, target_from_dput, ) from breezy.plugins.builder.deb_version import ( check_expanded_deb_version, substitute_branch_vars, substitute_time, ) if dput is not None and key_id is None: raise errors.BzrCommandError("You must specify --key-id if you " "specify --dput.") if watch_ppa: if not dput: raise errors.BzrCommandError( "cannot watch a ppa without doing dput.") else: # Check we can calculate a PPA url. target_from_dput(dput) possible_transports = [] base_branch = get_prepared_branch_from_location( location, safe=safe, possible_transports=possible_transports) # Save the unsubstituted version template_version = base_branch.deb_version if if_changed_from is not None: old_recipe = get_old_recipe(if_changed_from, possible_transports) else: old_recipe = None if base_branch.deb_version is not None: time = datetime.datetime.utcnow() substitute_time(base_branch, time) changed = resolve_revisions( base_branch, if_changed_from=old_recipe, substitute_branch_vars=substitute_branch_vars) check_expanded_deb_version(base_branch) else: changed = resolve_revisions(base_branch, if_changed_from=old_recipe) if not changed: trace.note("Unchanged") return 0 if working_basedir is None: temp_dir = tempfile.mkdtemp(prefix="bzr-builder-") working_basedir = temp_dir else: temp_dir = None if not os.path.exists(working_basedir): os.makedirs(working_basedir) package_name = self._calculate_package_name(location, package) if template_version is None: working_directory = os.path.join(working_basedir, "%s-direct" % (package_name, )) else: working_directory = os.path.join( working_basedir, "%s-%s" % (package_name, template_version)) try: # we want to use a consistent package_dir always to support # updates in place, but debuild etc want PACKAGE-UPSTREAMVERSION # on disk, so we build_tree with the unsubstituted version number # and do a final rename-to step before calling into debian build # tools. We then rename the working dir back. manifest_path = os.path.join(working_directory, "debian", "bzr-builder.manifest") build_tree(base_branch, working_directory) control_path = os.path.join(working_directory, "debian", "control") if not os.path.exists(control_path): if package is None: raise errors.BzrCommandError( "No control file to " "take the package name from, and --package not " "specified.") else: package = debian_source_package_name(control_path) write_manifest_to_transport(manifest_path, base_branch, possible_transports) autobuild = (base_branch.deb_version is not None) if autobuild: # Add changelog also substitutes {debupstream}. add_autobuild_changelog_entry(base_branch, working_directory, package, distribution=distribution, append_version=append_version) else: if append_version: raise errors.BzrCommandError( "--append-version only " "supported for autobuild recipes (with a 'deb-version' " "header)") with open(os.path.join(working_directory, "debian", "changelog")) as cl_f: contents = cl_f.read() cl = changelog.Changelog(file=contents) package_name = cl.package package_version = cl.version package_dir = calculate_package_dir(package_name, package_version, working_basedir) # working_directory -> package_dir: after this debian stuff works. os.rename(working_directory, package_dir) try: current_format = get_source_format(package_dir) if (package_version.debian_version is not None or current_format == "3.0 (quilt)"): # Non-native package try: extract_upstream_tarball( base_branch.branch, package_name, package_version.upstream_version, working_basedir) except errors.NoSuchTag, e: if not allow_fallback_to_native: raise errors.BzrCommandError( "Unable to find the upstream source. Import it " "as tag %s or build with " "--allow-fallback-to-native." % e.tag_name) else: force_native_format(package_dir, current_format) if not no_build: build_source_package( package_dir, tgz_check=not allow_fallback_to_native) if key_id is not None: sign_source_package(package_dir, key_id) if dput is not None: dput_source_package(package_dir, dput) finally: if not no_build: # package_dir -> working_directory # FIXME: may fail in error unwind, masking the # original exception. os.rename(package_dir, working_directory) # Note that this may write a second manifest. if manifest is not None: write_manifest_to_transport(manifest, base_branch, possible_transports)
def main(args): import distro_info import socket import subprocess import silver_platter # noqa: F401 from . import ( propose_or_push, BuildFailedError, MissingUpstreamTarball, NoSuchPackage, ) from breezy import ( errors, ) from breezy.trace import note from breezy.plugins.propose.propose import ( NoSuchProject, UnsupportedHoster, ) possible_transports = [] possible_hosters = [] fixer_scripts = {} for fixer in available_lintian_fixers(): for tag in fixer.lintian_tags: fixer_scripts[tag] = fixer available_fixers = set(fixer_scripts) if args.fixers: available_fixers = available_fixers.intersection(set(args.fixers)) debian_info = distro_info.DebianDistroInfo() for pkg in args.packages: if args.pre_check: def pre_check(local_tree): try: subprocess.check_call(args.pre_check, shell=True, cwd=local_tree.basedir) except subprocess.CalledProcessError: note('%r: pre-check failed, skipping', pkg) return False return True else: pre_check = None if args.post_check: def post_check(local_tree, since_revid): try: subprocess.check_call(args.post_check, shell=True, cwd=local_tree.basedir, env={'SINCE_REVID': since_revid}) except subprocess.CalledProcessError: note('%r: post-check failed, skipping', pkg) return False return True else: post_check = None note('Processing: %s', pkg) try: main_branch = open_packaging_branch( pkg, possible_transports=possible_transports) except NoSuchPackage: note('%s: no such package', pkg) except socket.error: note('%s: ignoring, socket error', pkg) except errors.NotBranchError as e: note('%s: Branch does not exist: %s', pkg, e) except errors.UnsupportedProtocol: note('%s: Branch available over unsupported protocol', pkg) except errors.ConnectionError as e: note('%s: %s', pkg, e) except errors.PermissionDenied as e: note('%s: %s', pkg, e) except errors.InvalidHttpResponse as e: note('%s: %s', pkg, e) except errors.TransportError as e: note('%s: %s', pkg, e) else: # If it's unknown which fixers are relevant, just try all of them. if args.fixers: fixers = args.fixers else: fixers = available_fixers branch_changer = LintianFixer( pkg, fixers=[fixer_scripts[fixer] for fixer in fixers], update_changelog=args.update_changelog, compat_release=debian_info.stable(), build_verify=args.build_verify, pre_check=pre_check, post_check=post_check, propose_addon_only=args.propose_addon_only, committer=args.committer) try: result = propose_or_push( main_branch, "lintian-fixes", branch_changer, args.mode, possible_transports=possible_transports, possible_hosters=possible_hosters, refresh=args.refresh, dry_run=args.dry_run) except UnsupportedHoster: note('%s: Hoster unsupported', pkg) continue except NoSuchProject as e: note('%s: project %s was not found', pkg, e.project) continue except BuildFailedError: note('%s: build failed', pkg) continue except MissingUpstreamTarball: note('%s: unable to find upstream source', pkg) continue except errors.PermissionDenied as e: note('%s: %s', pkg, e) continue except PostCheckFailed as e: note('%s: %s', pkg, e) continue else: if result.merge_proposal: tags = set() for brush_result, unused_summary in branch_changer.applied: tags.update(brush_result.fixed_lintian_tags) if result.is_new: note('%s: Proposed fixes %r: %s', pkg, tags, result.merge_proposal.url) elif tags: note('%s: Updated proposal %s with fixes %r', pkg, result.merge_proposal.url, tags) else: note('%s: No new fixes for proposal %s', pkg, result.merge_proposal.url) if args.diff: result.show_base_diff(sys.stdout.buffer)
def watch(owner_name, archive_name, package_name, version): """Watch a package build. :return: True once the package built and published, or False if it fails or there is a timeout waiting. """ version = str(version) trace.note("Logging into Launchpad") launchpad = get_lp() owner = launchpad.people[owner_name] archive = owner.getPPAByName(name=archive_name) end_states = ['FAILEDTOBUILD', 'FULLYBUILT'] important_arches = ['amd64', 'i386', 'armel'] trace.note("Waiting for version %s of %s to build." % (version, package_name)) start = time.time() while True: sourceRecords = list( archive.getPublishedSources(source_name=package_name, version=version)) if not sourceRecords: if time.time() - 900 > start: # Over 15 minutes and no source yet, upload FAIL. raise errors.BzrCommandError( "No source record in %s/%s for " "package %s=%s after 15 minutes." % (owner_name, archive_name, package_name, version)) return False trace.note("Source not available yet - waiting.") time.sleep(60) continue pkg = sourceRecords[0] if pkg.status.lower() not in ('published', 'pending'): trace.note("Package status: %s" % (pkg.status, )) time.sleep(60) continue # FIXME: LP should export this as an attribute. source_id = pkg.self_link.rsplit('/', 1)[1] buildSummaries = archive.getBuildSummariesForSourceIds( source_ids=[source_id])[source_id] if buildSummaries['status'] in end_states: break if buildSummaries['status'] == 'NEEDSBUILD': # We ignore non-virtual PPA architectures that are sparsely # supplied with buildds. missing = [] for build in buildSummaries['builds']: arch = build['arch_tag'] if arch in important_arches: missing.append(arch) if not missing: break extra = ' on ' + ', '.join(missing) else: extra = '' trace.note("%s is still in %s%s" % (pkg.display_name, buildSummaries['status'], extra)) time.sleep(60) trace.note("%s is now %s" % (pkg.display_name, buildSummaries['status'])) result = True if pkg.status.lower() != 'published': result = False # should this perhaps keep waiting? if buildSummaries['status'] != 'FULLYBUILT': if buildSummaries['status'] == 'NEEDSBUILD': # We're stopping early cause the important_arches are built. builds = pkg.getBuilds() for build in builds: if build.arch_tag in important_arches: if build.buildstate != 'Successfully built': result = False else: result = False return result
def report(text, *args, **kwargs): note('%r: ' + text, *((changer,)+args), **kwargs)
def run(self, from_location, to_location=None, format=None, trees=False, standalone=False, layout=None, all=False, prefix=None, keep=False, restore=False, until=None, colocated=False): from breezy import ( osutils, trace, urlutils, ) from breezy.controldir import ControlDir from breezy.errors import ( BzrCommandError, NoRepositoryPresent, ) from . import gettext from .convert import convert_repository from .remote import SvnRemoteAccess from .repository import SvnRepository from .workingtree import SvnCheckout import os from subvertpy import NODE_NONE if to_location is None: to_location = os.path.basename(from_location.rstrip("/\\")) if all: # All implies shared repository # (otherwise there is no repository to store revisions in) standalone = False if os.path.isfile(from_location): from .convert import load_dumpfile import tempfile tmp_repos = tempfile.mkdtemp(prefix='bzr-svn-dump-') load_dumpfile(from_location, tmp_repos) from_location = tmp_repos else: tmp_repos = None from_dir = ControlDir.open(from_location) if not (isinstance(from_dir, SvnRemoteAccess) or isinstance(from_dir, SvnCheckout)): raise BzrCommandError(gettext( "Source repository is not a Subversion repository.")) try: from_repos = from_dir.open_repository() except NoRepositoryPresent: if prefix is not None: raise BzrCommandError( gettext("Path inside repository specified " "and --prefix specified")) from_repos = from_dir.find_repository(_ignore_branch_path=True) assert from_dir.root_transport.base.startswith(from_repos.base) prefix = from_dir.root_transport.base[ len(from_repos.base):].strip("/") prefix = prefix.encode("utf-8") if not isinstance(from_repos, SvnRepository): raise BzrCommandError( gettext("Not a Subversion repository: %s") % from_location) if until is None: to_revnum = from_repos.get_latest_revnum() else: to_revnum = min(until, from_repos.get_latest_revnum()) with from_repos.lock_read(): if prefix is not None: if layout is None: overall_layout = from_repos.get_guessed_layout() else: overall_layout = layout prefix = prefix.strip("/") + "/" if overall_layout.is_branch(prefix): raise BzrCommandError( gettext("%s appears to contain a branch. " "For individual branches, use 'bzr branch'.") % from_location) # FIXME: Hint about is_tag() elif overall_layout.is_branch_parent(prefix): self.outf.write( gettext("Importing branches with prefix %s\n") % ("/" + urlutils.unescape_for_display(prefix, self.outf.encoding))) else: raise BzrCommandError( gettext("The specified path is inside a branch. " "Specify a different URL or a different " "repository layout (see also " "'bzr help svn-layout').")) if (prefix is not None and from_repos.transport.check_path(prefix, to_revnum) == NODE_NONE): raise BzrCommandError("Prefix %s does not exist" % prefix) def filter_branch(branch): if (prefix is not None and not branch.get_branch_path().startswith(prefix)): return False return True trace.note(gettext("Using repository layout: %s"), layout or from_repos.get_layout()) convert_repository( from_repos, to_location, layout, not standalone, trees, all, format=format, filter_branch=filter_branch, keep=keep, incremental=not restore, to_revnum=to_revnum, prefix=prefix, colocated=colocated, remember_parent=(tmp_repos is None)) if tmp_repos is not None: osutils.rmtree(tmp_repos) if not trees: trace.note( gettext("Use 'bzr checkout' to create a working tree in " "the newly created branches."))
def run(self, svn_repository, directory=".", no_reconcile=False): from breezy.controldir import ControlDir from breezy.repository import InterRepository, Repository from breezy import trace correct_dir = ControlDir.open_containing(svn_repository)[0] correct_repo = correct_dir.find_repository() repo_to_fix = Repository.open(directory) revids = repo_to_fix.all_revision_ids() present_revisions = correct_repo.has_revisions(revids) dir_to_fix = repo_to_fix.controldir old_repo_format = repo_to_fix._format del repo_to_fix trace.note("Renaming existing repository to repository.backup.") dir_to_fix.control_transport.rename('repository', 'repository.backup') backup_transport = dir_to_fix.control_transport.clone( 'repository.backup') old_repo = old_repo_format.open( dir_to_fix, _found=True, _override_transport=backup_transport) new_repo = dir_to_fix.create_repository( shared=old_repo.is_shared()) working_trees = old_repo.make_working_trees() if working_trees is not None: new_repo.set_make_working_trees(working_trees) interrepo = InterRepository.get(correct_repo, new_repo) revisionfinder = interrepo.get_revision_finder(True) trace.note("Finding revisions to fetch from SVN") for revid in present_revisions: foreign_revid, mapping = correct_repo.lookup_bzr_revision_id( revid) revisionfinder.find_until( foreign_revid, mapping, find_ghosts=False, exclude_non_mainline=False) trace.note("Fetching correct SVN revisions") interrepo.fetch(needed=revisionfinder.get_missing()) trace.note("Fetching other revisions") new_repo.fetch(old_repo) if not no_reconcile: from breezy.reconcile import reconcile trace.note("Reconciling new repository.") reconcile(dir_to_fix) trace.note('Removing backup') dir_to_fix.control_transport.delete_tree('repository.backup')
def debug_memory_win32api(message='', short=True): """Use trace.note() to dump the running memory info.""" import ctypes from breezy import trace class PROCESS_MEMORY_COUNTERS_EX(ctypes.Structure): """Used by GetProcessMemoryInfo""" _fields_ = [('cb', ctypes.c_ulong), ('PageFaultCount', ctypes.c_ulong), ('PeakWorkingSetSize', ctypes.c_size_t), ('WorkingSetSize', ctypes.c_size_t), ('QuotaPeakPagedPoolUsage', ctypes.c_size_t), ('QuotaPagedPoolUsage', ctypes.c_size_t), ('QuotaPeakNonPagedPoolUsage', ctypes.c_size_t), ('QuotaNonPagedPoolUsage', ctypes.c_size_t), ('PagefileUsage', ctypes.c_size_t), ('PeakPagefileUsage', ctypes.c_size_t), ('PrivateUsage', ctypes.c_size_t), ] cur_process = ctypes.windll.kernel32.GetCurrentProcess() mem_struct = PROCESS_MEMORY_COUNTERS_EX() ret = ctypes.windll.psapi.GetProcessMemoryInfo( cur_process, ctypes.byref(mem_struct), ctypes.sizeof(mem_struct)) if not ret: trace.note(gettext('Failed to GetProcessMemoryInfo()')) return info = {'PageFaultCount': mem_struct.PageFaultCount, 'PeakWorkingSetSize': mem_struct.PeakWorkingSetSize, 'WorkingSetSize': mem_struct.WorkingSetSize, 'QuotaPeakPagedPoolUsage': mem_struct.QuotaPeakPagedPoolUsage, 'QuotaPagedPoolUsage': mem_struct.QuotaPagedPoolUsage, 'QuotaPeakNonPagedPoolUsage': mem_struct.QuotaPeakNonPagedPoolUsage, 'QuotaNonPagedPoolUsage': mem_struct.QuotaNonPagedPoolUsage, 'PagefileUsage': mem_struct.PagefileUsage, 'PeakPagefileUsage': mem_struct.PeakPagefileUsage, 'PrivateUsage': mem_struct.PrivateUsage, } if short: # using base-2 units (see HACKING.txt). trace.note(gettext('WorkingSize {0:>7}KiB' '\tPeakWorking {1:>7}KiB\t{2}').format( info['WorkingSetSize'] / 1024, info['PeakWorkingSetSize'] / 1024, message)) return if message: trace.note('%s', message) trace.note(gettext('WorkingSize %8d KiB'), info['WorkingSetSize'] / 1024) trace.note(gettext('PeakWorking %8d KiB'), info['PeakWorkingSetSize'] / 1024) trace.note(gettext('PagefileUsage %8d KiB'), info.get('PagefileUsage', 0) / 1024) trace.note(gettext('PeakPagefileUsage %8d KiB'), info.get('PeakPagefileUsage', 0) / 1024) trace.note(gettext('PrivateUsage %8d KiB'), info.get('PrivateUsage', 0) / 1024) trace.note(gettext('PageFaultCount %8d'), info.get('PageFaultCount', 0))
def redirected(transport, e, redirection_notice): trace.note(redirection_notice) return transport._redirected_to(e.source, e.target)
def up_thread(self, merge_type=None): """Move one thread up in the loom.""" with self.lock_write(): self._check_switch() # set it up: current_revision = self.tree.last_revision() threadname = self.tree.branch.nick threads = self.tree.branch.get_loom_state().get_threads() old_thread_rev = None new_thread_name = None new_thread_rev = None # TODO: Factor this out into a search routine. for thread, rev, parents in reversed(threads): if thread == threadname: # found the current thread. old_thread_rev = rev break new_thread_name = thread new_thread_rev = rev if new_thread_rev is None: raise breezy.errors.BzrCommandError( 'Cannot move up from the highest thread.') graph = self.tree.branch.repository.get_graph() # special case no-change condition. if new_thread_rev == old_thread_rev: self.tree.branch._set_nick(new_thread_name) return 0 if new_thread_rev == EMPTY_REVISION: new_thread_rev = breezy.revision.NULL_REVISION if old_thread_rev == EMPTY_REVISION: old_thread_rev = breezy.revision.NULL_REVISION # merge the tree up into the new patch: if merge_type is None: merge_type = breezy.merge.Merge3Merger try: merge_controller = breezy.merge.Merger.from_revision_ids( self.tree, new_thread_rev, revision_graph=graph) except breezy.errors.UnrelatedBranches: raise breezy.errors.BzrCommandError( 'corrupt loom: thread %s' ' has no common ancestor with thread %s' % (new_thread_name, threadname)) merge_controller.merge_type = merge_type result = merge_controller.do_merge() # change the tree to the revision of the new thread. parent_trees = [] if new_thread_rev != breezy.revision.NULL_REVISION: parent_trees.append( (new_thread_rev, merge_controller.other_tree)) # record the merge if: # the old thread != new thread (we have something to record) # and the new thread is not a descendant of old thread if (old_thread_rev != new_thread_rev and not graph.is_ancestor(old_thread_rev, new_thread_rev)): basis_tree = self.tree.basis_tree() basis_tree.lock_read() parent_trees.append((old_thread_rev, basis_tree)) else: basis_tree = None try: self.tree.set_parent_trees(parent_trees) finally: if basis_tree is not None: basis_tree.unlock() if len(parent_trees) == 0: new_thread_rev = breezy.revision.NULL_REVISION else: new_thread_rev = parent_trees[0][0] # change the branch self.tree.branch.generate_revision_history(new_thread_rev) # update the branch nick. self.tree.branch._set_nick(new_thread_name) trace.note("Moved to thread '%s'." % new_thread_name) if (basis_tree is not None and not result and not self.tree.changes_from(basis_tree).has_changed()): trace.note("This thread is now empty, you may wish to " 'run "bzr combine-thread" to remove it.') if result != 0: return 1 else: return 0