Exemplo n.º 1
0
    def checkout_sources(self):
        dirs = self.query_abs_dirs()
        gecko_config = self.load_gecko_config()
        b2g_manifest_intree = gecko_config.get('b2g_manifest_intree')

        if gecko_config.get('config_version') >= 2:
            repos = [
                {'vcs': 'gittool', 'repo': 'https://git.mozilla.org/b2g/B2G.git', 'dest': dirs['work_dir']},
            ]

            if b2g_manifest_intree:
                # Checkout top-level B2G repo now
                self.vcs_checkout_repos(repos)
                b2g_manifest_branch = 'master'

                # That may have blown away our build-tools checkout. It would
                # be better if B2G were checked out into a subdirectory, but
                # for now, just redo it.
                self.checkout_tools()

                # Now checkout gecko inside the build directory
                self.checkout_gecko()
                conf_dir = os.path.join(dirs['gecko_src'], os.path.dirname(self.query_gecko_config_path()))
                manifest_filename = os.path.join(conf_dir, 'sources.xml')
                self.info("Using manifest at %s" % manifest_filename)
                have_gecko = True
            else:
                # Checkout B2G and b2g-manifests. We'll do gecko later
                b2g_manifest_branch = gecko_config.get('b2g_manifest_branch', 'master')
                repos.append(
                    {'vcs': 'gittool',
                     'repo': 'https://git.mozilla.org/b2g/b2g-manifest.git',
                     'dest': os.path.join(dirs['work_dir'], 'b2g-manifest'),
                     'branch': b2g_manifest_branch},
                )
                manifest_filename = gecko_config.get('b2g_manifest', self.config['target'] + '.xml')
                manifest_filename = os.path.join(dirs['work_dir'], 'b2g-manifest', manifest_filename)
                self.vcs_checkout_repos(repos)
                have_gecko = False

            manifest = load_manifest(manifest_filename)

            if not b2g_manifest_intree:
                # Now munge the manifest by mapping remotes to local remotes
                mapping_func = functools.partial(map_remote, mappings=self.config['repo_remote_mappings'])

                rewrite_remotes(manifest, mapping_func)
                # Remove gecko, since we'll be checking that out ourselves
                gecko_node = remove_project(manifest, path='gecko')
                if not gecko_node:
                    self.fatal("couldn't remove gecko from manifest")

            # Write out our manifest locally
            manifest_dir = os.path.join(dirs['work_dir'], 'tmp_manifest')
            self.rmtree(manifest_dir)
            self.mkdir_p(manifest_dir)
            manifest_filename = os.path.join(manifest_dir, self.config['target'] + '.xml')
            self.info("Writing manifest to %s" % manifest_filename)
            manifest_file = open(manifest_filename, 'w')
            manifest.writexml(manifest_file)
            manifest_file.close()

            # Set up repo
            repo_link = os.path.join(dirs['work_dir'], '.repo')
            if 'repo_mirror_dir' in self.config:
                # Make our local .repo directory a symlink to the shared repo
                # directory
                repo_mirror_dir = self.config['repo_mirror_dir']
                self.mkdir_p(repo_mirror_dir)
                repo_link = os.path.join(dirs['work_dir'], '.repo')
                if not os.path.exists(repo_link) or not os.path.islink(repo_link):
                    self.rmtree(repo_link)
                    self.info("Creating link from %s to %s" % (repo_link, repo_mirror_dir))
                    os.symlink(repo_mirror_dir, repo_link)

            # Checkout the repo tool
            if 'repo_repo' in self.config:
                repo_dir = os.path.join(dirs['work_dir'], '.repo', 'repo')
                self.checkout_repotool(repo_dir)

                cmd = ['./repo', '--version']
                if not self.run_command(cmd, cwd=dirs['work_dir']) == 0:
                    # Set return code to RETRY
                    self.fatal("repo is broken", exit_code=4)

            # Check it out!
            max_tries = 5
            sleep_time = 60
            max_sleep_time = 300
            for _ in range(max_tries):
                # If .repo points somewhere, then try and reset our state
                # before running config.sh
                if os.path.isdir(repo_link):
                    # Delete any projects with broken HEAD references
                    self.info("Deleting broken projects...")
                    cmd = ['./repo', 'forall', '-c', 'git show-ref -q --head HEAD || rm -rfv $PWD']
                    self.run_command(cmd, cwd=dirs['work_dir'])

                config_result = self.run_command([
                    './config.sh', '-q', self.config['target'], manifest_filename,
                ], cwd=dirs['work_dir'], output_timeout=55 * 60)

                # TODO: Check return code from these? retry?
                # Run git reset --hard to make sure we're in a clean state
                self.info("Resetting all git projects")
                cmd = ['./repo', 'forall', '-c', 'git reset --hard']
                self.run_command(cmd, cwd=dirs['work_dir'])

                self.info("Cleaning all git projects")
                cmd = ['./repo', 'forall', '-c', 'git clean -f -x -d']
                self.run_command(cmd, cwd=dirs['work_dir'])

                if config_result == 0:
                    break
                else:
                    # We may have died due to left-over lock files. Make sure
                    # we clean those up before trying again.
                    self.info("Deleting stale lock files")
                    cmd = ['find', '.repo/', '-name', '*.lock', '-print', '-delete']
                    self.run_command(cmd, cwd=dirs['work_dir'])

                    # Try again in a bit. Broken clones should be deleted and
                    # re-tried above
                    self.info("config.sh failed; sleeping %i and retrying" % sleep_time)
                    time.sleep(sleep_time)
                    # Exponential backoff with random jitter
                    sleep_time = min(sleep_time * 1.5, max_sleep_time) + random.randint(1, 60)
            else:
                self.fatal("failed to run config.sh")

            # Workaround bug 985837
            if self.config['target'] == 'emulator-kk':
                self.info("Forcing -j4 for emulator-kk")
                dotconfig_file = os.path.join(dirs['abs_work_dir'], '.config')
                with open(dotconfig_file, "a+") as f:
                    f.write("\nMAKE_FLAGS=-j1\n")

            # output our sources.xml, make a copy for update_sources_xml()
            self.run_command(
                ["./gonk-misc/add-revision.py", "-o", "sources.xml", "--force",
                 ".repo/manifest.xml"], cwd=dirs["work_dir"],
                halt_on_failure=True, fatal_exit_code=3)
            self.run_command(["cat", "sources.xml"], cwd=dirs['work_dir'], halt_on_failure=True, fatal_exit_code=3)
            self.run_command(["cp", "-p", "sources.xml", "sources.xml.original"], cwd=dirs['work_dir'], halt_on_failure=True, fatal_exit_code=3)

            manifest = load_manifest(os.path.join(dirs['work_dir'], 'sources.xml'))
            gaia_node = get_project(manifest, path="gaia")
            gaia_rev = gaia_node.getAttribute("revision")
            gaia_remote = get_remote(manifest, gaia_node.getAttribute('remote'))
            gaia_repo = "%s/%s" % (gaia_remote.getAttribute('fetch'), gaia_node.getAttribute('name'))
            gaia_url = self.query_gitweb_url(gaia_repo, gaia_rev)
            self.set_buildbot_property("gaia_revision", gaia_rev, write_to_file=True)
            self.info("TinderboxPrint: gaia_revlink: %s" % gaia_url)

            # Now we can checkout gecko and other stuff
            if not have_gecko:
                self.checkout_gecko()
            return

        # Old behaviour
        self.checkout_gecko()
        self.checkout_gaia()
Exemplo n.º 2
0
    def resolve_refs(self, manifest):
        worker_pool = ThreadPool(20)
        lookup_threads_by_project = {}
        lookup_threads_by_parameters = {}

        # Resolve refnames
        for p in manifest.getElementsByTagName('project'):
            name = p.getAttribute('name')
            remote_url = repo_manifest.get_project_remote_url(manifest, p)
            revision = repo_manifest.get_project_revision(manifest, p)

            # commit ids are already done
            if repo_manifest.is_commitid(revision):
                self.debug("%s is already locked to %s; skipping" %
                           (name, revision))
                continue

            # gaia is special - make sure we're using the same revision we used
            # for gaia.json
            if self.gaia_hg_revision and p.getAttribute('path') == 'gaia' and revision == self.config['gaia_git_branch']:
                git_rev = self.query_gaia_git_rev()
                self.info("Using %s for gaia to match %s in gaia.json" % (git_rev, self.gaia_hg_revision))
                p.setAttribute('revision', git_rev)
                continue

            # If there's no '/' in the revision, assume it's a head
            if '/' not in revision:
                revision = 'refs/heads/%s' % revision

            cache_key = "%s:%s" % (remote_url, revision)

            # Check to see if we've looked up this revision on this remote
            # before. If we have, reuse the previous value rather than looking
            # it up again. This will make sure revisions for the same ref name
            # are consistent between devices, as long as they use the same
            # remote/refname.
            if cache_key in self._git_ref_cache:
                abs_revision = self._git_ref_cache[cache_key]
                self.debug(
                    "Reusing previous lookup %s -> %s" %
                    (cache_key, abs_revision))
                p.setAttribute('revision', abs_revision)
                continue

            # Maybe a thread already exists for this lookup, even if the result has not
            # yet been retrieved and placed in _git_ref_cache...
            # Please note result.get() can be called multiple times without problems;
            # the git command will only be executed once. Therefore we can associate many
            # projects to the same thread result, without problems later when we call
            # get() multiple times against the same thread result.
            if cache_key in lookup_threads_by_parameters:
                self.debug("Reusing currently running thread to look up %s" % cache_key)
                lookup_threads_by_project[p] = lookup_threads_by_parameters.get(cache_key)
            else:
                async_result = worker_pool.apply_async(self.resolve_git_ref,
                                                   (remote_url, revision))
                lookup_threads_by_parameters[cache_key] = async_result
                lookup_threads_by_project[p] = async_result

        # TODO: alert/notify on missing repositories
        abort = False
        failed = []
        for p, result in lookup_threads_by_project.iteritems():
            abs_revision = result.get(timeout=300)
            remote_url = repo_manifest.get_project_remote_url(manifest, p)
            revision = repo_manifest.get_project_revision(manifest, p)
            if not abs_revision:
                abort = True
                self.error("Couldn't resolve reference %s %s" % (remote_url, revision))
                failed.append(p)
            p.setAttribute('revision', abs_revision)
        if abort:
            # Write message about how to set up syncing
            default = repo_manifest.get_default(manifest)
            for p in failed:
                if p.hasAttribute('remote'):
                    remote = repo_manifest.get_remote(manifest, p.getAttribute('remote'))
                else:
                    remote = repo_manifest.get_remote(manifest, default.getAttribute('remote'))

                new_fetch_url = remote.getAttribute('fetch')
                orig_fetch_url = self._remote_mappings[new_fetch_url]
                name = p.getAttribute('name')
                self.info("needs sync? %s/%s -> %s/%s" % (orig_fetch_url, name, new_fetch_url, name))

            self.fatal("couldn't resolve some refs; exiting")
Exemplo n.º 3
0
    def resolve_refs(self, manifest):
        worker_pool = ThreadPool(20)
        results = []

        # Resolve refnames
        for p in manifest.getElementsByTagName('project'):
            name = p.getAttribute('name')
            remote_url = repo_manifest.get_project_remote_url(manifest, p)
            revision = repo_manifest.get_project_revision(manifest, p)

            # commit ids are already done
            if repo_manifest.is_commitid(revision):
                self.debug("%s is already locked to %s; skipping" %
                           (name, revision))
                continue

            # gaia is special - make sure we're using the same revision we used
            # for gaia.json
            if self.gaia_hg_revision and p.getAttribute('path') == 'gaia' and revision == self.config['gaia_git_branch']:
                if not self.gaia_git_rev:
                    self.gaia_git_rev = self.query_mapper_git_revision(
                        self.config['mapper_url'],
                        self.config['gaia_mapper_project'],
                        self.gaia_hg_revision,
                    )
                self.info("Using %s for gaia to match %s in gaia.json" % (self.gaia_git_rev, self.gaia_hg_revision))
                p.setAttribute('revision', self.gaia_git_rev)
                continue

            # Check to see if we've looked up this revision on this remote
            # before If we have, reuse the previous value rather than looking
            # it up again This will make sure revisions for the same ref name
            # are consistent between devices, as long as they use the same
            # remote/refname
            if (remote_url, revision) in self._git_ref_cache:
                abs_revision = self._git_ref_cache[remote_url, revision]
                self.info(
                    "Re-using previous lookup %s:%s -> %s" %
                    (remote_url, revision, abs_revision))
                p.setAttribute('revision', abs_revision)
                continue

            # If there's no '/' in the revision, assume it's a head
            if '/' not in revision:
                revision = 'refs/heads/%s' % revision

            self.debug("Getting revision for %s (currently %s)" %
                       (name, revision))
            async_result = worker_pool.apply_async(self.resolve_git_ref,
                                                   (remote_url, revision))
            results.append((p, async_result))

        # TODO: alert/notify on missing repositories
        # TODO: Add external caching
        abort = False
        failed = []
        for p, result in results:
            abs_revision = result.get()
            remote_url = repo_manifest.get_project_remote_url(manifest, p)
            revision = repo_manifest.get_project_revision(manifest, p)
            if not abs_revision:
                abort = True
                self.error("Couldn't resolve %s %s" % (remote_url, revision))
                failed.append(p)
            # Save to our cache
            self._git_ref_cache[remote_url, revision] = abs_revision
            p.setAttribute('revision', abs_revision)
        if abort:
            # Write message about how to set up syncing
            default = repo_manifest.get_default(manifest)
            for p in failed:
                if p.hasAttribute('remote'):
                    remote = repo_manifest.get_remote(manifest, p.getAttribute('remote'))
                else:
                    remote = repo_manifest.get_remote(manifest, default.getAttribute('remote'))

                new_fetch_url = remote.getAttribute('fetch')
                orig_fetch_url = self._remote_mappings[new_fetch_url]
                name = p.getAttribute('name')
                self.info("needs sync? %s/%s -> %s/%s" % (orig_fetch_url, name, new_fetch_url, name))

            self.fatal("couldn't resolve some refs; exiting")
Exemplo n.º 4
0
    def checkout_sources(self):
        dirs = self.query_abs_dirs()
        gecko_config = self.load_gecko_config()
        b2g_manifest_intree = gecko_config.get('b2g_manifest_intree')
        b2g_repo = gecko_config.get('b2g_repo',
                                    'https://github.com/mozilla-b2g/B2G.git')
        b2g_branch = gecko_config.get('b2g_branch', 'master')

        if gecko_config.get('config_version') >= 2:
            repos = [
                {
                    'vcs': 'gittool',
                    'repo': b2g_repo,
                    'branch': b2g_branch,
                    'dest': dirs['work_dir']
                },
            ]

            if b2g_manifest_intree:
                # Checkout top-level B2G repo now
                self.vcs_checkout_repos(repos)
                b2g_manifest_branch = 'master'

                # That may have blown away our build-tools checkout. It would
                # be better if B2G were checked out into a subdirectory, but
                # for now, just redo it.
                self.checkout_tools()

                # Now checkout gecko inside the build directory
                self.checkout_gecko()
                conf_dir = os.path.join(
                    dirs['gecko_src'],
                    os.path.dirname(self.query_gecko_config_path()))
                manifest_filename = os.path.join(conf_dir, 'sources.xml')
                self.info("Using manifest at %s" % manifest_filename)
                have_gecko = True
            else:
                # Checkout B2G and b2g-manifests. We'll do gecko later
                b2g_manifest_branch = gecko_config.get('b2g_manifest_branch',
                                                       'master')
                repos.append(
                    {
                        'vcs': 'gittool',
                        'repo': 'https://github.com/mozilla-b2g/b2g-manifest',
                        'dest': os.path.join(dirs['work_dir'], 'b2g-manifest'),
                        'branch': b2g_manifest_branch
                    }, )
                manifest_filename = gecko_config.get(
                    'b2g_manifest', self.config['target'] + '.xml')
                manifest_filename = os.path.join(dirs['work_dir'],
                                                 'b2g-manifest',
                                                 manifest_filename)
                self.vcs_checkout_repos(repos)
                have_gecko = False

            manifest = load_manifest(manifest_filename)

            if not b2g_manifest_intree:
                # Now munge the manifest by mapping remotes to local remotes
                mapping_func = functools.partial(
                    map_remote, mappings=self.config['repo_remote_mappings'])

                rewrite_remotes(manifest, mapping_func)
                # Remove gecko, since we'll be checking that out ourselves
                gecko_node = remove_project(manifest, path='gecko')
                if not gecko_node:
                    self.fatal("couldn't remove gecko from manifest")

            # Write out our manifest locally
            manifest_dir = os.path.join(dirs['work_dir'], 'tmp_manifest')
            self.rmtree(manifest_dir)
            self.mkdir_p(manifest_dir)
            manifest_filename = os.path.join(manifest_dir,
                                             self.config['target'] + '.xml')
            self.info("Writing manifest to %s" % manifest_filename)
            manifest_file = open(manifest_filename, 'w')
            manifest.writexml(manifest_file)
            manifest_file.close()

            # Set up repo
            repo_link = os.path.join(dirs['work_dir'], '.repo')
            if 'repo_mirror_dir' in self.config:
                # Make our local .repo directory a symlink to the shared repo
                # directory
                repo_mirror_dir = self.config['repo_mirror_dir']
                self.mkdir_p(repo_mirror_dir)
                repo_link = os.path.join(dirs['work_dir'], '.repo')
                if not os.path.exists(repo_link) or not os.path.islink(
                        repo_link):
                    self.rmtree(repo_link)
                    self.info("Creating link from %s to %s" %
                              (repo_link, repo_mirror_dir))
                    os.symlink(repo_mirror_dir, repo_link)

            # Checkout the repo tool
            if 'repo_repo' in self.config:
                repo_dir = os.path.join(dirs['work_dir'], '.repo', 'repo')
                self.checkout_repotool(repo_dir)

                cmd = ['./repo', '--version']
                if not self.run_command(cmd, cwd=dirs['work_dir']) == 0:
                    # Set return code to RETRY
                    self.fatal("repo is broken", exit_code=4)

            # Check it out!
            max_tries = 5
            sleep_time = 60
            max_sleep_time = 300
            for _ in range(max_tries):
                # If .repo points somewhere, then try and reset our state
                # before running config.sh
                if os.path.isdir(repo_link):
                    # Delete any projects with broken HEAD references
                    self.info("Deleting broken projects...")
                    cmd = [
                        './repo', 'forall', '-c',
                        'git show-ref -q --head HEAD || rm -rfv $PWD'
                    ]
                    self.run_command(cmd, cwd=dirs['work_dir'])

                # bug https://bugzil.la/1177190 - workaround - change
                # timeout from 55 to 10 min, based on "normal" being
                # about 7.5 minutes
                config_result = self.run_command([
                    './config.sh',
                    '-q',
                    self.config['target'],
                    manifest_filename,
                ],
                                                 cwd=dirs['work_dir'],
                                                 output_timeout=10 * 60)

                # TODO: Check return code from these? retry?
                # Run git reset --hard to make sure we're in a clean state
                self.info("Resetting all git projects")
                cmd = ['./repo', 'forall', '-c', 'git reset --hard']
                self.run_command(cmd, cwd=dirs['work_dir'])

                self.info("Cleaning all git projects")
                cmd = ['./repo', 'forall', '-c', 'git clean -f -x -d']
                self.run_command(cmd, cwd=dirs['work_dir'])

                if config_result == 0:
                    break
                else:
                    # We may have died due to left-over lock files. Make sure
                    # we clean those up before trying again.
                    self.info("Deleting stale lock files")
                    cmd = [
                        'find', '.repo/', '-name', '*.lock', '-print',
                        '-delete'
                    ]
                    self.run_command(cmd, cwd=dirs['work_dir'])

                    # Try again in a bit. Broken clones should be deleted and
                    # re-tried above
                    self.info("config.sh failed; sleeping %i and retrying" %
                              sleep_time)
                    time.sleep(sleep_time)
                    # Exponential backoff with random jitter
                    sleep_time = min(sleep_time * 1.5,
                                     max_sleep_time) + random.randint(1, 60)
            else:
                self.fatal("failed to run config.sh")

            # output our sources.xml, make a copy for update_sources_xml()
            self.run_command([
                "./gonk-misc/add-revision.py", "-o", "sources.xml", "--force",
                ".repo/manifest.xml"
            ],
                             cwd=dirs["work_dir"],
                             halt_on_failure=True,
                             fatal_exit_code=3)
            self.run_command(["cat", "sources.xml"],
                             cwd=dirs['work_dir'],
                             halt_on_failure=True,
                             fatal_exit_code=3)
            self.run_command(
                ["cp", "-p", "sources.xml", "sources.xml.original"],
                cwd=dirs['work_dir'],
                halt_on_failure=True,
                fatal_exit_code=3)

            manifest = load_manifest(
                os.path.join(dirs['work_dir'], 'sources.xml'))
            gaia_node = get_project(manifest, path="gaia")
            gaia_rev = gaia_node.getAttribute("revision")
            gaia_remote = get_remote(manifest,
                                     gaia_node.getAttribute('remote'))
            gaia_repo = "%s/%s" % (gaia_remote.getAttribute('fetch'),
                                   gaia_node.getAttribute('name'))
            gaia_url = self.query_gitweb_url(gaia_repo, gaia_rev)
            self.set_buildbot_property("gaia_revision",
                                       gaia_rev,
                                       write_to_file=True)
            self.info("TinderboxPrint: gaia_revlink: %s" % gaia_url)

            # Now we can checkout gecko and other stuff
            if not have_gecko:
                self.checkout_gecko()
            return

        # Old behaviour
        self.checkout_gecko()
        self.checkout_gaia()
Exemplo n.º 5
0
    def resolve_refs(self, manifest):
        worker_pool = ThreadPool(20)
        lookup_threads_by_project = {}
        lookup_threads_by_parameters = {}

        # Resolve refnames
        for p in manifest.getElementsByTagName('project'):
            name = p.getAttribute('name')
            remote_url = repo_manifest.get_project_remote_url(manifest, p)
            revision = repo_manifest.get_project_revision(manifest, p)

            # commit ids are already done
            if repo_manifest.is_commitid(revision):
                self.debug("%s is already locked to %s; skipping" %
                           (name, revision))
                continue

            # gaia is special - make sure we're using the same revision we used
            # for gaia.json
            if self.gaia_hg_revision and p.getAttribute('path') == 'gaia' and revision == self.config['gaia_git_branch']:
                git_rev = self.query_gaia_git_rev()
                self.info("Using %s for gaia to match %s in gaia.json" % (git_rev, self.gaia_hg_revision))
                p.setAttribute('revision', git_rev)
                continue

            # If there's no '/' in the revision, assume it's a head
            if '/' not in revision:
                revision = 'refs/heads/%s' % revision

            cache_key = "%s:%s" % (remote_url, revision)

            # Check to see if we've looked up this revision on this remote
            # before. If we have, reuse the previous value rather than looking
            # it up again. This will make sure revisions for the same ref name
            # are consistent between devices, as long as they use the same
            # remote/refname.
            if cache_key in self._git_ref_cache:
                abs_revision = self._git_ref_cache[cache_key]
                self.info(
                    "Reusing previous lookup %s -> %s" %
                    (cache_key, abs_revision))
                p.setAttribute('revision', abs_revision)
                continue

            # Maybe a thread already exists for this lookup, even if the result has not
            # yet been retrieved and placed in _git_ref_cache...
            # Please note result.get() can be called multiple times without problems;
            # the git command will only be executed once. Therefore we can associate many
            # projects to the same thread result, without problems later when we call
            # get() multiple times against the same thread result.
            if cache_key in lookup_threads_by_parameters:
                self.info("Reusing currently running thread to look up %s" % cache_key)
                lookup_threads_by_project[p] = lookup_threads_by_parameters.get(cache_key)
            else:
                async_result = worker_pool.apply_async(self.resolve_git_ref,
                                                   (remote_url, revision))
                lookup_threads_by_parameters[cache_key] = async_result
                lookup_threads_by_project[p] = async_result

        # TODO: alert/notify on missing repositories
        abort = False
        failed = []
        for p, result in lookup_threads_by_project.iteritems():
            abs_revision = result.get(timeout=300)
            remote_url = repo_manifest.get_project_remote_url(manifest, p)
            revision = repo_manifest.get_project_revision(manifest, p)
            if not abs_revision:
                abort = True
                self.error("Couldn't resolve reference %s %s" % (remote_url, revision))
                failed.append(p)
            p.setAttribute('revision', abs_revision)
        if abort:
            # Write message about how to set up syncing
            default = repo_manifest.get_default(manifest)
            for p in failed:
                if p.hasAttribute('remote'):
                    remote = repo_manifest.get_remote(manifest, p.getAttribute('remote'))
                else:
                    remote = repo_manifest.get_remote(manifest, default.getAttribute('remote'))

                new_fetch_url = remote.getAttribute('fetch')
                orig_fetch_url = self._remote_mappings[new_fetch_url]
                name = p.getAttribute('name')
                self.info("needs sync? %s/%s -> %s/%s" % (orig_fetch_url, name, new_fetch_url, name))

            self.fatal("couldn't resolve some refs; exiting")