def resolve_refs(self):
        manifest = self.query_manifest()

        worker_pool = ThreadPool(8)
        results = []

        # Resolve refnames
        for p in manifest.getElementsByTagName('project'):
            name = p.getAttribute('name')
            remote_url = repo_manifest.get_project_remote_url(manifest, p)
            revision = repo_manifest.get_project_revision(manifest, p)
            # commit ids are already done
            if repo_manifest.is_commitid(revision):
                self.debug("%s is already locked to %s; skipping" %
                           (name, revision))
                continue

            # If there's no '/' in the revision, assume it's a head
            if '/' not in revision:
                revision = 'refs/heads/%s' % revision

            self.debug("Getting revision for %s (currently %s)" %
                       (name, revision))
            async_result = worker_pool.apply_async(self.resolve_git_ref,
                                                   (remote_url, revision))
            results.append((p, async_result))

        for p, result in results:
            revision = result.get()
            p.setAttribute('revision', revision)
    def resolve_refs(self):
        manifest = self.query_manifest()

        worker_pool = ThreadPool(8)
        results = []

        # Resolve refnames
        for p in manifest.getElementsByTagName('project'):
            name = p.getAttribute('name')
            remote_url = repo_manifest.get_project_remote_url(manifest, p)
            revision = repo_manifest.get_project_revision(manifest, p)
            # commit ids are already done
            if repo_manifest.is_commitid(revision):
                self.debug("%s is already locked to %s; skipping" %
                           (name, revision))
                continue

            # If there's no '/' in the revision, assume it's a head
            if '/' not in revision:
                revision = 'refs/heads/%s' % revision

            self.debug("Getting revision for %s (currently %s)" %
                       (name, revision))
            async_result = worker_pool.apply_async(self.resolve_git_ref, (remote_url, revision))
            results.append((p, async_result))

        for p, result in results:
            revision = result.get()
            p.setAttribute('revision', revision)
Example #3
0
    def resolve_refs(self, manifest):
        worker_pool = ThreadPool(20)
        lookup_threads_by_project = {}
        lookup_threads_by_parameters = {}

        # Resolve refnames
        for p in manifest.getElementsByTagName('project'):
            name = p.getAttribute('name')
            remote_url = repo_manifest.get_project_remote_url(manifest, p)
            revision = repo_manifest.get_project_revision(manifest, p)

            # commit ids are already done
            if repo_manifest.is_commitid(revision):
                self.debug("%s is already locked to %s; skipping" %
                           (name, revision))
                continue

            # gaia is special - make sure we're using the same revision we used
            # for gaia.json
            if self.gaia_hg_revision and p.getAttribute('path') == 'gaia' and revision == self.config['gaia_git_branch']:
                git_rev = self.query_gaia_git_rev()
                self.info("Using %s for gaia to match %s in gaia.json" % (git_rev, self.gaia_hg_revision))
                p.setAttribute('revision', git_rev)
                continue

            # If there's no '/' in the revision, assume it's a head
            if '/' not in revision:
                revision = 'refs/heads/%s' % revision

            cache_key = "%s:%s" % (remote_url, revision)

            # Check to see if we've looked up this revision on this remote
            # before. If we have, reuse the previous value rather than looking
            # it up again. This will make sure revisions for the same ref name
            # are consistent between devices, as long as they use the same
            # remote/refname.
            if cache_key in self._git_ref_cache:
                abs_revision = self._git_ref_cache[cache_key]
                self.debug(
                    "Reusing previous lookup %s -> %s" %
                    (cache_key, abs_revision))
                p.setAttribute('revision', abs_revision)
                continue

            # Maybe a thread already exists for this lookup, even if the result has not
            # yet been retrieved and placed in _git_ref_cache...
            # Please note result.get() can be called multiple times without problems;
            # the git command will only be executed once. Therefore we can associate many
            # projects to the same thread result, without problems later when we call
            # get() multiple times against the same thread result.
            if cache_key in lookup_threads_by_parameters:
                self.debug("Reusing currently running thread to look up %s" % cache_key)
                lookup_threads_by_project[p] = lookup_threads_by_parameters.get(cache_key)
            else:
                async_result = worker_pool.apply_async(self.resolve_git_ref,
                                                   (remote_url, revision))
                lookup_threads_by_parameters[cache_key] = async_result
                lookup_threads_by_project[p] = async_result

        # TODO: alert/notify on missing repositories
        abort = False
        failed = []
        for p, result in lookup_threads_by_project.iteritems():
            abs_revision = result.get(timeout=300)
            remote_url = repo_manifest.get_project_remote_url(manifest, p)
            revision = repo_manifest.get_project_revision(manifest, p)
            if not abs_revision:
                abort = True
                self.error("Couldn't resolve reference %s %s" % (remote_url, revision))
                failed.append(p)
            p.setAttribute('revision', abs_revision)
        if abort:
            # Write message about how to set up syncing
            default = repo_manifest.get_default(manifest)
            for p in failed:
                if p.hasAttribute('remote'):
                    remote = repo_manifest.get_remote(manifest, p.getAttribute('remote'))
                else:
                    remote = repo_manifest.get_remote(manifest, default.getAttribute('remote'))

                new_fetch_url = remote.getAttribute('fetch')
                orig_fetch_url = self._remote_mappings[new_fetch_url]
                name = p.getAttribute('name')
                self.info("needs sync? %s/%s -> %s/%s" % (orig_fetch_url, name, new_fetch_url, name))

            self.fatal("couldn't resolve some refs; exiting")
Example #4
0
 def munge_manifests(self):
     """ Switch the branched repos to the new branch; lock down third
         party revisions.
         """
     branch_repos = self.query_branch_repos()
     dirs = self.query_abs_dirs()
     new_branch = self.config['branch_name']
     unused_manifests = []
     if not self.check_existing_branch(new_branch, cwd=dirs['abs_manifest_dir'])[0]:
         self.fatal("b2g-manifest isn't branched properly!  Run --clean-repos --branch-repos")
     for manifest in self.query_manifests():
         self.info("Munging %s..." % manifest)
         doc = repo_manifest.load_manifest(manifest)
         try:
             repo_manifest.get_default(doc).getAttribute("revision")
         except IndexError:
             self.info("No default revision; skipping.")
             unused_manifests.append(manifest)
             continue
         for p in doc.getElementsByTagName('project'):
             name = self._query_repo_name(p.getAttribute('name'))
             fetch = repo_manifest.get_project_remote_url(doc, p)
             self.debug("Remote %s Name %s" % (fetch, name))
             current_revision = repo_manifest.get_project_revision(doc, p)
             if repo_manifest.is_commitid(current_revision):
                 self.info("%s: %s is already locked to %s; skipping." % (manifest, name, current_revision))
                 # I could setAttribute() here, but I very much doubt the
                 # default_revision is a commitid.
                 continue
             # We've branched this repo; do we set the revision to
             # new_branch or not?  ('fetch' needs to match, since we have
             # same-named repos with different urls =P )
             if name in branch_repos and branch_repos[name]['fetch'] == fetch:
                 orig_branch = branch_repos[name]['branch_revisions'].keys()[0]
                 if manifest in branch_repos[name]['branch_revisions'][orig_branch]:
                     if current_revision != orig_branch:
                         self.fatal("I don't know how we got here, but %s in %s's revision %s is not the branching point %s." %
                                    (name, manifest, current_revision, orig_branch))
                     self.info("Setting %s (%s) to %s (was %s)" % (name, manifest, new_branch, current_revision))
                     p.setAttribute('revision', new_branch)
                     continue
                 # Should we keep the old branch or lock revision?  Doing
                 # the former for now.
                 self.info("%s %s is off a different branch point (%s, not %s).  Keeping the old branch..." %
                           (manifest, name, current_revision, orig_branch))
                 continue
             if name in self.config['extra_branch_manifest_repos']:
                 p.setAttribute('revision', new_branch)
                 continue
             # Lock revision?
             if not self.config["lock_manifest_revisions"]:
                 self.info("%s: Not locking revision for %s due to config." % (manifest, name))
                 continue
             lock_revision = self._query_remote_branch_revision(fetch, current_revision, manifest)
             if lock_revision is not None:
                 p.setAttribute('revision', lock_revision)
         with self.opened(manifest, open_mode='w') as (fh, err):
             if err:
                 self.fatal("Can't open %s for writing!" % manifest)
             else:
                 doc.writexml(fh)
         fh.close()
     if self.config["delete_unused_manifests"]:
         self._delete_unused_manifests(unused_manifests)
     self.info("TODO: diff, commit, --push!")
Example #5
0
    def query_branch_repos(self):
        """ Parse all manifests and build a dictionary of repos with
            expected revisions and/or branches.

            The format will be {
                name: {
                    'revision': branch,
                    'fetch': git_url,
                    'branch_revisions': {
                        revision: [manifest list]  # This should only have one key/value pair
                    },
                    'all_revisions': {
                        revision: [manifest list]  # This will have all key/value pairs
                    },
                },
            }

            This depends on the pull action having run at least once.
            """
        if self.branch_repo_dict is not None:
            return self.branch_repo_dict
        self.info("Building branch_repo_dict...")
        branch_repo_dict = {
            'b2g-manifest': {
                'fetch': self.config['manifest_repo_url'],
                'revision': self.config['manifest_repo_revision'],
            },
        }
        for manifest in self.query_manifests():
            self.info("Processing %s" % manifest)
            doc = repo_manifest.load_manifest(manifest)
            try:
                default_revision = repo_manifest.get_default(doc).getAttribute("revision")
            except IndexError:
                self.info("Skipping %s (no defaults)" % manifest)
                continue

            for p in doc.getElementsByTagName('project'):
                name = self._query_repo_name(p.getAttribute('name'))
                fetch = repo_manifest.get_project_remote_url(doc, p)
                self.debug("Remote %s Name %s" % (fetch, name))
                # We branch github.com/mozilla repos only
                if not self._query_do_branch(fetch, name):
                    continue
                # Now start building the branch info
                branch_repo_dict.setdefault(name, {}).setdefault("fetch", fetch)
                revision = p.getAttribute("revision")
                if not revision:
                    # fall back to default revision
                    if default_revision:
                        self.info("%s: %s, using default revision (%s)" % (manifest, name, default_revision))
                        self._add_branch_revision(branch_repo_dict[name], name, default_revision, manifest)
                    else:
                        self.warning("Can't determine revision for %s in %s" % (name, manifest))
                elif revision and branch_repo_dict.get(name, {}).get("revision", revision) != revision:
                    self._add_branch_revision(branch_repo_dict[name], name, revision, manifest)
                else:
                    self._add_branch_revision(branch_repo_dict[name], name, revision, manifest)
        self.info("Outputting branch_repo_dict:")
        self.info(pprint.pformat(branch_repo_dict))
        message = ""
        for name, r in branch_repo_dict.iteritems():
            if r.get("revision") is None:
                self.warning("Sanity: No revision set for %s %s; we'll fall back to master" % (name, r["fetch"]))
            if len(r.get("branch_revisions", {})) > 1:
                message += "Sanity: Not clear where to branch for %s %s %s\n" % (name, r["fetch"], pprint.pformat(r["branch_revisions"]))
        if message:
            self.fatal(message + "Use --branch-order or self.config['no_branch_repos'] to fix!")
        self.branch_repo_dict = branch_repo_dict
        return branch_repo_dict
    def resolve_refs(self, manifest):
        worker_pool = ThreadPool(20)
        results = []

        # Resolve refnames
        for p in manifest.getElementsByTagName('project'):
            name = p.getAttribute('name')
            remote_url = repo_manifest.get_project_remote_url(manifest, p)
            revision = repo_manifest.get_project_revision(manifest, p)

            # commit ids are already done
            if repo_manifest.is_commitid(revision):
                self.debug("%s is already locked to %s; skipping" %
                           (name, revision))
                continue

            # gaia is special - make sure we're using the same revision we used
            # for gaia.json
            if self.gaia_hg_revision and p.getAttribute('path') == 'gaia' and revision == self.config['gaia_git_branch']:
                if not self.gaia_git_rev:
                    self.gaia_git_rev = self.query_mapper_git_revision(
                        self.config['mapper_url'],
                        self.config['gaia_mapper_project'],
                        self.gaia_hg_revision,
                    )
                self.info("Using %s for gaia to match %s in gaia.json" % (self.gaia_git_rev, self.gaia_hg_revision))
                p.setAttribute('revision', self.gaia_git_rev)
                continue

            # Check to see if we've looked up this revision on this remote
            # before If we have, reuse the previous value rather than looking
            # it up again This will make sure revisions for the same ref name
            # are consistent between devices, as long as they use the same
            # remote/refname
            if (remote_url, revision) in self._git_ref_cache:
                abs_revision = self._git_ref_cache[remote_url, revision]
                self.info(
                    "Re-using previous lookup %s:%s -> %s" %
                    (remote_url, revision, abs_revision))
                p.setAttribute('revision', abs_revision)
                continue

            # If there's no '/' in the revision, assume it's a head
            if '/' not in revision:
                revision = 'refs/heads/%s' % revision

            self.debug("Getting revision for %s (currently %s)" %
                       (name, revision))
            async_result = worker_pool.apply_async(self.resolve_git_ref,
                                                   (remote_url, revision))
            results.append((p, async_result))

        # TODO: alert/notify on missing repositories
        # TODO: Add external caching
        abort = False
        failed = []
        for p, result in results:
            abs_revision = result.get()
            remote_url = repo_manifest.get_project_remote_url(manifest, p)
            revision = repo_manifest.get_project_revision(manifest, p)
            if not abs_revision:
                abort = True
                self.error("Couldn't resolve %s %s" % (remote_url, revision))
                failed.append(p)
            # Save to our cache
            self._git_ref_cache[remote_url, revision] = abs_revision
            p.setAttribute('revision', abs_revision)
        if abort:
            # Write message about how to set up syncing
            default = repo_manifest.get_default(manifest)
            for p in failed:
                if p.hasAttribute('remote'):
                    remote = repo_manifest.get_remote(manifest, p.getAttribute('remote'))
                else:
                    remote = repo_manifest.get_remote(manifest, default.getAttribute('remote'))

                new_fetch_url = remote.getAttribute('fetch')
                orig_fetch_url = self._remote_mappings[new_fetch_url]
                name = p.getAttribute('name')
                self.info("needs sync? %s/%s -> %s/%s" % (orig_fetch_url, name, new_fetch_url, name))

            self.fatal("couldn't resolve some refs; exiting")
Example #7
0
    def resolve_refs(self, manifest):
        worker_pool = ThreadPool(20)
        results = []

        # Resolve refnames
        for p in manifest.getElementsByTagName("project"):
            name = p.getAttribute("name")
            remote_url = repo_manifest.get_project_remote_url(manifest, p)
            revision = repo_manifest.get_project_revision(manifest, p)

            # commit ids are already done
            if repo_manifest.is_commitid(revision):
                self.debug("%s is already locked to %s; skipping" % (name, revision))
                continue

            # gaia is special - make sure we're using the same revision we used
            # for gaia.json
            if (
                self.gaia_hg_revision
                and p.getAttribute("path") == "gaia"
                and revision == self.config["gaia_git_branch"]
            ):
                if not self.gaia_git_rev:
                    self.gaia_git_rev = self.query_mapper_git_revision(
                        self.config["mapper_url"], self.config["gaia_mapper_project"], self.gaia_hg_revision
                    )
                self.info("Using %s for gaia to match %s in gaia.json" % (self.gaia_git_rev, self.gaia_hg_revision))
                p.setAttribute("revision", self.gaia_git_rev)
                continue

            # Check to see if we've looked up this revision on this remote
            # before If we have, reuse the previous value rather than looking
            # it up again This will make sure revisions for the same ref name
            # are consistent between devices, as long as they use the same
            # remote/refname
            if (remote_url, revision) in self._git_ref_cache:
                abs_revision = self._git_ref_cache[remote_url, revision]
                self.info("Re-using previous lookup %s:%s -> %s" % (remote_url, revision, abs_revision))
                p.setAttribute("revision", abs_revision)
                continue

            # If there's no '/' in the revision, assume it's a head
            if "/" not in revision:
                revision = "refs/heads/%s" % revision

            self.debug("Getting revision for %s (currently %s)" % (name, revision))
            async_result = worker_pool.apply_async(self.resolve_git_ref, (remote_url, revision))
            results.append((p, async_result))

        # TODO: alert/notify on missing repositories
        # TODO: Add external caching
        abort = False
        for p, result in results:
            abs_revision = result.get()
            remote_url = repo_manifest.get_project_remote_url(manifest, p)
            revision = repo_manifest.get_project_revision(manifest, p)
            if not abs_revision:
                abort = True
                self.error("Couldn't resolve %s %s" % (remote_url, revision))
            # Save to our cache
            self._git_ref_cache[remote_url, revision] = abs_revision
            p.setAttribute("revision", abs_revision)
        if abort:
            self.fatal("couldn't resolve some refs; exiting")
Example #8
0
    def resolve_refs(self, manifest):
        worker_pool = ThreadPool(20)
        lookup_threads_by_project = {}
        lookup_threads_by_parameters = {}

        # Resolve refnames
        for p in manifest.getElementsByTagName('project'):
            name = p.getAttribute('name')
            remote_url = repo_manifest.get_project_remote_url(manifest, p)
            revision = repo_manifest.get_project_revision(manifest, p)

            # commit ids are already done
            if repo_manifest.is_commitid(revision):
                self.debug("%s is already locked to %s; skipping" %
                           (name, revision))
                continue

            # gaia is special - make sure we're using the same revision we used
            # for gaia.json
            if self.gaia_hg_revision and p.getAttribute('path') == 'gaia' and revision == self.config['gaia_git_branch']:
                git_rev = self.query_gaia_git_rev()
                self.info("Using %s for gaia to match %s in gaia.json" % (git_rev, self.gaia_hg_revision))
                p.setAttribute('revision', git_rev)
                continue

            # If there's no '/' in the revision, assume it's a head
            if '/' not in revision:
                revision = 'refs/heads/%s' % revision

            cache_key = "%s:%s" % (remote_url, revision)

            # Check to see if we've looked up this revision on this remote
            # before. If we have, reuse the previous value rather than looking
            # it up again. This will make sure revisions for the same ref name
            # are consistent between devices, as long as they use the same
            # remote/refname.
            if cache_key in self._git_ref_cache:
                abs_revision = self._git_ref_cache[cache_key]
                self.info(
                    "Reusing previous lookup %s -> %s" %
                    (cache_key, abs_revision))
                p.setAttribute('revision', abs_revision)
                continue

            # Maybe a thread already exists for this lookup, even if the result has not
            # yet been retrieved and placed in _git_ref_cache...
            # Please note result.get() can be called multiple times without problems;
            # the git command will only be executed once. Therefore we can associate many
            # projects to the same thread result, without problems later when we call
            # get() multiple times against the same thread result.
            if cache_key in lookup_threads_by_parameters:
                self.info("Reusing currently running thread to look up %s" % cache_key)
                lookup_threads_by_project[p] = lookup_threads_by_parameters.get(cache_key)
            else:
                async_result = worker_pool.apply_async(self.resolve_git_ref,
                                                   (remote_url, revision))
                lookup_threads_by_parameters[cache_key] = async_result
                lookup_threads_by_project[p] = async_result

        # TODO: alert/notify on missing repositories
        abort = False
        failed = []
        for p, result in lookup_threads_by_project.iteritems():
            abs_revision = result.get(timeout=300)
            remote_url = repo_manifest.get_project_remote_url(manifest, p)
            revision = repo_manifest.get_project_revision(manifest, p)
            if not abs_revision:
                abort = True
                self.error("Couldn't resolve reference %s %s" % (remote_url, revision))
                failed.append(p)
            p.setAttribute('revision', abs_revision)
        if abort:
            # Write message about how to set up syncing
            default = repo_manifest.get_default(manifest)
            for p in failed:
                if p.hasAttribute('remote'):
                    remote = repo_manifest.get_remote(manifest, p.getAttribute('remote'))
                else:
                    remote = repo_manifest.get_remote(manifest, default.getAttribute('remote'))

                new_fetch_url = remote.getAttribute('fetch')
                orig_fetch_url = self._remote_mappings[new_fetch_url]
                name = p.getAttribute('name')
                self.info("needs sync? %s/%s -> %s/%s" % (orig_fetch_url, name, new_fetch_url, name))

            self.fatal("couldn't resolve some refs; exiting")
Example #9
0
 def munge_manifests(self):
     """ Switch the branched repos to the new branch; lock down third
         party revisions.
         """
     branch_repos = self.query_branch_repos()
     dirs = self.query_abs_dirs()
     new_branch = self.config['branch_name']
     unused_manifests = []
     if not self.check_existing_branch(new_branch, cwd=dirs['abs_manifest_dir'])[0]:
         self.fatal("b2g-manifest isn't branched properly!  Run --clean-repos --branch-repos")
     for manifest in self.query_manifests():
         self.info("Munging %s..." % manifest)
         doc = repo_manifest.load_manifest(manifest)
         try:
             repo_manifest.get_default(doc).getAttribute("revision")
         except IndexError:
             self.info("No default revision; skipping.")
             unused_manifests.append(manifest)
             continue
         for p in doc.getElementsByTagName('project'):
             name = self._query_repo_name(p.getAttribute('name'))
             fetch = repo_manifest.get_project_remote_url(doc, p)
             self.debug("Remote %s Name %s" % (fetch, name))
             current_revision = repo_manifest.get_project_revision(doc, p)
             if repo_manifest.is_commitid(current_revision):
                 self.info("%s: %s is already locked to %s; skipping." % (manifest, name, current_revision))
                 # I could setAttribute() here, but I very much doubt the
                 # default_revision is a commitid.
                 continue
             # We've branched this repo; do we set the revision to
             # new_branch or not?  ('fetch' needs to match, since we have
             # same-named repos with different urls =P )
             if name in branch_repos and branch_repos[name]['fetch'] == fetch:
                 orig_branch = branch_repos[name]['branch_revisions'].keys()[0]
                 if manifest in branch_repos[name]['branch_revisions'][orig_branch]:
                     if current_revision != orig_branch:
                         self.fatal("I don't know how we got here, but %s in %s's revision %s is not the branching point %s." %
                                    (name, manifest, current_revision, orig_branch))
                     self.info("Setting %s (%s) to %s (was %s)" % (name, manifest, new_branch, current_revision))
                     p.setAttribute('revision', new_branch)
                     continue
                 # Should we keep the old branch or lock revision?  Doing
                 # the former for now.
                 self.info("%s %s is off a different branch point (%s, not %s).  Keeping the old branch..." %
                           (manifest, name, current_revision, orig_branch))
                 continue
             if name in self.config['extra_branch_manifest_repos']:
                 p.setAttribute('revision', new_branch)
                 continue
             # Lock revision?
             if not self.config["lock_manifest_revisions"]:
                 self.info("%s: Not locking revision for %s due to config." % (manifest, name))
                 continue
             lock_revision = self._query_remote_branch_revision(fetch, current_revision, manifest)
             if lock_revision is not None:
                 p.setAttribute('revision', lock_revision)
         with self.opened(manifest, open_mode='w') as (fh, err):
             if err:
                 self.fatal("Can't open %s for writing!" % manifest)
             else:
                 doc.writexml(fh)
         fh.close()
     if self.config["delete_unused_manifests"]:
         self._delete_unused_manifests(unused_manifests)
     self.info("TODO: diff, commit, --push!")
Example #10
0
    def query_branch_repos(self):
        """ Parse all manifests and build a dictionary of repos with
            expected revisions and/or branches.

            The format will be {
                name: {
                    'revision': branch,
                    'fetch': git_url,
                    'branch_revisions': {
                        revision: [manifest list]  # This should only have one key/value pair
                    },
                    'all_revisions': {
                        revision: [manifest list]  # This will have all key/value pairs
                    },
                },
            }

            This depends on the pull action having run at least once.
            """
        if self.branch_repo_dict is not None:
            return self.branch_repo_dict
        self.info("Building branch_repo_dict...")
        branch_repo_dict = {
            'b2g-manifest': {
                'fetch': self.config['manifest_repo_url'],
                'revision': self.config['manifest_repo_revision'],
            },
        }
        for manifest in self.query_manifests():
            self.info("Processing %s" % manifest)
            doc = repo_manifest.load_manifest(manifest)
            try:
                default_revision = repo_manifest.get_default(doc).getAttribute("revision")
            except IndexError:
                self.info("Skipping %s (no defaults)" % manifest)
                continue

            for p in doc.getElementsByTagName('project'):
                name = self._query_repo_name(p.getAttribute('name'))
                fetch = repo_manifest.get_project_remote_url(doc, p)
                self.debug("Remote %s Name %s" % (fetch, name))
                # We branch github.com/mozilla repos only
                if not self._query_do_branch(fetch, name):
                    continue
                # Now start building the branch info
                branch_repo_dict.setdefault(name, {}).setdefault("fetch", fetch)
                revision = p.getAttribute("revision")
                if not revision:
                    # fall back to default revision
                    if default_revision:
                        self.info("%s: %s, using default revision (%s)" % (manifest, name, default_revision))
                        self._add_branch_revision(branch_repo_dict[name], name, default_revision, manifest)
                    else:
                        self.warning("Can't determine revision for %s in %s" % (name, manifest))
                elif revision and branch_repo_dict.get(name, {}).get("revision", revision) != revision:
                    self._add_branch_revision(branch_repo_dict[name], name, revision, manifest)
                else:
                    self._add_branch_revision(branch_repo_dict[name], name, revision, manifest)
        self.info("Outputting branch_repo_dict:")
        self.info(pprint.pformat(branch_repo_dict))
        message = ""
        for name, r in branch_repo_dict.iteritems():
            if r.get("revision") is None:
                self.warning("Sanity: No revision set for %s %s; we'll fall back to master" % (name, r["fetch"]))
            if len(r.get("branch_revisions", {})) > 1:
                message += "Sanity: Not clear where to branch for %s %s %s\n" % (name, r["fetch"], pprint.pformat(r["branch_revisions"]))
        if message:
            self.fatal(message + "Use --branch-order or self.config['no_branch_repos'] to fix!")
        self.branch_repo_dict = branch_repo_dict
        return branch_repo_dict