def _query_remote_branch_revision(self, fetch, branch, manifest, retry=True): """ Helper method for munge_manifests(). Since we want to lock revisions, we need to know what revision goes with a specific git url + branch. """ sleep_time = 10 if self.remote_branch_revisions.get(fetch, {}).get(branch): return self.remote_branch_revisions[fetch][branch] git = self.query_exe('git', return_type='list') output = self.get_output_from_command( git + ["ls-remote", fetch, branch] ) if not output: if retry: self.info("Trying again in %d seconds..." % sleep_time) time.sleep(sleep_time) return self._query_remote_branch_revision(manifest, fetch, branch, retry=False) # TODO figure out what to do here # self.fatal("Can't find revision for %s %s!" % (fetch, branch)) self.critical("%s: Can't find revision for %s %s!" % (fetch, branch, manifest)) return None r = output[0:40] if not repo_manifest.is_commitid(r): self.fatal("Can't grok ls-remote output:\n %s" % output) self.info("Found revision %s for %s %s" % (r, fetch, branch)) self.remote_branch_revisions.setdefault(fetch, {})[branch] = r return r
def _add_branch_revision(self, repo_config, name, revision, manifest): """ query_branch_repos() helper. Pass in branch-repos['name'] for 'repo', name, revision, manifest. """ if repo_manifest.is_commitid(revision): self.debug("%s is a commit id! Skipping for %s %s" % (revision, name, manifest)) return new_branch = self.config['branch_name'] branch_order = [new_branch] + list(self.config.get('branch_order', [])) repo_config.setdefault('all_revisions', {}).setdefault(revision, []).append(manifest) if "revision" not in repo_config: repo_config['revision'] = revision else: previous_revision = repo_config["revision"] if previous_revision != revision: if revision in branch_order: if previous_revision not in branch_order or \ branch_order.index(revision) < branch_order.index(previous_revision): self.info("Prefering %s over %s for %s" % (revision, previous_revision, name)) del(repo_config["branch_revisions"][previous_revision]) repo_config["revision"] = revision else: self.info("%s: not branching off %s since %s is in branch_order" % (name, revision, previous_revision)) return elif previous_revision in branch_order: self.info("%s: not keeping %s since %s is in branch_order" % (name, revision, previous_revision)) self.debug(pprint.pformat(repo_config['branch_revisions'])) return else: # We shouldn't ever hit this line self.critical("WAT %s %s %s" % (name, revision, previous_revision)) repo_config.setdefault('branch_revisions', {}).setdefault(revision, []).append(manifest) self.debug("%s %s %s" % (name, repo_config['revision'], pprint.pformat(repo_config['branch_revisions'])))
def resolve_refs(self): manifest = self.query_manifest() worker_pool = ThreadPool(8) results = [] # Resolve refnames for p in manifest.getElementsByTagName('project'): name = p.getAttribute('name') remote_url = repo_manifest.get_project_remote_url(manifest, p) revision = repo_manifest.get_project_revision(manifest, p) # commit ids are already done if repo_manifest.is_commitid(revision): self.debug("%s is already locked to %s; skipping" % (name, revision)) continue # If there's no '/' in the revision, assume it's a head if '/' not in revision: revision = 'refs/heads/%s' % revision self.debug("Getting revision for %s (currently %s)" % (name, revision)) async_result = worker_pool.apply_async(self.resolve_git_ref, (remote_url, revision)) results.append((p, async_result)) for p, result in results: revision = result.get() p.setAttribute('revision', revision)
def resolve_refs(self, manifest): worker_pool = ThreadPool(20) lookup_threads_by_project = {} lookup_threads_by_parameters = {} # Resolve refnames for p in manifest.getElementsByTagName('project'): name = p.getAttribute('name') remote_url = repo_manifest.get_project_remote_url(manifest, p) revision = repo_manifest.get_project_revision(manifest, p) # commit ids are already done if repo_manifest.is_commitid(revision): self.debug("%s is already locked to %s; skipping" % (name, revision)) continue # gaia is special - make sure we're using the same revision we used # for gaia.json if self.gaia_hg_revision and p.getAttribute('path') == 'gaia' and revision == self.config['gaia_git_branch']: git_rev = self.query_gaia_git_rev() self.info("Using %s for gaia to match %s in gaia.json" % (git_rev, self.gaia_hg_revision)) p.setAttribute('revision', git_rev) continue # If there's no '/' in the revision, assume it's a head if '/' not in revision: revision = 'refs/heads/%s' % revision cache_key = "%s:%s" % (remote_url, revision) # Check to see if we've looked up this revision on this remote # before. If we have, reuse the previous value rather than looking # it up again. This will make sure revisions for the same ref name # are consistent between devices, as long as they use the same # remote/refname. if cache_key in self._git_ref_cache: abs_revision = self._git_ref_cache[cache_key] self.debug( "Reusing previous lookup %s -> %s" % (cache_key, abs_revision)) p.setAttribute('revision', abs_revision) continue # Maybe a thread already exists for this lookup, even if the result has not # yet been retrieved and placed in _git_ref_cache... # Please note result.get() can be called multiple times without problems; # the git command will only be executed once. Therefore we can associate many # projects to the same thread result, without problems later when we call # get() multiple times against the same thread result. if cache_key in lookup_threads_by_parameters: self.debug("Reusing currently running thread to look up %s" % cache_key) lookup_threads_by_project[p] = lookup_threads_by_parameters.get(cache_key) else: async_result = worker_pool.apply_async(self.resolve_git_ref, (remote_url, revision)) lookup_threads_by_parameters[cache_key] = async_result lookup_threads_by_project[p] = async_result # TODO: alert/notify on missing repositories abort = False failed = [] for p, result in lookup_threads_by_project.iteritems(): abs_revision = result.get(timeout=300) remote_url = repo_manifest.get_project_remote_url(manifest, p) revision = repo_manifest.get_project_revision(manifest, p) if not abs_revision: abort = True self.error("Couldn't resolve reference %s %s" % (remote_url, revision)) failed.append(p) p.setAttribute('revision', abs_revision) if abort: # Write message about how to set up syncing default = repo_manifest.get_default(manifest) for p in failed: if p.hasAttribute('remote'): remote = repo_manifest.get_remote(manifest, p.getAttribute('remote')) else: remote = repo_manifest.get_remote(manifest, default.getAttribute('remote')) new_fetch_url = remote.getAttribute('fetch') orig_fetch_url = self._remote_mappings[new_fetch_url] name = p.getAttribute('name') self.info("needs sync? %s/%s -> %s/%s" % (orig_fetch_url, name, new_fetch_url, name)) self.fatal("couldn't resolve some refs; exiting")
def munge_manifests(self): """ Switch the branched repos to the new branch; lock down third party revisions. """ branch_repos = self.query_branch_repos() dirs = self.query_abs_dirs() new_branch = self.config['branch_name'] unused_manifests = [] if not self.check_existing_branch(new_branch, cwd=dirs['abs_manifest_dir'])[0]: self.fatal("b2g-manifest isn't branched properly! Run --clean-repos --branch-repos") for manifest in self.query_manifests(): self.info("Munging %s..." % manifest) doc = repo_manifest.load_manifest(manifest) try: repo_manifest.get_default(doc).getAttribute("revision") except IndexError: self.info("No default revision; skipping.") unused_manifests.append(manifest) continue for p in doc.getElementsByTagName('project'): name = self._query_repo_name(p.getAttribute('name')) fetch = repo_manifest.get_project_remote_url(doc, p) self.debug("Remote %s Name %s" % (fetch, name)) current_revision = repo_manifest.get_project_revision(doc, p) if repo_manifest.is_commitid(current_revision): self.info("%s: %s is already locked to %s; skipping." % (manifest, name, current_revision)) # I could setAttribute() here, but I very much doubt the # default_revision is a commitid. continue # We've branched this repo; do we set the revision to # new_branch or not? ('fetch' needs to match, since we have # same-named repos with different urls =P ) if name in branch_repos and branch_repos[name]['fetch'] == fetch: orig_branch = branch_repos[name]['branch_revisions'].keys()[0] if manifest in branch_repos[name]['branch_revisions'][orig_branch]: if current_revision != orig_branch: self.fatal("I don't know how we got here, but %s in %s's revision %s is not the branching point %s." % (name, manifest, current_revision, orig_branch)) self.info("Setting %s (%s) to %s (was %s)" % (name, manifest, new_branch, current_revision)) p.setAttribute('revision', new_branch) continue # Should we keep the old branch or lock revision? Doing # the former for now. self.info("%s %s is off a different branch point (%s, not %s). Keeping the old branch..." % (manifest, name, current_revision, orig_branch)) continue if name in self.config['extra_branch_manifest_repos']: p.setAttribute('revision', new_branch) continue # Lock revision? if not self.config["lock_manifest_revisions"]: self.info("%s: Not locking revision for %s due to config." % (manifest, name)) continue lock_revision = self._query_remote_branch_revision(fetch, current_revision, manifest) if lock_revision is not None: p.setAttribute('revision', lock_revision) with self.opened(manifest, open_mode='w') as (fh, err): if err: self.fatal("Can't open %s for writing!" % manifest) else: doc.writexml(fh) fh.close() if self.config["delete_unused_manifests"]: self._delete_unused_manifests(unused_manifests) self.info("TODO: diff, commit, --push!")
def resolve_refs(self, manifest): worker_pool = ThreadPool(20) results = [] # Resolve refnames for p in manifest.getElementsByTagName('project'): name = p.getAttribute('name') remote_url = repo_manifest.get_project_remote_url(manifest, p) revision = repo_manifest.get_project_revision(manifest, p) # commit ids are already done if repo_manifest.is_commitid(revision): self.debug("%s is already locked to %s; skipping" % (name, revision)) continue # gaia is special - make sure we're using the same revision we used # for gaia.json if self.gaia_hg_revision and p.getAttribute('path') == 'gaia' and revision == self.config['gaia_git_branch']: if not self.gaia_git_rev: self.gaia_git_rev = self.query_mapper_git_revision( self.config['mapper_url'], self.config['gaia_mapper_project'], self.gaia_hg_revision, ) self.info("Using %s for gaia to match %s in gaia.json" % (self.gaia_git_rev, self.gaia_hg_revision)) p.setAttribute('revision', self.gaia_git_rev) continue # Check to see if we've looked up this revision on this remote # before If we have, reuse the previous value rather than looking # it up again This will make sure revisions for the same ref name # are consistent between devices, as long as they use the same # remote/refname if (remote_url, revision) in self._git_ref_cache: abs_revision = self._git_ref_cache[remote_url, revision] self.info( "Re-using previous lookup %s:%s -> %s" % (remote_url, revision, abs_revision)) p.setAttribute('revision', abs_revision) continue # If there's no '/' in the revision, assume it's a head if '/' not in revision: revision = 'refs/heads/%s' % revision self.debug("Getting revision for %s (currently %s)" % (name, revision)) async_result = worker_pool.apply_async(self.resolve_git_ref, (remote_url, revision)) results.append((p, async_result)) # TODO: alert/notify on missing repositories # TODO: Add external caching abort = False failed = [] for p, result in results: abs_revision = result.get() remote_url = repo_manifest.get_project_remote_url(manifest, p) revision = repo_manifest.get_project_revision(manifest, p) if not abs_revision: abort = True self.error("Couldn't resolve %s %s" % (remote_url, revision)) failed.append(p) # Save to our cache self._git_ref_cache[remote_url, revision] = abs_revision p.setAttribute('revision', abs_revision) if abort: # Write message about how to set up syncing default = repo_manifest.get_default(manifest) for p in failed: if p.hasAttribute('remote'): remote = repo_manifest.get_remote(manifest, p.getAttribute('remote')) else: remote = repo_manifest.get_remote(manifest, default.getAttribute('remote')) new_fetch_url = remote.getAttribute('fetch') orig_fetch_url = self._remote_mappings[new_fetch_url] name = p.getAttribute('name') self.info("needs sync? %s/%s -> %s/%s" % (orig_fetch_url, name, new_fetch_url, name)) self.fatal("couldn't resolve some refs; exiting")
def resolve_refs(self, manifest): worker_pool = ThreadPool(20) results = [] # Resolve refnames for p in manifest.getElementsByTagName("project"): name = p.getAttribute("name") remote_url = repo_manifest.get_project_remote_url(manifest, p) revision = repo_manifest.get_project_revision(manifest, p) # commit ids are already done if repo_manifest.is_commitid(revision): self.debug("%s is already locked to %s; skipping" % (name, revision)) continue # gaia is special - make sure we're using the same revision we used # for gaia.json if ( self.gaia_hg_revision and p.getAttribute("path") == "gaia" and revision == self.config["gaia_git_branch"] ): if not self.gaia_git_rev: self.gaia_git_rev = self.query_mapper_git_revision( self.config["mapper_url"], self.config["gaia_mapper_project"], self.gaia_hg_revision ) self.info("Using %s for gaia to match %s in gaia.json" % (self.gaia_git_rev, self.gaia_hg_revision)) p.setAttribute("revision", self.gaia_git_rev) continue # Check to see if we've looked up this revision on this remote # before If we have, reuse the previous value rather than looking # it up again This will make sure revisions for the same ref name # are consistent between devices, as long as they use the same # remote/refname if (remote_url, revision) in self._git_ref_cache: abs_revision = self._git_ref_cache[remote_url, revision] self.info("Re-using previous lookup %s:%s -> %s" % (remote_url, revision, abs_revision)) p.setAttribute("revision", abs_revision) continue # If there's no '/' in the revision, assume it's a head if "/" not in revision: revision = "refs/heads/%s" % revision self.debug("Getting revision for %s (currently %s)" % (name, revision)) async_result = worker_pool.apply_async(self.resolve_git_ref, (remote_url, revision)) results.append((p, async_result)) # TODO: alert/notify on missing repositories # TODO: Add external caching abort = False for p, result in results: abs_revision = result.get() remote_url = repo_manifest.get_project_remote_url(manifest, p) revision = repo_manifest.get_project_revision(manifest, p) if not abs_revision: abort = True self.error("Couldn't resolve %s %s" % (remote_url, revision)) # Save to our cache self._git_ref_cache[remote_url, revision] = abs_revision p.setAttribute("revision", abs_revision) if abort: self.fatal("couldn't resolve some refs; exiting")
def resolve_refs(self, manifest): worker_pool = ThreadPool(20) lookup_threads_by_project = {} lookup_threads_by_parameters = {} # Resolve refnames for p in manifest.getElementsByTagName('project'): name = p.getAttribute('name') remote_url = repo_manifest.get_project_remote_url(manifest, p) revision = repo_manifest.get_project_revision(manifest, p) # commit ids are already done if repo_manifest.is_commitid(revision): self.debug("%s is already locked to %s; skipping" % (name, revision)) continue # gaia is special - make sure we're using the same revision we used # for gaia.json if self.gaia_hg_revision and p.getAttribute('path') == 'gaia' and revision == self.config['gaia_git_branch']: git_rev = self.query_gaia_git_rev() self.info("Using %s for gaia to match %s in gaia.json" % (git_rev, self.gaia_hg_revision)) p.setAttribute('revision', git_rev) continue # If there's no '/' in the revision, assume it's a head if '/' not in revision: revision = 'refs/heads/%s' % revision cache_key = "%s:%s" % (remote_url, revision) # Check to see if we've looked up this revision on this remote # before. If we have, reuse the previous value rather than looking # it up again. This will make sure revisions for the same ref name # are consistent between devices, as long as they use the same # remote/refname. if cache_key in self._git_ref_cache: abs_revision = self._git_ref_cache[cache_key] self.info( "Reusing previous lookup %s -> %s" % (cache_key, abs_revision)) p.setAttribute('revision', abs_revision) continue # Maybe a thread already exists for this lookup, even if the result has not # yet been retrieved and placed in _git_ref_cache... # Please note result.get() can be called multiple times without problems; # the git command will only be executed once. Therefore we can associate many # projects to the same thread result, without problems later when we call # get() multiple times against the same thread result. if cache_key in lookup_threads_by_parameters: self.info("Reusing currently running thread to look up %s" % cache_key) lookup_threads_by_project[p] = lookup_threads_by_parameters.get(cache_key) else: async_result = worker_pool.apply_async(self.resolve_git_ref, (remote_url, revision)) lookup_threads_by_parameters[cache_key] = async_result lookup_threads_by_project[p] = async_result # TODO: alert/notify on missing repositories abort = False failed = [] for p, result in lookup_threads_by_project.iteritems(): abs_revision = result.get(timeout=300) remote_url = repo_manifest.get_project_remote_url(manifest, p) revision = repo_manifest.get_project_revision(manifest, p) if not abs_revision: abort = True self.error("Couldn't resolve reference %s %s" % (remote_url, revision)) failed.append(p) p.setAttribute('revision', abs_revision) if abort: # Write message about how to set up syncing default = repo_manifest.get_default(manifest) for p in failed: if p.hasAttribute('remote'): remote = repo_manifest.get_remote(manifest, p.getAttribute('remote')) else: remote = repo_manifest.get_remote(manifest, default.getAttribute('remote')) new_fetch_url = remote.getAttribute('fetch') orig_fetch_url = self._remote_mappings[new_fetch_url] name = p.getAttribute('name') self.info("needs sync? %s/%s -> %s/%s" % (orig_fetch_url, name, new_fetch_url, name)) self.fatal("couldn't resolve some refs; exiting")