def run(self, platform, productName, appVersion, version, build_number, locale, hashFunction, extVersion, buildID, **updateKwargs): targets = buildbot2updatePlatforms(platform) # Some platforms may have alias', but those are set-up elsewhere # for release blobs. build_target = targets[0] name = get_release_blob_name(productName, version, build_number, self.suffix) locale_data = {"buildID": buildID, "appVersion": appVersion, "displayVersion": getPrettyVersion(version)} locale_data.update(self._get_update_data(productName, version, build_number, **updateKwargs)) if self.backend_version == 2: log.info("Using backend version 2...") # XXX Check for existing data_version for this locale data = { "blob": {"platforms": {build_target: {"locales": {locale: locale_data}}}}, # XXX old_data_versions here is currently required but shouldn't be "old_data_versions": {"platforms": {build_target: {"locales": {}}}}, } url = self.api_root + "/v2/releases/" + name session = get_balrog_session(auth0_secrets=self.auth0_secrets) balrog_request(session, "post", url, json=data) else: log.info("Using legacy backend version...") api = SingleLocale(name=name, build_target=build_target, locale=locale, auth0_secrets=self.auth0_secrets, api_root=self.api_root) current_data, data_version = api.get_data() api.update_build(data_version=data_version, product=productName, hashFunction=hashFunction, buildData=json.dumps(locale_data), schemaVersion=9)
def run(self, appVersion, productName, version, buildNumber, updateChannels, ftpServer, bouncerServer, enUSPlatforms, hashFunction, updateLine, **updateKwargs): blob = self.generate_data(appVersion, productName, version, buildNumber, updateChannels, ftpServer, bouncerServer, enUSPlatforms, updateLine, **updateKwargs) name = get_release_blob_name(productName, version, buildNumber, self.suffix) if self.backend_version == 2: log.info("Using backend version 2...") url = self.api_root + "/v2/releases/" + name data = {"product": productName, "blob": {}} session = get_balrog_session(auth0_secrets=self.auth0_secrets) try: current_data = balrog_request(session, "get", url) if current_data: data["blob"] = current_data["blob"] data["old_data_versions"] = current_data["data_versions"] except HTTPError as e: if e.response.status_code != 404: raise data["blob"] = always_merger.merge(data["blob"], blob) data["blob"]["schema_version"] = self.schemaVersion data["blob"]["hashFunction"] = hashFunction data["blob"]["name"] = name balrog_request(session, "put", url, json=data) else: log.info("Using legacy backend version...") api = Release(name=name, auth0_secrets=self.auth0_secrets, api_root=self.api_root) try: current_data, data_version = api.get_data() except HTTPError as e: if e.response.status_code == 404: log.warning( "Release blob doesn't exist, using empty data...") current_data, data_version = {}, None else: raise blob = recursive_update(current_data, blob) api.update_release(product=productName, hashFunction=hashFunction, releaseData=json.dumps(blob), schemaVersion=self.schemaVersion, data_version=data_version)
def update_data(url, existing_release, existing_locale_data): # If the partials are already a subset of the blob and the # complete MAR is the same, skip the submission skip_submission = bool( existing_locale_data and existing_locale_data.get("completes") == data.get("completes") and all(p in existing_locale_data.get("partials", []) for p in data.get("partials", [])) ) if skip_submission: log.warning("Dated data didn't change, skipping update") return # explicitly pass data version new_data = {"blob": {"platforms": {build_target: {"locales": {locale: data}}}}, "old_data_versions": {"platforms": {build_target: {"locales": {}}}}} if existing_release.get("data_versions", {}).get("platforms", {}).get(build_target, {}).get("locales", {}).get(locale): new_data["old_data_versions"]["platforms"][build_target]["locales"][locale] = existing_release["data_versions"]["platforms"][build_target][ "locales" ][locale] balrog_request(session, "post", url, json=new_data)
def run_backend2( self, platform, buildID, productName, branch, appVersion, locale, hashFunction, extVersion, schemaVersion, isOSUpdate=None, **updateKwargs ): log.info("Using backend version 2...") session = get_balrog_session(auth0_secrets=self.auth0_secrets) targets = buildbot2updatePlatforms(platform) build_target = targets[0] alias = None if len(targets) > 1: alias = targets[1:] log.debug("alias entry of %s ignored...", json.dumps(alias)) data = {"buildID": buildID, "appVersion": appVersion, "platformVersion": extVersion, "displayVersion": appVersion} data.update(self._get_update_data(productName, branch, **updateKwargs)) build_type = self.build_type # wrap operations into "atomic" functions that can be retried def update_data(url, existing_release, existing_locale_data): # If the partials are already a subset of the blob and the # complete MAR is the same, skip the submission skip_submission = bool( existing_locale_data and existing_locale_data.get("completes") == data.get("completes") and all(p in existing_locale_data.get("partials", []) for p in data.get("partials", [])) ) if skip_submission: log.warning("Dated data didn't change, skipping update") return # explicitly pass data version new_data = {"blob": {"platforms": {build_target: {"locales": {locale: data}}}}, "old_data_versions": {"platforms": {build_target: {"locales": {}}}}} if existing_release.get("data_versions", {}).get("platforms", {}).get(build_target, {}).get("locales", {}).get(locale): new_data["old_data_versions"]["platforms"][build_target]["locales"][locale] = existing_release["data_versions"]["platforms"][build_target][ "locales" ][locale] balrog_request(session, "post", url, json=new_data) for identifier in (buildID, "latest"): name = get_nightly_blob_name(productName, branch, build_type, identifier, self.dummy) url = self.api_root + "/v2/releases/" + name try: existing_release = balrog_request(session, "get", url) except HTTPError as excp: if excp.response.status_code == 404: log.info("No existing release %s, creating it...", name) # TODO: we should also submit alias' here. # not doing so will cause issues with dated blobs if we # point rules at them # in reality this isn't a problem 99% of the time so it's # being ignored for new in favour of expediency toplevel_data = { "blob": { "name": name, "hashFunction": hashFunction, "schema_version": 4, }, "product": productName, } # In theory multiple requests can race against each other on this # but since they're all submitting the same data they'll all get 200s balrog_request(session, "put", url, json=toplevel_data) existing_release = {"blob": {}} else: raise existing_locale_data = existing_release["blob"].get(build_type, {}).get("locales", {}).get(locale) update_data(url, existing_release, existing_locale_data)