def run(self, platform, productName, appVersion, version, build_number, locale, hashFunction, extVersion, buildID, schemaVersion, **updateKwargs): assert schemaVersion in (2, 3), 'Unhandled schema version %s' % schemaVersion targets = buildbot2updatePlatforms(platform) # Some platforms may have alias', but those are set-up elsewhere # for release blobs. build_target = targets[0] name = get_release_blob_name(productName, version, build_number, self.dummy) data = { 'buildID': buildID, } data['appVersion'] = appVersion data['platformVersion'] = extVersion data['displayVersion'] = getPrettyVersion(version) data.update(self._get_update_data(productName, version, build_number, **updateKwargs)) data = json.dumps(data) api = SingleLocale(auth=self.auth, api_root=self.api_root) schemaVersion = json.dumps(schemaVersion) api.update_build(name=name, product=productName, build_target=build_target, version=appVersion, locale=locale, hashFunction=hashFunction, buildData=data, schemaVersion=schemaVersion)
def generate_data(self, appVersion, productName, version, buildNumber, updateChannels, ftpServer, bouncerServer, enUSPlatforms, schemaVersion, openURL=None, **updateKwargs): assert schemaVersion in ( 3, 4), 'Unhandled schema version %s' % schemaVersion details_product = productName.lower() if details_product == "devedition": details_product = "firefox" data = { 'detailsUrl': getProductDetails(details_product, appVersion), 'platforms': {}, 'fileUrls': {}, 'appVersion': appVersion, 'platformVersion': appVersion, 'displayVersion': getPrettyVersion(version) } actions = [] if openURL: actions.append("showURL") data["openURL"] = openURL if actions: data["actions"] = " ".join(actions) fileUrls = self._getFileUrls(productName, version, buildNumber, updateChannels, ftpServer, bouncerServer, **updateKwargs) if fileUrls: data.update(fileUrls) updateData = self._get_update_data(productName, version, **updateKwargs) if updateData: data.update(updateData) for platform in enUSPlatforms: updatePlatforms = buildbot2updatePlatforms(platform) bouncerPlatform = buildbot2bouncer(platform) ftpPlatform = buildbot2ftp(platform) data['platforms'][updatePlatforms[0]] = { 'OS_BOUNCER': bouncerPlatform, 'OS_FTP': ftpPlatform } for aliasedPlatform in updatePlatforms[1:]: data['platforms'][aliasedPlatform] = { 'alias': updatePlatforms[0] } return data
def run(self, platform, productName, appVersion, version, build_number, locale, hashFunction, extVersion, buildID, schemaVersion, **updateKwargs): assert schemaVersion in ( 3, 4), 'Unhandled schema version %s' % schemaVersion targets = buildbot2updatePlatforms(platform) # Some platforms may have alias', but those are set-up elsewhere # for release blobs. build_target = targets[0] name = get_release_blob_name(productName, version, build_number, self.dummy) data = { 'buildID': buildID, 'appVersion': appVersion, 'platformVersion': extVersion, 'displayVersion': getPrettyVersion(version) } data.update( self._get_update_data(productName, version, build_number, **updateKwargs)) api = SingleLocale(name=name, build_target=build_target, locale=locale, auth=self.auth, api_root=self.api_root) current_data, data_version = api.get_data() api.update_build(data_version=data_version, product=productName, hashFunction=hashFunction, buildData=json.dumps( merge_partial_updates(current_data, data)), schemaVersion=schemaVersion)
def run(self, platform, productName, appVersion, version, build_number, locale, hashFunction, extVersion, buildID, completeMarSize, completeMarHash): targets = buildbot2updatePlatforms(platform) # Some platforms may have alias', but those are set-up elsewhere # for release blobs. build_target = targets[0] name = get_release_blob_name(productName, version, build_number, self.dummy) data = { 'appv': appVersion, 'extv': extVersion, 'buildID': buildID, } data['complete'] = { 'from': '*', 'filesize': completeMarSize, 'hashValue': completeMarHash, } data = json.dumps(data) api = SingleLocale(auth=self.auth, api_root=self.api_root) api.update_build(name=name, product=productName, build_target=build_target, version=appVersion, locale=locale, hashFunction=hashFunction, buildData=data)
def run(self, platform, buildID, productName, branch, appVersion, locale, hashFunction, extVersion, schemaVersion, isOSUpdate=None, **updateKwargs): assert schemaVersion in (2, 3), 'Unhandled schema version %s' % schemaVersion targets = buildbot2updatePlatforms(platform) build_target = targets[0] alias = None if len(targets) > 1: alias = targets[1:] data = { 'buildID': buildID, } data['appVersion'] = appVersion data['platformVersion'] = extVersion data['displayVersion'] = appVersion if isOSUpdate: data['isOSUpdate'] = isOSUpdate data.update(self._get_update_data(productName, branch, **updateKwargs)) name = get_nightly_blob_name(productName, branch, self.build_type, buildID, self.dummy) data = json.dumps(data) api = SingleLocale(auth=self.auth, api_root=self.api_root) copyTo = [get_nightly_blob_name( productName, branch, self.build_type, 'latest', self.dummy)] copyTo = json.dumps(copyTo) alias = json.dumps(alias) api.update_build(name=name, product=productName, build_target=build_target, version=appVersion, locale=locale, hashFunction=hashFunction, buildData=data, copyTo=copyTo, alias=alias, schemaVersion=schemaVersion)
def generate_data(self, appVersion, productName, version, buildNumber, updateChannels, stagingServer, bouncerServer, enUSPlatforms, schemaVersion, **updateKwargs): assert schemaVersion in (2, 3), 'Unhandled schema version %s' % schemaVersion self.name = get_release_blob_name(productName, version, buildNumber) data = { 'name': self.name, 'detailsUrl': getProductDetails(productName.lower(), appVersion), 'platforms': {}, 'fileUrls': {}, 'ftpFilenames': {}, 'bouncerProducts': {}, } data['appVersion'] = appVersion data['platformVersion'] = appVersion data['displayVersion'] = getPrettyVersion(version) # XXX: This is a hack for bug 1045583. We should remove it, and always # use "candidates" for nightlyDir after the switch to Balrog is complete. if productName.lower() == "mobile": nightlyDir = "candidates" else: nightlyDir = "nightly" for channel in updateChannels: if channel in ('betatest', 'esrtest') or "localtest" in channel: dir_ = makeCandidatesDir(productName.lower(), version, buildNumber, server=stagingServer, protocol='http', nightlyDir=nightlyDir) data['fileUrls'][channel] = '%supdate/%%OS_FTP%%/%%LOCALE%%/%%FILENAME%%' % dir_ else: url = 'http://%s/?product=%%PRODUCT%%&os=%%OS_BOUNCER%%&lang=%%LOCALE%%' % bouncerServer data['fileUrls'][channel] = url # XXX: quick hack for bug 1021026. We should be using Bouncer for this # after we implement better solution talked about in comments 2 through 4 if channel == 'release': dir_ = makeCandidatesDir(productName.lower(), version, buildNumber, server='download.cdn.mozilla.net', protocol='http', nightlyDir=nightlyDir) url = '%supdate/%%OS_FTP%%/%%LOCALE%%/%%FILENAME%%' % dir_ data['fileUrls']['beta'] = url data['fileUrls']['beta-cdntest'] = url data.update(self._get_update_data(productName, version, **updateKwargs)) for platform in enUSPlatforms: updatePlatforms = buildbot2updatePlatforms(platform) bouncerPlatform = buildbot2bouncer(platform) ftpPlatform = buildbot2ftp(platform) data['platforms'][updatePlatforms[0]] = { 'OS_BOUNCER': bouncerPlatform, 'OS_FTP': ftpPlatform } for aliasedPlatform in updatePlatforms[1:]: data['platforms'][aliasedPlatform] = { 'alias': updatePlatforms[0] } return data
def run(self, platform, buildID, productName, branch, appVersion, locale, hashFunction, extVersion, completeMarSize, completeMarHash, completeMarUrl, schemaVersion, isOSUpdate=None, partialMarSize=None, partialMarHash=None, partialMarUrl=None, previous_buildid=None): targets = buildbot2updatePlatforms(platform) build_target = targets[0] alias = None if len(targets) > 1: alias = targets[1:] name = get_nightly_blob_name(productName, branch, self.build_type, buildID, self.dummy) data = { 'buildID': buildID, } assert schemaVersion in (1, 2), 'Unhandled schema version %s' % schemaVersion if schemaVersion == 1: data['appv'] = appVersion data['extv'] = extVersion elif schemaVersion == 2: data['appVersion'] = appVersion data['platformVersion'] = extVersion data['displayVersion'] = appVersion if isOSUpdate: data['isOSUpdate'] = isOSUpdate data['complete'] = { 'from': '*', 'filesize': completeMarSize, 'hashValue': completeMarHash, 'fileUrl': completeMarUrl } if partialMarSize: data['partial'] = { 'from': get_nightly_blob_name(productName, branch, self.build_type, previous_buildid, self.dummy), 'filesize': partialMarSize, 'hashValue': partialMarHash, 'fileUrl': partialMarUrl } data = json.dumps(data) api = SingleLocale(auth=self.auth, api_root=self.api_root) copyTo = [get_nightly_blob_name( productName, branch, self.build_type, 'latest', self.dummy)] copyTo = json.dumps(copyTo) alias = json.dumps(alias) schemaVersion = json.dumps(schemaVersion) api.update_build(name=name, product=productName, build_target=build_target, version=appVersion, locale=locale, hashFunction=hashFunction, buildData=data, copyTo=copyTo, alias=alias, schemaVersion=schemaVersion)
def generate_data(self, appVersion, productName, version, buildNumber, partialUpdates, updateChannels, stagingServer, bouncerServer, enUSPlatforms, schemaVersion): # TODO: Multiple partial support. Probably as a part of bug 797033. previousVersion = str(max(StrictVersion(v) for v in partialUpdates)) self.name = get_release_blob_name(productName, version, buildNumber) data = { 'name': self.name, 'detailsUrl': getProductDetails(productName.lower(), appVersion), 'platforms': {}, 'fileUrls': {}, 'ftpFilenames': {}, 'bouncerProducts': {}, } assert schemaVersion in (1, 2), 'Unhandled schema version %s' % schemaVersion if schemaVersion == 1: data['appv'] = appVersion data['extv'] = appVersion elif schemaVersion == 2: data['appVersion'] = appVersion data['platformVersion'] = appVersion data['displayVersion'] = getPrettyVersion(version) for channel in updateChannels: if channel in ('betatest', 'esrtest'): dir_ = makeCandidatesDir(productName.lower(), version, buildNumber, server=stagingServer, protocol='http') data['fileUrls'][channel] = '%supdate/%%OS_FTP%%/%%LOCALE%%/%%FILENAME%%' % dir_ else: url = 'http://%s/?product=%%PRODUCT%%&os=%%OS_BOUNCER%%&lang=%%LOCALE%%' % bouncerServer data['fileUrls'][channel] = url data['ftpFilenames']['complete'] = '%s-%s.complete.mar' % (productName.lower(), version) data['ftpFilenames']['partial'] = '%s-%s-%s.partial.mar' % (productName.lower(), previousVersion, version) data['bouncerProducts']['complete'] = '%s-%s-Complete' % (productName.capitalize(), version) data['bouncerProducts']['partial'] = '%s-%s-Partial-%s' % (productName.capitalize(), version, previousVersion) for platform in enUSPlatforms: updatePlatforms = buildbot2updatePlatforms(platform) bouncerPlatform = buildbot2bouncer(platform) ftpPlatform = buildbot2ftp(platform) data['platforms'][updatePlatforms[0]] = { 'OS_BOUNCER': bouncerPlatform, 'OS_FTP': ftpPlatform } for aliasedPlatform in updatePlatforms[1:]: data['platforms'][aliasedPlatform] = { 'alias': updatePlatforms[0] } return data
def generate_data(self, appVersion, productName, version, buildNumber, updateChannels, ftpServer, bouncerServer, enUSPlatforms, **updateKwargs): details_product = productName.lower() if details_product == "devedition": details_product = "firefox" data = { 'platforms': {}, 'fileUrls': {}, 'appVersion': appVersion, 'displayVersion': getPrettyVersion(version), 'updateLine': [ { 'for': {}, 'fields': { 'detailsURL': getProductDetails(details_product, appVersion), 'type': 'minor', }, }, ] } actions = [] fileUrls = self._getFileUrls(productName, version, buildNumber, updateChannels, ftpServer, bouncerServer, **updateKwargs) if fileUrls: data.update(fileUrls) for platform in enUSPlatforms: updatePlatforms = buildbot2updatePlatforms(platform) bouncerPlatform = buildbot2bouncer(platform) ftpPlatform = buildbot2ftp(platform) data['platforms'][updatePlatforms[0]] = { 'OS_BOUNCER': bouncerPlatform, 'OS_FTP': ftpPlatform } for aliasedPlatform in updatePlatforms[1:]: data['platforms'][aliasedPlatform] = { 'alias': updatePlatforms[0] } return data
def generate_data(self, appVersion, productName, version, buildNumber, updateChannels, ftpServer, bouncerServer, enUSPlatforms, schemaVersion, openURL=None, **updateKwargs): assert schemaVersion in ( 3, 4), 'Unhandled schema version %s' % schemaVersion data = { 'detailsUrl': getProductDetails(productName.lower(), appVersion), 'platforms': {}, 'fileUrls': {}, 'appVersion': appVersion, 'platformVersion': appVersion, 'displayVersion': getPrettyVersion(version) } actions = [] if openURL: actions.append("showURL") data["openURL"] = openURL if actions: data["actions"] = " ".join(actions) fileUrls = self._getFileUrls(productName, version, buildNumber, updateChannels, ftpServer, bouncerServer, **updateKwargs) if fileUrls: data.update(fileUrls) updateData = self._get_update_data( productName, version, **updateKwargs) if updateData: data.update(updateData) for platform in enUSPlatforms: updatePlatforms = buildbot2updatePlatforms(platform) bouncerPlatform = buildbot2bouncer(platform) ftpPlatform = buildbot2ftp(platform) data['platforms'][updatePlatforms[0]] = { 'OS_BOUNCER': bouncerPlatform, 'OS_FTP': ftpPlatform } for aliasedPlatform in updatePlatforms[1:]: data['platforms'][aliasedPlatform] = { 'alias': updatePlatforms[0] } return data
def run(self, platform, buildID, productName, branch, appVersion, locale, hashFunction, extVersion, schemaVersion, isOSUpdate=None, **updateKwargs): assert schemaVersion in (3,4), 'Unhandled schema version %s' % schemaVersion targets = buildbot2updatePlatforms(platform) build_target = targets[0] alias = None if len(targets) > 1: alias = targets[1:] data = { 'buildID': buildID, } data['appVersion'] = appVersion data['platformVersion'] = extVersion data['displayVersion'] = appVersion if isOSUpdate: data['isOSUpdate'] = isOSUpdate data.update(self._get_update_data(productName, branch, **updateKwargs)) if build_target == 'flame-kk': # Bug 1055305 - a hack so that we can have JB and KK OTA for flame. # They both query with buildTarget of flame, but differ in OS Version, # so we need separate release blobs and rule to do the right thing build_type = 'kitkat-%s' % self.build_type elif platform == 'android-api-9': # Bug 1080749 - a hack to support api-9 and api-10+ split builds. # Like 1055305 above, this is a hack to support two builds with same build target that # require differed't release blobs and rules build_type = 'api-9-%s' % self.build_type else: build_type = self.build_type name = get_nightly_blob_name(productName, branch, build_type, buildID, self.dummy) data = json.dumps(data) api = SingleLocale(auth=self.auth, api_root=self.api_root) copyTo = [get_nightly_blob_name( productName, branch, build_type, 'latest', self.dummy)] copyTo = json.dumps(copyTo) alias = json.dumps(alias) api.update_build(name=name, product=productName, build_target=build_target, version=appVersion, locale=locale, hashFunction=hashFunction, buildData=data, copyTo=copyTo, alias=alias, schemaVersion=schemaVersion)
def run(self, platform, buildID, productName, branch, appVersion, locale, hashFunction, extVersion, completeMarSize, completeMarHash, completeMarUrl, partialMarSize, partialMarHash=None, partialMarUrl=None, previous_buildid=None): targets = buildbot2updatePlatforms(platform) build_target = targets[0] alias = None if len(targets) > 1: alias = targets[1:] name = get_nightly_blob_name(productName, branch, self.build_type, buildID, self.dummy) data = { 'appv': appVersion, 'extv': extVersion, 'buildID': buildID, } data['complete'] = { 'from': '*', 'filesize': completeMarSize, 'hashValue': completeMarHash, 'fileUrl': completeMarUrl } if partialMarSize: data['partial'] = { 'from': get_nightly_blob_name(productName, branch, self.build_type, previous_buildid, self.dummy), 'filesize': partialMarSize, 'hashValue': partialMarHash, 'fileUrl': partialMarUrl } data = json.dumps(data) api = SingleLocale(auth=self.auth, api_root=self.api_root) copyTo = [get_nightly_blob_name( productName, branch, self.build_type, 'latest', self.dummy)] copyTo = json.dumps(copyTo) alias = json.dumps(alias) api.update_build(name=name, product=productName, build_target=build_target, version=appVersion, locale=locale, hashFunction=hashFunction, buildData=data, copyTo=copyTo, alias=alias)
def generate_data(self): fp = open(self.buildbprops_file) bp = json.load(fp) fp.close() props = bp['properties'] targets = buildbot2updatePlatforms(props['platform']) self.build_target = targets[0] self.alias = None if len(targets) > 1: self.alias = targets[1:] buildID = props['buildid'] self.appName = props['appName'] self.branch = props['branch'] self.appVersion = props['appVersion'] self.name = get_nightly_blob_name(self.appName, self.branch, self.build_type, buildID, self.dummy) self.locale = props.get('locale', 'en-US') self.hashFunction = props['hashType'] data = { 'appv': self.appVersion, 'extv': props.get('extVersion', self.appVersion), 'buildID': props['buildid'], } data['complete'] = { 'from': '*', 'filesize': props['completeMarSize'], 'hashValue': props['completeMarHash'], 'fileUrl': props['completeMarUrl'] } if props.get('partialMarFilename'): data['partial'] = { 'from': get_nightly_blob_name(self.appName, self.branch, self.build_type, props['previous_buildid'], self.dummy), 'filesize': props['partialMarSize'], 'hashValue': props['partialMarHash'], 'fileUrl': props['partialMarUrl'] } return data
def generate_data(self, appVersion, productName, version, buildNumber, updateChannels, stagingServer, bouncerServer, enUSPlatforms, schemaVersion, **updateKwargs): assert schemaVersion in (2, 3), 'Unhandled schema version %s' % schemaVersion self.name = get_release_blob_name(productName, version, buildNumber) data = { 'name': self.name, 'detailsUrl': getProductDetails(productName.lower(), appVersion), 'platforms': {}, 'fileUrls': {}, 'ftpFilenames': {}, 'bouncerProducts': {}, } data['appVersion'] = appVersion data['platformVersion'] = appVersion data['displayVersion'] = getPrettyVersion(version) for channel in updateChannels: if channel in ('betatest', 'esrtest'): dir_ = makeCandidatesDir(productName.lower(), version, buildNumber, server=stagingServer, protocol='http') data['fileUrls'][channel] = '%supdate/%%OS_FTP%%/%%LOCALE%%/%%FILENAME%%' % dir_ else: url = 'http://%s/?product=%%PRODUCT%%&os=%%OS_BOUNCER%%&lang=%%LOCALE%%' % bouncerServer data['fileUrls'][channel] = url data.update(self._get_update_data(productName, version, **updateKwargs)) for platform in enUSPlatforms: updatePlatforms = buildbot2updatePlatforms(platform) bouncerPlatform = buildbot2bouncer(platform) ftpPlatform = buildbot2ftp(platform) data['platforms'][updatePlatforms[0]] = { 'OS_BOUNCER': bouncerPlatform, 'OS_FTP': ftpPlatform } for aliasedPlatform in updatePlatforms[1:]: data['platforms'][aliasedPlatform] = { 'alias': updatePlatforms[0] } return data
def run(self, platform, buildID, productName, branch, appVersion, locale, hashFunction, extVersion, schemaVersion, isOSUpdate=None, **updateKwargs): assert schemaVersion in (2, 3), 'Unhandled schema version %s' % schemaVersion targets = buildbot2updatePlatforms(platform) build_target = targets[0] alias = None if len(targets) > 1: alias = targets[1:] data = { 'buildID': buildID, } data['appVersion'] = appVersion data['platformVersion'] = extVersion data['displayVersion'] = appVersion if isOSUpdate: data['isOSUpdate'] = isOSUpdate data.update(self._get_update_data(productName, branch, **updateKwargs)) # Bug 1055305 - a hack so that we can have JB and KK OTA for flame. # They both query with buildTarget of flame, but differ in OS Version, # so we need separate release blobs and rule to do the right thing build_type = self.build_type if build_target == 'flame-kk': build_type = 'kitkat-%s' % build_type name = get_nightly_blob_name(productName, branch, build_type, buildID, self.dummy) data = json.dumps(data) api = SingleLocale(auth=self.auth, api_root=self.api_root) copyTo = [get_nightly_blob_name( productName, branch, build_type, 'latest', self.dummy)] copyTo = json.dumps(copyTo) alias = json.dumps(alias) api.update_build(name=name, product=productName, build_target=build_target, version=appVersion, locale=locale, hashFunction=hashFunction, buildData=data, copyTo=copyTo, alias=alias, schemaVersion=schemaVersion)
def run(self, platform, productName, appVersion, version, build_number, locale, hashFunction, extVersion, buildID, completeMarSize, completeMarHash, schemaVersion=2): targets = buildbot2updatePlatforms(platform) # Some platforms may have alias', but those are set-up elsewhere # for release blobs. build_target = targets[0] name = get_release_blob_name(productName, version, build_number, self.dummy) data = { 'buildID': buildID, } assert schemaVersion in (1, 2), 'Unhandled schema version %s' % schemaVersion if schemaVersion == 1: data['appv'] = appVersion data['extv'] = extVersion elif schemaVersion == 2: data['appVersion'] = appVersion data['platformVersion'] = extVersion data['displayVersion'] = getPrettyVersion(version) data['complete'] = { 'from': '*', 'filesize': completeMarSize, 'hashValue': completeMarHash, } data = json.dumps(data) api = SingleLocale(auth=self.auth, api_root=self.api_root) schemaVersion = json.dumps(schemaVersion) api.update_build(name=name, product=productName, build_target=build_target, version=appVersion, locale=locale, hashFunction=hashFunction, buildData=data, schemaVersion=schemaVersion)
updatePaths[fromVersion] = [] updatePaths[fromVersion].append(locale) completes_only_index = 0 for fromVersion in reversed(sorted(updatePaths, key=LooseVersion)): locales = updatePaths[fromVersion] from_ = pc["release"][fromVersion] appVersion = from_["extension-version"] build_id = from_["platforms"][ftp_platform] mar_channel_IDs = from_.get('mar-channel-ids') # Use new build targets for Windows, but only on compatible # versions (42+). See bug 1185456 for additional context. if args.platform not in ("win32", "win64") or \ LooseVersion(fromVersion) < LooseVersion("42.0"): update_platform = buildbot2updatePlatforms(args.platform)[0] else: update_platform = buildbot2updatePlatforms(args.platform)[1] path_ = makeReleaseRepackUrls( product_name, app_name, fromVersion, args.platform, locale='%locale%', signed=True, exclude_secondary=True ).values()[0] release_dir = makeReleasesDir(stage_product_name, fromVersion, ftp_root='/') from_path = "%s%s" % (release_dir, path_) # Exclude locales being full checked quick_check_locales = [l for l in locales if l not in full_check_locales] # Get the intersection of from and to full_check_locales this_full_check_locales = [l for l in full_check_locales
def generate_data(self, appVersion, productName, version, buildNumber, updateChannels, stagingServer, bouncerServer, enUSPlatforms, schemaVersion, openURL=None, **updateKwargs): assert schemaVersion in (2, 3), 'Unhandled schema version %s' % schemaVersion self.name = get_release_blob_name(productName, version, buildNumber) data = { 'name': self.name, 'detailsUrl': getProductDetails(productName.lower(), appVersion), 'platforms': {}, 'fileUrls': {}, 'ftpFilenames': {}, 'bouncerProducts': {}, } data['appVersion'] = appVersion data['platformVersion'] = appVersion data['displayVersion'] = getPrettyVersion(version) actions = [] if openURL: actions.append("showURL") data["openURL"] = openURL if actions: data["actions"] = " ".join(actions) # XXX: This is a hack for bug 1045583. We should remove it, and always # use "candidates" for nightlyDir after the switch to Balrog is complete. if productName.lower() == "mobile": nightlyDir = "candidates" else: nightlyDir = "nightly" for channel in updateChannels: if channel in ('betatest', 'esrtest') or "localtest" in channel: dir_ = makeCandidatesDir(productName.lower(), version, buildNumber, server=stagingServer, protocol='http', nightlyDir=nightlyDir) data['fileUrls'][channel] = '%supdate/%%OS_FTP%%/%%LOCALE%%/%%FILENAME%%' % dir_ else: url = 'http://%s/?product=%%PRODUCT%%&os=%%OS_BOUNCER%%&lang=%%LOCALE%%' % bouncerServer data['fileUrls'][channel] = url # XXX: quick hack for bug 1021026. We should be using Bouncer for this # after we implement better solution talked about in comments 2 through 4 if channel == 'release': dir_ = makeCandidatesDir(productName.lower(), version, buildNumber, server='download.cdn.mozilla.net', protocol='http', nightlyDir=nightlyDir) url = '%supdate/%%OS_FTP%%/%%LOCALE%%/%%FILENAME%%' % dir_ data['fileUrls']['beta'] = url data['fileUrls']['beta-cdntest'] = url data.update(self._get_update_data(productName, version, **updateKwargs)) for platform in enUSPlatforms: updatePlatforms = buildbot2updatePlatforms(platform) bouncerPlatform = buildbot2bouncer(platform) ftpPlatform = buildbot2ftp(platform) data['platforms'][updatePlatforms[0]] = { 'OS_BOUNCER': bouncerPlatform, 'OS_FTP': ftpPlatform } for aliasedPlatform in updatePlatforms[1:]: data['platforms'][aliasedPlatform] = { 'alias': updatePlatforms[0] } return data
def createSnippets( brandName, product, appName, version, appVersion, oldVersion, oldAppVersion, buildNumber, oldBuildNumber, platforms, channels, oldBaseSnippetDir, stageServer, hg, sourceRepo, generatePartials): errs = [] snippets = ['complete.txt'] if generatePartials: snippets.append('partial.txt') previousCandidateIDs = findOldBuildIDs(product, version, buildNumber, platforms, server=stageServer) oldShippedLocales = getShippedLocales(product, appName, oldVersion, oldBuildNumber, sourceRepo, hg) shippedLocales = getShippedLocales(product, appName, version, buildNumber, sourceRepo, hg) for platform in previousCandidateIDs.keys(): update_platforms = buildbot2updatePlatforms(platform) oldVersionBuildID = getBuildID(platform, product, oldVersion, oldBuildNumber, server=stageServer) oldPlatformLocales = getPlatformLocales(oldShippedLocales, (platform,))[platform] platformLocales = getPlatformLocales(shippedLocales, (platform,))[platform] commonLocales = getCommonLocales(platformLocales, oldPlatformLocales) for chan in channels: baseSnippetDir = getSnippetDirname(oldBaseSnippetDir, chan) if not os.path.exists(baseSnippetDir): errs.append("Can't generate snippets for %s because %s doesn't exist" % (chan, baseSnippetDir)) continue for buildID in previousCandidateIDs[platform]: for locale in commonLocales: for update_platform in update_platforms: try: oldFile = os.path.join(baseSnippetDir, brandName, oldAppVersion, update_platform, oldVersionBuildID, locale, chan, 'complete.txt') oldCompleteSnippet = open(oldFile).read() except Exception, e: errs.append("Error reading from %s\n%s" % (oldFile, e)) continue newDir = os.path.join(baseSnippetDir, brandName, appVersion, update_platform, buildID, locale, chan) try: os.makedirs(newDir) log.info("Creating snippets for %s" % newDir) for f in snippets: newFile = os.path.join(newDir, f) log.info(" %s" % f) writeSnippet(newFile, oldCompleteSnippet) except OSError, e: errs.append("Error creating %s\n%s" % (newDir, e)) except Exception, e: errs.append("Hit error creating %s\n%s" % (newFile, e)) for l in [l for l in platformLocales if l not in commonLocales]: log.debug("WARNING: %s not in oldVersion for %s, did not generate snippets for it" % (l, platform))
def run(self, platform, buildID, productName, branch, appVersion, locale, hashFunction, extVersion, schemaVersion, isOSUpdate=None, **updateKwargs): assert schemaVersion in ( 3, 4), 'Unhandled schema version %s' % schemaVersion targets = buildbot2updatePlatforms(platform) build_target = targets[0] alias = None if len(targets) > 1: alias = targets[1:] data = { 'buildID': buildID, 'appVersion': appVersion, 'platformVersion': extVersion, 'displayVersion': appVersion, } if isOSUpdate: data['isOSUpdate'] = isOSUpdate data.update(self._get_update_data(productName, branch, **updateKwargs)) if platform == 'android-api-9': # Bug 1080749 - a hack to support api-9 and api-10+ split builds. # Like 1055305, this is a hack to support two builds with same # build target that # require differed't release blobs and rules build_type = 'api-9-%s' % self.build_type else: build_type = self.build_type name = get_nightly_blob_name(productName, branch, build_type, buildID, self.dummy) api = SingleLocale(name=name, build_target=build_target, locale=locale, auth=self.auth, api_root=self.api_root) # wrap operations into "atomic" functions that can be retried def update_dated(): current_data, data_version = api.get_data() # If the partials are already a subset of the blob and the # complete MAR is the same, skip the submission skip_submission = bool( current_data and current_data.get("completes") == data.get("completes") and all(p in current_data.get("partials", []) for p in data.get("partials", []))) if skip_submission: log.warn("Dated data didn't change, skipping update") return # explicitly pass data version api.update_build( product=productName, version=appVersion, hashFunction=hashFunction, buildData=json.dumps(merge_partial_updates(current_data, data)), alias=json.dumps(alias), schemaVersion=schemaVersion, data_version=data_version) retry(update_dated, sleeptime=10) latest = SingleLocale( api_root=self.api_root, auth=self.auth, name=get_nightly_blob_name(productName, branch, build_type, 'latest', self.dummy), build_target=build_target, locale=locale) def update_latest(): # copy everything over using target release's data version latest_data, latest_data_version = latest.get_data() source_data, _ = api.get_data() if source_data == latest_data: log.warn("Latest data didn't change, skipping update") return latest.update_build( product=productName, version=appVersion, hashFunction=hashFunction, buildData=json.dumps(source_data), alias=json.dumps(alias), schemaVersion=schemaVersion, data_version=latest_data_version) retry(update_latest, sleeptime=10)
updatePaths[fromVersion] = [] updatePaths[fromVersion].append(locale) for fromVersion in reversed(sorted(updatePaths, key=LooseVersion)): locales = updatePaths[fromVersion] from_ = pc["release"][fromVersion] appVersion = from_["extension-version"] build_id = from_["platforms"][ftp_platform] mar_channel_IDs = from_.get('mar-channel-ids') # Use new build targets for Windows, but only on compatible versions (42+) # See bug 1185456 for additional context. if options.platform not in ( "win32", "win64") or LooseVersion(fromVersion) < LooseVersion("42.0"): update_platform = buildbot2updatePlatforms(options.platform)[0] else: update_platform = buildbot2updatePlatforms(options.platform)[1] path_ = makeReleaseRepackUrls(product_name, app_name, fromVersion, options.platform, locale='%locale%', signed=True, exclude_secondary=True).values()[0] release_dir = makeReleasesDir(product_name, fromVersion, ftp_root='/') from_path = "%s%s" % (release_dir, path_) # Exclude locales being full checked quick_check_locales = [
def createSnippets(brandName, product, appName, version, appVersion, oldVersion, oldAppVersion, buildNumber, oldBuildNumber, platforms, channels, oldBaseSnippetDir, stageServer, hg, sourceRepo, generatePartials): errs = [] snippets = ['complete.txt'] if generatePartials: snippets.append('partial.txt') previousCandidateIDs = findOldBuildIDs(product, version, buildNumber, platforms, server=stageServer) oldShippedLocales = getShippedLocales(product, appName, oldVersion, oldBuildNumber, sourceRepo, hg) shippedLocales = getShippedLocales(product, appName, version, buildNumber, sourceRepo, hg) for platform in previousCandidateIDs.keys(): update_platforms = buildbot2updatePlatforms(platform) oldVersionBuildID = getBuildID(platform, product, oldVersion, oldBuildNumber, server=stageServer) oldPlatformLocales = getPlatformLocales(oldShippedLocales, (platform, ))[platform] platformLocales = getPlatformLocales(shippedLocales, (platform, ))[platform] commonLocales = getCommonLocales(platformLocales, oldPlatformLocales) for chan in channels: baseSnippetDir = getSnippetDirname(oldBaseSnippetDir, chan) if not os.path.exists(baseSnippetDir): errs.append( "Can't generate snippets for %s because %s doesn't exist" % (chan, baseSnippetDir)) continue for buildID in previousCandidateIDs[platform]: for locale in commonLocales: for update_platform in update_platforms: try: oldFile = os.path.join(baseSnippetDir, brandName, oldAppVersion, update_platform, oldVersionBuildID, locale, chan, 'complete.txt') oldCompleteSnippet = open(oldFile).read() except Exception, e: errs.append("Error reading from %s\n%s" % \ (oldFile, e)) continue newDir = os.path.join(baseSnippetDir, brandName, appVersion, update_platform, buildID, locale, chan) try: os.makedirs(newDir) log.info("Creating snippets for %s" % newDir) for f in snippets: newFile = os.path.join(newDir, f) log.info(" %s" % f) writeSnippet(newFile, oldCompleteSnippet) except OSError, e: errs.append("Error creating %s\n%s" % (newDir, e)) except Exception, e: errs.append("Hit error creating %s\n%s" % \ (newFile, e)) for l in [ l for l in platformLocales if l not in commonLocales ]: log.debug( "WARNING: %s not in oldVersion for %s, did not generate snippets for it" % (l, platform))
completes_only_index = 0 for fromVersion in reversed(sorted(updatePaths, key=LooseVersion)): locales = updatePaths[fromVersion] from_ = pc["release"][fromVersion] appVersion = from_["extension-version"] build_id = from_["platforms"][ftp_platform] mar_channel_IDs = from_.get('mar-channel-ids') if not updater_platform: updater_platform = args.platform # Use new build targets for Windows, but only on compatible # versions (42+). See bug 1185456 for additional context. if args.platform not in ("win32", "win64") or \ LooseVersion(fromVersion) < LooseVersion("42.0"): update_platform = buildbot2updatePlatforms(args.platform)[0] else: update_platform = buildbot2updatePlatforms(args.platform)[1] path_ = makeReleaseRepackUrls(product_name, app_name, fromVersion, args.platform, locale='%locale%', signed=True, exclude_secondary=True).values()[0] release_dir = makeReleasesDir(stage_product_name, fromVersion, ftp_root='/') from_path = "%s%s" % (release_dir, path_) updater_package = "%s%s" % (release_dir,
def run(self, platform, buildID, productName, branch, appVersion, locale, hashFunction, extVersion, schemaVersion, isOSUpdate=None, **updateKwargs): assert schemaVersion in ( 3, 4), 'Unhandled schema version %s' % schemaVersion targets = buildbot2updatePlatforms(platform) build_target = targets[0] alias = None if len(targets) > 1: alias = targets[1:] data = { 'buildID': buildID, 'appVersion': appVersion, 'platformVersion': extVersion, 'displayVersion': appVersion, } if isOSUpdate: data['isOSUpdate'] = isOSUpdate data.update(self._get_update_data(productName, branch, **updateKwargs)) if 'old-id' in platform: # bug 1366034: support old-id builds # Like 1055305, this is a hack to support two builds with same build target that # require differed't release blobs and rules build_type = 'old-id-%s' % self.build_type else: build_type = self.build_type name = get_nightly_blob_name(productName, branch, build_type, buildID, self.dummy) api = SingleLocale(name=name, build_target=build_target, locale=locale, auth=self.auth, api_root=self.api_root) # wrap operations into "atomic" functions that can be retried def update_dated(): current_data, data_version = api.get_data() # If the partials are already a subset of the blob and the # complete MAR is the same, skip the submission skip_submission = bool( current_data and current_data.get("completes") == data.get("completes") and all(p in current_data.get("partials", []) for p in data.get("partials", []))) if skip_submission: log.warn("Dated data didn't change, skipping update") return # explicitly pass data version api.update_build(product=productName, hashFunction=hashFunction, buildData=json.dumps( merge_partial_updates(current_data, data)), alias=json.dumps(alias), schemaVersion=schemaVersion, data_version=data_version) # Most retries are caused by losing a data race. In these cases, # there's no point in waiting a long time to retry, so we reduce # sleeptime and increase the number of attempts instead. retry(update_dated, sleeptime=2, max_sleeptime=2, attempts=10) latest = SingleLocale(api_root=self.api_root, auth=self.auth, name=get_nightly_blob_name( productName, branch, build_type, 'latest', self.dummy), build_target=build_target, locale=locale) def update_latest(): # copy everything over using target release's data version latest_data, latest_data_version = latest.get_data() source_data, _ = api.get_data() if source_data == latest_data: log.warn("Latest data didn't change, skipping update") return latest.update_build(product=productName, hashFunction=hashFunction, buildData=json.dumps(source_data), alias=json.dumps(alias), schemaVersion=schemaVersion, data_version=latest_data_version) retry(update_latest, sleeptime=2, max_sleeptime=2, attempts=10)
options, args = parser.parse_args() required_options = ['config', 'platform', 'release_config', 'buildbot_configs', 'release_tag'] options_dict = vars(options) for opt in required_options: if not options_dict[opt]: parser.error("Required option %s not present" % opt) if options.verbose: log.setLevel(logging.DEBUG) else: log.setLevel(logging.INFO) update_platform = buildbot2updatePlatforms(options.platform)[0] ftp_platform = buildbot2ftp(options.platform) full_check_locales = options.full_check_locales # Variables from release config retry(mercurial, args=(options.buildbot_configs, 'buildbot-configs')) update('buildbot-configs', revision=options.release_tag) release_config = validate(options) product_name = release_config['productName'] staging_server = FTP_SERVER_TEMPLATE % release_config['stagingServer'] aus_server_url = release_config['ausServerUrl'] build_number = release_config['buildNumber'] previous_releases_staging_server = FTP_SERVER_TEMPLATE % \ release_config.get('previousReleasesStagingServer', release_config['stagingServer'])