def generate_data(self, appVersion, productName, version, buildNumber, updateChannels, stagingServer, bouncerServer, enUSPlatforms, schemaVersion, **updateKwargs): assert schemaVersion in (2, 3), 'Unhandled schema version %s' % schemaVersion self.name = get_release_blob_name(productName, version, buildNumber) data = { 'name': self.name, 'detailsUrl': getProductDetails(productName.lower(), appVersion), 'platforms': {}, 'fileUrls': {}, 'ftpFilenames': {}, 'bouncerProducts': {}, } data['appVersion'] = appVersion data['platformVersion'] = appVersion data['displayVersion'] = getPrettyVersion(version) # XXX: This is a hack for bug 1045583. We should remove it, and always # use "candidates" for nightlyDir after the switch to Balrog is complete. if productName.lower() == "mobile": nightlyDir = "candidates" else: nightlyDir = "nightly" for channel in updateChannels: if channel in ('betatest', 'esrtest') or "localtest" in channel: dir_ = makeCandidatesDir(productName.lower(), version, buildNumber, server=stagingServer, protocol='http', nightlyDir=nightlyDir) data['fileUrls'][channel] = '%supdate/%%OS_FTP%%/%%LOCALE%%/%%FILENAME%%' % dir_ else: url = 'http://%s/?product=%%PRODUCT%%&os=%%OS_BOUNCER%%&lang=%%LOCALE%%' % bouncerServer data['fileUrls'][channel] = url # XXX: quick hack for bug 1021026. We should be using Bouncer for this # after we implement better solution talked about in comments 2 through 4 if channel == 'release': dir_ = makeCandidatesDir(productName.lower(), version, buildNumber, server='download.cdn.mozilla.net', protocol='http', nightlyDir=nightlyDir) url = '%supdate/%%OS_FTP%%/%%LOCALE%%/%%FILENAME%%' % dir_ data['fileUrls']['beta'] = url data['fileUrls']['beta-cdntest'] = url data.update(self._get_update_data(productName, version, **updateKwargs)) for platform in enUSPlatforms: updatePlatforms = buildbot2updatePlatforms(platform) bouncerPlatform = buildbot2bouncer(platform) ftpPlatform = buildbot2ftp(platform) data['platforms'][updatePlatforms[0]] = { 'OS_BOUNCER': bouncerPlatform, 'OS_FTP': ftpPlatform } for aliasedPlatform in updatePlatforms[1:]: data['platforms'][aliasedPlatform] = { 'alias': updatePlatforms[0] } return data
def _getFileUrls(self, productName, version, buildNumber, updateChannels, stagingServer, bouncerServer, partialUpdates): data = {"fileUrls": {}} # TODO: comment about * uniqueChannels = ["*"] for c in updateChannels: # Channels that aren't localtest all use the same URLs, which are # added in the catch all. To avoid duplication, we simply don't # add them explicitly. if c in ("betatest", "esrtest") or "localtest" in c: uniqueChannels.append(c) for channel in uniqueChannels: data["fileUrls"][channel] = { "completes": {} } if channel in ('betatest', 'esrtest') or "localtest" in channel: dir_ = makeCandidatesDir(productName.lower(), version, buildNumber, server=stagingServer, protocol='http') filename = "%s-%s.complete.mar" % (productName.lower(), version) data["fileUrls"][channel]["completes"]["*"] = "%supdate/%%OS_FTP%%/%%LOCALE%%/%s" % (dir_, filename) else: if productName.lower() == "fennec": bouncerProduct = "%s-%s" % (productName.lower(), version) else: bouncerProduct = "%s-%s-complete" % (productName.lower(), version) url = 'http://%s/?product=%s&os=%%OS_BOUNCER%%&lang=%%LOCALE%%' % (bouncerServer, bouncerProduct) data["fileUrls"][channel]["completes"]["*"] = url if not partialUpdates: return data for channel in uniqueChannels: data["fileUrls"][channel]["partials"] = {} for previousVersion, previousInfo in partialUpdates.iteritems(): from_ = get_release_blob_name(productName, previousVersion, previousInfo["buildNumber"], self.dummy) if channel in ('betatest', 'esrtest') or "localtest" in channel: dir_ = makeCandidatesDir(productName.lower(), version, buildNumber, server=stagingServer, protocol='http') filename = "%s-%s-%s.partial.mar" % (productName.lower(), previousVersion, version) data["fileUrls"][channel]["partials"][from_] = "%supdate/%%OS_FTP%%/%%LOCALE%%/%s" % (dir_, filename) else: bouncerProduct = "%s-%s-partial-%s" % (productName.lower(), version, previousVersion) url = 'http://%s/?product=%s&os=%%OS_BOUNCER%%&lang=%%LOCALE%%' % (bouncerServer, bouncerProduct) data["fileUrls"][channel]["partials"][from_] = url return data
def doSyncPartnerBundles(productName, version, buildNumber, stageServer, stageUsername, stageSshKey): candidates_dir = makeCandidatesDir(productName, version, buildNumber) for dest, src in PARTNER_BUNDLE_MAPPINGS.iteritems(): full_dest = path.join(PARTNER_BUNDLE_DIR, dest) full_src = path.join(candidates_dir, 'partner-repacks', src) full_src = full_src % {'version': version} run_remote_cmd(['cp', '-f', full_src, full_dest], server=stageServer, username=stageUsername, sshKey=stageSshKey) # And fix the permissions... run_remote_cmd([ 'find', PARTNER_BUNDLE_DIR, '-type', 'd', '-exec', 'chmod', '775', '{}', '\\;' ], server=stageServer, username=stageUsername, sshKey=stageSshKey) run_remote_cmd([ 'find', PARTNER_BUNDLE_DIR, '-name', '"*.exe"', '-exec', 'chmod', '775', '{}', '\\;' ], server=stageServer, username=stageUsername, sshKey=stageSshKey) run_remote_cmd([ 'find', PARTNER_BUNDLE_DIR, '-name', '"*.dmg"', '-exec', 'chmod', '775', '{}', '\\;' ], server=stageServer, username=stageUsername, sshKey=stageSshKey)
def downloadRelease(productName, version, buildNumber, stageServer, stageUsername=None, stageSshKey=None, stageUrlPrefix='http://stage.mozilla.org'): candidatesDir = makeCandidatesDir(productName, version, buildNumber).rstrip('/') releasesDir = makeReleasesDir(productName, version).rstrip('/') commands = [ 'rm -rf %s' % candidatesDir, 'rm -rf %s' % releasesDir, 'mkdir -p %s' % candidatesDir, 'cd %(candidatesDir)s && \ wget -nv -r -np -nH --cut-dirs=6 -R index.html* \ -X %(candidatesDir)s/unsigned \ -X %(candidatesDir)s/contrib* \ -X %(candidatesDir)s/partner-repacks \ -X %(candidatesDir)s/win32-EUballot \ %(stageUrlPrefix)s%(candidatesDir)s/' % \ (dict(candidatesDir=candidatesDir, stageUrlPrefix=stageUrlPrefix)), 'ln -s %s %s' % (candidatesDir, releasesDir), ] for command in commands: run_remote_cmd(command, server=stageServer, username=stageUsername, sshKey=stageSshKey)
def checkStagePermissions(productName, version, buildNumber, stageServer, stageUsername=None, stageSshKey=None): # The following commands should return 0 lines output and exit code 0 tests = [ "find %s ! -user ffxbld ! -path '*/contrib*'", "find %s ! -group firefox ! -path '*/contrib*'", "find %s -type f ! -perm 644", "find %s -mindepth 1 -type d ! -perm 755 ! -path '*/contrib*' ! -path '*/partner-repacks*'", "find %s -maxdepth 1 -type d ! -perm 2775 -path '*/contrib*'", ] candidates_dir = makeCandidatesDir(productName, version, buildNumber) errors = False for test_template in tests: test = test_template % candidates_dir cmd = 'test "0" = "$(%s | wc -l)"' % test try: run_remote_cmd(cmd, server=stageServer, username=stageUsername, sshKey=stageSshKey) except CalledProcessError: errors = True print 'Error while running: %s' % test if errors: raise
def pushToMirrors(productName, version, buildNumber, stageServer, stageUsername=None, stageSshKey=None, excludes=None, extra_excludes=None, dryRun=False): """ excludes overrides DEFAULT_RSYNC_EXCLUDES, extra_exludes will be appended to DEFAULT_RSYNC_EXCLUDES. """ source_dir = makeCandidatesDir(productName, version, buildNumber) target_dir = makeReleasesDir(productName, version) if not excludes: excludes = DEFAULT_RSYNC_EXCLUDES if extra_excludes: excludes += extra_excludes # fail/warn if target directory exists depending on dry run mode try: run_remote_cmd(['test', '!', '-d', target_dir], server=stageServer, username=stageUsername, sshKey=stageSshKey) except CalledProcessError: if not dryRun: raise else: log.warning('WARN: target directory %s exists', target_dir) if not dryRun: run_remote_cmd(['mkdir', '-p', target_dir], server=stageServer, username=stageUsername, sshKey=stageSshKey) run_remote_cmd(['chmod', 'u=rwx,g=rxs,o=rx', target_dir], server=stageServer, username=stageUsername, sshKey=stageSshKey) rsync_cmd = ['rsync', '-av' ] if dryRun: rsync_cmd.append('-n') run_remote_cmd(rsync_cmd + excludes + [source_dir, target_dir], server=stageServer, username=stageUsername, sshKey=stageSshKey)
def doSyncPartnerBundles(productName, version, buildNumber, stageServer, stageUsername, stageSshKey): candidates_dir = makeCandidatesDir(productName, version, buildNumber) for dest, src in PARTNER_BUNDLE_MAPPINGS.iteritems(): full_dest = path.join(PARTNER_BUNDLE_DIR, dest) full_src = path.join(candidates_dir, 'partner-repacks', src) full_src = full_src % {'version': version} run_remote_cmd( ['cp', '-f', full_src, full_dest], server=stageServer, username=stageUsername, sshKey=stageSshKey ) # And fix the permissions... run_remote_cmd( ['find', PARTNER_BUNDLE_DIR, '-type', 'd', '-exec', 'chmod', '775', '{}', '\\;'], server=stageServer, username=stageUsername, sshKey=stageSshKey ) run_remote_cmd( ['find', PARTNER_BUNDLE_DIR, '-name', '"*.exe"', '-exec', 'chmod', '775', '{}', '\\;'], server=stageServer, username=stageUsername, sshKey=stageSshKey ) run_remote_cmd( ['find', PARTNER_BUNDLE_DIR, '-name', '"*.dmg"', '-exec', 'chmod', '775', '{}', '\\;'], server=stageServer, username=stageUsername, sshKey=stageSshKey )
def makeIndexFiles(productName, version, buildNumber, stageServer, stageUsername, stageSshKey): candidates_dir = makeCandidatesDir(productName, version, buildNumber) indexFile = NamedTemporaryFile() indexFile.write(indexFileTemplate % {"version": version}) indexFile.flush() scp(indexFile.name, "%s@%s:%s/index.html" % (stageUsername, stageServer, candidates_dir), sshKey=stageSshKey) run_remote_cmd( ["chmod", "644", "%s/index.html" % candidates_dir], server=stageServer, username=stageUsername, sshKey=stageSshKey, ) run_remote_cmd( [ "find", candidates_dir, "-mindepth", "1", "-type", "d", "-not", "-regex", ".*contrib.*", "-exec", "cp", "-pv", "%s/index.html" % candidates_dir, "{}", "\\;", ], server=stageServer, username=stageUsername, sshKey=stageSshKey, )
def doSyncPartnerBundles(productName, version, buildNumber, stageServer, stageUsername, stageSshKey): candidates_dir = makeCandidatesDir(productName, version, buildNumber) for dest, src in PARTNER_BUNDLE_MAPPINGS.iteritems(): full_dest = path.join(PARTNER_BUNDLE_DIR, dest) full_src = path.join(candidates_dir, "partner-repacks", src) full_src = full_src % {"version": version} run_remote_cmd( ["cp", "-f", full_src, full_dest], server=stageServer, username=stageUsername, sshKey=stageSshKey ) # And fix the permissions... run_remote_cmd( ["find", PARTNER_BUNDLE_DIR, "-type", "d", "-exec", "chmod", "775", "{}", "\\;"], server=stageServer, username=stageUsername, sshKey=stageSshKey, ) run_remote_cmd( ["find", PARTNER_BUNDLE_DIR, "-name", '"*.exe"', "-exec", "chmod", "775", "{}", "\\;"], server=stageServer, username=stageUsername, sshKey=stageSshKey, ) run_remote_cmd( ["find", PARTNER_BUNDLE_DIR, "-name", '"*.dmg"', "-exec", "chmod", "775", "{}", "\\;"], server=stageServer, username=stageUsername, sshKey=stageSshKey, )
def _getFileUrls(self, productName, version, buildNumber, updateChannels, stagingServer, bouncerServer, partialUpdates): data = {} # XXX: This is a hack for bug 1045583. We should remove it, and always # use "candidates" for nightlyDir after the switch to Balrog is complete. if productName.lower() == "mobile": nightlyDir = "candidates" else: nightlyDir = "nightly" for channel in updateChannels: if channel in ('betatest', 'esrtest') or "localtest" in channel: dir_ = makeCandidatesDir(productName.lower(), version, buildNumber, server=stagingServer, protocol='http', nightlyDir=nightlyDir) data["fileUrls"][ channel] = '%supdate/%%OS_FTP%%/%%LOCALE%%/%%FILENAME%%' % dir_ else: url = 'http://%s/?product=%%PRODUCT%%&os=%%OS_BOUNCER%%&lang=%%LOCALE%%' % bouncerServer data["fileUrls"][channel] = url return data
def downloadReleaseBuilds(stageServer, productName, brandName, version, buildNumber, platform, candidatesDir=None, signed=False, usePymake=False): if candidatesDir is None: candidatesDir = makeCandidatesDir(productName, version, buildNumber, protocol='http', server=stageServer) files = makeReleaseRepackUrls(productName, brandName, version, platform, signed=signed) env = {} for fileName, remoteFile in files.iteritems(): url = '/'.join([p.strip('/') for p in [candidatesDir, urllib.quote(remoteFile)]]) log.info("Downloading %s to %s", url, fileName) urlretrieve(url, fileName) if fileName.endswith('exe'): if usePymake: env['WIN32_INSTALLER_IN'] = msys2windows(path.join(os.getcwd(), fileName)) else: env['WIN32_INSTALLER_IN'] = windows2msys(path.join(os.getcwd(), fileName)) else: if platform.startswith('win') and not usePymake: env['ZIP_IN'] = windows2msys(path.join(os.getcwd(), fileName)) else: env['ZIP_IN'] = msys2windows(path.join(os.getcwd(), fileName)) return env
def downloadUpdate(stageServer, productName, version, buildNumber, platform, locale, candidatesDir=None): if candidatesDir is None: candidatesDir = makeCandidatesDir(productName, version, buildNumber, protocol='http', server=stageServer) fileName = '%s-%s.complete.mar' % (productName, version) destFileName = '%s-%s.%s.complete.mar' % (productName, version, locale) platformDir = buildbot2ftp(platform) url = '/'.join([ p.strip('/') for p in [candidatesDir, 'update', platformDir, locale, fileName] ]) log.info("Downloading %s to %s", url, destFileName) remote_f = urlopen(url) local_f = open(destFileName, "wb") local_f.write(remote_f.read()) local_f.close() return destFileName
def downloadUpdate(stageServer, productName, version, buildNumber, platform, locale, candidatesDir=None): if candidatesDir is None: candidatesDir = makeCandidatesDir(productName, version, buildNumber, protocol='http', server=stageServer) fileName = '%s-%s.complete.mar' % (productName, version) destFileName = '%s-%s.%s.complete.mar' % (productName, version, locale) platformDir = buildbot2ftp(platform) url = '/'.join([ p.strip('/') for p in [candidatesDir, 'update', platformDir, locale, fileName] ]) log.info("Downloading %s to %s", url, destFileName) remote_f = urlopen(url, timeout=20) local_f = open(destFileName, "wb") local_f.write(remote_f.read()) local_f.close() expected_size = int(remote_f.info()['Content-Length']) actual_size = os.path.getsize(destFileName) if expected_size != actual_size: log.info("File is truncated, got %s of %s bytes" % (actual_size, expected_size)) raise HTTPError return destFileName
def test_remote(self): expected = "http://foo.bar/pub/bbb/candidates/1.0-candidates/build5/" got = makeCandidatesDir('bbb', '1.0', 5, protocol="http", server='foo.bar') self.assertEquals(expected, got)
def pushToMirrors(productName, version, buildNumber, stageServer, stageUsername=None, stageSshKey=None, excludes=None, extra_excludes=None, dryRun=False, overwrite=False): """ excludes overrides DEFAULT_RSYNC_EXCLUDES, extra_exludes will be appended to DEFAULT_RSYNC_EXCLUDES. """ source_dir = makeCandidatesDir(productName, version, buildNumber) target_dir = makeReleasesDir(productName, version) if not excludes: excludes = DEFAULT_RSYNC_EXCLUDES if extra_excludes: excludes += ['--exclude=%s' % ex for ex in extra_excludes] # fail/warn if target directory exists depending on dry run mode try: run_remote_cmd(['test', '!', '-d', target_dir], server=stageServer, username=stageUsername, sshKey=stageSshKey) except CalledProcessError: if overwrite: log.info( 'target directory %s exists, but overwriting files as requested' % target_dir) elif dryRun: log.warning('WARN: target directory %s exists', target_dir) else: raise if not dryRun: run_remote_cmd(['mkdir', '-p', target_dir], server=stageServer, username=stageUsername, sshKey=stageSshKey) run_remote_cmd(['chmod', 'u=rwx,g=rxs,o=rx', target_dir], server=stageServer, username=stageUsername, sshKey=stageSshKey) rsync_cmd = ['rsync', '-av'] if dryRun: rsync_cmd.append('-n') # use hardlinks rsync_cmd.append('--link-dest=%s' % source_dir) run_remote_cmd(rsync_cmd + excludes + [source_dir, target_dir], server=stageServer, username=stageUsername, sshKey=stageSshKey)
def downloadReleaseBuilds(stageServer, productName, brandName, version, buildNumber, platform, candidatesDir=None, signed=False, usePymake=False): if candidatesDir is None: candidatesDir = makeCandidatesDir(productName, version, buildNumber, protocol='http', server=stageServer) files = makeReleaseRepackUrls(productName, brandName, version, platform, signed=signed) env = {} for fileName, remoteFile in files.iteritems(): url = '/'.join( [p.strip('/') for p in [candidatesDir, urllib.quote(remoteFile)]]) log.info("Downloading %s to %s", url, fileName) for _ in retrier(): with open(fileName, "wb") as f: try: r = requests.get(url, stream=True, timeout=15) r.raise_for_status() for chunk in r.iter_content(chunk_size=5 * 1024**2): f.write(chunk) r.close() break except (requests.HTTPError, requests.ConnectionError, requests.Timeout): log.exception("Caught exception downloading") if fileName.endswith('exe'): if usePymake: env['WIN32_INSTALLER_IN'] = msys2windows( path.join(os.getcwd(), fileName)) else: env['WIN32_INSTALLER_IN'] = windows2msys( path.join(os.getcwd(), fileName)) else: if platform.startswith('win') and not usePymake: env['ZIP_IN'] = windows2msys(path.join(os.getcwd(), fileName)) else: env['ZIP_IN'] = msys2windows(path.join(os.getcwd(), fileName)) return env
def runAntivirusCheck(productName, version, buildNumber, stageServer, stageUsername=None, stageSshKey=None): candidates_dir = makeCandidatesDir(productName, version, buildNumber) cmd = VIRUS_SCAN_CMD + [candidates_dir] run_remote_cmd(cmd, server=stageServer, username=stageUsername, sshKey=stageSshKey)
def createRepacks(sourceRepo, revision, l10nRepoDir, l10nBaseRepo, mozconfigPath, objdir, makeDirs, locales, ftpProduct, appName, version, appVersion, buildNumber, stageServer, stageUsername, stageSshKey, compareLocalesRepo, merge, platform, stage_platform, brand, mobileDirName): sourceRepoName = path.split(sourceRepo)[-1] nightlyDir = "candidates" localeSrcDir = path.join(sourceRepoName, objdir, mobileDirName, "locales") # Even on Windows we need to use "/" as a separator for this because # compare-locales doesn"t work any other way l10nIni = "/".join([sourceRepoName, mobileDirName, "locales", "l10n.ini"]) env = { "MOZ_OBJDIR": objdir, "MOZ_PKG_VERSION": version, "UPLOAD_HOST": stageServer, "UPLOAD_USER": stageUsername, "UPLOAD_SSH_KEY": stageSshKey, "UPLOAD_TO_TEMP": "1", # Android signing "JARSIGNER": os.path.join(os.getcwd(), "scripts", "release", "signing", "mozpass.py") } build.misc.cleanupObjdir(sourceRepoName, objdir, mobileDirName) retry(mercurial, args=(sourceRepo, sourceRepoName)) update(sourceRepoName, revision=revision) l10nRepackPrep(sourceRepoName, objdir, mozconfigPath, l10nRepoDir, makeDirs, localeSrcDir, env) fullCandidatesDir = makeCandidatesDir(appName, version, buildNumber, protocol='http', server=stageServer, nightlyDir=nightlyDir) input_env = retry(downloadReleaseBuilds, args=(stageServer, ftpProduct, brand, version, buildNumber, stage_platform, fullCandidatesDir)) env.update(input_env) print "env pre-locale: %s" % str(env) failed = [] for l in locales: try: # adding locale into builddir env["POST_UPLOAD_CMD"] = postUploadCmdPrefix( to_mobile_candidates=True, product=appName, version=version, builddir='%s/%s' % (stage_platform, l), buildNumber=buildNumber, nightly_dir=nightlyDir,) print "env post-locale: %s" % str(env) repackLocale(str(l), l10nRepoDir, l10nBaseRepo, revision, localeSrcDir, l10nIni, compareLocalesRepo, env, merge) except Exception, e: failed.append((l, format_exc()))
def generate_data(self, appVersion, productName, version, buildNumber, partialUpdates, updateChannels, stagingServer, bouncerServer, enUSPlatforms, schemaVersion): # TODO: Multiple partial support. Probably as a part of bug 797033. previousVersion = str(max(StrictVersion(v) for v in partialUpdates)) self.name = get_release_blob_name(productName, version, buildNumber) data = { 'name': self.name, 'detailsUrl': getProductDetails(productName.lower(), appVersion), 'platforms': {}, 'fileUrls': {}, 'ftpFilenames': {}, 'bouncerProducts': {}, } assert schemaVersion in (1, 2), 'Unhandled schema version %s' % schemaVersion if schemaVersion == 1: data['appv'] = appVersion data['extv'] = appVersion elif schemaVersion == 2: data['appVersion'] = appVersion data['platformVersion'] = appVersion data['displayVersion'] = getPrettyVersion(version) for channel in updateChannels: if channel in ('betatest', 'esrtest'): dir_ = makeCandidatesDir(productName.lower(), version, buildNumber, server=stagingServer, protocol='http') data['fileUrls'][channel] = '%supdate/%%OS_FTP%%/%%LOCALE%%/%%FILENAME%%' % dir_ else: url = 'http://%s/?product=%%PRODUCT%%&os=%%OS_BOUNCER%%&lang=%%LOCALE%%' % bouncerServer data['fileUrls'][channel] = url data['ftpFilenames']['complete'] = '%s-%s.complete.mar' % (productName.lower(), version) data['ftpFilenames']['partial'] = '%s-%s-%s.partial.mar' % (productName.lower(), previousVersion, version) data['bouncerProducts']['complete'] = '%s-%s-Complete' % (productName.capitalize(), version) data['bouncerProducts']['partial'] = '%s-%s-Partial-%s' % (productName.capitalize(), version, previousVersion) for platform in enUSPlatforms: updatePlatforms = buildbot2updatePlatforms(platform) bouncerPlatform = buildbot2bouncer(platform) ftpPlatform = buildbot2ftp(platform) data['platforms'][updatePlatforms[0]] = { 'OS_BOUNCER': bouncerPlatform, 'OS_FTP': ftpPlatform } for aliasedPlatform in updatePlatforms[1:]: data['platforms'][aliasedPlatform] = { 'alias': updatePlatforms[0] } return data
def _getFileUrls(self, productName, version, buildNumber, updateChannels, ftpServer, bouncerServer, partialUpdates): data = {} for channel in updateChannels: if channel in ('betatest', 'esrtest') or "localtest" in channel: dir_ = makeCandidatesDir(productName.lower(), version, buildNumber, server=ftpServer, protocol='http') data["fileUrls"][channel] = '%supdate/%%OS_FTP%%/%%LOCALE%%/%%FILENAME%%' % dir_ else: url = 'http://%s/?product=%%PRODUCT%%&os=%%OS_BOUNCER%%&lang=%%LOCALE%%' % bouncerServer data["fileUrls"][channel] = url return data
def getBuildID(platform, product, version, buildNumber, nightlyDir="nightly", server="stage.mozilla.org"): infoTxt = ( makeCandidatesDir(product, version, buildNumber, nightlyDir, protocol="http", server=server) + "%s_info.txt" % platform ) try: buildInfo = urlopen(infoTxt).read() except: log.error("Failed to retrieve %s" % infoTxt) raise for line in buildInfo.splitlines(): key, value = line.rstrip().split("=", 1) if key == "buildID": return value
def _getFileUrls(self, productName, version, buildNumber, updateChannels, ftpServer, bouncerServer, partialUpdates): data = {} for channel in updateChannels: if channel in ('betatest', 'esrtest') or "localtest" in channel: dir_ = makeCandidatesDir(productName.lower(), version, buildNumber, server=ftpServer, protocol='http') data["fileUrls"][channel] = _FTP % dir_ else: url = _URL % bouncerServer data["fileUrls"][channel] = url return data
def getBuildID(platform, product, version, buildNumber, nightlyDir='nightly', server='stage.mozilla.org'): infoTxt = makeCandidatesDir(product, version, buildNumber, nightlyDir, protocol='http', server=server) + \ '%s_info.txt' % platform try: buildInfo = urlopen(infoTxt).read() except: log.error("Failed to retrieve %s" % infoTxt) raise for line in buildInfo.splitlines(): key, value = line.rstrip().split('=', 1) if key == 'buildID': return value
def downloadUpdate(stageServer, productName, version, buildNumber, platform, locale, candidatesDir=None): if candidatesDir is None: candidatesDir = makeCandidatesDir(productName, version, buildNumber, protocol='http', server=stageServer) fileName = '%s-%s.complete.mar' % (productName, version) destFileName = '%s-%s.%s.complete.mar' % (productName, version, locale) platformDir = buildbot2ftp(platform) url = '/'.join([p.strip('/') for p in [ candidatesDir, 'update', platformDir, locale, fileName]]) log.info("Downloading %s to %s", url, destFileName) remote_f = urlopen(url) local_f = open(destFileName, "wb") local_f.write(remote_f.read()) local_f.close() return destFileName
def makeIndexFiles(productName, version, buildNumber, stageServer, stageUsername, stageSshKey): candidates_dir = makeCandidatesDir(productName, version, buildNumber) indexFile = NamedTemporaryFile() indexFile.write(indexFileTemplate % {'version': version}) indexFile.flush() scp( indexFile.name, '%s@%s:%s/index.html' % ( stageUsername, stageServer, candidates_dir), sshKey=stageSshKey) run_remote_cmd(['chmod', '644', '%s/index.html' % candidates_dir], server=stageServer, username=stageUsername, sshKey=stageSshKey) run_remote_cmd( ['find', candidates_dir, '-mindepth', '1', '-type', 'd', '-not', '-regex', '.*contrib.*', '-exec', 'cp', '-pv', '%s/index.html' % candidates_dir, '{}', '\\;'], server=stageServer, username=stageUsername, sshKey=stageSshKey)
def pushToMirrors( productName, version, buildNumber, stageServer, stageUsername=None, stageSshKey=None, excludes=None, extra_excludes=None, dryRun=False, overwrite=False, ): """ excludes overrides DEFAULT_RSYNC_EXCLUDES, extra_exludes will be appended to DEFAULT_RSYNC_EXCLUDES. """ source_dir = makeCandidatesDir(productName, version, buildNumber) target_dir = makeReleasesDir(productName, version) if not excludes: excludes = DEFAULT_RSYNC_EXCLUDES if extra_excludes: excludes += ["--exclude=%s" % ex for ex in extra_excludes] # fail/warn if target directory exists depending on dry run mode try: run_remote_cmd(["test", "!", "-d", target_dir], server=stageServer, username=stageUsername, sshKey=stageSshKey) except CalledProcessError: if overwrite: log.info("target directory %s exists, but overwriting files as requested" % target_dir) elif dryRun: log.warning("WARN: target directory %s exists", target_dir) else: raise if not dryRun: run_remote_cmd(["mkdir", "-p", target_dir], server=stageServer, username=stageUsername, sshKey=stageSshKey) run_remote_cmd( ["chmod", "u=rwx,g=rxs,o=rx", target_dir], server=stageServer, username=stageUsername, sshKey=stageSshKey ) rsync_cmd = ["rsync", "-av"] if dryRun: rsync_cmd.append("-n") # use hardlinks rsync_cmd.append("--link-dest=%s" % source_dir) run_remote_cmd( rsync_cmd + excludes + [source_dir, target_dir], server=stageServer, username=stageUsername, sshKey=stageSshKey )
def generate_data(self, appVersion, productName, version, buildNumber, updateChannels, stagingServer, bouncerServer, enUSPlatforms, schemaVersion, **updateKwargs): assert schemaVersion in (2, 3), 'Unhandled schema version %s' % schemaVersion self.name = get_release_blob_name(productName, version, buildNumber) data = { 'name': self.name, 'detailsUrl': getProductDetails(productName.lower(), appVersion), 'platforms': {}, 'fileUrls': {}, 'ftpFilenames': {}, 'bouncerProducts': {}, } data['appVersion'] = appVersion data['platformVersion'] = appVersion data['displayVersion'] = getPrettyVersion(version) for channel in updateChannels: if channel in ('betatest', 'esrtest'): dir_ = makeCandidatesDir(productName.lower(), version, buildNumber, server=stagingServer, protocol='http') data['fileUrls'][channel] = '%supdate/%%OS_FTP%%/%%LOCALE%%/%%FILENAME%%' % dir_ else: url = 'http://%s/?product=%%PRODUCT%%&os=%%OS_BOUNCER%%&lang=%%LOCALE%%' % bouncerServer data['fileUrls'][channel] = url data.update(self._get_update_data(productName, version, **updateKwargs)) for platform in enUSPlatforms: updatePlatforms = buildbot2updatePlatforms(platform) bouncerPlatform = buildbot2bouncer(platform) ftpPlatform = buildbot2ftp(platform) data['platforms'][updatePlatforms[0]] = { 'OS_BOUNCER': bouncerPlatform, 'OS_FTP': ftpPlatform } for aliasedPlatform in updatePlatforms[1:]: data['platforms'][aliasedPlatform] = { 'alias': updatePlatforms[0] } return data
def downloadReleaseBuilds(stageServer, productName, brandName, version, buildNumber, platform, candidatesDir=None, signed=False, usePymake=False): if candidatesDir is None: candidatesDir = makeCandidatesDir(productName, version, buildNumber, protocol='http', server=stageServer) files = makeReleaseRepackUrls(productName, brandName, version, platform, signed=signed) env = {} for fileName, remoteFile in files.iteritems(): url = '/'.join([p.strip('/') for p in [candidatesDir, urllib.quote(remoteFile)]]) log.info("Downloading %s to %s", url, fileName) for _ in retrier(): with open(fileName, "wb") as f: try: r = requests.get(url, stream=True, timeout=15) r.raise_for_status() for chunk in r.iter_content(chunk_size=5*1024**2): f.write(chunk) r.close() break except (requests.HTTPError, requests.ConnectionError, requests.Timeout): log.exception("Caught exception downloading") if fileName.endswith('exe'): if usePymake: env['WIN32_INSTALLER_IN'] = msys2windows(path.join(os.getcwd(), fileName)) else: env['WIN32_INSTALLER_IN'] = windows2msys(path.join(os.getcwd(), fileName)) else: if platform.startswith('win') and not usePymake: env['ZIP_IN'] = windows2msys(path.join(os.getcwd(), fileName)) else: env['ZIP_IN'] = msys2windows(path.join(os.getcwd(), fileName)) return env
def _getFileUrls(self, productName, version, buildNumber, updateChannels, stagingServer, bouncerServer, partialUpdates): data = {} # XXX: This is a hack for bug 1045583. We should remove it, and always # use "candidates" for nightlyDir after the switch to Balrog is complete. if productName.lower() == "mobile": nightlyDir = "candidates" else: nightlyDir = "nightly" for channel in updateChannels: if channel in ('betatest', 'esrtest') or "localtest" in channel: dir_ = makeCandidatesDir(productName.lower(), version, buildNumber, server=stagingServer, protocol='http', nightlyDir=nightlyDir) data["fileUrls"][channel] = '%supdate/%%OS_FTP%%/%%LOCALE%%/%%FILENAME%%' % dir_ else: url = 'http://%s/?product=%%PRODUCT%%&os=%%OS_BOUNCER%%&lang=%%LOCALE%%' % bouncerServer data["fileUrls"][channel] = url return data
def doSyncPartnerBundles(productName, version, buildNumber, stageServer, stageUsername, stageSshKey): candidates_dir = makeCandidatesDir(productName, version, buildNumber) # Sync the Bing packages... bing_dir = '%s/partner-repacks/bing' % candidates_dir mac_bing_src = '%s/mac/en-US/Firefox\\ %s.dmg' % (bing_dir, version) mac_bing_dst = '%s/bing/mac/en-US/Firefox-Bing.dmg' % PARTNER_BUNDLE_DIR win32_bing_src = '%s/win32/en-US/Firefox\\ Setup\\ %s.exe' % (bing_dir, version) win32_bing_dst = '%s/bing/win32/en-US/Firefox-Bing\\ Setup.exe' % PARTNER_BUNDLE_DIR run_remote_cmd(['cp', '-f', mac_bing_src, mac_bing_dst], server=stageServer, username=stageUsername, sshKey=stageSshKey ) run_remote_cmd(['cp', '-f', win32_bing_src, win32_bing_dst], server=stageServer, username=stageUsername, sshKey=stageSshKey ) # Sync the MSN packages... run_remote_cmd( ['rsync', '-av', '%s/partner-repacks/msn*' % candidates_dir, PARTNER_BUNDLE_DIR], server=stageServer, username=stageUsername, sshKey=stageSshKey ) # And fix the permissions... run_remote_cmd( ['find', PARTNER_BUNDLE_DIR, '-type', 'd', '-exec', 'chmod', '775', '{}', '\\;'], server=stageServer, username=stageUsername, sshKey=stageSshKey ) run_remote_cmd( ['find', PARTNER_BUNDLE_DIR, '-name', '"*.exe"', '-exec', 'chmod', '775', '{}', '\\;'], server=stageServer, username=stageUsername, sshKey=stageSshKey ) run_remote_cmd( ['find', PARTNER_BUNDLE_DIR, '-name', '"*.dmg"', '-exec', 'chmod', '775', '{}', '\\;'], server=stageServer, username=stageUsername, sshKey=stageSshKey )
def checkStagePermissions(productName, version, buildNumber, stageServer, stageUsername, stageSshKey): # The following commands should return 0 lines output and exit code 0 tests = [ "find %%s ! -user %s ! -path '*/contrib*'" % stageUsername, "find %%s ! -group `id -g -n %s` ! -path '*/contrib*'" % stageUsername, "find %s -type f ! -perm 644", "find %s -mindepth 1 -type d ! -perm 755 ! -path '*/contrib*' ! -path '*/partner-repacks*'", "find %s -maxdepth 1 -type d ! -perm 2775 -path '*/contrib*'", ] candidates_dir = makeCandidatesDir(productName, version, buildNumber) errors = False for test_template in tests: test = test_template % candidates_dir cmd = 'test "0" = "$(%s | wc -l)"' % test try: run_remote_cmd(cmd, server=stageServer, username=stageUsername, sshKey=stageSshKey) except CalledProcessError: errors = True if errors: raise
def makeIndexFiles(productName, version, buildNumber, stageServer, stageUsername, stageSshKey): candidates_dir = makeCandidatesDir(productName, version, buildNumber) indexFile = NamedTemporaryFile() indexFile.write(indexFileTemplate % {'version': version}) indexFile.flush() scp(indexFile.name, '%s@%s:%s/index.html' % (stageUsername, stageServer, candidates_dir), sshKey=stageSshKey) run_remote_cmd( ['chmod', '644', '%s/index.html' % candidates_dir], server=stageServer, username=stageUsername, sshKey=stageSshKey) run_remote_cmd([ 'find', candidates_dir, '-mindepth', '1', '-type', 'd', '-not', '-regex', '.*contrib.*', '-exec', 'cp', '-pv', '%s/index.html' % candidates_dir, '{}', '\\;' ], server=stageServer, username=stageUsername, sshKey=stageSshKey)
def test_base(self): expected = "/pub/bbb/candidates/1.0-candidates/build2/" got = makeCandidatesDir('bbb', '1.0', 2) self.assertEquals(expected, got)
def repackLocale(locale, l10nRepoDir, l10nBaseRepo, revision, localeSrcDir, l10nIni, compareLocalesRepo, env, absObjdir, merge=True, productName=None, platform=None, version=None, partialUpdates=None, buildNumber=None, stageServer=None, mozillaDir=None, mozillaSrcDir=None): repo = "/".join([l10nBaseRepo, locale]) localeDir = path.join(l10nRepoDir, locale) mercurial(repo, localeDir) update(localeDir, revision=revision) # It's a bad assumption to make, but the source dir is currently always # one level above the objdir. absSourceRepoPath = path.split(absObjdir)[0] use_pymake = env.get("USE_PYMAKE", False) make = getMakeCommand(use_pymake, absSourceRepoPath) env["AB_CD"] = locale env["LOCALE_MERGEDIR"] = path.abspath(path.join(localeSrcDir, "merged")) if sys.platform.startswith('win'): if use_pymake: env["LOCALE_MERGEDIR"] = msys2windows(env["LOCALE_MERGEDIR"]) else: env["LOCALE_MERGEDIR"] = windows2msys(env["LOCALE_MERGEDIR"]) if sys.platform.startswith('darwin'): env["MOZ_PKG_PLATFORM"] = "mac" UPLOAD_EXTRA_FILES = [] if mozillaDir: nativeDistDir = path.normpath(path.abspath( path.join(localeSrcDir, '../../%s/dist' % mozillaDir))) else: nativeDistDir = path.normpath(path.abspath( path.join(localeSrcDir, '../../dist'))) posixDistDir = windows2msys(nativeDistDir) mar = '%s/host/bin/mar' % posixDistDir mbsdiff = '%s/host/bin/mbsdiff' % posixDistDir if platform.startswith('win'): mar += ".exe" mbsdiff += ".exe" current = '%s/current' % posixDistDir previous = '%s/previous' % posixDistDir updateDir = 'update/%s/%s' % (buildbot2ftp(platform), locale) updateAbsDir = '%s/%s' % (posixDistDir, updateDir) current_mar = '%s/%s-%s.complete.mar' % ( updateAbsDir, productName, version) unwrap_full_update = '../../../tools/update-packaging/unwrap_full_update.pl' make_incremental_update = '../../tools/update-packaging/make_incremental_update.sh' prevMarDir = '../../../../' if mozillaSrcDir: # Compensate for having the objdir or not. additionalParent = '' if mozillaDir: additionalParent = '../' unwrap_full_update = '../../../%s%s/tools/update-packaging/unwrap_full_update.pl' % (additionalParent, mozillaSrcDir) make_incremental_update = '../../%s%s/tools/update-packaging/make_incremental_update.sh' % (additionalParent, mozillaSrcDir) prevMarDir = '../../../../%s' % additionalParent env['MAR'] = mar env['MBSDIFF'] = mbsdiff log.info("Download mar tools") if stageServer: candidates_dir = makeCandidatesDir(productName, version, buildNumber, protocol="http", server=stageServer) if not path.isfile(msys2windows(mar)): marUrl = "%(c_dir)s/mar-tools/%(platform)s/%(mar)s" % \ dict(c_dir=candidates_dir, platform=platform, mar=path.basename(mar)) run_cmd(['mkdir', '-p', path.dirname(mar)]) log.info("Downloading %s to %s", marUrl, mar) urlretrieve(marUrl, msys2windows(mar)) if not sys.platform.startswith('win'): run_cmd(['chmod', '755', mar]) if not path.isfile(msys2windows(mbsdiff)): mbsdiffUrl = "%(c_dir)s/mar-tools/%(platform)s/%(mbsdiff)s" % \ dict(c_dir=candidates_dir, platform=platform, mbsdiff=path.basename(mbsdiff)) run_cmd(['mkdir', '-p', path.dirname(mbsdiff)]) log.info("Downloading %s to %s", mbsdiffUrl, mbsdiff) urlretrieve(mbsdiffUrl, msys2windows(mbsdiff)) if not sys.platform.startswith('win'): run_cmd(['chmod', '755', mbsdiff]) else: log.warning('stageServer not set. mar tools will *not* be downloaded.') compareLocales(compareLocalesRepo, locale, l10nRepoDir, localeSrcDir, l10nIni, revision=revision, merge=merge) run_cmd(make + ["installers-%s" % locale], cwd=localeSrcDir, env=env) # Our Windows-native rm from bug 727551 requires Windows-style paths run_cmd(['rm', '-rf', msys2windows(current)]) run_cmd(['mkdir', current]) run_cmd(['perl', unwrap_full_update, current_mar], cwd=path.join(nativeDistDir, 'current'), env=env) for oldVersion in partialUpdates: prevMar = partialUpdates[oldVersion]['mar'] if prevMar: partial_mar_name = '%s-%s-%s.partial.mar' % (productName, oldVersion, version) partial_mar = '%s/%s' % (updateAbsDir, partial_mar_name) UPLOAD_EXTRA_FILES.append('%s/%s' % (updateDir, partial_mar_name)) # Our Windows-native rm from bug 727551 requires Windows-style paths run_cmd(['rm', '-rf', msys2windows(previous)]) run_cmd(['mkdir', previous]) run_cmd( ['perl', unwrap_full_update, '%s/%s' % (prevMarDir, prevMar)], cwd=path.join(nativeDistDir, 'previous'), env=env) run_cmd(['bash', make_incremental_update, partial_mar, previous, current], cwd=nativeDistDir, env=env) if os.environ.get('MOZ_SIGN_CMD'): run_cmd(['bash', '-c', '%s -f mar -f gpg "%s"' % (os.environ['MOZ_SIGN_CMD'], partial_mar)], env=env) UPLOAD_EXTRA_FILES.append( '%s/%s.asc' % (updateDir, partial_mar_name)) else: log.warning( "Skipping partial MAR creation for %s %s" % (oldVersion, locale)) env['UPLOAD_EXTRA_FILES'] = ' '.join(UPLOAD_EXTRA_FILES) retry(run_cmd, args=(make + ["upload", "AB_CD=%s" % locale], ), kwargs={'cwd': localeSrcDir, 'env': env}) # return the location of the checksums file, because consumers may want # some information about the files that were generated. # Some versions of make that we use (at least pymake) imply --print-directory # We need to turn it off to avoid getting extra output that mess up our # parsing of the checksum file path. curdir = os.getcwd() try: os.chdir(localeSrcDir) relative_checksums = get_output(make + ["--no-print-directory", "echo-variable-CHECKSUM_FILE", "AB_CD=%s" % locale], env=env).strip("\"'\n") return path.normpath(path.join(localeSrcDir, relative_checksums)) finally: os.chdir(curdir)
def repackLocale(locale, l10nRepoDir, l10nBaseRepo, revision, localeSrcDir, l10nIni, compareLocalesRepo, env, merge=True, productName=None, platform=None, version=None, partialUpdates=None, buildNumber=None, stageServer=None): repo = "/".join([l10nBaseRepo, locale]) localeDir = path.join(l10nRepoDir, locale) retry(mercurial, args=(repo, localeDir)) update(localeDir, revision=revision) mozillaDir = '' if 'thunderbird' in productName: mozillaDir = 'mozilla/' # split on \\ since we care about the absSourceRepoPath for pymake, which # is windows. absSourceRepoPath = os.path.join(os.getcwd(), localeSrcDir.split("\\")[0]) use_pymake = env.get("USE_PYMAKE", False) make = getMakeCommand(use_pymake, absSourceRepoPath) env["AB_CD"] = locale env["LOCALE_MERGEDIR"] = path.abspath(path.join(localeSrcDir, "merged")) if sys.platform.startswith('win'): if use_pymake: env["LOCALE_MERGEDIR"] = msys2windows(env["LOCALE_MERGEDIR"]) else: env["LOCALE_MERGEDIR"] = windows2msys(env["LOCALE_MERGEDIR"]) if sys.platform.startswith('darwin'): env["MOZ_PKG_PLATFORM"] = "mac" UPLOAD_EXTRA_FILES = [] nativeDistDir = path.normpath(path.abspath( path.join(localeSrcDir, '../../%sdist' % mozillaDir))) posixDistDir = windows2msys(nativeDistDir) mar = '%s/host/bin/mar' % posixDistDir mbsdiff = '%s/host/bin/mbsdiff' % posixDistDir if platform.startswith('win'): mar += ".exe" mbsdiff += ".exe" current = '%s/current' % posixDistDir previous = '%s/previous' % posixDistDir updateDir = 'update/%s/%s' % (buildbot2ftp(platform), locale) updateAbsDir = '%s/%s' % (posixDistDir, updateDir) current_mar = '%s/%s-%s.complete.mar' % ( updateAbsDir, productName, version) unwrap_full_update = '../../../tools/update-packaging/unwrap_full_update.pl' make_incremental_update = '../../tools/update-packaging/make_incremental_update.sh' prevMarDir = '../../../../' if mozillaDir: unwrap_full_update = '../../../../%stools/update-packaging/unwrap_full_update.pl' % mozillaDir make_incremental_update = '../../../%stools/update-packaging/make_incremental_update.sh' % mozillaDir prevMarDir = '../../../../../' env['MAR'] = mar env['MBSDIFF'] = mbsdiff log.info("Download mar tools") if stageServer: candidates_dir = makeCandidatesDir(productName, version, buildNumber, protocol="http", server=stageServer) if not path.isfile(msys2windows(mar)): marUrl = "%(c_dir)s/mar-tools/%(platform)s/%(mar)s" % \ dict(c_dir=candidates_dir, platform=platform, mar=path.basename(mar)) run_cmd(['mkdir', '-p', path.dirname(mar)]) log.info("Downloading %s to %s", marUrl, mar) urlretrieve(marUrl, msys2windows(mar)) if not sys.platform.startswith('win'): run_cmd(['chmod', '755', mar]) if not path.isfile(msys2windows(mbsdiff)): mbsdiffUrl = "%(c_dir)s/mar-tools/%(platform)s/%(mbsdiff)s" % \ dict(c_dir=candidates_dir, platform=platform, mbsdiff=path.basename(mbsdiff)) run_cmd(['mkdir', '-p', path.dirname(mbsdiff)]) log.info("Downloading %s to %s", mbsdiffUrl, mbsdiff) urlretrieve(mbsdiffUrl, msys2windows(mbsdiff)) if not sys.platform.startswith('win'): run_cmd(['chmod', '755', mbsdiff]) else: log.warning('stageServer not set. mar tools will *not* be downloaded.') compareLocales(compareLocalesRepo, locale, l10nRepoDir, localeSrcDir, l10nIni, revision=revision, merge=merge) run_cmd(make + ["installers-%s" % locale], cwd=localeSrcDir, env=env) run_cmd(['rm', '-rf', current]) run_cmd(['mkdir', current]) run_cmd(['perl', unwrap_full_update, current_mar], cwd=path.join(nativeDistDir, 'current'), env=env) for oldVersion in partialUpdates: prevMar = partialUpdates[oldVersion]['mar'] if prevMar: partial_mar_name = '%s-%s-%s.partial.mar' % (productName, oldVersion, version) partial_mar = '%s/%s' % (updateAbsDir, partial_mar_name) UPLOAD_EXTRA_FILES.append('%s/%s' % (updateDir, partial_mar_name)) run_cmd(['rm', '-rf', previous]) run_cmd(['mkdir', previous]) run_cmd( ['perl', unwrap_full_update, '%s/%s' % (prevMarDir, prevMar)], cwd=path.join(nativeDistDir, 'previous'), env=env) run_cmd(['bash', make_incremental_update, partial_mar, previous, current], cwd=nativeDistDir, env=env) if os.environ.get('MOZ_SIGN_CMD'): run_cmd(['bash', '-c', '%s -f mar -f gpg "%s"' % (os.environ['MOZ_SIGN_CMD'], partial_mar)], env=env) UPLOAD_EXTRA_FILES.append( '%s/%s.asc' % (updateDir, partial_mar_name)) else: log.warning( "Skipping partial MAR creation for %s %s" % (oldVersion, locale)) env['UPLOAD_EXTRA_FILES'] = ' '.join(UPLOAD_EXTRA_FILES) retry(run_cmd, args=(make + ["upload", "AB_CD=%s" % locale], ), kwargs={'cwd': localeSrcDir, 'env': env})
productName=productName, version=version, buildNumber=buildNumber) pushToMirrors(stageServer=stageServer, stageUsername=stageUsername, stageSshKey=stageSshKey, productName=productName, version=version, extra_excludes=options.extra_excludes, buildNumber=buildNumber, overwrite=options.overwrite) if createIndexFiles: deleteIndexFiles(stageServer=stageServer, stageUsername=stageUsername, stageSshKey=stageSshKey, cleanup_dir=makeCandidatesDir(productName, version, buildNumber)) if 'postrelease' in args: if createIndexFiles: deleteIndexFiles(stageServer=stageServer, stageUsername=stageUsername, stageSshKey=stageSshKey, cleanup_dir=makeReleasesDir(productName, version)) if ftpSymlinkName: updateSymlink(stageServer=stageServer, stageUsername=stageUsername, stageSshKey=stageSshKey, productName=productName, version=version, target=ftpSymlinkName) if syncPartnerBundles:
def generate_data(self, appVersion, productName, version, buildNumber, updateChannels, stagingServer, bouncerServer, enUSPlatforms, schemaVersion, openURL=None, **updateKwargs): assert schemaVersion in (2, 3), 'Unhandled schema version %s' % schemaVersion self.name = get_release_blob_name(productName, version, buildNumber) data = { 'name': self.name, 'detailsUrl': getProductDetails(productName.lower(), appVersion), 'platforms': {}, 'fileUrls': {}, 'ftpFilenames': {}, 'bouncerProducts': {}, } data['appVersion'] = appVersion data['platformVersion'] = appVersion data['displayVersion'] = getPrettyVersion(version) actions = [] if openURL: actions.append("showURL") data["openURL"] = openURL if actions: data["actions"] = " ".join(actions) # XXX: This is a hack for bug 1045583. We should remove it, and always # use "candidates" for nightlyDir after the switch to Balrog is complete. if productName.lower() == "mobile": nightlyDir = "candidates" else: nightlyDir = "nightly" for channel in updateChannels: if channel in ('betatest', 'esrtest') or "localtest" in channel: dir_ = makeCandidatesDir(productName.lower(), version, buildNumber, server=stagingServer, protocol='http', nightlyDir=nightlyDir) data['fileUrls'][channel] = '%supdate/%%OS_FTP%%/%%LOCALE%%/%%FILENAME%%' % dir_ else: url = 'http://%s/?product=%%PRODUCT%%&os=%%OS_BOUNCER%%&lang=%%LOCALE%%' % bouncerServer data['fileUrls'][channel] = url # XXX: quick hack for bug 1021026. We should be using Bouncer for this # after we implement better solution talked about in comments 2 through 4 if channel == 'release': dir_ = makeCandidatesDir(productName.lower(), version, buildNumber, server='download.cdn.mozilla.net', protocol='http', nightlyDir=nightlyDir) url = '%supdate/%%OS_FTP%%/%%LOCALE%%/%%FILENAME%%' % dir_ data['fileUrls']['beta'] = url data['fileUrls']['beta-cdntest'] = url data.update(self._get_update_data(productName, version, **updateKwargs)) for platform in enUSPlatforms: updatePlatforms = buildbot2updatePlatforms(platform) bouncerPlatform = buildbot2bouncer(platform) ftpPlatform = buildbot2ftp(platform) data['platforms'][updatePlatforms[0]] = { 'OS_BOUNCER': bouncerPlatform, 'OS_FTP': ftpPlatform } for aliasedPlatform in updatePlatforms[1:]: data['platforms'][aliasedPlatform] = { 'alias': updatePlatforms[0] } return data
options, args = parser.parse_args() mercurial(options.buildbotConfigs, "buildbot-configs") update("buildbot-configs", revision=options.releaseTag) branchConfig, releaseConfig = validate(options, args) productName = releaseConfig['productName'] version = releaseConfig['version'] buildNumber = releaseConfig['buildNumber'] stageServer = branchConfig['stage_server'] stageUsername = branchConfig['stage_username'] stageSshKey = path.join(os.path.expanduser("~"), ".ssh", branchConfig["stage_ssh_key"]) candidatesDir = makeCandidatesDir(productName, version, buildNumber) rsyncFilesByPattern(server=stageServer, userName=stageUsername, sshKey=stageSshKey, source_dir=candidatesDir, target_dir='temp/', pattern='*.checksums') types = {'sha1': 'SHA1SUMS', 'md5': 'MD5SUMS', 'sha512': 'SHA512SUMS'} generateChecksums('temp', types) files = types.values() signFiles(files) upload_files = files + ['%s.asc' % x for x in files] + \ [path.join(path.dirname(__file__), 'KEY')] rsyncFiles(files=upload_files, server=stageServer, userName=stageUsername,
def test_ftp_root(self): expected = "pub/bbb/candidates/1.0-candidates/build5/" got = makeCandidatesDir('bbb', '1.0', 5, ftp_root="pub/") self.assertEquals(expected, got)
staging_server = FTP_SERVER_TEMPLATE % release_config['stagingServer'] aus_server_url = release_config['ausServerUrl'] build_number = release_config['buildNumber'] previous_releases_staging_server = FTP_SERVER_TEMPLATE % \ release_config.get('previousReleasesStagingServer', release_config['stagingServer']) # Current version data pc = PatcherConfig(open(options.config).read()) app_name = pc['appName'] to_version = pc['current-update']['to'] to_ = makeReleaseRepackUrls( product_name, app_name, to_version, options.platform, locale='%locale%', signed=True, exclude_secondary=True ).values()[0] candidates_dir = makeCandidatesDir( product_name, to_version, build_number, ftp_root='/') to_path = "%s%s" % (candidates_dir, to_) partials = pc['current-update']['partials'].keys() # Exclude current version from update verify completes = pc.getFromVersions() uvc = UpdateVerifyConfig(product=app_name, platform=update_platform, channel=options.channel, aus_server=aus_server_url, to=to_path) to_locales = pc['release'][to_version]['locales'] # remove exceptions for to build, e.g. "ja" for mac for locale, platforms in pc['release'][to_version]['exceptions'].iteritems(): if ftp_platform not in platforms and locale in to_locales: log.info("Removing %s locale from %s platform for %s" % ( locale, ftp_platform, to_version))
def _getFileUrls(self, productName, version, buildNumber, updateChannels, ftpServer, bouncerServer, partialUpdates, requiresMirrors=True): data = {"fileUrls": {}} # "*" is for the default set of fileUrls, which generally points at # bouncer. It's helpful to have this to reduce duplication between # the live channel and the cdntest channel (which eliminates the # possibility that those two channels serve different contents). uniqueChannels = ["*"] for c in updateChannels: # localtest channels are different than the default because they # point directly at FTP rather than Bouncer. if "localtest" in c: uniqueChannels.append(c) # beta and beta-cdntest are special, but only if requiresMirrors is # set to False. This is typically used when generating beta channel # updates as part of RC builds, which get shipped prior to the # release being pushed to mirrors. This is a bit of a hack. if not requiresMirrors and c in ("beta", "beta-cdntest"): uniqueChannels.append(c) for channel in uniqueChannels: data["fileUrls"][channel] = { "completes": {} } if "localtest" in channel: dir_ = makeCandidatesDir(productName.lower(), version, buildNumber, server=ftpServer, protocol='http') filename = "%s-%s.complete.mar" % ( productName.lower(), version) filename = _FTP2 % (dir_, filename) data["fileUrls"][channel]["completes"]["*"] = filename else: # See comment above about these channels for explanation. if not requiresMirrors and channel in ("beta", "beta-cdntest"): bouncerProduct = "%s-%sbuild%s-complete" % ( productName.lower(), version, buildNumber) else: if productName.lower() == "fennec": bouncerProduct = "%s-%s" % ( productName.lower(), version) else: bouncerProduct = "%s-%s-complete" % ( productName.lower(), version) url = _URL2 % (bouncerServer, bouncerProduct) data["fileUrls"][channel]["completes"]["*"] = url if not partialUpdates: return data for channel in uniqueChannels: data["fileUrls"][channel]["partials"] = {} for previousVersion, previousInfo in partialUpdates.iteritems(): from_ = get_release_blob_name(productName, previousVersion, previousInfo["buildNumber"], self.dummy) if "localtest" in channel: dir_ = makeCandidatesDir(productName.lower(), version, buildNumber, server=ftpServer, protocol='http') filename = "%s-%s-%s.partial.mar" % ( productName.lower(), previousVersion, version) data["fileUrls"][channel]["partials"][from_] = _FTP2 % ( dir_, filename) else: lname = productName.lower() # See comment above about these channels for explanation. if (not requiresMirrors and channel in ("beta", "beta-cdntest")): bouncerProduct = "%s-%sbuild%s-partial-%sbuild%s" % ( lname, version, buildNumber, previousVersion, previousInfo["buildNumber"]) else: bouncerProduct = "%s-%s-partial-%s" % ( productName.lower(), version, previousVersion) url = _URL2 % (bouncerServer, bouncerProduct) data["fileUrls"][channel]["partials"][from_] = url return data
def repackLocale(locale, l10nRepoDir, l10nBaseRepo, revision, localeSrcDir, l10nIni, compareLocalesRepo, env, absObjdir, merge=True, productName=None, platform=None, version=None, partialUpdates=None, buildNumber=None, stageServer=None): repo = "/".join([l10nBaseRepo, locale]) localeDir = path.join(l10nRepoDir, locale) retry(mercurial, args=(repo, localeDir)) update(localeDir, revision=revision) mozillaDir = '' if 'thunderbird' in productName: mozillaDir = 'mozilla/' # It's a bad assumption to make, but the source dir is currently always # one level above the objdir. absSourceRepoPath = path.split(absObjdir)[0] use_pymake = env.get("USE_PYMAKE", False) make = getMakeCommand(use_pymake, absSourceRepoPath) env["AB_CD"] = locale env["LOCALE_MERGEDIR"] = path.abspath(path.join(localeSrcDir, "merged")) if sys.platform.startswith('win'): if use_pymake: env["LOCALE_MERGEDIR"] = msys2windows(env["LOCALE_MERGEDIR"]) else: env["LOCALE_MERGEDIR"] = windows2msys(env["LOCALE_MERGEDIR"]) if sys.platform.startswith('darwin'): env["MOZ_PKG_PLATFORM"] = "mac" UPLOAD_EXTRA_FILES = [] nativeDistDir = path.normpath( path.abspath(path.join(localeSrcDir, '../../%sdist' % mozillaDir))) posixDistDir = windows2msys(nativeDistDir) mar = '%s/host/bin/mar' % posixDistDir mbsdiff = '%s/host/bin/mbsdiff' % posixDistDir if platform.startswith('win'): mar += ".exe" mbsdiff += ".exe" current = '%s/current' % posixDistDir previous = '%s/previous' % posixDistDir updateDir = 'update/%s/%s' % (buildbot2ftp(platform), locale) updateAbsDir = '%s/%s' % (posixDistDir, updateDir) current_mar = '%s/%s-%s.complete.mar' % (updateAbsDir, productName, version) unwrap_full_update = '../../../tools/update-packaging/unwrap_full_update.pl' make_incremental_update = '../../tools/update-packaging/make_incremental_update.sh' prevMarDir = '../../../../' if mozillaDir: unwrap_full_update = '../../../../%stools/update-packaging/unwrap_full_update.pl' % mozillaDir make_incremental_update = '../../../%stools/update-packaging/make_incremental_update.sh' % mozillaDir prevMarDir = '../../../../../' env['MAR'] = mar env['MBSDIFF'] = mbsdiff log.info("Download mar tools") if stageServer: candidates_dir = makeCandidatesDir(productName, version, buildNumber, protocol="http", server=stageServer) if not path.isfile(msys2windows(mar)): marUrl = "%(c_dir)s/mar-tools/%(platform)s/%(mar)s" % \ dict(c_dir=candidates_dir, platform=platform, mar=path.basename(mar)) run_cmd(['mkdir', '-p', path.dirname(mar)]) log.info("Downloading %s to %s", marUrl, mar) urlretrieve(marUrl, msys2windows(mar)) if not sys.platform.startswith('win'): run_cmd(['chmod', '755', mar]) if not path.isfile(msys2windows(mbsdiff)): mbsdiffUrl = "%(c_dir)s/mar-tools/%(platform)s/%(mbsdiff)s" % \ dict(c_dir=candidates_dir, platform=platform, mbsdiff=path.basename(mbsdiff)) run_cmd(['mkdir', '-p', path.dirname(mbsdiff)]) log.info("Downloading %s to %s", mbsdiffUrl, mbsdiff) urlretrieve(mbsdiffUrl, msys2windows(mbsdiff)) if not sys.platform.startswith('win'): run_cmd(['chmod', '755', mbsdiff]) else: log.warning('stageServer not set. mar tools will *not* be downloaded.') compareLocales(compareLocalesRepo, locale, l10nRepoDir, localeSrcDir, l10nIni, revision=revision, merge=merge) run_cmd(make + ["installers-%s" % locale], cwd=localeSrcDir, env=env) # Our Windows-native rm from bug 727551 requires Windows-style paths run_cmd(['rm', '-rf', msys2windows(current)]) run_cmd(['mkdir', current]) run_cmd(['perl', unwrap_full_update, current_mar], cwd=path.join(nativeDistDir, 'current'), env=env) for oldVersion in partialUpdates: prevMar = partialUpdates[oldVersion]['mar'] if prevMar: partial_mar_name = '%s-%s-%s.partial.mar' % (productName, oldVersion, version) partial_mar = '%s/%s' % (updateAbsDir, partial_mar_name) UPLOAD_EXTRA_FILES.append('%s/%s' % (updateDir, partial_mar_name)) # Our Windows-native rm from bug 727551 requires Windows-style paths run_cmd(['rm', '-rf', msys2windows(previous)]) run_cmd(['mkdir', previous]) run_cmd( ['perl', unwrap_full_update, '%s/%s' % (prevMarDir, prevMar)], cwd=path.join(nativeDistDir, 'previous'), env=env) run_cmd([ 'bash', make_incremental_update, partial_mar, previous, current ], cwd=nativeDistDir, env=env) if os.environ.get('MOZ_SIGN_CMD'): run_cmd([ 'bash', '-c', '%s -f mar -f gpg "%s"' % (os.environ['MOZ_SIGN_CMD'], partial_mar) ], env=env) UPLOAD_EXTRA_FILES.append('%s/%s.asc' % (updateDir, partial_mar_name)) else: log.warning("Skipping partial MAR creation for %s %s" % (oldVersion, locale)) env['UPLOAD_EXTRA_FILES'] = ' '.join(UPLOAD_EXTRA_FILES) retry(run_cmd, args=(make + ["upload", "AB_CD=%s" % locale], ), kwargs={ 'cwd': localeSrcDir, 'env': env }) # return the location of the checksums file, because consumers may want # some information about the files that were generated. # Some versions of make that we use (at least pymake) imply --print-directory # We need to turn it off to avoid getting extra output that mess up our # parsing of the checksum file path. curdir = os.getcwd() try: os.chdir(localeSrcDir) relative_checksums = get_output(make + [ "--no-print-directory", "echo-variable-CHECKSUM_FILE", "AB_CD=%s" % locale ], env=env).strip("\"'\n") return path.normpath(path.join(localeSrcDir, relative_checksums)) finally: os.chdir(curdir)
release_config['stagingServer']) # Current version data pc = PatcherConfig(open(options.config).read()) partials = pc['current-update']['partials'].keys() app_name = pc['appName'] to_version = pc['current-update']['to'] to_ = makeReleaseRepackUrls(product_name, app_name, to_version, options.platform, locale='%locale%', signed=True, exclude_secondary=True).values()[0] candidates_dir = makeCandidatesDir(product_name, to_version, build_number, ftp_root='/') to_path = "%s%s" % (candidates_dir, to_) uvc = UpdateVerifyConfig(product=app_name, channel=options.channel, aus_server=aus_server_url, to=to_path) # getUpdatePaths yields all of the update paths, but we need to know # everything about a fromVersion before we can add it to the update # verify config, so we need to collect everything it yields before # acting on it. updatePaths = {} for fromVersion, platform, locale, _, _ in pc.getUpdatePaths(): # Skip paths from platforms we don't care about.
def _getFileUrls(self, productName, version, buildNumber, updateChannels, ftpServer, bouncerServer, partialUpdates, requiresMirrors=True): data = {"fileUrls": {}} file_prefix = productName.lower() if file_prefix == "devedition": file_prefix = "firefox" # "*" is for the default set of fileUrls, which generally points at # bouncer. It's helpful to have this to reduce duplication between # the live channel and the cdntest channel (which eliminates the # possibility that those two channels serve different contents). uniqueChannels = ["*"] for c in updateChannels: # localtest channels are different than the default because they # point directly at FTP rather than Bouncer. if "localtest" in c: uniqueChannels.append(c) # beta and beta-cdntest are special, but only if requiresMirrors is # set to False. This is typically used when generating beta channel # updates as part of RC builds, which get shipped prior to the # release being pushed to mirrors. This is a bit of a hack. if not requiresMirrors and c in ("beta", "beta-cdntest", "beta-dev", "beta-dev-cdntest"): uniqueChannels.append(c) for channel in uniqueChannels: data["fileUrls"][channel] = {"completes": {}} if "localtest" in channel: dir_ = makeCandidatesDir(productName.lower(), version, buildNumber, server=ftpServer, protocol='http') filename = "%s-%s.complete.mar" % (file_prefix, version) data["fileUrls"][channel]["completes"][ "*"] = "%supdate/%%OS_FTP%%/%%LOCALE%%/%s" % (dir_, filename) else: # See comment above about these channels for explanation. if not requiresMirrors and channel in ("beta", "beta-cdntest", "beta-dev", "beta-dev-cdntest"): bouncerProduct = "%s-%sbuild%s-complete" % ( productName.lower(), version, buildNumber) else: if productName.lower() == "fennec": bouncerProduct = "%s-%s" % (productName.lower(), version) else: bouncerProduct = "%s-%s-complete" % ( productName.lower(), version) url = 'http://%s/?product=%s&os=%%OS_BOUNCER%%&lang=%%LOCALE%%' % ( bouncerServer, bouncerProduct) data["fileUrls"][channel]["completes"]["*"] = url if not partialUpdates: return data for channel in uniqueChannels: data["fileUrls"][channel]["partials"] = {} for previousVersion, previousInfo in partialUpdates.iteritems(): from_ = get_release_blob_name(productName, previousVersion, previousInfo["buildNumber"], self.dummy) if "localtest" in channel: dir_ = makeCandidatesDir(productName.lower(), version, buildNumber, server=ftpServer, protocol='http') filename = "%s-%s-%s.partial.mar" % ( file_prefix, previousVersion, version) data["fileUrls"][channel]["partials"][ from_] = "%supdate/%%OS_FTP%%/%%LOCALE%%/%s" % ( dir_, filename) else: # See comment above about these channels for explanation. if not requiresMirrors and channel in ("beta", "beta-cdntest", "beta-dev", "beta-dev-cdntest"): bouncerProduct = "%s-%sbuild%s-partial-%sbuild%s" % ( productName.lower(), version, buildNumber, previousVersion, previousInfo["buildNumber"]) else: bouncerProduct = "%s-%s-partial-%s" % ( productName.lower(), version, previousVersion) url = 'http://%s/?product=%s&os=%%OS_BOUNCER%%&lang=%%LOCALE%%' % ( bouncerServer, bouncerProduct) data["fileUrls"][channel]["partials"][from_] = url return data
def createRepacks(sourceRepo, revision, l10nRepoDir, l10nBaseRepo, mozconfigPath, objdir, makeDirs, locales, ftpProduct, appName, version, appVersion, buildNumber, stageServer, stageUsername, stageSshKey, compareLocalesRepo, merge, platform, stage_platform, brand, mobileDirName): sourceRepoName = path.split(sourceRepo)[-1] nightlyDir = "candidates" localeSrcDir = path.join(sourceRepoName, objdir, mobileDirName, "locales") # Even on Windows we need to use "/" as a separator for this because # compare-locales doesn"t work any other way l10nIni = "/".join([sourceRepoName, mobileDirName, "locales", "l10n.ini"]) env = { "MOZ_OBJDIR": objdir, "MOZ_PKG_VERSION": version, "UPLOAD_HOST": stageServer, "UPLOAD_USER": stageUsername, "UPLOAD_SSH_KEY": stageSshKey, "UPLOAD_TO_TEMP": "1", # Android signing "JARSIGNER": os.path.join(os.getcwd(), "scripts", "release", "signing", "mozpass.py") } build.misc.cleanupObjdir(sourceRepoName, objdir, mobileDirName) retry(mercurial, args=(sourceRepo, sourceRepoName)) update(sourceRepoName, revision=revision) l10nRepackPrep(sourceRepoName, objdir, mozconfigPath, l10nRepoDir, makeDirs, localeSrcDir, env) fullCandidatesDir = makeCandidatesDir(appName, version, buildNumber, protocol='http', server=stageServer, nightlyDir=nightlyDir) input_env = retry(downloadReleaseBuilds, args=(stageServer, ftpProduct, brand, version, buildNumber, stage_platform, fullCandidatesDir)) env.update(input_env) print "env pre-locale: %s" % str(env) failed = [] for l in locales: try: # adding locale into builddir env["POST_UPLOAD_CMD"] = postUploadCmdPrefix( to_mobile_candidates=True, product=appName, version=version, builddir='%s/%s' % (stage_platform, l), buildNumber=buildNumber, nightly_dir=nightlyDir, ) print "env post-locale: %s" % str(env) repackLocale(str(l), l10nRepoDir, l10nBaseRepo, revision, localeSrcDir, l10nIni, compareLocalesRepo, env, merge) except Exception, e: failed.append((l, format_exc()))
def test_fennec(self): expected = "/pub/mobile/candidates/15.1-candidates/build3/" got = makeCandidatesDir('fennec', '15.1', 3) self.assertEquals(expected, got)