def run_benchmark(options, use_refbuild, benchmark_results): result = 0 build_dir = os.path.abspath(options.build_dir) if not use_refbuild: if chromium_utils.IsMac(): build_dir = os.path.join(os.path.dirname(build_dir), 'xcodebuild') elif chromium_utils.IsLinux(): build_dir = os.path.join(os.path.dirname(build_dir), 'sconsbuild') build_dir = os.path.join(build_dir, options.target) else: build_dir = os.path.join(os.path.dirname(build_dir), 'chrome', 'tools', 'test', 'reference_build') if chromium_utils.IsMac(): build_dir = os.path.join(build_dir, 'chrome_mac') elif chromium_utils.IsLinux(): build_dir = os.path.join(build_dir, 'chrome_linux') else: build_dir = os.path.join(build_dir, 'chrome_win') if chromium_utils.IsWindows(): chrome_exe_name = 'chrome.exe' elif chromium_utils.IsLinux(): chrome_exe_name = 'chrome' else: chrome_exe_name = 'Chromium' chrome_exe_path = os.path.join(build_dir, chrome_exe_name) if not os.path.exists(chrome_exe_path): raise chromium_utils.PathNotFound('Unable to find %s' % chrome_exe_path) temp_dir = tempfile.mkdtemp() command = [ chrome_exe_path, '--user-data-dir=%s' % temp_dir, '--no-first-run', '--no-default-browser-check', START_URL ] print "Executing: " print command browser_process = subprocess.Popen(command) benchmark_results['ready'].wait() if benchmark_results['ready'].isSet(): results = json.loads(benchmark_results['results'])[0] print_result(True, 'Total', results['score'], use_refbuild) for child in results['children']: print_result(False, child['name'], child['score'], use_refbuild) benchmark_results['ready'].clear() if chromium_utils.IsWindows(): subprocess.call('taskkill /f /pid %i /t' % browser_process.pid) else: os.system('kill -15 %i' % browser_process.pid) browser_process.wait() shutil.rmtree(temp_dir) return result
def BuildArch(target_arch=None): """Determine the architecture of the build being processed.""" if target_arch == 'x64': # Just use the architecture specified by the build if it's 64 bit. return '64bit' elif target_arch: raise StagingError('Unknown target_arch "%s"', target_arch) if chromium_utils.IsWindows() or chromium_utils.IsMac(): # Architecture is not relevant for Mac (combines multiple archs in one # release) and Win (32-bit only), so just call it 32bit. # TODO(mmoss): This might change for Win if we add 64-bit builds. return '32bit' elif chromium_utils.IsLinux(): # This assumes we either build natively or build (and run staging) in a # chroot, where the architecture of the python executable is the same as # the build target. # TODO(mmoss): This appears to be true for the current builders. If that # changes, we might have to modify the bots to pass in the build # architecture when running this script. arch = platform.architecture(bits='unknown')[0] if arch == 'unknown': raise StagingError('Could not determine build architecture') return arch else: raise NotImplementedError('Platform "%s" is not currently supported.' % sys.platform)
def RemoveChromeTemporaryFiles(): """A large hammer to nuke what could be leaked files from unittests or files left from a unittest that crashed, was killed, etc.""" # NOTE: print out what is cleaned up so the bots don't timeout if # there is a lot to cleanup and also se we see the leaks in the # build logs. # At some point a leading dot got added, support with and without it. kLogRegex = '^\.?(com\.google\.Chrome|org\.chromium)\.' if chromium_utils.IsWindows(): kLogRegex = r'^(base_dir|scoped_dir|nps|chrome_test|SafeBrowseringTest)' LogAndRemoveFiles(tempfile.gettempdir(), kLogRegex) # Dump and temporary files. LogAndRemoveFiles(tempfile.gettempdir(), r'^.+\.(dmp|tmp)$') LogAndRemoveFiles(tempfile.gettempdir(), r'^_CL_.*$') RemoveChromeDesktopFiles() RemoveJumpListFiles() elif chromium_utils.IsLinux(): kLogRegexHeapcheck = '\.(sym|heap)$' LogAndRemoveFiles(tempfile.gettempdir(), kLogRegex) LogAndRemoveFiles(tempfile.gettempdir(), kLogRegexHeapcheck) LogAndRemoveFiles('/dev/shm', kLogRegex) elif chromium_utils.IsMac(): nstempdir_path = '/usr/local/libexec/nstempdir' if os.path.exists(nstempdir_path): ns_temp_dir = chromium_utils.GetCommandOutput([nstempdir_path]).strip() if ns_temp_dir: LogAndRemoveFiles(ns_temp_dir, kLogRegex) for i in ('Chromium', 'Google Chrome'): # Remove dumps. crash_path = '%s/Library/Application Support/%s/Crash Reports' % ( os.environ['HOME'], i) LogAndRemoveFiles(crash_path, r'^.+\.dmp$') else: raise NotImplementedError( 'Platform "%s" is not currently supported.' % sys.platform)
def FileRegexBlacklist(options): extensions = None if chromium_utils.IsWindows(): extensions = [ 'res', 'lib', 'exp', 'ilk', '7z', r'([pP]recompile\.h\.pch.*)' ] elif chromium_utils.IsMac(): # The static libs are just built as intermediate targets, and we don't # need to pull the dSYMs over to the testers most of the time (except for # the memory tools). if options.package_dsym_files: extensions = ['a'] else: extensions = ['a', 'dSYM'] elif chromium_utils.IsLinux(): # object files, archives, and gcc (make build) dependency info. extensions = ['o', 'a', 'd'] else: return '$NO_FILTER^' # No need for the .ninja files generated by "gn gen". extensions.append('ninja') extensions_pattern = r'.+\.({})'.format('|'.join(extensions)) # Primary toolchain is excluded by "exclusions" rather than regex. secondary_toolchain_pattern = r'\w+{}(obj|gen)'.format( re.escape(os.path.sep)) return '^({}|{})$'.format(extensions_pattern, secondary_toolchain_pattern)
def determine_goma_jobs(): # We would like to speed up build on Windows a bit, since it is slowest. number_of_processors = 0 try: number_of_processors = multiprocessing.cpu_count() except NotImplementedError: print 'cpu_count() is not implemented, using default value 50.' return 50 assert number_of_processors > 0 # When goma is used, 10 * number_of_processors is basically good in # various situations according to our measurement. Build speed won't # be improved if -j is larger than that. # # Since Mac had process number limitation before, we had to set # the upper limit to 50. Now that the process number limitation is 2000, # so we would be able to use 10 * number_of_processors. # For the safety, we'd like to set the upper limit to 200. # # Note that currently most try-bot build slaves have 8 processors. if chromium_utils.IsMac() or chromium_utils.IsWindows(): return min(10 * number_of_processors, 200) # For Linux, we also would like to use 10 * cpu. However, not sure # backend resource is enough, so let me set Linux and Linux x64 builder # only for now. # Also increasing cpus for v8/blink trybots. hostname = goma_utils.GetShortHostname() if hostname in ( ['build14-m1', 'build48-m1'] + ['build%d-m4' % x for x in xrange(45, 48)]): return min(10 * number_of_processors, 200) return 50
def main(): usage = 'usage: %prog [--nuke]' parser = OptionParser(usage) parser.add_option('-n', '--nuke', action='store_true', dest='nuke', default=False, help='Nuke whole repository (not just build output)') options, unused_args = parser.parse_args() if options.nuke: chromium_utils.RemoveDirectory('trunk') else: # Remove platform specific build output directories. if chromium_utils.IsWindows(): chromium_utils.RemoveDirectory('trunk\\build\\Debug') chromium_utils.RemoveDirectory('trunk\\build\\Release') elif chromium_utils.IsMac(): chromium_utils.RemoveDirectory('trunk/out') chromium_utils.RemoveDirectory('trunk/xcodebuild') elif chromium_utils.IsLinux(): chromium_utils.RemoveDirectory('trunk/out') else: print 'Unknown platform: ' + sys.platform return 1 return 0
def determine_goma_jobs(): # We would like to speed up build on Windows a bit, since it is slowest. number_of_processors = 0 try: number_of_processors = multiprocessing.cpu_count() except NotImplementedError: print 'cpu_count() is not implemented, using default value 50.' return 50 assert number_of_processors > 0 # When goma is used, 10 * number_of_processors is basically good in # various situations according to our measurement. Build speed won't # be improved if -j is larger than that. # # Since Mac had process number limitation before, we had to set # the upper limit to 50. Now that the process number limitation is 2000, # so we would be able to use 10 * number_of_processors. # For the safety, we'd like to set the upper limit to 200. # # For linux, let me keep the current value 50. It's fast enough # compared to the other platforms. # # Note that currently most try-bot build slaves have 8 processors. if chromium_utils.IsMac() or chromium_utils.IsWindows(): return min(10 * number_of_processors, 200) return 50
def FileRegexBlacklist(options): if chromium_utils.IsWindows(): # Remove all .ilk/.7z and maybe PDB files # TODO(phajdan.jr): Remove package_pdb_files when nobody uses it. include_pdbs = options.factory_properties.get('package_pdb_files', True) if include_pdbs: return r'^.+\.(rc|res|lib|exp|ilk|7z|([pP]recompile\.h\.pch.*))$' else: return r'^.+\.(rc|res|lib|exp|ilk|pdb|7z|([pP]recompile\.h\.pch.*))$' if chromium_utils.IsMac(): # The static libs are just built as intermediate targets, and we don't # need to pull the dSYMs over to the testers most of the time (except for # the memory tools). include_dsyms = options.factory_properties.get('package_dsym_files', False) if include_dsyms: return r'^.+\.(a)$' else: return r'^.+\.(a|dSYM)$' if chromium_utils.IsLinux(): # object files, archives, and gcc (make build) dependency info. return r'^.+\.(o|a|d)$' return '$NO_FILTER^'
def archive(options, args): build_dir, _ = chromium_utils.ConvertBuildDirToLegacy( options.build_dir, use_out=chromium_utils.IsLinux()) build_dir = os.path.join(build_dir, options.target) src_dir = os.path.abspath(os.path.dirname(options.build_dir)) staging_dir = slave_utils.GetStagingDir(src_dir) build_revision = slave_utils.SubversionRevision(src_dir) chromium_utils.MakeParentDirectoriesWorldReadable(staging_dir) print 'Staging in %s' % build_dir # Build the list of files to archive. zip_file_list = [ f for f in os.listdir(build_dir) if ShouldPackageFile(f, options.target) ] subdir = None # TODO(nsylvain): We need to move linux to a subdir as well, but aarya is not # ready with the server-side change. if chromium_utils.IsMac(): subdir = '%s-%s' % (chromium_utils.PlatformName(), options.target.lower()) prefix = options.factory_properties.get('cf_archive_name', 'cf_archive') zip_file_name = '%s-%s-%s-%d' % (prefix, chromium_utils.PlatformName(), options.target.lower(), build_revision) (zip_dir, zip_file) = chromium_utils.MakeZip(staging_dir, zip_file_name, zip_file_list, build_dir, raise_error=True) chromium_utils.RemoveDirectory(zip_dir) if not os.path.exists(zip_file): raise StagingError('Failed to make zip package %s' % zip_file) chromium_utils.MakeWorldReadable(zip_file) # Report the size of the zip file to help catch when it gets too big. zip_size = os.stat(zip_file)[stat.ST_SIZE] print 'Zip file is %ld bytes' % zip_size gs_bucket = options.factory_properties.get('gs_bucket', None) gs_acl = options.factory_properties.get('gs_acl', None) status = slave_utils.GSUtilCopyFile(zip_file, gs_bucket, subdir=subdir, gs_acl=gs_acl) if status: raise StagingError('Failed to upload %s to %s. Error %d' % (zip_file, gs_bucket, status)) else: # Delete the file, it is not needed anymore. os.remove(zip_file) return status
def SubversionExe(): # TODO(pamg): move this into platform_utils to support Mac and Linux. if chromium_utils.IsWindows(): return 'svn.bat' # Find it in the user's path. elif chromium_utils.IsLinux() or chromium_utils.IsMac(): return 'svn' # Find it in the user's path. else: raise NotImplementedError( 'Platform "%s" is not currently supported.' % sys.platform)
def CreateCoverageFileAndUpload(options): """Create coverage file with bbcov2lcov binary and upload to www dir.""" # Assert log files exist log_files = glob.glob(os.path.join(options.dynamorio_log_dir, '*.log')) if not log_files: print 'No coverage log files found.' return 1 if (options.browser_shard_index and options.test_to_upload in options.sharded_tests): coverage_info = os.path.join( options.build_dir, 'coverage_%s.info' % options.browser_shard_index) else: coverage_info = os.path.join(options.build_dir, COVERAGE_INFO) coverage_info = os.path.normpath(coverage_info) if os.path.isfile(coverage_info): os.remove(coverage_info) bbcov2lcov_binary = GetExecutableName( os.path.join(options.dynamorio_dir, 'tools', 'bin32', 'bbcov2lcov')) cmd = [ bbcov2lcov_binary, '--dir', options.dynamorio_log_dir, '--output', coverage_info ] RunCmd(cmd) # Delete log files. log_files = glob.glob(os.path.join(options.dynamorio_log_dir, '*.log')) for log_file in log_files: os.remove(log_file) # Assert coverage.info file exist if not os.path.isfile(coverage_info): print 'Failed to create coverage.info file.' return 1 # Upload coverage file. cov_dir = options.test_to_upload.replace('_', '') + COVERAGE_DIR_POSTFIX dest = os.path.join(options.www_dir, options.platform, options.build_id, cov_dir) dest = os.path.normpath(dest) if chromium_utils.IsWindows(): print('chromium_utils.CopyFileToDir(%s, %s)' % (coverage_info, dest)) chromium_utils.MaybeMakeDirectory(dest) chromium_utils.CopyFileToDir(coverage_info, dest) elif chromium_utils.IsLinux() or chromium_utils.IsMac(): print 'SshCopyFiles(%s, %s, %s)' % (coverage_info, options.host, dest) chromium_utils.SshMakeDirectory(options.host, dest) chromium_utils.MakeWorldReadable(coverage_info) chromium_utils.SshCopyFiles(coverage_info, options.host, dest) os.unlink(coverage_info) else: raise NotImplementedError('Platform "%s" is not currently supported.' % sys.platform) return 0
def _UploadBuild(self, www_dir, revisions_path, archive_files, gs_base, gs_acl): if chromium_utils.IsWindows(): print 'os.makedirs(%s)' % www_dir for archive in archive_files: print 'chromium_utils.CopyFileToDir(%s, %s)' % (archive, www_dir) print 'chromium_utils.CopyFileToDir(%s, %s)' % (revisions_path, www_dir) if not self.options.dry_run: self.MyMaybeMakeDirectory(www_dir, gs_base) for archive in archive_files: self.MyCopyFileToDir(archive, www_dir, gs_base, gs_acl=gs_acl) self.MyCopyFileToDir(revisions_path, www_dir, gs_base, gs_acl=gs_acl) elif chromium_utils.IsLinux() or chromium_utils.IsMac(): for archive in archive_files: print 'SshCopyFiles(%s, %s, %s)' % ( archive, self.options.archive_host, www_dir) print 'SshCopyFiles(%s, %s, %s)' % ( revisions_path, self.options.archive_host, www_dir) if not self.options.dry_run: print 'SshMakeDirectory(%s, %s)' % (self.options.archive_host, www_dir) self.MySshMakeDirectory(self.options.archive_host, www_dir, gs_base) for archive in archive_files: self.MyMakeWorldReadable(archive, gs_base) self.MySshCopyFiles(archive, self.options.archive_host, www_dir, gs_base, gs_acl=gs_acl) os.unlink(archive) # Files are created umask 077 by default, so make it world-readable # before pushing to web server. self.MyMakeWorldReadable(revisions_path, gs_base) self.MySshCopyFiles(revisions_path, self.options.archive_host, www_dir, gs_base, gs_acl=gs_acl) else: raise NotImplementedError( 'Platform "%s" is not currently supported.' % sys.platform)
def prepareToolDir(self): # Build up a directory for Zip file testing if chromium_utils.IsWindows(): self.tool_dir = 'chrome/tools/build/win' elif chromium_utils.IsLinux(): self.tool_dir = 'chrome/tools/build/linux' elif chromium_utils.IsMac(): self.tool_dir = 'chrome/tools/build/mac' else: raise PlatformError('Platform "%s" is not currently supported.' % sys.platform) self.tool_dir = os.path.join(self.src_dir, self.tool_dir) os.makedirs(self.tool_dir)
def FileExclusions(): # Skip files that the testers don't care about. Mostly directories. if chromium_utils.IsWindows(): # Remove obj or lib dir entries return ['obj', 'lib', 'cfinstaller_archive', 'installer_archive'] if chromium_utils.IsMac(): return [ # We don't need the arm bits v8 builds. 'd8_arm', 'v8_shell_arm', # pdfsqueeze is a build helper, no need to copy it to testers. 'pdfsqueeze', # The inspector copies its resources into a resources folder in the build # output, but we only need the copy that ends up within the Chrome bundle. 'resources', # We copy the framework into the app bundle, we don't need the second # copy outside the app. # TODO(mark): Since r28431, the copy in the build directory is actually # used by tests. Putting two copies in the .zip isn't great, so maybe # we can find another workaround. # 'Chromium Framework.framework', # 'Google Chrome Framework.framework', # We copy the Helper into the app bundle, we don't need the second # copy outside the app. 'Chromium Helper.app', 'Google Chrome Helper.app', '.deps', 'obj', 'obj.host', 'obj.target', ] if chromium_utils.IsLinux(): return [ # intermediate build directories (full of .o, .d, etc.). 'appcache', 'glue', 'googleurl', 'lib', 'lib.host', 'obj', 'obj.host', 'obj.target', 'src', '.deps', # scons build cruft '.sconsign.dblite', # build helper, not needed on testers 'mksnapshot', ] return []
def main(): try: if chromium_utils.IsWindows(): return main_win() elif chromium_utils.IsMac(): return main_mac() elif chromium_utils.IsLinux(): return main_linux() else: print 'Unknown platform: ' + sys.platform return 1 except FullDriveException, e: print >> sys.stderr, 'Not enough free space on %s: %d bytes left' % ( e.args[0], e.args[1]) send_alert(e.args[0], e.args[1])
def FileRegexBlacklist(options): if chromium_utils.IsWindows(): return r'^.+\.(rc|res|lib|exp|ilk|7z|([pP]recompile\.h\.pch.*))$' if chromium_utils.IsMac(): # The static libs are just built as intermediate targets, and we don't # need to pull the dSYMs over to the testers most of the time (except for # the memory tools). if options.package_dsym_files: return r'^.+\.(a)$' else: return r'^.+\.(a|dSYM)$' if chromium_utils.IsLinux(): # object files, archives, and gcc (make build) dependency info. return r'^.+\.(o|a|d)$' return '$NO_FILTER^'
def MaybeMakeDirectoryOnArchiveHost(dest_dir): """A wrapper method to create a directory on the archive host. It calls MaybeMakeDirectory on Windows and SshMakeDirectory on Linux/Mac. Args: dest_dir: destination directory on the host. """ host = config.Archive.archive_host if chromium_utils.IsWindows(): chromium_utils.MaybeMakeDirectory(dest_dir) print 'saving results to %s' % dest_dir elif chromium_utils.IsLinux() or chromium_utils.IsMac(): chromium_utils.SshMakeDirectory(host, dest_dir) print 'saving results to "%s" on "%s"' % (dest_dir, host) else: raise NotImplementedError( 'Platform "%s" is not currently supported.' % sys.platform)
def ShouldPackageFile(filename, target): """Returns true if the file should be a part of the resulting archive.""" if chromium_utils.IsMac(): file_filter = '^.+\.(a|dSYM)$' elif chromium_utils.IsLinux(): file_filter = '^.+\.(o|a|d)$' else: raise NotImplementedError('%s is not supported.' % sys.platform) if re.match(file_filter, filename): return False # Skip files that we don't care about. Mostly directories. things_to_skip = zip_build.FileExclusions() if filename in things_to_skip: return False return True
def GetRealBuildDirectory(build_dir, target, factory_properties): """Return the build directory.""" if chromium_utils.IsWindows(): path_list = [build_dir, target] elif chromium_utils.IsLinux(): path_list = [os.path.dirname(build_dir), 'out', target] elif chromium_utils.IsMac(): is_make_or_ninja = (factory_properties.get('gclient_env', {}).get('GYP_GENERATORS') in ('ninja', 'make')) if is_make_or_ninja: path_list = [os.path.dirname(build_dir), 'out', target] else: path_list = [os.path.dirname(build_dir), 'xcodebuild', target] else: raise NotImplementedError('%s is not supported.' % sys.platform) return os.path.abspath(os.path.join(*path_list))
def main(): if os.environ.get('SWARMING_HEADLESS'): # On Swarming, this script is run from a temporary directory. Eh. print('Skipping temp cleanup when run from Swarming.') return 0 try: if chromium_utils.IsWindows(): return main_win() elif chromium_utils.IsMac(): return main_mac() elif chromium_utils.IsLinux(): return main_linux() else: print 'Unknown platform: ' + sys.platform return 1 except FullDriveException, e: print >> sys.stderr, 'Not enough free space on %s: %d bytes left' % ( e.args[0], e.args[1]) send_alert(e.args[0], e.args[1])
def Cleanup(b_dir=None): """Performs the cleanup operation for the current platform. Raises: UnknownPlatform: If the current platform is unknown. FullDriveException: If one of the target drives was too full to operate. """ if os.environ.get('SWARMING_HEADLESS'): # On Swarming, this script is run from a temporary directory. Eh. print('Skipping temp cleanup when run from Swarming.') return if chromium_utils.IsWindows(): _CleanupWindows(b_dir=b_dir) elif chromium_utils.IsMac(): _CleanupMac(b_dir=b_dir) elif chromium_utils.IsLinux(): _CleanupLinux(b_dir=b_dir) else: raise UnknownPlatform('Unknown platform: %s' % (sys.platform,))
def main(options, args): # Create some variables src_dir = os.path.abspath(options.src_dir) build_dir = os.path.dirname(options.build_dir) staging_dir = slave_utils.GetStagingDir(src_dir) build_revision = slave_utils.SubversionRevision(src_dir) build_version = str(build_revision) if chromium_utils.IsMac() or chromium_utils.IsLinux(): # Files are created umask 077 by default, we need to make sure the staging # dir can be fetch from, do this by recursively chmoding back up to the root # before pushing to web server. a_path = staging_dir while a_path != '/': current_permissions = os.stat(a_path)[0] if current_permissions & 0555 == 0555: break print 'Fixing permissions (%o) for \'%s\'' % (current_permissions, a_path) os.chmod(a_path, current_permissions | 0555) a_path = os.path.dirname(a_path)
def _ArchiveFullLayoutTestResults(staging_dir, dest_dir, diff_file_list, options): # Copy the actual and diff files to the web server. # Don't clobber the staging_dir in the MakeZip call so that it keeps the # files from the previous MakeZip call on diff_file_list. print "archiving results + diffs" full_zip_file = chromium_utils.MakeZip(staging_dir, 'layout-test-results', diff_file_list, options.results_dir, remove_archive_directory=False)[1] slave_utils.CopyFileToArchiveHost(full_zip_file, dest_dir) # Extract the files on the web server. extract_dir = os.path.join(dest_dir, 'results') print 'extracting zip file to %s' % extract_dir if chromium_utils.IsWindows(): chromium_utils.ExtractZip(full_zip_file, extract_dir) elif chromium_utils.IsLinux() or chromium_utils.IsMac(): remote_zip_file = os.path.join(dest_dir, os.path.basename(full_zip_file)) chromium_utils.SshExtractZip(config.Archive.archive_host, remote_zip_file, extract_dir)
def CopyFileToArchiveHost(src, dest_dir): """A wrapper method to copy files to the archive host. It calls CopyFileToDir on Windows and SshCopyFiles on Linux/Mac. TODO: we will eventually want to change the code to upload the data to appengine. Args: src: full path to the src file. dest_dir: destination directory on the host. """ host = config.Archive.archive_host if not os.path.exists(src): raise chromium_utils.ExternalError('Source path "%s" does not exist' % src) chromium_utils.MakeWorldReadable(src) if chromium_utils.IsWindows(): chromium_utils.CopyFileToDir(src, dest_dir) elif chromium_utils.IsLinux() or chromium_utils.IsMac(): chromium_utils.SshCopyFiles(src, host, dest_dir) else: raise NotImplementedError( 'Platform "%s" is not currently supported.' % sys.platform)
def ShouldPackageFile(filename, target): # Disable 'unused argument' warning for 'target' | pylint: disable=W0613 """Returns true if the file should be a part of the resulting archive.""" if chromium_utils.IsMac(): file_filter = '^.+\.(a|dSYM)$' elif chromium_utils.IsLinux(): file_filter = '^.+\.(o|a|d)$' elif chromium_utils.IsWindows(): file_filter = '^.+\.(obj|lib|pch|exp)$' else: raise NotImplementedError('%s is not supported.' % sys.platform) if re.match(file_filter, filename): return False # Skip files that we don't care about. Mostly directories. things_to_skip = chromium_utils.FileExclusions() if filename in things_to_skip: return False return True
def setUp(self): self.temp_dir = tempfile.mkdtemp() archive_utils_unittest.BuildTestFilesTree(self.temp_dir) # Make some directories to make the stager happy. self.target = 'Test' if chromium_utils.IsWindows(): self.build_dir = os.path.join(self.temp_dir, 'build') elif chromium_utils.IsLinux(): self.build_dir = os.path.join(self.temp_dir, 'out') elif chromium_utils.IsMac(): self.build_dir = os.path.join(self.temp_dir, 'xcodebuild') else: raise PlatformError('Platform "%s" is not currently supported.' % sys.platform) os.makedirs(os.path.join(self.build_dir, self.target)) self.src_dir = os.path.join(self.temp_dir, 'build', 'src') os.makedirs(self.src_dir) self.archive_dir = os.path.join(self.temp_dir, 'archive') os.makedirs(self.archive_dir) # Make a directory to hold an extra files and tests specifier: self.extra_files_dir = os.path.join(self.temp_dir, 'build', 'src', 'extra') os.makedirs(self.extra_files_dir) # Create the FILES file and seed with contents: self.extra_files = os.path.join(self.extra_files_dir, 'FILES') extra_file = open(self.extra_files, 'w') for f in ZIP_TEST_FILES: extra_file.write(f + '\n') extra_file.close() # Create the TESTS file and seed with contents: self.extra_tests = os.path.join(self.extra_files_dir, 'TESTS') extra_tests = open(self.extra_tests, 'w') for t in EXTRA_TEST_FILES: extra_tests.write(t + '\n') extra_tests.close() # The stager object will be initialized in initializeStager method. self.stager = None
def determine_goma_jobs(): # We would like to speed up build on Windows a bit, since it is slowest. number_of_processors = 0 try: number_of_processors = multiprocessing.cpu_count() except NotImplementedError: print 'cpu_count() is not implemented, using default value' # When goma is used, 10 * number_of_processors is almost suitable # for -j value. Actually -j value was originally 100 for Linux and # Windows. But when goma was overloaded, it was reduced to 50. # Actually, goma server could not cope with burst request correctly # that time. Currently the situation got better a bit. The goma server # is now able to treat such requests well to a certain extent. # However, for safety, let's limit incrementing -j value only for # Windows now, since it's slowest. # Note that currently most try-bot build slaves have 8 processors. if chromium_utils.IsMac(): # On mac, due to the process number limit, we're using 50. return 50 elif chromium_utils.IsWindows() and number_of_processors > 0: return min(10 * number_of_processors, 200) else: return 50
def __init__(self, options, build_revision): """Sets a number of file and directory paths for convenient use.""" self.options = options self._src_dir = os.path.abspath(options.src_dir) self._chrome_dir = os.path.join(self._src_dir, 'chrome') build_dir = build_directory.GetBuildOutputDirectory() self._build_dir = os.path.join(build_dir, options.target) if chromium_utils.IsWindows(): self._tool_dir = os.path.join(self._chrome_dir, 'tools', 'build', 'win') elif chromium_utils.IsLinux(): # On Linux, we might have built for chromeos. Archive the same. if (options.factory_properties.get('chromeos', None) or slave_utils.GypFlagIsOn(options, 'chromeos')): self._tool_dir = os.path.join(self._chrome_dir, 'tools', 'build', 'chromeos') # Or, we might have built for Android. elif options.factory_properties.get('target_os') == 'android': self._tool_dir = os.path.join(self._chrome_dir, 'tools', 'build', 'android') else: self._tool_dir = os.path.join(self._chrome_dir, 'tools', 'build', 'linux') elif chromium_utils.IsMac(): self._tool_dir = os.path.join(self._chrome_dir, 'tools', 'build', 'mac') else: raise NotImplementedError( 'Platform "%s" is not currently supported.' % sys.platform) self._staging_dir = (options.staging_dir or slave_utils.GetStagingDir(self._src_dir)) if not os.path.exists(self._staging_dir): os.makedirs(self._staging_dir) self._symbol_dir_base = options.dirs['symbol_dir_base'] self._www_dir_base = options.dirs['www_dir_base'] if options.build_name: self._build_name = options.build_name else: self._build_name = slave_utils.SlaveBuildName(self._src_dir) self._symbol_dir_base = os.path.join(self._symbol_dir_base, self._build_name) self._www_dir_base = os.path.join(self._www_dir_base, self._build_name) self._version_file = os.path.join(self._chrome_dir, 'VERSION') self._chromium_revision = chromium_utils.GetBuildSortKey(options)[1] self._v8_revision = chromium_utils.GetBuildSortKey(options, project='v8')[1] self._v8_revision_git = chromium_utils.GetGitCommit(options, project='v8') self.last_change_file = os.path.join(self._staging_dir, 'LAST_CHANGE') # The REVISIONS file will record the revisions information of the main # components Chromium/WebKit/V8. self.revisions_path = os.path.join(self._staging_dir, 'REVISIONS') self._build_revision = build_revision self._build_path_component = str(self._build_revision) # Will be initialized in GetLastBuildRevision. self.last_chromium_revision = None self.last_v8_revision = None self._files_file = os.path.join(self._tool_dir, archive_utils.FILES_FILENAME) self._test_files = self.BuildOldFilesList(TEST_FILE_NAME) self._dual_upload = options.factory_properties.get( 'dual_upload', False) self._archive_files = None
def ArchiveBuild(self): """Zips build files and uploads them, their symbols, and a change log.""" result = 0 if self._build_revision is None: raise archive_utils.StagingError('No build revision was provided') print 'Staging in %s' % self._staging_dir fparser = archive_utils.FilesCfgParser(self._files_file, self.options.mode, self.options.arch) files_list = fparser.ParseLegacyList() self._archive_files = archive_utils.ExpandWildcards( self._build_dir, files_list) archives_list = fparser.ParseArchiveLists() # Check files and revision numbers. all_files_list = self._archive_files + [ item['filename'] for sublist in archives_list.values() for item in sublist ] all_files_list.append(self._version_file) not_found = archive_utils.VerifyFiles(all_files_list, self._build_dir, self.options.ignore) not_found_optional = [] for bad_fn in not_found[:]: if fparser.IsOptional(bad_fn): not_found_optional.append(bad_fn) not_found.remove(bad_fn) # Remove it from all file lists so we don't try to process it. if bad_fn in self._archive_files: self._archive_files.remove(bad_fn) for archive_list in archives_list.values(): archive_list[:] = [ x for x in archive_list if bad_fn != x['filename'] ] # TODO(mmoss): Now that we can declare files optional in FILES.cfg, should # we only allow not_found_optional, and fail on any leftover not_found # files? print 'last change: %s' % self._build_revision previous_revision = self.GetLastBuildRevision() # TODO(agable): This conditional only works for svn because git can't easily # compare revisions. if (slave_utils.GitOrSubversion(self._src_dir) == 'svn' and self._build_revision <= previous_revision): # If there have been no changes, report it but don't raise an exception. # Someone might have pushed the "force build" button. print 'No changes since last build (r%s <= r%s)' % ( self._build_revision, previous_revision) return 0 print 'build name: %s' % self._build_name archive_name = 'chrome-%s.zip' % self.TargetPlatformName() archive_file = self.CreateArchiveFile(archive_name, self._archive_files)[1] # Handle any custom archives. # TODO(mmoss): Largely copied from stage_build.py. Maybe refactor more of # this into archive_utils.py. archive_files = [archive_file] for archive_name in archives_list: # The list might be empty if it was all 'not_found' optional files. if not archives_list[archive_name]: continue if fparser.IsDirectArchive(archives_list[archive_name]): fileobj = archives_list[archive_name][0] # Copy the file to the path specified in archive_name, which might be # different than the dirname or basename in 'filename' (allowed by # 'direct_archive'). stage_subdir = os.path.dirname(archive_name) stage_fn = os.path.basename(archive_name) chromium_utils.MaybeMakeDirectory( os.path.join(self._staging_dir, stage_subdir)) print 'chromium_utils.CopyFileToDir(%s, %s, dest_fn=%s)' % ( os.path.join(self._build_dir, fileobj['filename']), os.path.join(self._staging_dir, stage_subdir), stage_fn) if not self.options.dry_run: chromium_utils.CopyFileToDir( os.path.join(self._build_dir, fileobj['filename']), os.path.join(self._staging_dir, stage_subdir), dest_fn=stage_fn) archive_files.append( os.path.join(self._staging_dir, archive_name)) else: custom_archive = self.CreateArchiveFile( archive_name, [f['filename'] for f in archives_list[archive_name]])[1] print 'Adding %s to be archived.' % (custom_archive) archive_files.append(custom_archive) # Generate a revisions file which contains the Chromium/WebKit/V8's # revision information. self.GenerateRevisionFile() www_dir = os.path.join(self._www_dir_base, self._build_path_component) gs_bucket = self.options.factory_properties.get('gs_bucket', None) gs_acl = self.options.factory_properties.get('gs_acl', None) gs_base = None if gs_bucket: gs_base = '/'.join( [gs_bucket, self._build_name, self._build_path_component]) self._UploadBuild(www_dir, self.revisions_path, archive_files, gs_base, gs_acl) # Archive Linux packages (if any -- only created for Chrome builds). if chromium_utils.IsLinux(): linux_packages = (glob.glob( os.path.join(self._build_dir, '*-r%s_*.deb' % self._chromium_revision))) linux_packages.extend( glob.glob( os.path.join(self._build_dir, '*-%s.*.rpm' % self._chromium_revision))) for package_file in linux_packages: print 'SshCopyFiles(%s, %s, %s)' % ( package_file, self.options.archive_host, www_dir) if not self.options.dry_run: print 'SshMakeDirectory(%s, %s)' % (self.options.archive_host, www_dir) self.MySshMakeDirectory(self.options.archive_host, www_dir, gs_base) for package_file in linux_packages: self.MyMakeWorldReadable(package_file, gs_base) self.MySshCopyFiles(package_file, self.options.archive_host, www_dir, gs_base, gs_acl=gs_acl) # Cleanup archived packages, otherwise they keep accumlating since # they have different filenames with each build. os.unlink(package_file) self.UploadTests(www_dir, gs_base, gs_acl) if not self.options.dry_run: # Save the current build revision locally so we can compute a changelog # next time self.SaveBuildRevisionToSpecifiedFile(self.last_change_file) # Record the latest revision in the developer archive directory. latest_file_path = os.path.join(self._www_dir_base, 'LATEST') if chromium_utils.IsWindows(): print 'Saving revision to %s' % latest_file_path if gs_base: self.CopyFileToGS(self.last_change_file, gs_base, '..', mimetype='text/plain', gs_acl=gs_acl) if not gs_base or self._dual_upload: self.SaveBuildRevisionToSpecifiedFile(latest_file_path) elif chromium_utils.IsLinux() or chromium_utils.IsMac(): # Files are created umask 077 by default, so make it world-readable # before pushing to web server. self.MyMakeWorldReadable(self.last_change_file, gs_base) print 'Saving revision to %s:%s' % (self.options.archive_host, latest_file_path) self.MySshCopyFiles(self.last_change_file, self.options.archive_host, latest_file_path, gs_base, '..', mimetype='text/plain', gs_acl=gs_acl) else: raise NotImplementedError( 'Platform "%s" is not currently supported.' % sys.platform) if len(not_found_optional): sys.stderr.write('\n\nINFO: Optional File(s) not found: %s\n' % ', '.join(not_found_optional)) if len(not_found): sys.stderr.write('\n\nWARNING: File(s) not found: %s\n' % ', '.join(not_found)) return result
def __init__(self, options, build_revision): """Sets a number of file and directory paths for convenient use.""" self.options = options self._src_dir = os.path.abspath(options.src_dir) self._chrome_dir = os.path.join(self._src_dir, 'chrome') # TODO: This scode should not be grabbing so deeply into WebKit. # Worse, this code ends up looking at top-of-tree WebKit # instead of the revision in DEPS. self._webkit_dir = os.path.join(self._src_dir, 'third_party', 'WebKit', 'Source') self._v8_dir = os.path.join(self._src_dir, 'v8') build_dir = build_directory.GetBuildOutputDirectory() self._build_dir = os.path.join(build_dir, options.target) if chromium_utils.IsWindows(): self._tool_dir = os.path.join(self._chrome_dir, 'tools', 'build', 'win') elif chromium_utils.IsLinux(): # On Linux, we might have built for chromeos. Archive the same. if (options.factory_properties.get('chromeos', None) or slave_utils.GypFlagIsOn(options, 'chromeos')): self._tool_dir = os.path.join(self._chrome_dir, 'tools', 'build', 'chromeos') # Or, we might have built for Android. elif options.factory_properties.get('target_os') == 'android': self._tool_dir = os.path.join(self._chrome_dir, 'tools', 'build', 'android') else: self._tool_dir = os.path.join(self._chrome_dir, 'tools', 'build', 'linux') elif chromium_utils.IsMac(): self._tool_dir = os.path.join(self._chrome_dir, 'tools', 'build', 'mac') else: raise NotImplementedError( 'Platform "%s" is not currently supported.' % sys.platform) self._staging_dir = slave_utils.GetStagingDir(self._src_dir) self._symbol_dir_base = options.dirs['symbol_dir_base'] self._www_dir_base = options.dirs['www_dir_base'] if options.build_name: self._build_name = options.build_name else: self._build_name = slave_utils.SlaveBuildName(self._src_dir) self._symbol_dir_base = os.path.join(self._symbol_dir_base, self._build_name) self._www_dir_base = os.path.join(self._www_dir_base, self._build_name) self._version_file = os.path.join(self._chrome_dir, 'VERSION') if options.default_chromium_revision: self._chromium_revision = options.default_chromium_revision else: self._chromium_revision = slave_utils.GetHashOrRevision( os.path.dirname( self._chrome_dir)) # src/ instead of src/chrome if options.default_webkit_revision: self._webkit_revision = options.default_webkit_revision else: self._webkit_revision = slave_utils.GetHashOrRevision( os.path.dirname( self._webkit_dir)) # WebKit/ instead of WebKit/Source if options.default_v8_revision: self._v8_revision = options.default_v8_revision else: self._v8_revision = slave_utils.GetHashOrRevision(self._v8_dir) self.last_change_file = os.path.join(self._staging_dir, 'LAST_CHANGE') # The REVISIONS file will record the revisions information of the main # components Chromium/WebKit/V8. self.revisions_path = os.path.join(self._staging_dir, 'REVISIONS') self._build_revision = build_revision # Will be initialized in GetLastBuildRevision. self.last_chromium_revision = None self.last_webkit_revision = None self.last_v8_revision = None self._files_file = os.path.join(self._tool_dir, archive_utils.FILES_FILENAME) self._test_files = self.BuildOldFilesList(TEST_FILE_NAME) self._dual_upload = options.factory_properties.get( 'dual_upload', False) self._archive_files = None