def RunPythonCommandInBuildDir(build_dir, target, command_line_args, server_dir=None): if sys.platform == 'win32': python_exe = 'python.exe' setup_mount = chromium_utils.FindUpward(build_dir, 'third_party', 'cygwin', 'setup_mount.bat') chromium_utils.RunCommand([setup_mount]) else: os.environ['PYTHONPATH'] = ( chromium_utils.FindUpward(build_dir, 'tools', 'python') + ":" + os.environ.get('PYTHONPATH', '')) python_exe = 'python' if chromium_utils.IsLinux(): slave_name = SlaveBuildName(build_dir) xvfb.StartVirtualX(slave_name, os.path.join(build_dir, '..', 'out', target), server_dir=server_dir) command = [python_exe] # The list of tests is given as arguments. command.extend(command_line_args) result = chromium_utils.RunCommand(command) if chromium_utils.IsLinux(): xvfb.StopVirtualX(slave_name) return result
def playback_benchmark(options, args): """Using the target build configuration, run the playback test.""" root_dir = os.path.dirname(options.build_dir) # That's src dir. data_dir = os.path.join(root_dir, 'data', 'webapp_benchmarks', 'gmailjs') benchmark_results = {'ready': threading.Event()} def callback(results): benchmark_results['results'] = results benchmark_results['ready'].set() benchmark = playback_benchmark_replay.ReplayBenchmark( callback, data_dir, SERVER_PORT) server_thread = threading.Thread(target=benchmark.RunForever) server_thread.setDaemon(True) server_thread.start() if chromium_utils.IsLinux(): xvfb.StartVirtualX(options.target, '') result = run_benchmark(options, False, benchmark_results) result |= run_benchmark(options, True, benchmark_results) if chromium_utils.IsLinux(): xvfb.StopVirtualX(options.target) return result
def run_benchmark(options, use_refbuild, benchmark_results): result = 0 build_dir = os.path.abspath(options.build_dir) if not use_refbuild: if chromium_utils.IsMac(): build_dir = os.path.join(os.path.dirname(build_dir), 'xcodebuild') elif chromium_utils.IsLinux(): build_dir = os.path.join(os.path.dirname(build_dir), 'sconsbuild') build_dir = os.path.join(build_dir, options.target) else: build_dir = os.path.join(os.path.dirname(build_dir), 'chrome', 'tools', 'test', 'reference_build') if chromium_utils.IsMac(): build_dir = os.path.join(build_dir, 'chrome_mac') elif chromium_utils.IsLinux(): build_dir = os.path.join(build_dir, 'chrome_linux') else: build_dir = os.path.join(build_dir, 'chrome_win') if chromium_utils.IsWindows(): chrome_exe_name = 'chrome.exe' elif chromium_utils.IsLinux(): chrome_exe_name = 'chrome' else: chrome_exe_name = 'Chromium' chrome_exe_path = os.path.join(build_dir, chrome_exe_name) if not os.path.exists(chrome_exe_path): raise chromium_utils.PathNotFound('Unable to find %s' % chrome_exe_path) temp_dir = tempfile.mkdtemp() command = [ chrome_exe_path, '--user-data-dir=%s' % temp_dir, '--no-first-run', '--no-default-browser-check', START_URL ] print "Executing: " print command browser_process = subprocess.Popen(command) benchmark_results['ready'].wait() if benchmark_results['ready'].isSet(): results = json.loads(benchmark_results['results'])[0] print_result(True, 'Total', results['score'], use_refbuild) for child in results['children']: print_result(False, child['name'], child['score'], use_refbuild) benchmark_results['ready'].clear() if chromium_utils.IsWindows(): subprocess.call('taskkill /f /pid %i /t' % browser_process.pid) else: os.system('kill -15 %i' % browser_process.pid) browser_process.wait() shutil.rmtree(temp_dir) return result
def get_target_build_dir(args, options): """Keep this function in sync with src/build/landmines.py""" build_tool = options.build_tool ret = None if build_tool == 'xcode': relpath = os.path.join('xcodebuild', options.target + ('-iphoneos' if 'iphoneos' in args else '')) elif build_tool in ['make', 'ninja']: if chromium_utils.IsLinux() and options.cros_board: # When building ChromeOS's Simple Chrome workflow, the output directory # has a CROS board name suffix. outdir = 'out_%s' % (options.cros_board,) elif options.out_dir: outdir = options.out_dir else: outdir = 'out' relpath = os.path.join(outdir, options.target) elif build_tool == 'make-android': relpath = os.path.join('out') elif build_tool in ['vs', 'ib']: relpath = os.path.join('build', options.target) else: raise NotImplementedError() ret = os.path.join(options.src_dir, relpath) return os.path.abspath(ret)
def main(): usage = 'usage: %prog [--nuke]' parser = OptionParser(usage) parser.add_option('-n', '--nuke', action='store_true', dest='nuke', default=False, help='Nuke whole repository (not just build output)') options, unused_args = parser.parse_args() if options.nuke: chromium_utils.RemoveDirectory('trunk') else: # Remove platform specific build output directories. if chromium_utils.IsWindows(): chromium_utils.RemoveDirectory('trunk\\build\\Debug') chromium_utils.RemoveDirectory('trunk\\build\\Release') elif chromium_utils.IsMac(): chromium_utils.RemoveDirectory('trunk/out') chromium_utils.RemoveDirectory('trunk/xcodebuild') elif chromium_utils.IsLinux(): chromium_utils.RemoveDirectory('trunk/out') else: print 'Unknown platform: ' + sys.platform return 1 return 0
def RemoveChromeTemporaryFiles(): """A large hammer to nuke what could be leaked files from unittests or files left from a unittest that crashed, was killed, etc.""" # NOTE: print out what is cleaned up so the bots don't timeout if # there is a lot to cleanup and also se we see the leaks in the # build logs. # At some point a leading dot got added, support with and without it. kLogRegex = '^\.?(com\.google\.Chrome|org\.chromium)\.' if chromium_utils.IsWindows(): kLogRegex = r'^(base_dir|scoped_dir|nps|chrome_test|SafeBrowseringTest)' LogAndRemoveFiles(tempfile.gettempdir(), kLogRegex) # Dump and temporary files. LogAndRemoveFiles(tempfile.gettempdir(), r'^.+\.(dmp|tmp)$') LogAndRemoveFiles(tempfile.gettempdir(), r'^_CL_.*$') RemoveChromeDesktopFiles() RemoveJumpListFiles() elif chromium_utils.IsLinux(): kLogRegexHeapcheck = '\.(sym|heap)$' LogAndRemoveFiles(tempfile.gettempdir(), kLogRegex) LogAndRemoveFiles(tempfile.gettempdir(), kLogRegexHeapcheck) LogAndRemoveFiles('/dev/shm', kLogRegex) elif chromium_utils.IsMac(): nstempdir_path = '/usr/local/libexec/nstempdir' if os.path.exists(nstempdir_path): ns_temp_dir = chromium_utils.GetCommandOutput([nstempdir_path]).strip() if ns_temp_dir: LogAndRemoveFiles(ns_temp_dir, kLogRegex) for i in ('Chromium', 'Google Chrome'): # Remove dumps. crash_path = '%s/Library/Application Support/%s/Crash Reports' % ( os.environ['HOME'], i) LogAndRemoveFiles(crash_path, r'^.+\.dmp$') else: raise NotImplementedError( 'Platform "%s" is not currently supported.' % sys.platform)
def FileRegexBlacklist(options): if chromium_utils.IsWindows(): # Remove all .ilk/.7z and maybe PDB files # TODO(phajdan.jr): Remove package_pdb_files when nobody uses it. include_pdbs = options.factory_properties.get('package_pdb_files', True) if include_pdbs: return r'^.+\.(rc|res|lib|exp|ilk|7z|([pP]recompile\.h\.pch.*))$' else: return r'^.+\.(rc|res|lib|exp|ilk|pdb|7z|([pP]recompile\.h\.pch.*))$' if chromium_utils.IsMac(): # The static libs are just built as intermediate targets, and we don't # need to pull the dSYMs over to the testers most of the time (except for # the memory tools). include_dsyms = options.factory_properties.get('package_dsym_files', False) if include_dsyms: return r'^.+\.(a)$' else: return r'^.+\.(a|dSYM)$' if chromium_utils.IsLinux(): # object files, archives, and gcc (make build) dependency info. return r'^.+\.(o|a|d)$' return '$NO_FILTER^'
def FileRegexBlacklist(options): extensions = None if chromium_utils.IsWindows(): extensions = [ 'res', 'lib', 'exp', 'ilk', '7z', r'([pP]recompile\.h\.pch.*)' ] elif chromium_utils.IsMac(): # The static libs are just built as intermediate targets, and we don't # need to pull the dSYMs over to the testers most of the time (except for # the memory tools). if options.package_dsym_files: extensions = ['a'] else: extensions = ['a', 'dSYM'] elif chromium_utils.IsLinux(): # object files, archives, and gcc (make build) dependency info. extensions = ['o', 'a', 'd'] else: return '$NO_FILTER^' # No need for the .ninja files generated by "gn gen". extensions.append('ninja') extensions_pattern = r'.+\.({})'.format('|'.join(extensions)) # Primary toolchain is excluded by "exclusions" rather than regex. secondary_toolchain_pattern = r'\w+{}(obj|gen)'.format( re.escape(os.path.sep)) return '^({}|{})$'.format(extensions_pattern, secondary_toolchain_pattern)
def BuildArch(target_arch=None): """Determine the architecture of the build being processed.""" if target_arch == 'x64': # Just use the architecture specified by the build if it's 64 bit. return '64bit' elif target_arch: raise StagingError('Unknown target_arch "%s"', target_arch) if chromium_utils.IsWindows() or chromium_utils.IsMac(): # Architecture is not relevant for Mac (combines multiple archs in one # release) and Win (32-bit only), so just call it 32bit. # TODO(mmoss): This might change for Win if we add 64-bit builds. return '32bit' elif chromium_utils.IsLinux(): # This assumes we either build natively or build (and run staging) in a # chroot, where the architecture of the python executable is the same as # the build target. # TODO(mmoss): This appears to be true for the current builders. If that # changes, we might have to modify the bots to pass in the build # architecture when running this script. arch = platform.architecture(bits='unknown')[0] if arch == 'unknown': raise StagingError('Could not determine build architecture') return arch else: raise NotImplementedError('Platform "%s" is not currently supported.' % sys.platform)
def archive(options, args): build_dir, _ = chromium_utils.ConvertBuildDirToLegacy( options.build_dir, use_out=chromium_utils.IsLinux()) build_dir = os.path.join(build_dir, options.target) src_dir = os.path.abspath(os.path.dirname(options.build_dir)) staging_dir = slave_utils.GetStagingDir(src_dir) build_revision = slave_utils.SubversionRevision(src_dir) chromium_utils.MakeParentDirectoriesWorldReadable(staging_dir) print 'Staging in %s' % build_dir # Build the list of files to archive. zip_file_list = [ f for f in os.listdir(build_dir) if ShouldPackageFile(f, options.target) ] subdir = None # TODO(nsylvain): We need to move linux to a subdir as well, but aarya is not # ready with the server-side change. if chromium_utils.IsMac(): subdir = '%s-%s' % (chromium_utils.PlatformName(), options.target.lower()) prefix = options.factory_properties.get('cf_archive_name', 'cf_archive') zip_file_name = '%s-%s-%s-%d' % (prefix, chromium_utils.PlatformName(), options.target.lower(), build_revision) (zip_dir, zip_file) = chromium_utils.MakeZip(staging_dir, zip_file_name, zip_file_list, build_dir, raise_error=True) chromium_utils.RemoveDirectory(zip_dir) if not os.path.exists(zip_file): raise StagingError('Failed to make zip package %s' % zip_file) chromium_utils.MakeWorldReadable(zip_file) # Report the size of the zip file to help catch when it gets too big. zip_size = os.stat(zip_file)[stat.ST_SIZE] print 'Zip file is %ld bytes' % zip_size gs_bucket = options.factory_properties.get('gs_bucket', None) gs_acl = options.factory_properties.get('gs_acl', None) status = slave_utils.GSUtilCopyFile(zip_file, gs_bucket, subdir=subdir, gs_acl=gs_acl) if status: raise StagingError('Failed to upload %s to %s. Error %d' % (zip_file, gs_bucket, status)) else: # Delete the file, it is not needed anymore. os.remove(zip_file) return status
def SubversionExe(): # TODO(pamg): move this into platform_utils to support Mac and Linux. if chromium_utils.IsWindows(): return 'svn.bat' # Find it in the user's path. elif chromium_utils.IsLinux() or chromium_utils.IsMac(): return 'svn' # Find it in the user's path. else: raise NotImplementedError( 'Platform "%s" is not currently supported.' % sys.platform)
def CreateCoverageFileAndUpload(options): """Create coverage file with bbcov2lcov binary and upload to www dir.""" # Assert log files exist log_files = glob.glob(os.path.join(options.dynamorio_log_dir, '*.log')) if not log_files: print 'No coverage log files found.' return 1 if (options.browser_shard_index and options.test_to_upload in options.sharded_tests): coverage_info = os.path.join( options.build_dir, 'coverage_%s.info' % options.browser_shard_index) else: coverage_info = os.path.join(options.build_dir, COVERAGE_INFO) coverage_info = os.path.normpath(coverage_info) if os.path.isfile(coverage_info): os.remove(coverage_info) bbcov2lcov_binary = GetExecutableName( os.path.join(options.dynamorio_dir, 'tools', 'bin32', 'bbcov2lcov')) cmd = [ bbcov2lcov_binary, '--dir', options.dynamorio_log_dir, '--output', coverage_info ] RunCmd(cmd) # Delete log files. log_files = glob.glob(os.path.join(options.dynamorio_log_dir, '*.log')) for log_file in log_files: os.remove(log_file) # Assert coverage.info file exist if not os.path.isfile(coverage_info): print 'Failed to create coverage.info file.' return 1 # Upload coverage file. cov_dir = options.test_to_upload.replace('_', '') + COVERAGE_DIR_POSTFIX dest = os.path.join(options.www_dir, options.platform, options.build_id, cov_dir) dest = os.path.normpath(dest) if chromium_utils.IsWindows(): print('chromium_utils.CopyFileToDir(%s, %s)' % (coverage_info, dest)) chromium_utils.MaybeMakeDirectory(dest) chromium_utils.CopyFileToDir(coverage_info, dest) elif chromium_utils.IsLinux() or chromium_utils.IsMac(): print 'SshCopyFiles(%s, %s, %s)' % (coverage_info, options.host, dest) chromium_utils.SshMakeDirectory(options.host, dest) chromium_utils.MakeWorldReadable(coverage_info) chromium_utils.SshCopyFiles(coverage_info, options.host, dest) os.unlink(coverage_info) else: raise NotImplementedError('Platform "%s" is not currently supported.' % sys.platform) return 0
def get_target_build_dir(options): """Keep this function in sync with src/build/landmines.py""" if chromium_utils.IsLinux() and options.cros_board: # When building ChromeOS's Simple Chrome workflow, the output directory # has a CROS board name suffix. outdir = 'out_%s' % (options.cros_board,) elif options.out_dir: outdir = options.out_dir else: outdir = 'out' return os.path.abspath(os.path.join(options.src_dir, outdir, options.target))
def _UploadBuild(self, www_dir, revisions_path, archive_files, gs_base, gs_acl): if chromium_utils.IsWindows(): print 'os.makedirs(%s)' % www_dir for archive in archive_files: print 'chromium_utils.CopyFileToDir(%s, %s)' % (archive, www_dir) print 'chromium_utils.CopyFileToDir(%s, %s)' % (revisions_path, www_dir) if not self.options.dry_run: self.MyMaybeMakeDirectory(www_dir, gs_base) for archive in archive_files: self.MyCopyFileToDir(archive, www_dir, gs_base, gs_acl=gs_acl) self.MyCopyFileToDir(revisions_path, www_dir, gs_base, gs_acl=gs_acl) elif chromium_utils.IsLinux() or chromium_utils.IsMac(): for archive in archive_files: print 'SshCopyFiles(%s, %s, %s)' % ( archive, self.options.archive_host, www_dir) print 'SshCopyFiles(%s, %s, %s)' % ( revisions_path, self.options.archive_host, www_dir) if not self.options.dry_run: print 'SshMakeDirectory(%s, %s)' % (self.options.archive_host, www_dir) self.MySshMakeDirectory(self.options.archive_host, www_dir, gs_base) for archive in archive_files: self.MyMakeWorldReadable(archive, gs_base) self.MySshCopyFiles(archive, self.options.archive_host, www_dir, gs_base, gs_acl=gs_acl) os.unlink(archive) # Files are created umask 077 by default, so make it world-readable # before pushing to web server. self.MyMakeWorldReadable(revisions_path, gs_base) self.MySshCopyFiles(revisions_path, self.options.archive_host, www_dir, gs_base, gs_acl=gs_acl) else: raise NotImplementedError( 'Platform "%s" is not currently supported.' % sys.platform)
def prepareToolDir(self): # Build up a directory for Zip file testing if chromium_utils.IsWindows(): self.tool_dir = 'chrome/tools/build/win' elif chromium_utils.IsLinux(): self.tool_dir = 'chrome/tools/build/linux' elif chromium_utils.IsMac(): self.tool_dir = 'chrome/tools/build/mac' else: raise PlatformError('Platform "%s" is not currently supported.' % sys.platform) self.tool_dir = os.path.join(self.src_dir, self.tool_dir) os.makedirs(self.tool_dir)
def FileExclusions(): # Skip files that the testers don't care about. Mostly directories. if chromium_utils.IsWindows(): # Remove obj or lib dir entries return ['obj', 'lib', 'cfinstaller_archive', 'installer_archive'] if chromium_utils.IsMac(): return [ # We don't need the arm bits v8 builds. 'd8_arm', 'v8_shell_arm', # pdfsqueeze is a build helper, no need to copy it to testers. 'pdfsqueeze', # The inspector copies its resources into a resources folder in the build # output, but we only need the copy that ends up within the Chrome bundle. 'resources', # We copy the framework into the app bundle, we don't need the second # copy outside the app. # TODO(mark): Since r28431, the copy in the build directory is actually # used by tests. Putting two copies in the .zip isn't great, so maybe # we can find another workaround. # 'Chromium Framework.framework', # 'Google Chrome Framework.framework', # We copy the Helper into the app bundle, we don't need the second # copy outside the app. 'Chromium Helper.app', 'Google Chrome Helper.app', '.deps', 'obj', 'obj.host', 'obj.target', ] if chromium_utils.IsLinux(): return [ # intermediate build directories (full of .o, .d, etc.). 'appcache', 'glue', 'googleurl', 'lib', 'lib.host', 'obj', 'obj.host', 'obj.target', 'src', '.deps', # scons build cruft '.sconsign.dblite', # build helper, not needed on testers 'mksnapshot', ] return []
def common_make_settings( command, options, env, crosstool=None, compiler=None): """ Sets desirable environment variables and command-line options that are used in the Make build. """ assert compiler in (None, 'clang', 'goma', 'goma-clang') maybe_set_official_build_envvars(options, env) # Don't stop at the first error. command.append('-k') # Set jobs parallelization based on number of cores. jobs = os.sysconf('SC_NPROCESSORS_ONLN') # Test if we can use ccache. ccache = '' if chromium_utils.IsLinux(): if os.path.exists('/usr/bin/ccache'): # The default CCACHE_DIR is $HOME/.ccache which, on some of our # bots, is over NFS. This is intentional. Talk to thestig or # mmoss if you have questions. ccache = 'ccache ' # Setup crosstool environment variables. if crosstool: env['AR'] = crosstool + '-ar' env['AS'] = crosstool + '-as' env['CC'] = ccache + crosstool + '-gcc' env['CXX'] = ccache + crosstool + '-g++' env['LD'] = crosstool + '-ld' env['RANLIB'] = crosstool + '-ranlib' command.append('-j%d' % jobs) # Don't use build-in rules. command.append('-r') return if compiler in ('goma', 'goma-clang'): print 'using', compiler goma_jobs = 50 if jobs < goma_jobs: jobs = goma_jobs command.append('-j%d' % jobs) return if compiler == 'clang': command.append('-r') command.append('-j%d' % jobs)
def main(): try: if chromium_utils.IsWindows(): return main_win() elif chromium_utils.IsMac(): return main_mac() elif chromium_utils.IsLinux(): return main_linux() else: print 'Unknown platform: ' + sys.platform return 1 except FullDriveException, e: print >> sys.stderr, 'Not enough free space on %s: %d bytes left' % ( e.args[0], e.args[1]) send_alert(e.args[0], e.args[1])
def FileRegexBlacklist(options): if chromium_utils.IsWindows(): return r'^.+\.(rc|res|lib|exp|ilk|7z|([pP]recompile\.h\.pch.*))$' if chromium_utils.IsMac(): # The static libs are just built as intermediate targets, and we don't # need to pull the dSYMs over to the testers most of the time (except for # the memory tools). if options.package_dsym_files: return r'^.+\.(a)$' else: return r'^.+\.(a|dSYM)$' if chromium_utils.IsLinux(): # object files, archives, and gcc (make build) dependency info. return r'^.+\.(o|a|d)$' return '$NO_FILTER^'
def MaybeMakeDirectoryOnArchiveHost(dest_dir): """A wrapper method to create a directory on the archive host. It calls MaybeMakeDirectory on Windows and SshMakeDirectory on Linux/Mac. Args: dest_dir: destination directory on the host. """ host = config.Archive.archive_host if chromium_utils.IsWindows(): chromium_utils.MaybeMakeDirectory(dest_dir) print 'saving results to %s' % dest_dir elif chromium_utils.IsLinux() or chromium_utils.IsMac(): chromium_utils.SshMakeDirectory(host, dest_dir) print 'saving results to "%s" on "%s"' % (dest_dir, host) else: raise NotImplementedError( 'Platform "%s" is not currently supported.' % sys.platform)
def ShouldPackageFile(filename, target): """Returns true if the file should be a part of the resulting archive.""" if chromium_utils.IsMac(): file_filter = '^.+\.(a|dSYM)$' elif chromium_utils.IsLinux(): file_filter = '^.+\.(o|a|d)$' else: raise NotImplementedError('%s is not supported.' % sys.platform) if re.match(file_filter, filename): return False # Skip files that we don't care about. Mostly directories. things_to_skip = zip_build.FileExclusions() if filename in things_to_skip: return False return True
def GetRealBuildDirectory(build_dir, target, factory_properties): """Return the build directory.""" if chromium_utils.IsWindows(): path_list = [build_dir, target] elif chromium_utils.IsLinux(): path_list = [os.path.dirname(build_dir), 'out', target] elif chromium_utils.IsMac(): is_make_or_ninja = (factory_properties.get('gclient_env', {}).get('GYP_GENERATORS') in ('ninja', 'make')) if is_make_or_ninja: path_list = [os.path.dirname(build_dir), 'out', target] else: path_list = [os.path.dirname(build_dir), 'xcodebuild', target] else: raise NotImplementedError('%s is not supported.' % sys.platform) return os.path.abspath(os.path.join(*path_list))
def main(): if os.environ.get('SWARMING_HEADLESS'): # On Swarming, this script is run from a temporary directory. Eh. print('Skipping temp cleanup when run from Swarming.') return 0 try: if chromium_utils.IsWindows(): return main_win() elif chromium_utils.IsMac(): return main_mac() elif chromium_utils.IsLinux(): return main_linux() else: print 'Unknown platform: ' + sys.platform return 1 except FullDriveException, e: print >> sys.stderr, 'Not enough free space on %s: %d bytes left' % ( e.args[0], e.args[1]) send_alert(e.args[0], e.args[1])
def Cleanup(b_dir=None): """Performs the cleanup operation for the current platform. Raises: UnknownPlatform: If the current platform is unknown. FullDriveException: If one of the target drives was too full to operate. """ if os.environ.get('SWARMING_HEADLESS'): # On Swarming, this script is run from a temporary directory. Eh. print('Skipping temp cleanup when run from Swarming.') return if chromium_utils.IsWindows(): _CleanupWindows(b_dir=b_dir) elif chromium_utils.IsMac(): _CleanupMac(b_dir=b_dir) elif chromium_utils.IsLinux(): _CleanupLinux(b_dir=b_dir) else: raise UnknownPlatform('Unknown platform: %s' % (sys.platform,))
def main(options, args): # Create some variables src_dir = os.path.abspath(options.src_dir) build_dir = os.path.dirname(options.build_dir) staging_dir = slave_utils.GetStagingDir(src_dir) build_revision = slave_utils.SubversionRevision(src_dir) build_version = str(build_revision) if chromium_utils.IsMac() or chromium_utils.IsLinux(): # Files are created umask 077 by default, we need to make sure the staging # dir can be fetch from, do this by recursively chmoding back up to the root # before pushing to web server. a_path = staging_dir while a_path != '/': current_permissions = os.stat(a_path)[0] if current_permissions & 0555 == 0555: break print 'Fixing permissions (%o) for \'%s\'' % (current_permissions, a_path) os.chmod(a_path, current_permissions | 0555) a_path = os.path.dirname(a_path)
def _ArchiveFullLayoutTestResults(staging_dir, dest_dir, diff_file_list, options): # Copy the actual and diff files to the web server. # Don't clobber the staging_dir in the MakeZip call so that it keeps the # files from the previous MakeZip call on diff_file_list. print "archiving results + diffs" full_zip_file = chromium_utils.MakeZip(staging_dir, 'layout-test-results', diff_file_list, options.results_dir, remove_archive_directory=False)[1] slave_utils.CopyFileToArchiveHost(full_zip_file, dest_dir) # Extract the files on the web server. extract_dir = os.path.join(dest_dir, 'results') print 'extracting zip file to %s' % extract_dir if chromium_utils.IsWindows(): chromium_utils.ExtractZip(full_zip_file, extract_dir) elif chromium_utils.IsLinux() or chromium_utils.IsMac(): remote_zip_file = os.path.join(dest_dir, os.path.basename(full_zip_file)) chromium_utils.SshExtractZip(config.Archive.archive_host, remote_zip_file, extract_dir)
def ShouldPackageFile(filename, target): # Disable 'unused argument' warning for 'target' | pylint: disable=W0613 """Returns true if the file should be a part of the resulting archive.""" if chromium_utils.IsMac(): file_filter = '^.+\.(a|dSYM)$' elif chromium_utils.IsLinux(): file_filter = '^.+\.(o|a|d)$' elif chromium_utils.IsWindows(): file_filter = '^.+\.(obj|lib|pch|exp)$' else: raise NotImplementedError('%s is not supported.' % sys.platform) if re.match(file_filter, filename): return False # Skip files that we don't care about. Mostly directories. things_to_skip = chromium_utils.FileExclusions() if filename in things_to_skip: return False return True
def CopyFileToArchiveHost(src, dest_dir): """A wrapper method to copy files to the archive host. It calls CopyFileToDir on Windows and SshCopyFiles on Linux/Mac. TODO: we will eventually want to change the code to upload the data to appengine. Args: src: full path to the src file. dest_dir: destination directory on the host. """ host = config.Archive.archive_host if not os.path.exists(src): raise chromium_utils.ExternalError('Source path "%s" does not exist' % src) chromium_utils.MakeWorldReadable(src) if chromium_utils.IsWindows(): chromium_utils.CopyFileToDir(src, dest_dir) elif chromium_utils.IsLinux() or chromium_utils.IsMac(): chromium_utils.SshCopyFiles(src, host, dest_dir) else: raise NotImplementedError( 'Platform "%s" is not currently supported.' % sys.platform)
def setUp(self): self.temp_dir = tempfile.mkdtemp() archive_utils_unittest.BuildTestFilesTree(self.temp_dir) # Make some directories to make the stager happy. self.target = 'Test' if chromium_utils.IsWindows(): self.build_dir = os.path.join(self.temp_dir, 'build') elif chromium_utils.IsLinux(): self.build_dir = os.path.join(self.temp_dir, 'out') elif chromium_utils.IsMac(): self.build_dir = os.path.join(self.temp_dir, 'xcodebuild') else: raise PlatformError('Platform "%s" is not currently supported.' % sys.platform) os.makedirs(os.path.join(self.build_dir, self.target)) self.src_dir = os.path.join(self.temp_dir, 'build', 'src') os.makedirs(self.src_dir) self.archive_dir = os.path.join(self.temp_dir, 'archive') os.makedirs(self.archive_dir) # Make a directory to hold an extra files and tests specifier: self.extra_files_dir = os.path.join(self.temp_dir, 'build', 'src', 'extra') os.makedirs(self.extra_files_dir) # Create the FILES file and seed with contents: self.extra_files = os.path.join(self.extra_files_dir, 'FILES') extra_file = open(self.extra_files, 'w') for f in ZIP_TEST_FILES: extra_file.write(f + '\n') extra_file.close() # Create the TESTS file and seed with contents: self.extra_tests = os.path.join(self.extra_files_dir, 'TESTS') extra_tests = open(self.extra_tests, 'w') for t in EXTRA_TEST_FILES: extra_tests.write(t + '\n') extra_tests.close() # The stager object will be initialized in initializeStager method. self.stager = None
def real_main(): option_parser = optparse.OptionParser() option_parser.add_option( '--clobber', action='store_true', default=False, help='delete the output directory before compiling') option_parser.add_option('--target', default='Release', help='build target (Debug or Release)') option_parser.add_option('--src-dir', default=None, help='path to the root of the source tree') option_parser.add_option('--mode', default='dev', help='build mode (dev or official) controlling ' 'environment variables set during build') # TODO(thakis): Remove this, https://crbug.com/622768 option_parser.add_option('--build-tool', default=None, help='ignored') option_parser.add_option('--build-args', action='append', default=[], help='arguments to pass to the build tool') option_parser.add_option('--build-data-dir', action='store', help='specify a build data directory.') option_parser.add_option('--compiler', default=None, help='specify alternative compiler (e.g. clang)') if chromium_utils.IsLinux(): option_parser.add_option( '--cros-board', action='store', help='If building for the ChromeOS Simple Chrome ' 'workflow, the name of the ChromeOS board.') option_parser.add_option('--out-dir', action='store', help='Specify a custom output directory.') option_parser.add_option('--goma-dir', default=os.path.join(BUILD_DIR, 'goma'), help='specify goma directory') option_parser.add_option('--goma-cache-dir', default=DEFAULT_GOMA_CACHE_DIR, help='specify goma cache directory') option_parser.add_option('--goma-deps-cache-dir', help='specify goma deps cache directory') option_parser.add_option('--goma-hermetic', default='error', help='Set goma hermetic mode') option_parser.add_option('--goma-enable-remote-link', default=None, help='Enable goma remote link.') option_parser.add_option('--goma-enable-compiler-info-cache', action='store_true', help='Enable goma CompilerInfo cache') option_parser.add_option('--goma-store-local-run-output', default=None, help='Store local run output to goma servers.') option_parser.add_option('--goma-fail-fast', action='store_true') option_parser.add_option('--goma-disable-local-fallback', action='store_true') option_parser.add_option( '--goma-jsonstatus', help='Specify a file to dump goma_ctl jsonstatus.') option_parser.add_option( '--goma-service-account-json-file', help='Specify a file containing goma service account' ' credentials') option_parser.add_option('--gsutil-py-path', help='Specify path to gsutil.py script.') option_parser.add_option('--ninja-path', default='ninja', help='Specify path to the ninja tool.') option_parser.add_option('--ninja-ensure-up-to-date', action='store_true', help='Checks the output of the ninja builder to ' 'confirm that a second compile immediately ' 'the first is a no-op.') options, args = option_parser.parse_args() if not options.src_dir: options.src_dir = 'src' options.src_dir = os.path.abspath(options.src_dir) options.target_output_dir = get_target_build_dir(args, options) assert options.build_tool in (None, 'ninja') return main_ninja(options, args)