def main(): option_parser = optparse.OptionParser() option_parser.add_option('', '--run-id', default=None, help='unique id for this run') option_parser.add_option('', '--generated-dir', default=None, help='path to the generated images directory') option_parser.add_option('', '--gpu-reference-dir', default=None, help=('path to the directory holding the reference' 'images generated by the gpu')) # --sw-reference-dir is ignored. We keep it here so we don't have to # modify master side script. option_parser.add_option('', '--sw-reference-dir', default=None, help=('path to the directory holding the reference' 'images generated by the software renderer')) options = option_parser.parse_args()[0] if (options.run_id is None or options.generated_dir is None or options.gpu_reference_dir is None): print 'All command options are required. Use --help.' return 1 if Archive(options.run_id, options.generated_dir, options.gpu_reference_dir): retcode = 0 else: retcode = 2 chromium_utils.RemoveDirectory(options.generated_dir) return retcode
def MakeUnversionedArchive(build_dir, staging_dir, zip_file_list, zip_file_name, strip_files=None): """Creates an unversioned full build archive. Returns the path of the created archive.""" # Prevents having zip_file_list to contain duplicates zip_file_list = list(set(zip_file_list)) (zip_dir, zip_file) = chromium_utils.MakeZip(staging_dir, zip_file_name, zip_file_list, build_dir, raise_error=True, strip_files=strip_files) chromium_utils.RemoveDirectory(zip_dir) if not os.path.exists(zip_file): raise StagingError('Failed to make zip package %s' % zip_file) chromium_utils.MakeWorldReadable(zip_file) # Report the size of the zip file to help catch when it gets too big and # can cause bot failures from timeouts during downloads to testers. zip_size = os.stat(zip_file)[stat.ST_SIZE] print 'Zip file is %ld bytes' % zip_size return zip_file
def archive(options, args): build_dir, _ = chromium_utils.ConvertBuildDirToLegacy( options.build_dir, use_out=chromium_utils.IsLinux()) build_dir = os.path.join(build_dir, options.target) src_dir = os.path.abspath(os.path.dirname(options.build_dir)) staging_dir = slave_utils.GetStagingDir(src_dir) build_revision = slave_utils.SubversionRevision(src_dir) chromium_utils.MakeParentDirectoriesWorldReadable(staging_dir) print 'Staging in %s' % build_dir # Build the list of files to archive. zip_file_list = [ f for f in os.listdir(build_dir) if ShouldPackageFile(f, options.target) ] subdir = None # TODO(nsylvain): We need to move linux to a subdir as well, but aarya is not # ready with the server-side change. if chromium_utils.IsMac(): subdir = '%s-%s' % (chromium_utils.PlatformName(), options.target.lower()) prefix = options.factory_properties.get('cf_archive_name', 'cf_archive') zip_file_name = '%s-%s-%s-%d' % (prefix, chromium_utils.PlatformName(), options.target.lower(), build_revision) (zip_dir, zip_file) = chromium_utils.MakeZip(staging_dir, zip_file_name, zip_file_list, build_dir, raise_error=True) chromium_utils.RemoveDirectory(zip_dir) if not os.path.exists(zip_file): raise StagingError('Failed to make zip package %s' % zip_file) chromium_utils.MakeWorldReadable(zip_file) # Report the size of the zip file to help catch when it gets too big. zip_size = os.stat(zip_file)[stat.ST_SIZE] print 'Zip file is %ld bytes' % zip_size gs_bucket = options.factory_properties.get('gs_bucket', None) gs_acl = options.factory_properties.get('gs_acl', None) status = slave_utils.GSUtilCopyFile(zip_file, gs_bucket, subdir=subdir, gs_acl=gs_acl) if status: raise StagingError('Failed to upload %s to %s. Error %d' % (zip_file, gs_bucket, status)) else: # Delete the file, it is not needed anymore. os.remove(zip_file) return status
def cleanup(self, wanted): retval = self.old_remote_setBuilderList(wanted) wanted_dirs = ['info', 'cert', '.svn'] + [r[1] for r in wanted] for d in os.listdir(self.basedir): # Delete build.dead directories. possible_build_dead = os.path.join(self.basedir, d, 'build.dead') if os.path.isdir(possible_build_dead): from common import chromium_utils log.msg("Deleting unwanted directory %s" % possible_build_dead) chromium_utils.RemoveDirectory(possible_build_dead) # Delete old slave directories. if d not in wanted_dirs and os.path.isdir( os.path.join(self.basedir, d)): log.msg("Deleting unwanted directory %s" % d) from common import chromium_utils chromium_utils.RemoveDirectory(os.path.join(self.basedir, d)) return retval
def tearDownClass(cls): try: for d in os.listdir(CACHE_DIR): try: if d.lower().startswith('localhost:%d-botupdatetest' % cls.git_server.port): chromium_utils.RemoveDirectory( os.path.join(CACHE_DIR, d)) except Exception: pass except Exception: pass cls.git_server.stop() cls.svn_server.stop() try: cls.rietveld.stop_server() except Exception: pass chromium_utils.RemoveDirectory(cls.server_root)
def clobber(): print('Clobbereing platform: %s' % sys.platform) if sys.platform in ('win32'): release_dir = os.path.abspath('Release_ia32') print('Removing directory %s' % release_dir) chromium_utils.RemoveDirectory(release_dir) debug_dir = os.path.abspath('Debug_ia32') print('Removing directory %s' % debug_dir) chromium_utils.RemoveDirectory(debug_dir) elif sys.platform in ('linux2'): out_dir = os.path.abspath('out') print('Removing directory %s' % out_dir) chromium_utils.RemoveDirectory(out_dir) elif sys.platform.startswith('darwin'): xcode_dir = os.path.abspath('xcodebuild') print('Removing directory %s' % xcode_dir) chromium_utils.RemoveDirectory(xcode_dir) else: print("Platform not recognized") return 0
def main(): usage = 'usage: %prog [--nuke]' parser = OptionParser(usage) parser.add_option('-n', '--nuke', action='store_true', dest='nuke', default=False, help='Nuke whole repository (not just build output)') options, unused_args = parser.parse_args() if options.nuke: chromium_utils.RemoveDirectory('trunk') else: # Remove platform specific build output directories. if chromium_utils.IsWindows(): chromium_utils.RemoveDirectory('trunk\\build\\Debug') chromium_utils.RemoveDirectory('trunk\\build\\Release') elif chromium_utils.IsMac(): chromium_utils.RemoveDirectory('trunk/out') chromium_utils.RemoveDirectory('trunk/xcodebuild') elif chromium_utils.IsLinux(): chromium_utils.RemoveDirectory('trunk/out') else: print 'Unknown platform: ' + sys.platform return 1 return 0
def cleanup(self, wanted): retval = self.old_remote_setBuilderList(wanted) wanted_dirs = sorted(['info', 'cert', '.svn'] + [r[1] for r in wanted]) Log('Wanted directories: %s' % wanted_dirs) actual_dirs = sorted(i for i in os.listdir(self.basedir) if os.path.isdir(os.path.join(self.basedir, i))) Log('Actual directories: %s' % actual_dirs) for d in actual_dirs: # Delete build.dead directories. possible_build_dead = os.path.join(self.basedir, d, 'build.dead') if os.path.isdir(possible_build_dead): Log('Deleting unwanted directory %s' % possible_build_dead) if not is_testing: chromium_utils.RemoveDirectory(possible_build_dead) # Delete old slave directories. if d not in wanted_dirs: Log('Deleting unwanted directory %s' % d) if not is_testing: chromium_utils.RemoveDirectory( os.path.join(self.basedir, d)) return retval
def cleanup_directory(directory_to_clean): """Cleans up a directory. This is a best effort attempt to clean up, since some files will be held open for some reason. Args: directory_to_clean: directory to clean, the directory itself is not deleted. """ try: chromium_utils.RemoveDirectory(directory_to_clean) except OSError as e: print 'Exception removing %s: %s' % (directory_to_clean, e)
def close(self): if self._leak: LOGGER.warning('Leaking temporary paths: %s', self._tempdirs) else: for path in reversed(self._tempdirs): try: if os.path.isdir(path): LOGGER.debug('Cleaning up temporary directory [%s].', path) chromium_utils.RemoveDirectory(path) except BaseException: LOGGER.exception( 'Failed to clean up temporary directory [%s].', path) del (self._tempdirs[:])
def main(argv): with open(sys.argv[3], 'r') as f: zip_file_list = json.load(f) (zip_dir, zip_file) = chromium_utils.MakeZip(sys.argv[1], sys.argv[2], zip_file_list, sys.argv[4], raise_error=True) chromium_utils.RemoveDirectory(zip_dir) if not os.path.exists(zip_file): raise Exception('Failed to make zip package %s' % zip_file) # Report the size of the zip file to help catch when it gets too big. zip_size = os.stat(zip_file)[stat.ST_SIZE] print 'Zip file is %ld bytes' % zip_size
def main(): builder_name = os.getenv('BUILDBOT_BUILDERNAME', default='') script = 'src/dartium_tools/buildbot_annotated_steps.py' chromium_utils.RunCommand([sys.executable, script]) # BIG HACK # Normal ninja clobbering does not work due to symlinks/python on windows # Full clobbering before building does not work since it will destroy # the ninja build files # So we basically clobber at the end here if chromium_utils.IsWindows() and 'full' in builder_name: chromium_utils.RemoveDirectory('src/out') return 0
def cleanup(self, base=None): """Explicitly remove ALL temporary directories under "<base>/<prefix>". This can be used e.g. to reduce chances of running out of disk space when temporary directories are leaked. """ base = base or tempfile.gettempdir() path = os.path.join(base, self._prefix) try: if os.path.isdir(path): LOGGER.info('Cleaning up temporary directory [%s].', path) chromium_utils.RemoveDirectory(path) except BaseException: LOGGER.exception('Failed to clean up temporary directory [%s].', path)
def PreProcess(options): """Setup some dynamorio config before running tests.""" dynamorio_log_dir = DynamorioLogDir() chromium_utils.RemoveDirectory(dynamorio_log_dir) chromium_utils.MaybeMakeDirectory(dynamorio_log_dir) drrun_config = ('DR_OP=-nop_initial_bblock -disable_traces ' '-fast_client_decode -no_enable_reset\n' 'CLIENT_REL=tools/lib32/release/bbcov.dll\n' 'TOOL_OP=-logdir ' + dynamorio_log_dir + '\n') fhandler = open(os.path.join(options.dynamorio_dir, 'tools', 'bbcov.drrun32'), 'w+') fhandler.write(drrun_config) # Exclude python's execution chromium_utils.RunCommand(os.path.join(options.dynamorio_dir, 'bin32', 'drconfig -reg python.exe -norun')) return 0
def main_scons(options, args): """Interprets options, clobbers object files, and calls scons. """ options.build_dir = os.path.abspath(options.build_dir) if options.clobber: build_output_dir = os.path.join(os.path.dirname(options.build_dir), 'sconsbuild', options.target) print('Removing %s' % build_output_dir) chromium_utils.RemoveDirectory(build_output_dir) os.chdir(options.build_dir) if sys.platform == 'win32': command = ['hammer.bat'] else: command = ['hammer'] env = EchoDict(os.environ) if sys.platform == 'linux2': common_make_settings(command, options, env) else: command.extend(['-k']) command.extend([ # Force scons to always check for dependency changes. '--implicit-deps-changed', '--mode=' + options.target, ]) # Here's what you can uncomment if you need to see more info # about what the build is doing on a slave: # # VERBOSE=1 (a setting in our local SCons config) replaces # the "Compiling ..." and "Linking ..." lines with the # actual executed command line(s) # # --debug=explain (a SCons option) will tell you why SCons # is deciding to rebuild thing (the target doesn't exist, # which .h file(s) changed, etc.) # #command.extend(['--debug=explain', 'VERBOSE=1']) command.extend(options.build_args + args) env.print_overrides() return chromium_utils.RunCommand(command, env=env)
def archive(options, args): src_dir = os.path.abspath(os.path.dirname(options.build_dir)) build_dir = os.path.join(src_dir, 'out', options.target) staging_dir = slave_utils.GetStagingDir(src_dir) build_revision = slave_utils.SubversionRevision(src_dir) chromium_utils.MakeParentDirectoriesWorldReadable(staging_dir) print 'Staging in %s' % build_dir # Build the list of files to archive. zip_file_list = [ f for f in os.listdir(build_dir) if ShouldPackageFile(f, options.target) ] prefix = options.factory_properties.get('asan_archive_name', 'asan') zip_file_name = '%s-%s-%s-%d' % (prefix, chromium_utils.PlatformName(), options.target.lower(), build_revision) (zip_dir, zip_file) = chromium_utils.MakeZip(staging_dir, zip_file_name, zip_file_list, build_dir, raise_error=True) chromium_utils.RemoveDirectory(zip_dir) if not os.path.exists(zip_file): raise StagingError('Failed to make zip package %s' % zip_file) chromium_utils.MakeWorldReadable(zip_file) # Report the size of the zip file to help catch when it gets too big. zip_size = os.stat(zip_file)[stat.ST_SIZE] print 'Zip file is %ld bytes' % zip_size gs_bucket = options.factory_properties.get('gs_bucket', None) gs_acl = options.factory_properties.get('gs_acl', None) status = slave_utils.GSUtilCopyFile(zip_file, gs_bucket, gs_acl=gs_acl) if status: raise StagingError('Failed to upload %s to %s. Error %d' % (zip_file, gs_bucket, status)) else: # Delete the file, it is not needed anymore. os.remove(zip_file) return status
def LogAndRemoveFiles(temp_dir, regex_pattern): """Removes files in |temp_dir| that match |regex_pattern|. This function prints out the name of each directory or filename before it deletes the file from disk.""" regex = re.compile(regex_pattern) if not os.path.isdir(temp_dir): return for dir_item in os.listdir(temp_dir): if regex.search(dir_item): full_path = os.path.join(temp_dir, dir_item) print 'Removing leaked temp item: %s' % full_path try: if os.path.islink(full_path) or os.path.isfile(full_path): os.remove(full_path) elif os.path.isdir(full_path): chromium_utils.RemoveDirectory(full_path) else: print 'Temp item wasn\'t a file or directory?' except OSError, e: print >> sys.stderr, e
def main_win(options, args): """Interprets options, clobbers object files, and calls the build tool. """ # Prefer the version specified in the .sln. When devenv.com is used at the # command line to start a build, it doesn't accept sln file from a different # version. if not options.msvs_version: sln = open(os.path.join(options.build_dir, options.solution), 'rU') header = sln.readline().strip() sln.close() if header.endswith('11.00'): options.msvs_version = '10' elif header.endswith('10.00'): options.msvs_version = '9' elif header.endswith('9.00'): options.msvs_version = '8' else: print >> sys.stderr, "Unknown sln header:\n" + header return 1 REG_ROOT = 'SOFTWARE\\Microsoft\\VisualStudio\\' devenv = ReadHKLMValue(REG_ROOT + options.msvs_version + '.0', 'InstallDir') if devenv: devenv = os.path.join(devenv, 'devenv.com') else: print >> sys.stderr, ("MSVS %s was requested but is not installed." % options.msvs_version) return 1 ib = ReadHKLMValue('SOFTWARE\\Xoreax\\IncrediBuild\\Builder', 'Folder') if ib: ib = os.path.join(ib, 'BuildConsole.exe') if ib and os.path.exists(ib) and not options.no_ib: tool = ib if options.arch == 'x64': tool_options = ['/Cfg=%s|x64' % options.target] else: tool_options = ['/Cfg=%s|Win32' % options.target] if options.project: tool_options.extend(['/Prj=%s' % options.project]) else: tool = devenv if options.arch == 'x64': tool_options = ['/Build', '%s|x64' % options.target] else: tool_options = ['/Build', options.target] if options.project: tool_options.extend(['/Project', options.project]) options.build_dir = os.path.abspath(options.build_dir) build_output_dir = os.path.join(options.build_dir, options.target) def clobber(): print('Removing %s' % build_output_dir) chromium_utils.RemoveDirectory(build_output_dir) if options.clobber: clobber() else: # Remove the log file so it doesn't grow without limit, chromium_utils.RemoveFile(build_output_dir, 'debug.log') # Remove the chrome.dll version resource so it picks up the new svn # revision, unless user explicitly asked not to remove it. See # Bug 1064677 for more details. if not options.keep_version_file: chromium_utils.RemoveFile(build_output_dir, 'obj', 'chrome_dll', 'chrome_dll_version.rc') env = EchoDict(os.environ) if options.compiler == 'goma': env['CC'] = 'gomacc.exe cl' env['CXX'] = 'gomacc.exe cl' env['PATH'] = ';'.join([options.goma_dir, env['PATH']]) if options.mode == 'google_chrome' or options.mode == 'official': env['CHROMIUM_BUILD'] = '_google_chrome' if options.mode == 'official': # Official builds are always Google Chrome. env['OFFICIAL_BUILD'] = '1' env['CHROME_BUILD_TYPE'] = '_official' if not options.solution: options.solution = 'all.sln' result = -1 solution = os.path.join(options.build_dir, options.solution) command = [tool, solution] + tool_options + args errors = [] # Examples: # midl : command line error MIDL1003 : error returned by the C # preprocessor (-1073741431) # # Error executing C:\PROGRA~2\MICROS~1\Common7\Tools\Bin\Midl.Exe (tool # returned code: 1282) # # --- # # cl : Command line error D8027 : cannot execute 'C:\Program Files # (x86)\Microsoft Visual Studio 8\VC\bin\c2.dll' # # --- # # Warning: Could not delete file "c:\b\slave\win\build\src\build\Debug\ # chrome.dll" : Access is denied # --------------------Build System Warning-------------------------------- # ------- # Could not delete file: # Could not delete file "c:\b\slave\win\build\src\build\Debug\ # chrome.dll" : Access is denied # (Automatically running xgHandle on first 10 files that could not be # deleted) # Searching for '\Device\HarddiskVolume1\b\slave\win\build\src\build\ # Debug\chrome.dll': # No handles found. # (xgHandle utility returned code: 0x00000000) # # --- # # webkit.lib(WebGeolocationError.obj) : fatal error LNK1318: Unexpected PDB # error; OK (0) '' # # Error executing link.exe (tool returned code: 1318) # # --- # # browser.lib(background_application_list_model.obj) : fatal error LNK1000: # Internal error during IMAGE::Pass2 # (along with a register dump) # # --- # # ...\browser\history\download_create_info.cc : fatal error C1033: cannot open # program database '...\src\build\debug\obj\browser\browser\vc80_ib_2.idb' known_toolset_bugs = [ '\\c2.dll', 'Midl.Exe (tool returned code: 1282)', 'LINK : fatal error LNK1102: out of memory', 'fatal error LNK1318: Unexpected PDB error', 'fatal error LNK1000: Internal error during IMAGE::Pass2', 'fatal error C1033', ] def scan(line): for known_line in known_toolset_bugs: if known_line in line: errors.append(line) break env.print_overrides() result = chromium_utils.RunCommand(command, parser_func=scan, env=env, universal_newlines=True) if errors: print('\n\nRetrying a clobber build because of:') print('\n'.join((' ' + l for l in errors))) print('Removing %s' % build_output_dir) for _ in range(3): try: chromium_utils.RemoveDirectory(build_output_dir) break except OSError, e: print(e) print('\nSleeping 15 seconds. Lovely windows file locks.') time.sleep(15) else: print('Failed to delete a file 3 times in a row, aborting.') return 1 result = chromium_utils.RunCommand(command, env=env)
def real_main(options): """Download a build and extract. Extract to build\\BuildDir\\full-build-win32 and rename it to build\\BuildDir\\Target """ target_build_output_dir = os.path.join(options.build_dir, options.target) platform = chromium_utils.PlatformName() revision = options.revision if not revision: revision = GetLatestRevision(options.build_url, platform) if not revision: print 'Failed to get revision number.' return slave_utils.ERROR_EXIT_CODE archive_url = GetBuildUrl(options.build_url, platform, revision) archive_name = 'dynamorio.' + os.path.basename(archive_url).split('.', 1)[1] temp_dir = tempfile.mkdtemp() try: # We try to download and extract 3 times. for tries in range(1, 4): print 'Try %d: Fetching build from %s' % (tries, archive_url) failure = False try: print '%s/%s' % (archive_url, archive_name) urllib.urlretrieve(archive_url, archive_name) print '\nDownload complete' except IOError: print '\nFailed to download build' failure = True if options.halt_on_missing_build: return slave_utils.ERROR_EXIT_CODE if failure: continue print 'Extracting build %s to %s...' % (archive_name, options.build_dir) try: chromium_utils.RemoveDirectory(target_build_output_dir) chromium_utils.ExtractZip(archive_name, temp_dir) # Look for the top level directory from extracted build. entries = os.listdir(temp_dir) output_dir = temp_dir if (len(entries) == 1 and os.path.isdir( os.path.join(output_dir, entries[0]))): output_dir = os.path.join(output_dir, entries[0]) print 'Moving build from %s to %s' % (output_dir, target_build_output_dir) shutil.move(output_dir, target_build_output_dir) except (OSError, IOError, chromium_utils.ExternalError): print 'Failed to extract the build.' # Print out the traceback in a nice format traceback.print_exc() # Try again... time.sleep(3) continue return 0 finally: chromium_utils.RemoveDirectory(temp_dir) # If we get here, that means that it failed 3 times. We return a failure. return slave_utils.ERROR_EXIT_CODE
def layout_test(options, args): """Parse options and call run-webkit-tests, using Python from the tree.""" build_dir = os.path.abspath(options.build_dir) dumprendertree_exe = 'DumpRenderTree.exe' if options.driver_name: dumprendertree_exe = '%s.exe' % options.driver_name # Disable the page heap in case it got left enabled by some previous process. try: slave_utils.SetPageHeap(build_dir, dumprendertree_exe, False) except chromium_utils.PathNotFound: # If we don't have gflags.exe, report it but don't worry about it. print 'Warning: Couldn\'t disable page heap, if it was already enabled.' blink_scripts_dir = chromium_utils.FindUpward(build_dir, 'third_party', 'WebKit', 'Tools', 'Scripts') run_blink_tests = os.path.join(blink_scripts_dir, 'run-webkit-tests') slave_name = slave_utils.SlaveBuildName(build_dir) command = [ run_blink_tests, '--no-show-results', '--no-new-test-results', '--full-results-html', # For the dashboards. '--clobber-old-results', # Clobber test results before each run. '--exit-after-n-failures', '5000', '--exit-after-n-crashes-or-timeouts', '100', ] # TODO(dpranke): we can switch to always using --debug-rwt-logging # after all the bots have WebKit r124789 or later. capture_obj = slave_utils.RunCommandCaptureFilter() slave_utils.RunPythonCommandInBuildDir(build_dir, options.target, [run_blink_tests, '--help'], filter_obj=capture_obj) if '--debug-rwt-logging' in ''.join(capture_obj.lines): command.append('--debug-rwt-logging') else: command.append('--verbose') if options.results_directory: # Prior to the fix in https://bugs.webkit.org/show_bug.cgi?id=58272, # run_blink_tests expects the results directory to be relative to # the configuration directory (e.g., src/webkit/Release). The # parameter is given to us relative to build_dir, which is where we # will run the command from. # # When 58272 is landed, run_blink_tests will support absolute file # paths as well as paths relative to CWD for non-Chromium ports and # paths relative to the configuration dir for Chromium ports. As # a transitional fix, we convert to an absolute dir, but once the # hack in 58272 is removed, we can use results_dir as-is. if not os.path.isabs(options.results_directory): if options.results_directory.startswith('../../'): options.results_directory = options.results_directory[6:] options.results_directory = os.path.abspath( os.path.join(os.getcwd(), options.results_directory)) chromium_utils.RemoveDirectory(options.results_directory) command.extend(['--results-directory', options.results_directory]) if options.target: command.extend(['--target', options.target]) if options.platform: command.extend(['--platform', options.platform]) if options.skipped: command.extend(['--skipped', options.skipped]) if options.no_pixel_tests: command.append('--no-pixel-tests') if options.batch_size: command.extend(['--batch-size', options.batch_size]) if options.run_part: command.extend(['--run-part', options.run_part]) if options.builder_name: command.extend(['--builder-name', options.builder_name]) if options.build_number: command.extend(['--build-number', options.build_number]) command.extend(['--master-name', slave_utils.GetActiveMaster() or '']) command.extend(['--build-name', slave_name]) if options.step_name: command.extend(['--step-name', options.step_name]) # On Windows, look for the target in an exact location. if sys.platform == 'win32': command.extend(['--build-directory', build_dir]) if options.test_results_server: command.extend(['--test-results-server', options.test_results_server]) if options.enable_pageheap: command.append('--time-out-ms=120000') if options.time_out_ms: command.extend(['--time-out-ms', options.time_out_ms]) for filename in options.additional_expectations: command.append('--additional-expectations=%s' % filename) if options.driver_name: command.append('--driver-name=%s' % options.driver_name) for additional_drt_flag in options.additional_drt_flag: command.append('--additional-drt-flag=%s' % additional_drt_flag) for test_list in options.test_list: command += ['--test-list', test_list] if options.enable_leak_detection: command.append('--enable-leak-detection') # The list of tests is given as arguments. if options.options: command.extend(options.options.split(' ')) command.extend(args) # Nuke anything that appears to be stale chrome items in the temporary # directory from previous test runs (i.e.- from crashes or unittest leaks). slave_utils.RemoveChromeTemporaryFiles() try: if options.enable_pageheap: slave_utils.SetPageHeap(build_dir, dumprendertree_exe, True) # Run the the tests return slave_utils.RunPythonCommandInBuildDir(build_dir, options.target, command) finally: if options.enable_pageheap: slave_utils.SetPageHeap(build_dir, dumprendertree_exe, False) if options.json_test_results: results_dir = options.results_directory results_json = os.path.join(results_dir, "failing_results.json") # If the json results file was not produced, then we produce no output # file too and rely on a recipe to handle this as invalid result. if os.path.isfile(results_json): with open(results_json, 'rb') as f: data = f.read() # data is in the form of: # ADD_RESULTS(<json object>); # but use a regex match to also support a raw json object. m = re.match( r'[^({]*' # From the beginning, take any except '(' or '{' r'(?:' r'\((.*)\);' # Expect '(<json>);' r'|' # or r'({.*})' # '<json object>' r')$', data) assert m is not None data = m.group(1) or m.group(2) json_data = json.loads(data) assert isinstance(json_data, dict) with open(options.json_test_results, 'wb') as f: f.write(data)
def archive(options, args): # Disable 'unused argument' warning for 'args' | pylint: disable=W0613 build_dir = build_directory.GetBuildOutputDirectory() src_dir = os.path.abspath(os.path.dirname(build_dir)) build_dir = os.path.join(build_dir, options.target) revision_dir = options.factory_properties.get('revision_dir') primary_project = chromium_utils.GetPrimaryProject(options) build_sortkey_branch, build_sortkey_value = GetBuildSortKey( options, primary_project) build_git_commit = GetGitCommit(options, primary_project) staging_dir = slave_utils.GetStagingDir(src_dir) chromium_utils.MakeParentDirectoriesWorldReadable(staging_dir) print 'Staging in %s' % build_dir # Build the list of files to archive. zip_file_list = [ f for f in os.listdir(build_dir) if ShouldPackageFile(f, options.target) ] subdir_suffix = options.factory_properties.get('cf_archive_subdir_suffix', '') pieces = [chromium_utils.PlatformName(), options.target.lower()] if subdir_suffix: pieces.append(subdir_suffix) subdir = '-'.join(pieces) # Components like v8 get a <name>-v8-component-<revision> infix. component = '' if revision_dir: component = '-%s-component' % revision_dir prefix = options.factory_properties.get('cf_archive_name', 'cf_archive') sortkey_path = chromium_utils.GetSortableUploadPathForSortKey( build_sortkey_branch, build_sortkey_value) zip_file_name = '%s-%s-%s%s-%s' % (prefix, chromium_utils.PlatformName(), options.target.lower(), component, sortkey_path) (zip_dir, zip_file) = chromium_utils.MakeZip(staging_dir, zip_file_name, zip_file_list, build_dir, raise_error=True) chromium_utils.RemoveDirectory(zip_dir) if not os.path.exists(zip_file): raise StagingError('Failed to make zip package %s' % zip_file) chromium_utils.MakeWorldReadable(zip_file) # Report the size of the zip file to help catch when it gets too big. zip_size = os.stat(zip_file)[stat.ST_SIZE] print 'Zip file is %ld bytes' % zip_size gs_bucket = options.factory_properties.get('gs_bucket', None) gs_acl = options.factory_properties.get('gs_acl', None) gs_metadata = { GS_COMMIT_POSITION_NUMBER_KEY: build_sortkey_value, } if build_sortkey_branch: gs_metadata[ GS_COMMIT_POSITION_KEY] = chromium_utils.BuildCommitPosition( build_sortkey_branch, build_sortkey_value) if build_git_commit: gs_metadata[GS_GIT_COMMIT_KEY] = build_git_commit status = slave_utils.GSUtilCopyFile(zip_file, gs_bucket, subdir=subdir, gs_acl=gs_acl, metadata=gs_metadata) if status: raise StagingError('Failed to upload %s to %s. Error %d' % (zip_file, gs_bucket, status)) else: # Delete the file, it is not needed anymore. os.remove(zip_file) return status
def real_main(options): """ Download a build, extract it to build\\BuildDir\\full-build-win32 and rename it to build\\BuildDir\\Target """ abs_build_dir = os.path.abspath( build_directory.GetBuildOutputDirectory(options.src_dir)) target_build_output_dir = os.path.join(abs_build_dir, options.target) # Generic name for the archive. archive_name = 'full-build-%s.zip' % chromium_utils.PlatformName() # Just take the zip off the name for the output directory name. output_dir = os.path.join(abs_build_dir, archive_name.replace('.zip', '')) src_dir = os.path.dirname(abs_build_dir) if not options.build_revision and not options.build_archive_url: (build_revision, webkit_revision) = slave_utils.GetBuildRevisions( src_dir, options.webkit_dir, options.revision_dir) else: build_revision = options.build_revision webkit_revision = options.webkit_revision url, archive_name = GetBuildUrl(options, build_revision, webkit_revision) if archive_name is None: archive_name = 'build.zip' base_url = None else: base_url = '/'.join(url.split('/')[:-1] + [archive_name]) if url.startswith('gs://'): handler = GSHandler(url=url, archive_name=archive_name) else: handler = WebHandler(url=url, archive_name=archive_name) # We try to download and extract 3 times. for tries in range(1, 4): print 'Try %d: Fetching build from %s...' % (tries, url) failure = False # If the url is valid, we download the file. if not failure: if not handler.download(): if options.halt_on_missing_build: return slave_utils.ERROR_EXIT_CODE failure = True # If the versioned url failed, we try to get the latest build. if failure: if url.startswith('gs://') or not base_url: continue else: print 'Fetching latest build at %s' % base_url base_handler = handler.__class__(base_url, handler.archive_name) if not base_handler.download(): continue print 'Extracting build %s to %s...' % (archive_name, abs_build_dir) try: chromium_utils.RemoveDirectory(target_build_output_dir) chromium_utils.ExtractZip(archive_name, abs_build_dir) # For Chrome builds, the build will be stored in chrome-win32. if 'full-build-win32' in output_dir: chrome_dir = output_dir.replace('full-build-win32', 'chrome-win32') if os.path.exists(chrome_dir): output_dir = chrome_dir print 'Moving build from %s to %s' % (output_dir, target_build_output_dir) shutil.move(output_dir, target_build_output_dir) except (OSError, IOError, chromium_utils.ExternalError): print 'Failed to extract the build.' # Print out the traceback in a nice format traceback.print_exc() # Try again... continue # If we got the latest build, then figure out its revision number. if failure: print "Trying to determine the latest build's revision number..." try: build_revision_file_name = os.path.join( target_build_output_dir, chromium_utils.FULL_BUILD_REVISION_FILENAME) build_revision_file = open(build_revision_file_name, 'r') print 'Latest build is revision: %s' % build_revision_file.read() build_revision_file.close() except IOError: print "Could not determine the latest build's revision number" if failure: # We successfully extracted the archive, but it was the generic one. return slave_utils.WARNING_EXIT_CODE return 0 # If we get here, that means that it failed 3 times. We return a failure. return slave_utils.ERROR_EXIT_CODE
def tearDown(self): chromium_utils.RemoveDirectory(self.workdir)
build_revision_file = open(build_revision_path, 'w') build_revision_file.write('%d' % build_revision) build_revision_file.close() if chromium_utils.IsMac() or chromium_utils.IsLinux(): os.chmod(build_revision_path, 0644) zip_file_list.append(build_revision_file_name) except IOError: print 'Writing to revision file %s failed ' % build_revision_path zip_file_name = 'full-build-%s' % chromium_utils.PlatformName() (zip_dir, zip_file) = chromium_utils.MakeZip(staging_dir, zip_file_name, zip_file_list, build_dir, raise_error=True) chromium_utils.RemoveDirectory(zip_dir) if not os.path.exists(zip_file): raise StagingError('Failed to make zip package %s' % zip_file) if chromium_utils.IsMac() or chromium_utils.IsLinux(): os.chmod(zip_file, 0644) # Report the size of the zip file to help catch when it gets too big and # can cause bot failures from timeouts during downloads to testers. zip_size = os.stat(zip_file)[stat.ST_SIZE] print 'Zip file is %ld bytes' % zip_size # Create a versioned copy of the file. versioned_file = zip_file.replace('.zip', '_%d.zip' % build_revision) if os.path.exists(versioned_file): # This file already exists. Maybe we are doing a clobber build at the same # revision. We can move this file away.
def clobber(): print 'Removing %s' % options.target_output_dir chromium_utils.RemoveDirectory(options.target_output_dir)
def real_main(options): """ Download a build, extract it to build\BuildDir\full-build-win32 and rename it to build\BuildDir\Target """ abs_build_dir = os.path.abspath( build_directory.GetBuildOutputDirectory(options.src_dir)) target_build_output_dir = os.path.join(abs_build_dir, options.target) # Generic name for the archive. archive_name = 'full-build-%s.zip' % chromium_utils.PlatformName() # Just take the zip off the name for the output directory name. output_dir = os.path.join(abs_build_dir, archive_name.replace('.zip', '')) src_dir = os.path.dirname(abs_build_dir) if not options.build_revision: (build_revision, webkit_revision) = slave_utils.GetBuildRevisions( src_dir, options.webkit_dir, options.revision_dir) else: build_revision = options.build_revision webkit_revision = options.webkit_revision base_url, url = GetBuildUrl(options, build_revision, webkit_revision) archive_name = os.path.basename(base_url) if url.startswith('gs://'): handler = GSHandler(url=url, archive_name=archive_name) else: handler = WebHandler(url=url, archive_name=archive_name) # We try to download and extract 3 times. for tries in range(1, 4): print 'Try %d: Fetching build from %s...' % (tries, url) failure = False # Check if the url exists. if not handler.is_present(): print '%s is not found' % url failure = True # When 'halt_on_missing_build' is present in factory_properties and if # 'revision' is set in build properties, we assume the build is # triggered automatically and so we halt on a missing build zip. The # other case is if the build is forced, in which case we keep trying # later by looking for the latest build that's available. if (options.factory_properties.get('halt_on_missing_build', False) and 'revision' in options.build_properties and options.build_properties['revision'] != ''): return slave_utils.ERROR_EXIT_CODE # If the url is valid, we download the file. if not failure: if not handler.download(): failure = True # If the versioned url failed, we try to get the latest build. if failure: if url.startswith('gs://'): continue else: print 'Fetching latest build at %s' % base_url handler.url = base_url if not handler.download(): continue print 'Extracting build %s to %s...' % (archive_name, abs_build_dir) try: chromium_utils.RemoveDirectory(target_build_output_dir) chromium_utils.ExtractZip(archive_name, abs_build_dir) # For Chrome builds, the build will be stored in chrome-win32. if 'full-build-win32' in output_dir: chrome_dir = output_dir.replace('full-build-win32', 'chrome-win32') if os.path.exists(chrome_dir): output_dir = chrome_dir print 'Moving build from %s to %s' % (output_dir, target_build_output_dir) shutil.move(output_dir, target_build_output_dir) except (OSError, IOError, chromium_utils.ExternalError): print 'Failed to extract the build.' # Print out the traceback in a nice format traceback.print_exc() # Try again... continue # If we got the latest build, then figure out its revision number. if failure: print "Trying to determine the latest build's revision number..." try: build_revision_file_name = os.path.join( target_build_output_dir, chromium_utils.FULL_BUILD_REVISION_FILENAME) build_revision_file = open(build_revision_file_name, 'r') print 'Latest build is revision: %s' % build_revision_file.read( ) build_revision_file.close() except IOError: print "Could not determine the latest build's revision number" if failure: # We successfully extracted the archive, but it was the generic one. return slave_utils.WARNING_EXIT_CODE return 0 # If we get here, that means that it failed 3 times. We return a failure. return slave_utils.ERROR_EXIT_CODE
def MakeClean(self): if os.path.isdir('out'): chromium_utils.RemoveDirectory('out')
def main_xcode(options, args): """Interprets options, clobbers object files, and calls xcodebuild. """ # If the project isn't in args, add all.xcodeproj to simplify configuration. command = ['xcodebuild', '-configuration', options.target] # TODO(mmoss) Support the old 'args' usage until we're confident the master is # switched to passing '--solution' everywhere. if not '-project' in args: # TODO(mmoss) Temporary hack to ignore the Windows --solution flag that is # passed to all builders. This can be taken out once the master scripts are # updated to only pass platform-appropriate --solution values. if (not options.solution or os.path.splitext(options.solution)[1] != '.xcodeproj'): options.solution = 'all.xcodeproj' command.extend(['-project', options.solution]) if options.xcode_target: command.extend(['-target', options.xcode_target]) # Note: this clobbers all targets, not just Debug or Release. if options.clobber: build_output_dir = os.path.join(os.path.dirname(options.build_dir), 'xcodebuild') print('Removing %s' % build_output_dir) chromium_utils.RemoveDirectory(build_output_dir) env = EchoDict(os.environ) common_xcode_settings(command, options, env, options.compiler) # Add on any remaining args command.extend(args) # Set up the filter before changing directories so the raw build log can # be recorded. # Support a local file blocking filters (for debugging). Also check the # Xcode version to make sure it is 3.2, as that is what the filter is coded # to. xcodebuild_filter = None no_filter_path = os.path.join(os.getcwd(), 'no_xcodebuild_filter') xcode_info = chromium_utils.GetCommandOutput(['xcodebuild', '-version']) if os.path.exists(no_filter_path): print 'NOTE: "%s" exists, output is unfiltered' % no_filter_path elif not xcode_info.startswith('Xcode 3.2.'): print 'NOTE: Not using Xcode 3.2, output is unfiltered' else: full_log_path = os.path.join(os.getcwd(), 'full_xcodebuild_log.txt') full_log = open(full_log_path, 'w') now = datetime.datetime.now() full_log.write('Build started ' + now.isoformat() + '\n\n\n') print 'NOTE: xcodebuild output filtered, full log at: "%s"' % full_log_path xcodebuild_filter = XcodebuildFilter(full_log) os.chdir(options.build_dir) # If using the Goma compiler, first call goma_ctl with ensure_start # (or restart in clobber mode) to ensure the proxy is available. goma_ctl_cmd = [os.path.join(options.goma_dir, 'goma_ctl.sh')] if options.compiler in ('goma', 'goma-clang', 'gomaclang'): goma_key = os.path.join(options.goma_dir, 'goma.key') env['GOMA_COMPILER_PROXY_DAEMON_MODE'] = 'true' if os.path.exists(goma_key): env['GOMA_API_KEY_FILE'] = goma_key if options.clobber: chromium_utils.RunCommand(goma_ctl_cmd + ['restart'], env=env) else: chromium_utils.RunCommand(goma_ctl_cmd + ['ensure_start'], env=env) # Run the build. env.print_overrides() result = chromium_utils.RunCommand(command, env=env, filter_obj=xcodebuild_filter) if options.compiler in ('goma', 'goma-clang', 'gomaclang'): # Always stop the proxy for now to allow in-place update. chromium_utils.RunCommand(goma_ctl_cmd + ['stop'], env=env) return result
def clobber(target): build_output_dir = os.path.join(working_dir, 'out', target) print('Removing %s' % build_output_dir) chromium_utils.RemoveDirectory(build_output_dir)
def clobber(): print('Removing %s' % build_output_dir) chromium_utils.RemoveDirectory(build_output_dir)