def main(): """Note: this is solely to run the current master's code and can totally differ from the underlying script flags. To update these flags: - Update the following code to support both the previous flag and the new flag. - Change scripts/master/factory/swarm_commands.py to pass the new flag. - Restart all the masters using swarming. - Remove the old flag from this code. """ client = swarming_utils.find_client(os.getcwd()) if not client: print >> sys.stderr, 'Failed to find swarm(ing)_client' return 1 parser = optparse.OptionParser(description=sys.modules[__name__].__doc__) parser.add_option('--swarming') parser.add_option('--isolate-server') parser.add_option('--task', nargs=4, action='append', default=[], dest='tasks') chromium_utils.AddPropertiesOptions(parser) options, args = parser.parse_args() if args: parser.error('Unsupported args: %s' % args) # Loads the other flags implicitly. task_prefix, slave_os, priority = process_build_properties(options) return trigger(client, options.swarming, options.isolate_server, priority, options.tasks, task_prefix, slave_os)
def main(): option_parser = optparse.OptionParser() # Required options: option_parser.add_option( '--build-dir', help='path to main build directory (the parent of ' 'the Release or Debug directory)') option_parser.add_option('--build-id', help='The build number of the tested build.') option_parser.add_option('--target', help='Target directory.') option_parser.add_option('--platform', help='Coverage subdir.') option_parser.add_option('--dynamorio-dir', help='Path to dynamorio binary.') option_parser.add_option('--dynamorio-log-dir', help='Path to dynamorio coverage log files.') option_parser.add_option('--test-to-upload', help='Test name.') chromium_utils.AddPropertiesOptions(option_parser) options, _ = option_parser.parse_args() fp = options.factory_properties options.browser_shard_index = fp.get('browser_shard_index') options.sharded_tests = fp.get('sharded_tests') options.host = fp.get('host') options.www_dir = fp.get('www-dir') del options.factory_properties del options.build_properties return CreateCoverageFileAndUpload(options)
def main(): option_parser = optparse.OptionParser() option_parser.add_option('', '--target', help='build target to archive (Debug or Release)') option_parser.add_option( '', '--build-dir', help='path to main build directory (the parent of ' 'the Release or Debug directory)') option_parser.add_option('', '--build-url', help='url where to find the build to extract') # TODO(cmp): Remove --halt-on-missing-build when the buildbots are upgraded # to not use this argument. option_parser.add_option('--halt-on-missing-build', action='store_true', default=False, help='whether to halt on a missing build') option_parser.add_option('', '--webkit-dir', help='webkit directory path, ' 'relative to --build-dir') option_parser.add_option('', '--build-output-dir', help='Output path relative to --build-dir.') chromium_utils.AddPropertiesOptions(option_parser) options, args = option_parser.parse_args() return real_main(options, args)
def main(): """Note: this is solely to run the current master's code and can totally differ from the underlying script flags. To update these flags: - Update the following code to support both the previous flag and the new flag. - Change scripts/master/factory/swarm_commands.py to pass the new flag. - Restart all the masters using swarming. - Remove the old flag from this code. """ client = swarming_utils.find_client(os.getcwd()) if not client: print >> sys.stderr, 'Failed to find swarm(ing)_client' return 1 parser = optparse.OptionParser() parser.add_option('-u', '--swarming', help='Swarm server') parser.add_option('-s', '--shards', type='int', default=-1, help='Number of shards') chromium_utils.AddPropertiesOptions(parser) (options, args) = parser.parse_args() options.swarming = options.swarming.rstrip('/') if not args: parser.error('Must specify one test name.') elif len(args) > 1: parser.error('Must specify only one test name.') print('Found %s' % client) sys.stdout.flush() return determine_version_and_run_handler(client, options, args[0])
def main(): option_parser = optparse.OptionParser() option_parser.add_option('', '--build-dir', help='ignored') option_parser.add_option('', '--results-dir', help='path to layout test results, relative to ' 'the build_dir') option_parser.add_option('', '--builder-name', default=None, help='The name of the builder running this script.') option_parser.add_option('', '--build-number', default=None, help=('The build number of the builder running' 'this script.')) option_parser.add_option('', '--gs-bucket', default=None, help=('The google storage bucket to upload to. ' 'If provided, this script will upload to gs ' 'instead of the master.')) option_parser.add_option('', '--gs-acl', default=None, help=('The ACL of the google storage files.')) chromium_utils.AddPropertiesOptions(option_parser) options, args = option_parser.parse_args() options.build_dir = build_directory.GetBuildOutputDirectory() # To continue supporting buildbot, initialize these from the # factory_properties if they were not supplied on the command line. if not options.gs_bucket: options.gs_bucket = options.factory_properties.get('gs_bucket') if not options.gs_acl: options.gs_acl = options.factory_properties.get('gs_acl') return archive_layout(options, args)
def main(): option_parser = optparse.OptionParser() option_parser.add_option( '', '--build-dir', default='webkit', help='path to main build directory (the parent of ' 'the Release or Debug directory)') option_parser.add_option('', '--results-dir', help='path to layout test results, relative to ' 'the build_dir') option_parser.add_option( '', '--builder-name', default=None, help='The name of the builder running this script.') option_parser.add_option('', '--build-number', default=None, help=('The build number of the builder running' 'this script.')) chromium_utils.AddPropertiesOptions(option_parser) options, args = option_parser.parse_args() return archive_layout(options, args)
def main(): option_parser = optparse.OptionParser() option_parser.add_option('--target', help='build target to archive (Debug or Release)') option_parser.add_option('--build-dir', help='ignored') option_parser.add_option('--build-url', help='url where to find the build to extract') option_parser.add_option('--revision', help='Revision number to download.') option_parser.add_option('--halt-on-missing-build', help='Halt on missing build.') chromium_utils.AddPropertiesOptions(option_parser) options, args = option_parser.parse_args() if args: print 'Unknown options: %s' % args return 1 options.build_dir = build_directory.GetBuildOutputDirectory() options.build_dir = os.path.abspath(options.build_dir) options.build_url = (options.build_url or options.factory_properties.get('build_url')) del options.factory_properties del options.build_properties return real_main(options)
def main(argv): option_parser = optparse.OptionParser() option_parser.add_option('--target', default='Release', help='build target to archive (Debug or Release)') option_parser.add_option('--build-dir', help='ignored') chromium_utils.AddPropertiesOptions(option_parser) options, args = option_parser.parse_args(argv) return archive(options, args)
def main(): option_parser = optparse.OptionParser() chromium_utils.AddPropertiesOptions(option_parser) (options, args) = option_parser.parse_args() buildername = options.build_properties.get('buildername', '') cmd = SDK_BUILDER_MAP.get(buildername) or SDK_BUILDER_MAP.get('DEFAULT') build_tools_dir = chromium_utils.FindUpward(os.getcwd(), 'src', 'native_client_sdk', 'src', 'build_tools') os.chdir(build_tools_dir) return chromium_utils.RunCommand(cmd + args)
def main(): option_parser = optparse.OptionParser() option_parser.add_option('--target', help='build target to archive (Debug or Release)') option_parser.add_option('--src-dir', default='src', help='path to the top-level sources directory') option_parser.add_option('--build-dir', help='ignored') option_parser.add_option( '--build-url', help='Base url where to find the build to extract') option_parser.add_option( '--build-archive-url', help='Exact url where to find the build to extract') # TODO(cmp): Remove --halt-on-missing-build when the buildbots are upgraded # to not use this argument. option_parser.add_option('--halt-on-missing-build', action='store_true', help='whether to halt on a missing build') option_parser.add_option('--build_revision', help='Revision of the build that is being ' 'archived. Overrides the revision found on ' 'the local disk') option_parser.add_option('--webkit_revision', help='Webkit revision of the build that is being ' 'archived. Overrides the revision found on ' 'the local disk') option_parser.add_option('--webkit-dir', help='WebKit directory path, ' 'relative to the src/ dir.') option_parser.add_option( '--revision-dir', help=('Directory path that shall be used to decide ' 'the revision number for the archive, ' 'relative to the src/ dir.')) option_parser.add_option('--build-output-dir', help='ignored') chromium_utils.AddPropertiesOptions(option_parser) options, args = option_parser.parse_args() if args: print 'Unknown options: %s' % args return 1 if not options.target: options.target = options.factory_properties.get('target', 'Release') if not options.webkit_dir: options.webkit_dir = options.factory_properties.get('webkit_dir') if not options.revision_dir: options.revision_dir = options.factory_properties.get('revision_dir') options.src_dir = (options.factory_properties.get('extract_build_src_dir') or options.src_dir) return real_main(options)
def main(): parser = optparse.OptionParser() chromium_utils.AddPropertiesOptions(parser) options, _ = parser.parse_args() master_name = options.build_properties.get('mastername') if master_name == 'chromium.fyi': return chromium_utils.RunCommand([ sys.executable, 'src/chrome/test/webdriver/test/run_webdriver_java_tests.py', ]) else: raise RuntimeError('Unrecognized master: ' + master_name)
def main(): parser = optparse.OptionParser() chromium_utils.AddPropertiesOptions(parser) options, _ = parser.parse_args() return chromium_utils.RunCommand( [sys.executable, '../../../scripts/slave/runtest.py', '--build-dir', 'src/build', '--run-python-script', 'src/chrome/test/chromedriver/run_buildbot_steps.py', '--revision', options.build_properties.get('got_revision'), ])
def main(args): """Note: this is solely to run the current master's code and can totally differ from the underlying script flags. To update these flags: - Update the following code to support both the previous flag and the new flag. - Change scripts/master/factory/swarm_commands.py to pass the new flag. - Restart all the masters using swarming. - Remove the old flag from this code. """ client = swarming_utils.find_client(os.getcwd()) if not client: print >> sys.stderr, 'Failed to find swarm(ing)_client' return 1 version = swarming_utils.get_version(client) if version < (0, 3): print >> sys.stderr, ( '%s is version %s which is too old. Please run the test locally' % (client, '.'.join(version))) return 1 parser = optparse.OptionParser(description=sys.modules[__name__].__doc__) parser.add_option('--verbose', action='store_true') parser.add_option('--swarming') parser.add_option('--isolate-server') chromium_utils.AddPropertiesOptions(parser) options, args = parser.parse_args(args) if args: parser.error('Unsupported args: %s' % args) if not options.swarming or not options.isolate_server: parser.error('Require both --swarming and --isolate-server') logging.basicConfig( level=logging.DEBUG if options.verbose else logging.ERROR) # Loads the other flags implicitly. slave_os, priority, steps, builder, build_number = process_build_properties( options) logging.info('To run: %s, %s, %s', slave_os, priority, steps) if not steps: print('Nothing to trigger') annotator.AdvancedAnnotationStep(sys.stdout, False).step_warnings() return 0 print('Selected tests:') print('\n'.join(' %s' % s for s in sorted(steps))) selected_os = swarming_utils.OS_MAPPING[slave_os] print('Selected OS: %s' % selected_os) return drive_many(client, version, options.swarming, options.isolate_server, priority, {'os': selected_os}, steps, builder, build_number)
def main(): if sys.platform in ('win32', 'cygwin'): default_platform = 'win' elif sys.platform.startswith('darwin'): default_platform = 'mac' elif sys.platform == 'linux2': default_platform = 'linux' else: default_platform = None platforms = ['linux', 'mac', 'win'] option_parser = optparse.OptionParser() option_parser.add_option('', '--target', default='Debug', help='build target (Debug, Release) ' '[default: %default]') option_parser.add_option('', '--build-dir', default='chrome', metavar='DIR', help='directory in which build was run ' '[default: %default]') option_parser.add_option( '', '--platform', default=default_platform, help='specify platform (%s) [default: %%default]' % ', '.join(platforms)) option_parser.add_option('--build-id', help='The build number of the tested build.') option_parser.add_option('--upload-dir', help='Path coverage file was uploaded to.') chromium_utils.AddPropertiesOptions(option_parser) options, args = option_parser.parse_args() fp = options.factory_properties options.tests = fp.get('tests') options.sharded_tests = fp.get('sharded_tests') options.browser_total_shards = fp.get('browser_total_shards') del options.factory_properties del options.build_properties if options.platform not in platforms: sys.stderr.write('Unsupported sys.platform %s.\n' % repr(sys.platform)) msg = 'Use the --platform= option to specify a supported platform:\n' sys.stderr.write(msg + ' ' + ' '.join(platforms) + '\n') return 2 return ProcessCoverage(options, args)
def main(argv): option_parser = optparse.OptionParser() option_parser.add_option('', '--target', default='Release', help='build target to archive (Debug or Release)') option_parser.add_option( '', '--build-dir', help='path to main build directory (the parent of ' 'the Release or Debug directory)') chromium_utils.AddPropertiesOptions(option_parser) options, args = option_parser.parse_args(argv) return archive(options, args)
def main(): option_parser = optparse.OptionParser() chromium_utils.AddPropertiesOptions(option_parser) (options, args) = option_parser.parse_args() buildername = options.build_properties.get('buildername', '') cmd = SDK_BUILDER_MAP.get(buildername) or SDK_BUILDER_MAP.get('DEFAULT') build_tools_dir = chromium_utils.FindUpward(os.getcwd(), 'src', 'native_client_sdk', 'src', 'build_tools') os.chdir(build_tools_dir) # Remove BOTO_CONFIG from the environment -- we want to use the NaCl .boto # file that has access to gs://nativeclient-mirror. if 'AWS_CREDENTIAL_FILE' in os.environ: del os.environ['AWS_CREDENTIAL_FILE'] if 'BOTO_CONFIG' in os.environ: del os.environ['BOTO_CONFIG'] return chromium_utils.RunCommand(cmd + args)
def main(argv): option_parser = optparse.OptionParser() option_parser.add_option('', '--target', default='Release', help='build target to archive (Debug or Release)') option_parser.add_option('', '--src-dir', default='src', help='path to the top-level sources directory') option_parser.add_option( '', '--build-dir', default='chrome', help=('path to main build directory (the parent of ' 'the Release or Debug directory)')) option_parser.add_option('', '--exclude-files', default='', help=('Comma separated list of files that should ' 'always be excluded from the zip.')) option_parser.add_option('', '--include-files', default='', help=('Comma separated list of files that should ' 'always be included in the zip.')) option_parser.add_option( '', '--webkit-dir', default=None, help='webkit directory path, relative to --src-dir') chromium_utils.AddPropertiesOptions(option_parser) options, args = option_parser.parse_args(argv) # When option_parser is passed argv as a list, it can return the caller as # first unknown arg. So throw a warning if we have two or more unknown # arguments. if args[1:]: print "Warning -- unknown arguments" % args[1:] return Archive(options)
def main(): option_parser = optparse.OptionParser() # Required options: option_parser.add_option('--post-process', action='store_true', help='Prepare dynamorio before running tests.') option_parser.add_option('--pre-process', action='store_true', help='Process coverage after running tests.') option_parser.add_option('--build-dir', help='ignored') option_parser.add_option('--build-id', help='The build number of the tested build.') option_parser.add_option('--target', help='Target directory.') option_parser.add_option('--platform', help='Coverage subdir.') option_parser.add_option('--dynamorio-dir', help='Path to dynamorio binary.') option_parser.add_option('--test-to-upload', help='Test name.') chromium_utils.AddPropertiesOptions(option_parser) options, _ = option_parser.parse_args() options.build_dir = build_directory.GetBuildOutputDirectory() fp = options.factory_properties options.browser_shard_index = fp.get('browser_shard_index') options.sharded_tests = fp.get('sharded_tests') options.host = fp.get('host') options.www_dir = fp.get('www-dir') del options.factory_properties del options.build_properties if options.pre_process: return PreProcess(options) elif options.post_process: return CreateCoverageFileAndUpload(options) else: print 'No valid options provided.' return 1
def main(argv): option_parser = optparse.OptionParser() option_parser.add_option('--target', default='Release', help='build target to archive (Debug or Release)') option_parser.add_option('--build-dir', help='ignored') option_parser.add_option('--cf_archive_name', help='prefix of the archive zip file') option_parser.add_option('--cf_archive_subdir_suffix', help='suffix of the archive directory') option_parser.add_option('--gs_acl', help='ACLs to be used on upload') option_parser.add_option('--gs_bucket', help='the google storage bucket name') option_parser.add_option('--revision_dir', help=('component builds: if set, use directory ' 'revision instead of chromium revision and ' 'add "-component" to the archive name')) chromium_utils.AddPropertiesOptions(option_parser) options, args = option_parser.parse_args(argv) return archive(options, args)
def main(): option_parser = optparse.OptionParser() option_parser.add_option('--target', help='build target to archive (Debug or Release)') option_parser.add_option( '--build-dir', help='path to main build directory (the parent of ' 'the Release or Debug directory)') option_parser.add_option('--build-url', help='url where to find the build to extract') option_parser.add_option('--build-output-dir', help='Output path relative to --build-dir.') option_parser.add_option('--revision', help='Revision number to download.') chromium_utils.AddPropertiesOptions(option_parser) options, args = option_parser.parse_args() if args: print 'Unknown options: %s' % args return 1 if options.build_output_dir: options.build_output_dir = os.path.join(options.build_dir, options.build_output_dir) else: options.build_output_dir, bad = chromium_utils.ConvertBuildDirToLegacy( options.build_dir) if bad: return slave_utils.WARNING_EXIT_CODE options.build_output_dir = os.path.abspath(options.build_output_dir) options.build_url = (options.build_url or options.factory_properties.get('build_url')) del options.factory_properties del options.build_properties return real_main(options)
def main(): # Initialize logging. log_level = logging.INFO logging.basicConfig(level=log_level, format='%(asctime)s %(filename)s:%(lineno)-3d' ' %(levelname)s %(message)s', datefmt='%y%m%d %H:%M:%S') option_parser = optparse.OptionParser(usage=USAGE) option_parser.add_option('', '--target', default='Release', help='build target (Debug or Release)') option_parser.add_option( '', '--build-dir', default='chrome', help='path to main build directory (the parent of ' 'the Release or Debug directory)') chromium_utils.AddPropertiesOptions(option_parser) options, args = option_parser.parse_args() return dom_perf(options, args)
def main(argv): option_parser = optparse.OptionParser() option_parser.add_option('--target', help='build target to archive (Debug or Release)') option_parser.add_option('--src-dir', default='src', help='path to the top-level sources directory') option_parser.add_option('--build-dir', help='ignored') option_parser.add_option('--exclude-files', default='', help='Comma separated list of files that should ' 'always be excluded from the zip.') option_parser.add_option('--include-files', default='', help='Comma separated list of files that should ' 'always be included in the zip.') option_parser.add_option('--master-name', help='Name of the buildbot master.') option_parser.add_option('--slave-name', help='Name of the buildbot slave.') option_parser.add_option('--build-number', type=int, help='Buildbot build number.') option_parser.add_option('--parent-build-number', type=int, help='Buildbot parent build number.') option_parser.add_option( '--webkit-dir', help='webkit directory path, relative to --src-dir') option_parser.add_option( '--revision-dir', help='Directory path that shall be used to decide ' 'the revision number for the archive, ' 'relative to --src-dir') option_parser.add_option('--build_revision', help='The revision the archive should be at. ' 'Overrides the revision found on disk.') option_parser.add_option('--webkit_revision', help='The revision of webkit the build is at. ' 'Overrides the revision found on disk.') option_parser.add_option( '--exclude-unmatched', action='store_true', help='Exclude all files not matched by a whitelist') option_parser.add_option('--build-url', default='', help=('Optional URL to which to upload build ' '(overrides build_url factory property)')) option_parser.add_option('--cros-board', help=('If building for Chrom[e|ium]OS via the ' 'simple chrome workflow, the name of the ' 'target CROS board.')) option_parser.add_option('--package-dsym-files', action='store_true', default=False, help='Add also dSYM files.') option_parser.add_option('--append-deps-patch-sha', action='store_true') option_parser.add_option('--gs-acl') option_parser.add_option('--json-urls', help=('Path to json file containing uploaded ' 'archive urls. If this is omitted then ' 'the urls will be emitted as buildbot ' 'annotations.')) chromium_utils.AddPropertiesOptions(option_parser) options, args = option_parser.parse_args(argv) if not options.master_name: options.master_name = options.build_properties.get('mastername', '') if not options.slave_name: options.slave_name = options.build_properties.get('slavename') if not options.build_number: options.build_number = options.build_properties.get('buildnumber') if not options.parent_build_number: options.parent_build_number = options.build_properties.get( 'parent_buildumber') if not options.target: options.target = options.factory_properties.get('target', 'Release') if not options.build_url: options.build_url = options.factory_properties.get('build_url', '') if not options.append_deps_patch_sha: options.append_deps_patch_sha = options.factory_properties.get( 'append_deps_patch_sha') if not options.gs_acl: options.gs_acl = options.factory_properties.get('gs_acl') # When option_parser is passed argv as a list, it can return the caller as # first unknown arg. So throw a warning if we have two or more unknown # arguments. if args[1:]: print 'Warning -- unknown arguments' % args[1:] urls = Archive(options) if options.json_urls: # we need to dump json with open(options.json_urls, 'w') as json_file: json.dump(urls, json_file) else: # we need to print buildbot annotations if 'storage_url' in urls: print '@@@STEP_LINK@download@%s@@@' % urls['storage_url'] if 'zip_url' in urls: print '@@@SET_BUILD_PROPERTY@build_archive_url@"%s"@@@' % urls[ 'zip_url'] return 0
def main(): option_parser = optparse.OptionParser() option_parser.add_option( '--mode', default='dev', help='switch indicating how to archive build (dev is only valid value)' ) option_parser.add_option('--target', default='Release', help='build target to archive (Debug or Release)') option_parser.add_option( '--arch', default=archive_utils.BuildArch(), help='specify that target architecure of the build') option_parser.add_option('--src-dir', default='src', help='path to the top-level sources directory') option_parser.add_option('--build-dir', help='ignored') option_parser.add_option('--extra-archive-paths', default='', help='comma-separated lists of paths containing ' 'files named FILES, SYMBOLS and TESTS. These ' 'files contain lists of extra files to be ' 'that will be archived. The paths are relative ' 'to the directory given in --src-dir.') option_parser.add_option('--build-number', type='int', help='The build number of the builder running ' 'this script. we use it as the name of build ' 'archive directory') option_parser.add_option('--dry-run', action='store_true', help='Avoid making changes, for testing') option_parser.add_option('--ignore', default=[], action='append', help='Files to ignore') option_parser.add_option('--archive_host', default=archive_utils.Config.archive_host) option_parser.add_option('--build-name', default=None, help="Name to use for build directory instead of " "the slave build name") option_parser.add_option('--staging-dir', help='Directory to use for staging the archives. ' 'Default behavior is to automatically detect ' 'slave\'s build directory.') chromium_utils.AddPropertiesOptions(option_parser) options, args = option_parser.parse_args() if args: raise archive_utils.StagingError('Unknown arguments: %s' % args) if not options.ignore: # Independent of any other configuration, these exes and any symbol files # derived from them (i.e., any filename starting with these strings) will # not be archived or uploaded, typically because they're not built for the # current distributon. options.ignore = archive_utils.Config.exes_to_skip_entirely if options.mode == 'official': option_parser.error('Official mode is not supported here') elif options.mode == 'dev': options.dirs = { # Built files are stored here, in a subdir. named for the build version. 'www_dir_base': archive_utils.Config.www_dir_base + 'snapshots', # Symbols are stored here, in a subdirectory named for the build version. 'symbol_dir_base': archive_utils.Config.www_dir_base + 'snapshots', } else: option_parser.error('Invalid options mode %s' % options.mode) if options.build_number is not None: s = StagerByBuildNumber(options) else: s = StagerByChromiumRevision(options) return s.ArchiveBuild()
def main(): """Entry point for runtest.py. This function: (1) Sets up the command-line options. (2) Sets environment variables based on those options. (3) Delegates to the platform-specific main functions. Returns: Exit code for this script. """ option_parser = optparse.OptionParser(usage=USAGE) # Since the trailing program to run may have has command-line args of its # own, we need to stop parsing when we reach the first positional argument. option_parser.disable_interspersed_args() option_parser.add_option('--target', default='Release', help='build target (Debug or Release)') option_parser.add_option('--pass-target', action='store_true', default=False, help='pass --target to the spawned test script') option_parser.add_option('--build-dir', help='ignored') option_parser.add_option( '--pass-build-dir', action='store_true', default=False, help='pass --build-dir to the spawned test script') option_parser.add_option('--test-platform', help='Platform to test on, e.g. ios-simulator') option_parser.add_option('--total-shards', dest='total_shards', default=None, type='int', help='Number of shards to split this test into.') option_parser.add_option('--shard-index', dest='shard_index', default=None, type='int', help='Shard to run. Must be between 1 and ' 'total-shards.') option_parser.add_option('--run-shell-script', action='store_true', default=False, help='treat first argument as the shell script' 'to run.') option_parser.add_option('--run-python-script', action='store_true', default=False, help='treat first argument as a python script' 'to run.') option_parser.add_option('--generate-json-file', action='store_true', default=False, help='output JSON results file if specified.') option_parser.add_option('--xvfb', action='store_true', dest='xvfb', default=True, help='Start virtual X server on Linux.') option_parser.add_option('--no-xvfb', action='store_false', dest='xvfb', help='Do not start virtual X server on Linux.') option_parser.add_option('-o', '--results-directory', default='', help='output results directory for JSON file.') option_parser.add_option('--chartjson-file', default='', help='File to dump chartjson results.') option_parser.add_option('--log-processor-output-file', default='', help='File to dump gtest log processor results.') option_parser.add_option( '--builder-name', default=None, help='The name of the builder running this script.') option_parser.add_option('--slave-name', default=None, help='The name of the slave running this script.') option_parser.add_option( '--master-class-name', default=None, help='The class name of the buildbot master running ' 'this script: examples include "Chromium", ' '"ChromiumWebkit", and "ChromiumGPU". The ' 'flakiness dashboard uses this value to ' 'categorize results. See buildershandler.py ' 'in the flakiness dashboard code ' '(use codesearch) for the known values. ' 'Defaults to fetching it from ' 'slaves.cfg/builders.pyl.') option_parser.add_option('--build-number', default=None, help=('The build number of the builder running' 'this script.')) option_parser.add_option( '--step-name', default=None, help=('The name of the step running this script.')) option_parser.add_option('--test-type', default='', help='The test name that identifies the test, ' 'e.g. \'unit-tests\'') option_parser.add_option('--test-results-server', default='', help='The test results server to upload the ' 'results.') option_parser.add_option('--annotate', default='', help='Annotate output when run as a buildstep. ' 'Specify which type of test to parse, available' ' types listed with --annotate=list.') option_parser.add_option('--parse-input', default='', help='When combined with --annotate, reads test ' 'from a file instead of executing a test ' 'binary. Use - for stdin.') option_parser.add_option('--parse-result', default=0, help='Sets the return value of the simulated ' 'executable under test. Only has meaning when ' '--parse-input is used.') option_parser.add_option('--results-url', default='', help='The URI of the perf dashboard to upload ' 'results to.') option_parser.add_option( '--perf-dashboard-id', default='', help='The ID on the perf dashboard to add results ' 'to.') option_parser.add_option('--perf-id', default='', help='The perf builder id') option_parser.add_option( '--perf-config', default='', help='Perf configuration dictionary (as a string). ' 'This allows to specify custom revisions to be ' 'the main revision at the Perf dashboard. ' 'Example: --perf-config="{\'a_default_rev\': ' '\'r_webrtc_rev\'}"') option_parser.add_option('--supplemental-columns-file', default='supplemental_columns', help='A file containing a JSON blob with a dict ' 'that will be uploaded to the results ' 'dashboard as supplemental columns.') option_parser.add_option( '--revision', help='The revision number which will be is used as ' 'primary key by the dashboard. If omitted it ' 'is automatically extracted from the checkout.') option_parser.add_option('--webkit-revision', help='See --revision.') option_parser.add_option('--enable-asan', action='store_true', default=False, help='Enable fast memory error detection ' '(AddressSanitizer).') option_parser.add_option( '--enable-lsan', action='store_true', default=False, help='Enable memory leak detection (LeakSanitizer).') option_parser.add_option( '--enable-msan', action='store_true', default=False, help='Enable uninitialized memory reads detection ' '(MemorySanitizer).') option_parser.add_option('--enable-tsan', action='store_true', default=False, help='Enable data race detection ' '(ThreadSanitizer).') option_parser.add_option( '--strip-path-prefix', default='build/src/out/Release/../../', help='Source paths in stack traces will be stripped ' 'of prefixes ending with this substring. This ' 'option is used by sanitizer tools.') option_parser.add_option('--no-spawn-dbus', action='store_true', default=False, help='Disable GLib DBus bug workaround: ' 'manually spawning dbus-launch') option_parser.add_option( '--test-launcher-summary-output', help='Path to test results file with all the info ' 'from the test launcher') option_parser.add_option( '--flakiness-dashboard-server', help='The flakiness dashboard server to which the ' 'results should be uploaded.') option_parser.add_option('--verbose', action='store_true', default=False, help='Prints more information.') chromium_utils.AddPropertiesOptions(option_parser) options, args = option_parser.parse_args() # Initialize logging. log_level = logging.INFO if options.verbose: log_level = logging.DEBUG logging.basicConfig(level=log_level, format='%(asctime)s %(filename)s:%(lineno)-3d' ' %(levelname)s %(message)s', datefmt='%y%m%d %H:%M:%S') logging.basicConfig(level=logging.DEBUG) logging.getLogger().addHandler(logging.StreamHandler(stream=sys.stdout)) options.test_type = options.test_type or options.factory_properties.get( 'step_name', '') if options.run_shell_script and options.run_python_script: sys.stderr.write( 'Use either --run-shell-script OR --run-python-script, ' 'not both.') return 1 print '[Running on builder: "%s"]' % options.builder_name did_launch_dbus = False if not options.no_spawn_dbus: did_launch_dbus = _LaunchDBus() try: options.build_dir = build_directory.GetBuildOutputDirectory() if options.pass_target and options.target: args.extend(['--target', options.target]) if options.pass_build_dir: args.extend(['--build-dir', options.build_dir]) # We will use this to accumulate overrides for the command under test, # That we may not need or want for other support commands. extra_env = {} # This option is used by sanitizer code. There is no corresponding command # line flag. options.use_symbolization_script = False # Set up extra environment and args for sanitizer tools. _ConfigureSanitizerTools(options, args, extra_env) # Set the number of shards environment variables. # NOTE: Chromium's test launcher will ignore these in favor of the command # line flags passed in _BuildTestBinaryCommand. if options.total_shards and options.shard_index: extra_env['GTEST_TOTAL_SHARDS'] = str(options.total_shards) extra_env['GTEST_SHARD_INDEX'] = str(options.shard_index - 1) return _Main(options, args, extra_env) finally: if did_launch_dbus: # It looks like the command line argument --exit-with-session # isn't working to clean up the spawned dbus-daemon. Kill it # manually. _ShutdownDBus()
def main(argv): option_parser = optparse.OptionParser() option_parser.add_option('--target', help='build target to archive (Debug or Release)') option_parser.add_option('--src-dir', default='src', help='path to the top-level sources directory') option_parser.add_option('--build-dir', help='ignored') option_parser.add_option('--exclude-files', default='', help='Comma separated list of files that should ' 'always be excluded from the zip.') option_parser.add_option('--include-files', default='', help='Comma separated list of files that should ' 'always be included in the zip.') option_parser.add_option('--webkit-dir', help='webkit directory path, relative to --src-dir') option_parser.add_option('--revision-dir', help='Directory path that shall be used to decide ' 'the revision number for the archive, ' 'relative to --src-dir') option_parser.add_option('--build_revision', help='The revision the archive should be at. ' 'Overrides the revision found on disk.') option_parser.add_option('--webkit_revision', help='The revision of webkit the build is at. ' 'Overrides the revision found on disk.') option_parser.add_option('--path-filter', help='Filter to use to transform build zip ' '(avail: %r).' % list(PATH_FILTERS.keys())) option_parser.add_option('--exclude-unmatched', action='store_true', help='Exclude all files not matched by a whitelist') option_parser.add_option('--build-url', default='', help=('Optional URL to which to upload build ' '(overrides build_url factory property)')) option_parser.add_option('--cros-board', help=('If building for Chrom[e|ium]OS via the ' 'simple chrome workflow, the name of the ' 'target CROS board.')) chromium_utils.AddPropertiesOptions(option_parser) options, args = option_parser.parse_args(argv) if not options.target: options.target = options.factory_properties.get('target', 'Release') if not options.webkit_dir: options.webkit_dir = options.factory_properties.get('webkit_dir') if not options.revision_dir: options.revision_dir = options.factory_properties.get('revision_dir') options.src_dir = (options.factory_properties.get('zip_build_src_dir') or options.src_dir) # When option_parser is passed argv as a list, it can return the caller as # first unknown arg. So throw a warning if we have two or more unknown # arguments. if args[1:]: print 'Warning -- unknown arguments' % args[1:] if (options.path_filter is None and options.factory_properties.get('syzyasan') and chromium_utils.IsWindows()): options.path_filter = 'syzyasan_win' if options.path_filter: options.path_filter = PATH_FILTERS[options.path_filter]( build_directory.GetBuildOutputDirectory(cros_board=options.cros_board), options.target) return Archive(options)
def main(): option_parser = optparse.OptionParser() chromium_utils.AddPropertiesOptions(option_parser) options, _ = option_parser.parse_args() if not os.path.exists('src'): raise Exception('ERROR: no src directory to package, exiting') try: revision_upload_path = chromium_utils.GetSortableUploadPathForSortKey( *chromium_utils.GetBuildSortKey(options)) except chromium_utils.NoIdentifiedRevision: revision_upload_path = 'NONE' completed_filename = '%s-%s.%s' % (options.factory_properties.get( 'package_filename', FILENAME), revision_upload_path, EXT) partial_filename = '%s.partial' % completed_filename chromium_utils.RunCommand(['rm', '-f', partial_filename]) if os.path.exists(partial_filename): raise Exception('ERROR: %s cannot be removed, exiting' % partial_filename) print '%s: Index generation...' % time.strftime('%X') indexing_successful = GenerateIndex() print '%s: Creating tar file...' % time.strftime('%X') packaging_successful = True find_command = [ 'find', 'src/', 'tools/', '/usr/include/', '-type', 'f', # The only files under src/out we want to package up # are index files.... '(', '-regex', '^src/out/.*\.index$', '-o', '(', # ... and generated sources... '-regex', '^src/out/.*/gen/.*', '-a', '(', '-name', '*.h', '-o', '-name', '*.cc', '-o', '-name', '*.cpp', '-o', '-name', '*.js', ')', '-a', # ... but none of the NaCL stuff. '!', '-regex', '^src/out/[^/]*/gen/lib[^/]*/.*', '-a', '!', '-regex', '^src/out/[^/]*/gen/sdk/.*', '-a', '!', '-regex', '^src/out/[^/]*/gen/tc_.*', ')', '-o', '!', '-regex', '^src/out/.*', ')', '-a', # Exclude all .svn directories, the native client toolchain # and the llvm build directory, and perf/data files. '!', '-regex', r'.*/\.svn/.*', '-a', '!', '-regex', r'.*/\.git/.*', '-a', '!', '-regex', '^src/native_client/toolchain/.*', '-a', '!', '-regex', '^src/native_client/.*/testdata/.*', '-a', '!', '-regex', '^src/third_party/llvm-build/.*', '-a', '!', '-regex', '^src/.*/\.cvsignore', '-a', '!', '-regex', '^src/chrome/tools/test/reference_build/.*', '-a', '!', '-regex', '^tools/perf/data/.*' ] try: if chromium_utils.RunCommand( find_command, pipes=[['tar', '-T-', '-cjvf', partial_filename] ]) != 0: raise Exception('ERROR: failed to create %s, exiting' % partial_filename) print '%s: Cleaning up google storage...' % time.strftime('%X') DeleteIfExists(completed_filename) DeleteIfExists(partial_filename) print '%s: Uploading...' % time.strftime('%X') status = slave_utils.GSUtilCopyFile(partial_filename, GSBASE, gs_acl=GSACL) if status != 0: raise Exception('ERROR: GSUtilCopyFile error %d. "%s" -> "%s"' % (status, partial_filename, GSBASE)) print '%s: Finalizing google storage...' % time.strftime('%X') status = slave_utils.GSUtilMoveFile( '%s/%s' % (GSBASE, partial_filename), '%s/%s' % (GSBASE, completed_filename), gs_acl=GSACL) if status != 0: raise Exception('ERROR: GSUtilMoveFile error %d. "%s" -> "%s"' % (status, '%s/%s' % (GSBASE, partial_filename), '%s/%s' % (GSBASE, completed_filename))) (status, output) = slave_utils.GSUtilListBucket(GSBASE, ['-l']) if status != 0: raise Exception('ERROR: failed to get list of GSBASE, exiting' % GSBASE) regex = re.compile('\s*\d+\s+([-:\w]+)\s+%s/%s\n' % (GSBASE, completed_filename)) match_data = regex.search(output) modified_time = None if match_data: modified_time = match_data.group(1) if not modified_time: raise Exception('ERROR: could not get modified_time, exiting') print 'Last modified time: %s' % modified_time print '%s: Deleting old archives on google storage...' % time.strftime( '%X') regex = re.compile('\s*\d+\s+([-:\w]+)\s+(%s/.*%s.*)\n' % (GSBASE, EXT)) last_week = int(time.time()) - 7 * 24 * 60 * 60 for match_data in regex.finditer(output): timestamp = int( time.strftime( '%s', time.strptime(match_data.group(1), '%Y-%m-%dT%H:%M:%S'))) if timestamp < last_week: print 'Deleting %s...' % match_data.group(2) status = slave_utils.GSUtilDeleteFile(match_data.group(2)) if status != 0: raise Exception('ERROR: GSUtilDeleteFile error %d. "%s"' % (status, match_data.group(2))) except Exception, e: print str(e) packaging_successful = False
def main(): option_parser = optparse.OptionParser() option_parser.add_option('--target', help='build target to archive (Debug or Release)') option_parser.add_option('--src-dir', default='src', help='path to the top-level sources directory') option_parser.add_option('--build-dir', help='ignored') option_parser.add_option('--master-name', help='Name of the buildbot master.') option_parser.add_option('--build-number', type=int, help='Buildbot build number.') option_parser.add_option('--parent-build-dir', help='Path to build directory on parent buildbot ' 'builder.') option_parser.add_option('--parent-builder-name', help='Name of parent buildbot builder.') option_parser.add_option('--parent-slave-name', help='Name of parent buildbot slave.') option_parser.add_option('--parent-build-number', type=int, help='Buildbot parent build number.') option_parser.add_option('--build-url', help='Base url where to find the build to extract') option_parser.add_option('--build-archive-url', help='Exact url where to find the build to extract') # TODO(cmp): Remove --halt-on-missing-build when the buildbots are upgraded # to not use this argument. option_parser.add_option('--halt-on-missing-build', action='store_true', help='whether to halt on a missing build') option_parser.add_option('--build_revision', help='Revision of the build that is being ' 'archived. Overrides the revision found on ' 'the local disk') option_parser.add_option('--webkit_revision', help='Webkit revision of the build that is being ' 'archived. Overrides the revision found on ' 'the local disk') option_parser.add_option('--webkit-dir', help='WebKit directory path, ' 'relative to the src/ dir.') option_parser.add_option('--revision-dir', help=('Directory path that shall be used to decide ' 'the revision number for the archive, ' 'relative to the src/ dir.')) option_parser.add_option('--build-output-dir', help='ignored') option_parser.add_option('--gsutil-py-path', help='Specify path to gsutil.py script.') chromium_utils.AddPropertiesOptions(option_parser) options, args = option_parser.parse_args() if args: print 'Unknown options: %s' % args return 1 if not options.master_name: options.master_name = options.build_properties.get('mastername', '') if not options.build_number: options.build_number = options.build_properties.get('buildnumber') if not options.parent_build_dir: options.parent_build_dir = options.build_properties.get('parent_builddir') if not options.parent_builder_name: options.parent_builder_name = options.build_properties.get('parentname') if not options.parent_slave_name: options.parent_slave_name = options.build_properties.get('parentslavename') if not options.parent_build_number: options.parent_build_number = options.build_properties.get( 'parent_buildnumber') if not options.build_url: options.build_url = options.factory_properties.get('build_url') if not options.halt_on_missing_build: options.halt_on_missing_build = options.factory_properties.get( 'halt_on_missing_build') if not options.target: options.target = options.factory_properties.get('target', 'Release') if not options.webkit_dir: options.webkit_dir = options.factory_properties.get('webkit_dir') if not options.revision_dir: options.revision_dir = options.factory_properties.get('revision_dir') options.src_dir = (options.factory_properties.get('extract_build_src_dir') or options.src_dir) return real_main(options)
def main(): import platform xvfb_path = os.path.join(os.path.dirname(sys.argv[0]), '..', '..', 'third_party', 'xvfb', platform.architecture()[0]) # Initialize logging. log_level = logging.INFO logging.basicConfig(level=log_level, format='%(asctime)s %(filename)s:%(lineno)-3d' ' %(levelname)s %(message)s', datefmt='%y%m%d %H:%M:%S') option_parser = optparse.OptionParser(usage=USAGE) # Since the trailing program to run may have has command-line args of its # own, we need to stop parsing when we reach the first positional argument. option_parser.disable_interspersed_args() option_parser.add_option('', '--target', default='Release', help='build target (Debug or Release)') option_parser.add_option( '', '--build-dir', default='chrome', help='path to main build directory (the parent of ' 'the Release or Debug directory)') option_parser.add_option('', '--enable-pageheap', action='store_true', default=False, help='enable pageheap checking for chrome.exe') # --with-httpd assumes a chromium checkout with src/tools/python. option_parser.add_option('', '--with-httpd', dest='document_root', default=None, metavar='DOC_ROOT', help='Start a local httpd server using the given ' 'document root, relative to the current dir') option_parser.add_option('', '--total-shards', dest='total_shards', default=None, type="int", help='Number of shards to split this test into.') option_parser.add_option('', '--shard-index', dest='shard_index', default=None, type="int", help='Shard to run. Must be between 1 and ' 'total-shards.') option_parser.add_option('', '--run-shell-script', action='store_true', default=False, help='treat first argument as the shell script' 'to run.') option_parser.add_option('', '--run-python-script', action='store_true', default=False, help='treat first argument as a python script' 'to run.') option_parser.add_option('', '--generate-json-file', action='store_true', default=False, help='output JSON results file if specified.') option_parser.add_option('', '--parallel', action='store_true', help='Shard and run tests in parallel for speed ' 'with sharding_supervisor.') option_parser.add_option('', '--llvmpipe', action='store_const', const=xvfb_path, dest='llvmpipe_dir', help='Use software gpu pipe directory.') option_parser.add_option('', '--no-llvmpipe', action='store_const', const=None, dest='llvmpipe_dir', help='Do not use software gpu pipe directory.') option_parser.add_option('', '--llvmpipe-dir', default=None, dest='llvmpipe_dir', help='Path to software gpu library directory.') option_parser.add_option( '', '--special-xvfb-dir', default=xvfb_path, help='Path to virtual X server directory on Linux.') option_parser.add_option('', '--special-xvfb', action='store_true', default='auto', help='use non-default virtual X server on Linux.') option_parser.add_option('', '--no-special-xvfb', action='store_false', dest='special_xvfb', help='Use default virtual X server on Linux.') option_parser.add_option('', '--auto-special-xvfb', action='store_const', const='auto', dest='special_xvfb', help='Guess as to virtual X server on Linux.') option_parser.add_option('', '--xvfb', action='store_true', dest='xvfb', default=True, help='Start virtual X server on Linux.') option_parser.add_option('', '--no-xvfb', action='store_false', dest='xvfb', help='Do not start virtual X server on Linux.') option_parser.add_option('', '--sharding-args', dest='sharding_args', default=None, help='Options to pass to sharding_supervisor.') option_parser.add_option('-o', '--results-directory', default='', help='output results directory for JSON file.') option_parser.add_option( "", "--builder-name", default=None, help="The name of the builder running this script.") option_parser.add_option("", "--build-number", default=None, help=("The build number of the builder running" "this script.")) option_parser.add_option("", "--test-type", default='', help="The test name that identifies the test, " "e.g. 'unit-tests'") option_parser.add_option("", "--test-results-server", default='', help="The test results server to upload the " "results.") chromium_utils.AddPropertiesOptions(option_parser) options, args = option_parser.parse_args() if options.run_shell_script and options.run_python_script: sys.stderr.write( 'Use either --run-shell-script OR --run-python-script, ' 'not both.') return 1 # Print out builder name for log_parser print '[Running on builder: "%s"]' % options.builder_name if options.factory_properties.get('asan', False): # Instruct GTK to use malloc while running ASAN tests. os.environ['G_SLICE'] = 'always-malloc' # Disable ASLR on Mac when running ASAN tests. os.environ['DYLD_NO_PIE'] = '1' # Set the number of shards environement variables. if options.total_shards and options.shard_index: os.environ['GTEST_TOTAL_SHARDS'] = str(options.total_shards) os.environ['GTEST_SHARD_INDEX'] = str(options.shard_index - 1) if options.results_directory: options.test_output_xml = os.path.normpath( os.path.join(options.results_directory, '%s.xml' % options.test_type)) args.append('--gtest_output=xml:' + options.test_output_xml) temp_files = get_temp_count() if sys.platform.startswith('darwin'): result = main_mac(options, args) elif sys.platform == 'win32': result = main_win(options, args) elif sys.platform == 'linux2': result = main_linux(options, args) else: sys.stderr.write('Unknown sys.platform value %s\n' % repr(sys.platform)) return 1 new_temp_files = get_temp_count() if temp_files > new_temp_files: print >> sys.stderr, ( 'Confused: %d files were deleted from %s during the test run') % ( (temp_files - new_temp_files), tempfile.gettempdir()) elif temp_files < new_temp_files: print >> sys.stderr, ( '%d new files were left in %s: Fix the tests to clean up themselves.' ) % ((new_temp_files - temp_files), tempfile.gettempdir()) # TODO(maruel): Make it an error soon. Not yet since I want to iron out all # the remaining cases before. #result = 1 return result