def CMDcollect(parser, args): """Retrieves results of a Swarming task. The result can be in multiple part if the execution was sharded. It can potentially have retries. """ add_collect_options(parser) add_sharding_options(parser) (options, args) = parser.parse_args(args) if not args: parser.error('Must specify one task name.') elif len(args) > 1: parser.error('Must specify only one task name.') auth.ensure_logged_in(options.swarming) try: return collect( options.swarming, args[0], options.shards, options.timeout, options.decorate, options.print_status_updates, options.task_summary_json, options.task_output_dir) except Failure: on_error.report(None) return 1
def CMDquery(parser, args): """Returns information about the bots connected to the Swarming server.""" add_filter_options(parser) parser.filter_group.add_option( '--dead-only', action='store_true', help='Only print dead bots, useful to reap them and reimage broken bots' ) parser.filter_group.add_option('-k', '--keep-dead', action='store_true', help='Do not filter out dead bots') parser.filter_group.add_option('-b', '--bare', action='store_true', help='Do not print out dimensions') options, args = parser.parse_args(args) if options.keep_dead and options.dead_only: parser.error('Use only one of --keep-dead and --dead-only') auth.ensure_logged_in(options.swarming) service = net.get_http_service(options.swarming) data = service.json_request('GET', '/swarming/api/v1/bots') if data is None: print >> sys.stderr, 'Failed to access %s' % options.swarming return 1 for machine in natsort.natsorted(data['machines'], key=lambda x: x['id']): if options.dead_only: if not machine['is_dead']: continue elif not options.keep_dead and machine['is_dead']: continue # If the user requested to filter on dimensions, ensure the bot has all the # dimensions requested. dimensions = machine['dimensions'] for key, value in options.dimensions: if key not in dimensions: break # A bot can have multiple value for a key, for example, # {'os': ['Windows', 'Windows-6.1']}, so that --dimension os=Windows will # be accepted. if isinstance(dimensions[key], list): if value not in dimensions[key]: break else: if value != dimensions[key]: break else: print machine['id'] if not options.bare: print ' %s' % dimensions return 0
def CMDrun(parser, args): """Triggers a task and wait for the results. Basically, does everything to run a command remotely. """ add_trigger_options(parser) add_collect_options(parser) add_sharding_options(parser) args, isolated_cmd_args = extract_isolated_command_extra_args(args) options, args = parser.parse_args(args) process_trigger_options(parser, options, args) auth.ensure_logged_in(options.swarming) if file_path.is_url(options.isolate_server): auth.ensure_logged_in(options.isolate_server) try: tasks, task_name = trigger( swarming=options.swarming, isolate_server=options.isolate_server or options.indir, namespace=options.namespace, file_hash_or_isolated=args[0], task_name=options.task_name, extra_args=isolated_cmd_args, shards=options.shards, dimensions=options.dimensions, env=dict(options.env), deadline=options.deadline, verbose=options.verbose, profile=options.profile, priority=options.priority) except Failure as e: on_error.report( 'Failed to trigger %s(%s): %s' % (options.task_name, args[0], e.args[0])) return 1 if not tasks: on_error.report('Failed to trigger the task.') return 1 if task_name != options.task_name: print('Triggered task: %s' % task_name) try: # TODO(maruel): Use task_ids, it's much more efficient! return collect( options.swarming, task_name, options.shards, options.timeout, options.decorate, options.print_status_updates, options.task_summary_json, options.task_output_dir) except Failure: on_error.report(None) return 1
def CMDreproduce(parser, args): """Runs a task locally that was triggered on the server. This running locally the same commands that have been run on the bot. The data downloaded will be in a subdirectory named 'work' of the current working directory. """ options, args = parser.parse_args(args) if len(args) != 1: parser.error('Must specify exactly one task id.') url = options.swarming + '/swarming/api/v1/client/task/%s/request' % args[0] request = net.url_read_json(url) if not request: print >> sys.stderr, 'Failed to retrieve request data for the task' return 1 if not os.path.isdir('work'): os.mkdir('work') swarming_host = urlparse.urlparse(options.swarming).netloc properties = request['properties'] for data_url, _ in properties['data']: assert data_url.startswith('https://'), data_url data_host = urlparse.urlparse(data_url).netloc if data_host != swarming_host: auth.ensure_logged_in('https://' + data_host) content = net.url_read(data_url) if content is None: print >> sys.stderr, 'Failed to download %s' % data_url return 1 with zipfile.ZipFile(StringIO.StringIO(content)) as zip_file: zip_file.extractall('work') env = None if properties['env']: env = os.environ.copy() logging.info('env: %r', properties['env']) env.update( (k.encode('utf-8'), v.encode('utf-8')) for k, v in properties['env'].iteritems()) exit_code = 0 for cmd in properties['commands']: try: c = subprocess.call(cmd, env=env, cwd='work') except OSError as e: print >> sys.stderr, 'Failed to run: %s' % ' '.join(cmd) print >> sys.stderr, str(e) c = 1 if not exit_code: exit_code = c return exit_code
def CMDtrigger(parser, args): """Triggers a Swarming task. Accepts either the hash (sha1) of a .isolated file already uploaded or the path to an .isolated file to archive, packages it if needed and sends a Swarming manifest file to the Swarming server. If an .isolated file is specified instead of an hash, it is first archived. Passes all extra arguments provided after '--' as additional command line arguments for an isolated command specified in *.isolate file. """ add_trigger_options(parser) add_sharding_options(parser) args, isolated_cmd_args = extract_isolated_command_extra_args(args) parser.add_option( '--dump-json', metavar='FILE', help='Dump details about the triggered task(s) to this file as json') options, args = parser.parse_args(args) process_trigger_options(parser, options, args) auth.ensure_logged_in(options.swarming) if file_path.is_url(options.isolate_server): auth.ensure_logged_in(options.isolate_server) try: tasks, task_name = trigger( swarming=options.swarming, isolate_server=options.isolate_server or options.indir, namespace=options.namespace, file_hash_or_isolated=args[0], task_name=options.task_name, extra_args=isolated_cmd_args, shards=options.shards, dimensions=options.dimensions, env=dict(options.env), deadline=options.deadline, verbose=options.verbose, profile=options.profile, priority=options.priority) if tasks: if task_name != options.task_name: print('Triggered task: %s' % task_name) if options.dump_json: data = { 'base_task_name': task_name, 'tasks': tasks, } tools.write_json(options.dump_json, data, True) return int(not tasks) except Failure: on_error.report(None) return 1
def CMDtrigger(parser, args): """Triggers a Swarming task. Accepts either the hash (sha1) of a .isolated file already uploaded or the path to an .isolated file to archive, packages it if needed and sends a Swarming manifest file to the Swarming server. If an .isolated file is specified instead of an hash, it is first archived. Passes all extra arguments provided after '--' as additional command line arguments for an isolated command specified in *.isolate file. """ add_trigger_options(parser) add_sharding_options(parser) args, isolated_cmd_args = extract_isolated_command_extra_args(args) parser.add_option( '--dump-json', metavar='FILE', help='Dump details about the triggered task(s) to this file as json') options, args = parser.parse_args(args) process_trigger_options(parser, options, args) auth.ensure_logged_in(options.swarming) if file_path.is_url(options.isolate_server): auth.ensure_logged_in(options.isolate_server) try: tasks, task_name = trigger(swarming=options.swarming, isolate_server=options.isolate_server or options.indir, namespace=options.namespace, file_hash_or_isolated=args[0], task_name=options.task_name, extra_args=isolated_cmd_args, shards=options.shards, dimensions=options.dimensions, env=dict(options.env), deadline=options.deadline, verbose=options.verbose, profile=options.profile, priority=options.priority) if tasks: if task_name != options.task_name: print('Triggered task: %s' % task_name) if options.dump_json: data = { 'base_task_name': task_name, 'tasks': tasks, } tools.write_json(options.dump_json, data, True) return int(not tasks) except Failure: on_error.report(None) return 1
def CMDquery(parser, args): """Returns information about the bots connected to the Swarming server.""" add_filter_options(parser) parser.filter_group.add_option( '--dead-only', action='store_true', help='Only print dead bots, useful to reap them and reimage broken bots') parser.filter_group.add_option( '-k', '--keep-dead', action='store_true', help='Do not filter out dead bots') parser.filter_group.add_option( '-b', '--bare', action='store_true', help='Do not print out dimensions') options, args = parser.parse_args(args) if options.keep_dead and options.dead_only: parser.error('Use only one of --keep-dead and --dead-only') auth.ensure_logged_in(options.swarming) service = net.get_http_service(options.swarming) data = service.json_request('GET', '/swarming/api/v1/bots') if data is None: print >> sys.stderr, 'Failed to access %s' % options.swarming return 1 for machine in natsort.natsorted(data['machines'], key=lambda x: x['id']): if options.dead_only: if not machine['is_dead']: continue elif not options.keep_dead and machine['is_dead']: continue # If the user requested to filter on dimensions, ensure the bot has all the # dimensions requested. dimensions = machine['dimensions'] for key, value in options.dimensions: if key not in dimensions: break # A bot can have multiple value for a key, for example, # {'os': ['Windows', 'Windows-6.1']}, so that --dimension os=Windows will # be accepted. if isinstance(dimensions[key], list): if value not in dimensions[key]: break else: if value != dimensions[key]: break else: print machine['id'] if not options.bare: print ' %s' % dimensions return 0
def CMDrun(parser, args): """Triggers a task and wait for the results. Basically, does everything to run a command remotely. """ add_trigger_options(parser) add_collect_options(parser) add_sharding_options(parser) args, isolated_cmd_args = extract_isolated_command_extra_args(args) options, args = parser.parse_args(args) process_trigger_options(parser, options, args) auth.ensure_logged_in(options.swarming) if file_path.is_url(options.isolate_server): auth.ensure_logged_in(options.isolate_server) try: tasks, task_name = trigger(swarming=options.swarming, isolate_server=options.isolate_server or options.indir, namespace=options.namespace, file_hash_or_isolated=args[0], task_name=options.task_name, extra_args=isolated_cmd_args, shards=options.shards, dimensions=options.dimensions, env=dict(options.env), deadline=options.deadline, verbose=options.verbose, profile=options.profile, priority=options.priority) except Failure as e: on_error.report('Failed to trigger %s(%s): %s' % (options.task_name, args[0], e.args[0])) return 1 if not tasks: on_error.report('Failed to trigger the task.') return 1 if task_name != options.task_name: print('Triggered task: %s' % task_name) try: # TODO(maruel): Use task_ids, it's much more efficient! return collect(options.swarming, task_name, options.shards, options.timeout, options.decorate, options.print_status_updates, options.task_summary_json, options.task_output_dir) except Failure: on_error.report(None) return 1
def CMDcollect(parser, args): """Retrieves results of a Swarming task. The result can be in multiple part if the execution was sharded. It can potentially have retries. """ add_collect_options(parser) add_sharding_options(parser) (options, args) = parser.parse_args(args) if not args: parser.error('Must specify one task name.') elif len(args) > 1: parser.error('Must specify only one task name.') auth.ensure_logged_in(options.swarming) try: return collect(options.swarming, args[0], options.shards, options.timeout, options.decorate, options.print_status_updates, options.task_summary_json, options.task_output_dir) except Failure: on_error.report(None) return 1
def _process_swarming(self, options): """Processes the --swarming option and aborts if not specified. Returns the identity as determined by the server. """ if not options.swarming: self.error('--swarming is required.') try: options.swarming = net.fix_url(options.swarming) except ValueError as e: self.error('--swarming %s' % e) on_error.report_on_exception_exit(options.swarming) try: user = auth.ensure_logged_in(options.swarming) except ValueError as e: self.error(str(e)) return user
def main(args): tools.disable_buffering() parser = tools.OptionParserWithLogging( usage='%prog <options>', version=__version__, log_file=RUN_ISOLATED_LOG_FILE) data_group = optparse.OptionGroup(parser, 'Data source') data_group.add_option( '-s', '--isolated', metavar='FILE', help='File/url describing what to map or run') data_group.add_option( '-H', '--hash', help='Hash of the .isolated to grab from the hash table') isolateserver.add_isolate_server_options(data_group, True) parser.add_option_group(data_group) cache_group = optparse.OptionGroup(parser, 'Cache management') cache_group.add_option( '--cache', default='cache', metavar='DIR', help='Cache directory, default=%default') cache_group.add_option( '--max-cache-size', type='int', metavar='NNN', default=20*1024*1024*1024, help='Trim if the cache gets larger than this value, default=%default') cache_group.add_option( '--min-free-space', type='int', metavar='NNN', default=2*1024*1024*1024, help='Trim if disk free space becomes lower than this value, ' 'default=%default') cache_group.add_option( '--max-items', type='int', metavar='NNN', default=100000, help='Trim if more than this number of items are in the cache ' 'default=%default') parser.add_option_group(cache_group) auth.add_auth_options(parser) options, args = parser.parse_args(args) auth.process_auth_options(parser, options) isolateserver.process_isolate_server_options(data_group, options) if bool(options.isolated) == bool(options.hash): logging.debug('One and only one of --isolated or --hash is required.') parser.error('One and only one of --isolated or --hash is required.') options.cache = os.path.abspath(options.cache) policies = CachePolicies( options.max_cache_size, options.min_free_space, options.max_items) # |options.cache| path may not exist until DiskCache() instance is created. cache = DiskCache( options.cache, policies, isolateserver.get_hash_algo(options.namespace)) remote = options.isolate_server or options.indir if file_path.is_url(remote): auth.ensure_logged_in(remote) with isolateserver.get_storage(remote, options.namespace) as storage: # Hashing schemes used by |storage| and |cache| MUST match. assert storage.hash_algo == cache.hash_algo return run_tha_test( options.isolated or options.hash, storage, cache, args)
def main(args): tools.disable_buffering() parser = tools.OptionParserWithLogging(usage='%prog <options>', version=__version__, log_file=RUN_ISOLATED_LOG_FILE) data_group = optparse.OptionGroup(parser, 'Data source') data_group.add_option('-s', '--isolated', metavar='FILE', help='File/url describing what to map or run') data_group.add_option( '-H', '--hash', help='Hash of the .isolated to grab from the hash table') isolateserver.add_isolate_server_options(data_group, True) parser.add_option_group(data_group) cache_group = optparse.OptionGroup(parser, 'Cache management') cache_group.add_option('--cache', default='cache', metavar='DIR', help='Cache directory, default=%default') cache_group.add_option( '--max-cache-size', type='int', metavar='NNN', default=20 * 1024 * 1024 * 1024, help='Trim if the cache gets larger than this value, default=%default') cache_group.add_option( '--min-free-space', type='int', metavar='NNN', default=2 * 1024 * 1024 * 1024, help='Trim if disk free space becomes lower than this value, ' 'default=%default') cache_group.add_option( '--max-items', type='int', metavar='NNN', default=100000, help='Trim if more than this number of items are in the cache ' 'default=%default') parser.add_option_group(cache_group) auth.add_auth_options(parser) options, args = parser.parse_args(args) auth.process_auth_options(parser, options) isolateserver.process_isolate_server_options(data_group, options) if bool(options.isolated) == bool(options.hash): logging.debug('One and only one of --isolated or --hash is required.') parser.error('One and only one of --isolated or --hash is required.') options.cache = os.path.abspath(options.cache) policies = CachePolicies(options.max_cache_size, options.min_free_space, options.max_items) # |options.cache| path may not exist until DiskCache() instance is created. cache = DiskCache(options.cache, policies, isolateserver.get_hash_algo(options.namespace)) remote = options.isolate_server or options.indir if file_path.is_url(remote): auth.ensure_logged_in(remote) with isolateserver.get_storage(remote, options.namespace) as storage: # Hashing schemes used by |storage| and |cache| MUST match. assert storage.hash_algo == cache.hash_algo return run_tha_test(options.isolated or options.hash, storage, cache, args)