def archive(isolate_server, namespace, isolated, algo, verbose): """Archives a .isolated and all the dependencies on the CAC.""" logging.info('archive(%s, %s, %s)', isolate_server, namespace, isolated) tempdir = None if file_path.is_url(isolate_server): command = 'archive' flag = '--isolate-server' else: command = 'hashtable' flag = '--outdir' print('Archiving: %s' % isolated) try: cmd = [ sys.executable, os.path.join(ROOT_DIR, 'isolate.py'), command, flag, isolate_server, '--namespace', namespace, '--isolated', isolated, ] cmd.extend(['--verbose'] * verbose) logging.info(' '.join(cmd)) if subprocess.call(cmd, verbose): return return isolateserver.hash_file(isolated, algo) finally: if tempdir: shutil.rmtree(tempdir)
def chromium_setup(manifest): """Sets up the commands to run. Highly chromium specific. """ # Add uncompressed zip here. It'll be compressed as part of the package sent # to Swarming server. run_test_name = "run_isolated.zip" manifest.bundle.add_buffer(run_test_name, run_isolated.get_as_zip_package().zip_into_buffer(compress=False)) cleanup_script_name = "swarm_cleanup.py" manifest.bundle.add_file(os.path.join(TOOLS_PATH, cleanup_script_name), cleanup_script_name) run_cmd = ["python", run_test_name, "--hash", manifest.isolated_hash, "--namespace", manifest.namespace] if file_path.is_url(manifest.isolate_server): run_cmd.extend(("--isolate-server", manifest.isolate_server)) else: run_cmd.extend(("--indir", manifest.isolate_server)) if manifest.verbose or manifest.profile: # Have it print the profiling section. run_cmd.append("--verbose") # Pass all extra args for run_isolated.py, it will pass them to the command. if manifest.extra_args: run_cmd.append("--") run_cmd.extend(manifest.extra_args) manifest.add_task("Run Test", run_cmd) # Clean up manifest.add_task("Clean Up", ["python", cleanup_script_name])
def archive(isolate_server, namespace, isolated, algo, verbose): """Archives a .isolated and all the dependencies on the CAC.""" logging.info("archive(%s, %s, %s)", isolate_server, namespace, isolated) tempdir = None if file_path.is_url(isolate_server): command = "archive" flag = "--isolate-server" else: command = "hashtable" flag = "--outdir" print ("Archiving: %s" % isolated) try: cmd = [ sys.executable, os.path.join(ROOT_DIR, "isolate.py"), command, flag, isolate_server, "--namespace", namespace, "--isolated", isolated, ] cmd.extend(["--verbose"] * verbose) logging.info(" ".join(cmd)) if subprocess.call(cmd, verbose): return return isolateserver.hash_file(isolated, algo) finally: if tempdir: shutil.rmtree(tempdir)
def extract_output_files_location(task_log): """Task log -> location of task output files to fetch. TODO(vadimsh,maruel): Use side-channel to get this information. See 'run_tha_test' in run_isolated.py for where the data is generated. Returns: Tuple (isolate server URL, namespace, isolated hash) on success. None if information is missing or can not be parsed. """ match = re.search(r"\[run_isolated_out_hack\](.*)\[/run_isolated_out_hack\]", task_log, re.DOTALL) if not match: return None def to_ascii(val): if not isinstance(val, basestring): raise ValueError() return val.encode("ascii") try: data = json.loads(match.group(1)) if not isinstance(data, dict): raise ValueError() isolated_hash = to_ascii(data["hash"]) namespace = to_ascii(data["namespace"]) isolate_server = to_ascii(data["storage"]) if not file_path.is_url(isolate_server): raise ValueError() return (isolate_server, namespace, isolated_hash) except (KeyError, ValueError): logging.warning("Unexpected value of run_isolated_out_hack: %s", match.group(1)) return None
def extract_output_files_location(task_log): """Task log -> location of task output files to fetch. TODO(vadimsh,maruel): Use side-channel to get this information. See 'run_tha_test' in run_isolated.py for where the data is generated. Returns: Tuple (isolate server URL, namespace, isolated hash) on success. None if information is missing or can not be parsed. """ match = re.search( r'\[run_isolated_out_hack\](.*)\[/run_isolated_out_hack\]', task_log, re.DOTALL) if not match: return None def to_ascii(val): if not isinstance(val, basestring): raise ValueError() return val.encode('ascii') try: data = json.loads(match.group(1)) if not isinstance(data, dict): raise ValueError() isolated_hash = to_ascii(data['hash']) namespace = to_ascii(data['namespace']) isolate_server = to_ascii(data['storage']) if not file_path.is_url(isolate_server): raise ValueError() return (isolate_server, namespace, isolated_hash) except (KeyError, ValueError): logging.warning('Unexpected value of run_isolated_out_hack: %s', match.group(1)) return None
def chromium_setup(manifest): """Sets up the commands to run. Highly chromium specific. """ # Add uncompressed zip here. It'll be compressed as part of the package sent # to Swarming server. run_test_name = 'run_isolated.zip' manifest.bundle.add_buffer(run_test_name, run_isolated.get_as_zip_package().zip_into_buffer(compress=False)) cleanup_script_name = 'swarm_cleanup.py' manifest.bundle.add_file(os.path.join(TOOLS_PATH, cleanup_script_name), cleanup_script_name) run_cmd = [ 'python', run_test_name, '--hash', manifest.isolated_hash, '--namespace', manifest.namespace, ] if file_path.is_url(manifest.isolate_server): run_cmd.extend(('--isolate-server', manifest.isolate_server)) else: run_cmd.extend(('--indir', manifest.isolate_server)) if manifest.verbose or manifest.profile: # Have it print the profiling section. run_cmd.append('--verbose') manifest.add_task('Run Test', run_cmd) # Clean up manifest.add_task('Clean Up', ['python', cleanup_script_name])
def process_outdir_options(parser, options, cwd): if not options.outdir: parser.error("--outdir is required.") if file_path.is_url(options.outdir): parser.error("Can't use an URL for --outdir.") options.outdir = unicode(options.outdir).replace("/", os.path.sep) # outdir doesn't need native path case since tracing is never done from there. options.outdir = os.path.abspath(os.path.normpath(os.path.join(cwd, options.outdir)))
def process_outdir_options(parser, options, cwd): if not options.outdir: parser.error('--outdir is required.') if file_path.is_url(options.outdir): parser.error('Can\'t use an URL for --outdir.') options.outdir = unicode(options.outdir).replace('/', os.path.sep) # outdir doesn't need native path case since tracing is never done from there. options.outdir = os.path.abspath( os.path.normpath(os.path.join(cwd, options.outdir)))
def CMDrun(parser, args): """Triggers a task and wait for the results. Basically, does everything to run a command remotely. """ add_trigger_options(parser) add_collect_options(parser) add_sharding_options(parser) args, isolated_cmd_args = extract_isolated_command_extra_args(args) options, args = parser.parse_args(args) process_trigger_options(parser, options, args) auth.ensure_logged_in(options.swarming) if file_path.is_url(options.isolate_server): auth.ensure_logged_in(options.isolate_server) try: tasks, task_name = trigger( swarming=options.swarming, isolate_server=options.isolate_server or options.indir, namespace=options.namespace, file_hash_or_isolated=args[0], task_name=options.task_name, extra_args=isolated_cmd_args, shards=options.shards, dimensions=options.dimensions, env=dict(options.env), deadline=options.deadline, verbose=options.verbose, profile=options.profile, priority=options.priority) except Failure as e: on_error.report( 'Failed to trigger %s(%s): %s' % (options.task_name, args[0], e.args[0])) return 1 if not tasks: on_error.report('Failed to trigger the task.') return 1 if task_name != options.task_name: print('Triggered task: %s' % task_name) try: # TODO(maruel): Use task_ids, it's much more efficient! return collect( options.swarming, task_name, options.shards, options.timeout, options.decorate, options.print_status_updates, options.task_summary_json, options.task_output_dir) except Failure: on_error.report(None) return 1
def CMDtrigger(parser, args): """Triggers a Swarming task. Accepts either the hash (sha1) of a .isolated file already uploaded or the path to an .isolated file to archive, packages it if needed and sends a Swarming manifest file to the Swarming server. If an .isolated file is specified instead of an hash, it is first archived. Passes all extra arguments provided after '--' as additional command line arguments for an isolated command specified in *.isolate file. """ add_trigger_options(parser) add_sharding_options(parser) args, isolated_cmd_args = extract_isolated_command_extra_args(args) parser.add_option( '--dump-json', metavar='FILE', help='Dump details about the triggered task(s) to this file as json') options, args = parser.parse_args(args) process_trigger_options(parser, options, args) auth.ensure_logged_in(options.swarming) if file_path.is_url(options.isolate_server): auth.ensure_logged_in(options.isolate_server) try: tasks, task_name = trigger(swarming=options.swarming, isolate_server=options.isolate_server or options.indir, namespace=options.namespace, file_hash_or_isolated=args[0], task_name=options.task_name, extra_args=isolated_cmd_args, shards=options.shards, dimensions=options.dimensions, env=dict(options.env), deadline=options.deadline, verbose=options.verbose, profile=options.profile, priority=options.priority) if tasks: if task_name != options.task_name: print('Triggered task: %s' % task_name) if options.dump_json: data = { 'base_task_name': task_name, 'tasks': tasks, } tools.write_json(options.dump_json, data, True) return int(not tasks) except Failure: on_error.report(None) return 1
def CMDtrigger(parser, args): """Triggers a Swarming task. Accepts either the hash (sha1) of a .isolated file already uploaded or the path to an .isolated file to archive, packages it if needed and sends a Swarming manifest file to the Swarming server. If an .isolated file is specified instead of an hash, it is first archived. Passes all extra arguments provided after '--' as additional command line arguments for an isolated command specified in *.isolate file. """ add_trigger_options(parser) add_sharding_options(parser) args, isolated_cmd_args = extract_isolated_command_extra_args(args) parser.add_option( '--dump-json', metavar='FILE', help='Dump details about the triggered task(s) to this file as json') options, args = parser.parse_args(args) process_trigger_options(parser, options, args) auth.ensure_logged_in(options.swarming) if file_path.is_url(options.isolate_server): auth.ensure_logged_in(options.isolate_server) try: tasks, task_name = trigger( swarming=options.swarming, isolate_server=options.isolate_server or options.indir, namespace=options.namespace, file_hash_or_isolated=args[0], task_name=options.task_name, extra_args=isolated_cmd_args, shards=options.shards, dimensions=options.dimensions, env=dict(options.env), deadline=options.deadline, verbose=options.verbose, profile=options.profile, priority=options.priority) if tasks: if task_name != options.task_name: print('Triggered task: %s' % task_name) if options.dump_json: data = { 'base_task_name': task_name, 'tasks': tasks, } tools.write_json(options.dump_json, data, True) return int(not tasks) except Failure: on_error.report(None) return 1
def setup_run_isolated(manifest, bundle): """Sets up the manifest to run an isolated task via run_isolated.py. Modifies |bundle| (by adding files) and |manifest| (by adding commands) in place. Args: manifest: Manifest with swarm task definition. bundle: ZipPackage with files that would be transfered to swarm bot. If None, only |manifest| is modified (useful in tests). """ # Add uncompressed zip here. It'll be compressed as part of the package sent # to Swarming server. run_test_name = 'run_isolated.zip' if bundle and run_test_name not in bundle.files: bundle.add_buffer( run_test_name, run_isolated.get_as_zip_package().zip_into_buffer(compress=False)) cleanup_script_name = 'swarm_cleanup.py' if bundle and cleanup_script_name not in bundle.files: bundle.add_file(os.path.join(TOOLS_PATH, cleanup_script_name), cleanup_script_name) run_cmd = [ 'python', run_test_name, '--hash', manifest.isolated_hash, '--namespace', manifest.namespace, ] if file_path.is_url(manifest.isolate_server): run_cmd.extend(('--isolate-server', manifest.isolate_server)) else: run_cmd.extend(('--indir', manifest.isolate_server)) if manifest.verbose or manifest.profile: # Have it print the profiling section. run_cmd.append('--verbose') # Pass all extra args for run_isolated.py, it will pass them to the command. if manifest.extra_args: run_cmd.append('--') run_cmd.extend(manifest.extra_args) manifest.add_task('Run Test', run_cmd) # Clean up manifest.add_task('Clean Up', ['python', cleanup_script_name])
def __init__(self, base_url, namespace): super(IsolateServer, self).__init__() assert file_path.is_url(base_url), base_url self._base_url = base_url.rstrip('/') self._namespace = namespace self._namespace_dict = { 'compression': 'flate' if namespace.endswith( ('-gzip', '-flate')) else '', 'digest_hash': 'sha-1', 'namespace': namespace, } self._lock = threading.Lock() self._server_caps = None self._memory_use = 0
def __init__(self, base_url, namespace): super(IsolateServer, self).__init__() assert file_path.is_url(base_url), base_url self._base_url = base_url.rstrip('/') self._namespace = namespace algo = isolated_format.get_hash_algo(namespace) self._namespace_dict = { 'compression': 'flate' if namespace.endswith( ('-gzip', '-flate')) else '', 'digest_hash': isolated_format.SUPPORTED_ALGOS_REVERSE[algo], 'namespace': namespace, } self._lock = threading.Lock() self._server_caps = None self._memory_use = 0
def CMDrun(parser, args): """Triggers a task and wait for the results. Basically, does everything to run a command remotely. """ add_trigger_options(parser) add_collect_options(parser) add_sharding_options(parser) args, isolated_cmd_args = extract_isolated_command_extra_args(args) options, args = parser.parse_args(args) process_trigger_options(parser, options, args) auth.ensure_logged_in(options.swarming) if file_path.is_url(options.isolate_server): auth.ensure_logged_in(options.isolate_server) try: tasks, task_name = trigger(swarming=options.swarming, isolate_server=options.isolate_server or options.indir, namespace=options.namespace, file_hash_or_isolated=args[0], task_name=options.task_name, extra_args=isolated_cmd_args, shards=options.shards, dimensions=options.dimensions, env=dict(options.env), deadline=options.deadline, verbose=options.verbose, profile=options.profile, priority=options.priority) except Failure as e: on_error.report('Failed to trigger %s(%s): %s' % (options.task_name, args[0], e.args[0])) return 1 if not tasks: on_error.report('Failed to trigger the task.') return 1 if task_name != options.task_name: print('Triggered task: %s' % task_name) try: # TODO(maruel): Use task_ids, it's much more efficient! return collect(options.swarming, task_name, options.shards, options.timeout, options.decorate, options.print_status_updates, options.task_summary_json, options.task_output_dir) except Failure: on_error.report(None) return 1
def setup_run_isolated(manifest, bundle): """Sets up the manifest to run an isolated task via run_isolated.py. Modifies |bundle| (by adding files) and |manifest| (by adding commands) in place. Args: manifest: Manifest with swarm task definition. bundle: ZipPackage with files that would be transfered to swarm bot. If None, only |manifest| is modified (useful in tests). """ # Add uncompressed zip here. It'll be compressed as part of the package sent # to Swarming server. run_test_name = 'run_isolated.zip' if bundle and run_test_name not in bundle.files: bundle.add_buffer( run_test_name, run_isolated.get_as_zip_package().zip_into_buffer(compress=False)) cleanup_script_name = 'swarm_cleanup.py' if bundle and cleanup_script_name not in bundle.files: bundle.add_file( os.path.join(TOOLS_PATH, cleanup_script_name), cleanup_script_name) run_cmd = [ 'python', run_test_name, '--hash', manifest.isolated_hash, '--namespace', manifest.namespace, ] if file_path.is_url(manifest.isolate_server): run_cmd.extend(('--isolate-server', manifest.isolate_server)) else: run_cmd.extend(('--indir', manifest.isolate_server)) if manifest.verbose or manifest.profile: # Have it print the profiling section. run_cmd.append('--verbose') # Pass all extra args for run_isolated.py, it will pass them to the command. if manifest.extra_args: run_cmd.append('--') run_cmd.extend(manifest.extra_args) manifest.add_task('Run Test', run_cmd) # Clean up manifest.add_task('Clean Up', ['python', cleanup_script_name])
def extract_output_files_location(task_log): """Task log -> location of task output files to fetch. TODO(vadimsh,maruel): Use side-channel to get this information. See 'run_tha_test' in run_isolated.py for where the data is generated. Returns: Tuple (isolate server URL, namespace, isolated hash) on success. None if information is missing or can not be parsed. """ if not task_log: return None match = re.search( r'\[run_isolated_out_hack\](.*)\[/run_isolated_out_hack\]', task_log, re.DOTALL) if not match: return None def to_ascii(val): if not isinstance(val, basestring): raise ValueError() return val.encode('ascii') try: data = json.loads(match.group(1)) if not isinstance(data, dict): raise ValueError() isolated_hash = to_ascii(data['hash']) namespace = to_ascii(data['namespace']) isolate_server = to_ascii(data['storage']) if not file_path.is_url(isolate_server): raise ValueError() data = { 'hash': isolated_hash, 'namespace': namespace, 'server': isolate_server, 'view_url': '%s/browse?%s' % (isolate_server, urllib.urlencode( [('namespace', namespace), ('hash', isolated_hash)])), } return data except (KeyError, ValueError): logging.warning( 'Unexpected value of run_isolated_out_hack: %s', match.group(1)) return None
def chromium_setup(manifest): """Sets up the commands to run. Highly chromium specific. """ # Add uncompressed zip here. It'll be compressed as part of the package sent # to Swarming server. run_test_name = 'run_isolated.zip' manifest.bundle.add_buffer( run_test_name, run_isolated.get_as_zip_package().zip_into_buffer(compress=False)) cleanup_script_name = 'swarm_cleanup.py' manifest.bundle.add_file(os.path.join(TOOLS_PATH, cleanup_script_name), cleanup_script_name) run_cmd = [ 'python', run_test_name, '--hash', manifest.isolated_hash, '--namespace', manifest.namespace, ] if file_path.is_url(manifest.isolate_server): run_cmd.extend(('--isolate-server', manifest.isolate_server)) else: run_cmd.extend(('--indir', manifest.isolate_server)) if manifest.verbose or manifest.profile: # Have it print the profiling section. run_cmd.append('--verbose') # Pass all extra args for run_isolated.py, it will pass them to the command. if manifest.extra_args: run_cmd.append('--') run_cmd.extend(manifest.extra_args) manifest.add_task('Run Test', run_cmd) # Clean up manifest.add_task('Clean Up', ['python', cleanup_script_name])
def __init__(self, url, namespace): """ Args: url: URL of isolate service to use shared cloud based storage. namespace: isolate namespace to operate in, also defines hashing and compression scheme used, e.g. namespace names that end with '-gzip' store compressed data. """ assert file_path.is_url(url) or not url, url self._url = url.rstrip('/') self._namespace = namespace self._hash_algo = hashlib.sha1 self._hash_algo_name = 'sha-1' if self.namespace.startswith('sha256-'): self._hash_algo = hashlib.sha256 self._hash_algo_name = 'sha-256' if self.namespace.startswith('sha512-'): self._hash_algo = hashlib.sha512 self._hash_algo_name = 'sha-512' self._is_with_compression = self.namespace.endswith( ('-gzip', '-deflate'))
def main(args): tools.disable_buffering() parser = tools.OptionParserWithLogging(usage='%prog <options>', version=__version__, log_file=RUN_ISOLATED_LOG_FILE) data_group = optparse.OptionGroup(parser, 'Data source') data_group.add_option('-s', '--isolated', metavar='FILE', help='File/url describing what to map or run') data_group.add_option( '-H', '--hash', help='Hash of the .isolated to grab from the hash table') isolateserver.add_isolate_server_options(data_group, True) parser.add_option_group(data_group) cache_group = optparse.OptionGroup(parser, 'Cache management') cache_group.add_option('--cache', default='cache', metavar='DIR', help='Cache directory, default=%default') cache_group.add_option( '--max-cache-size', type='int', metavar='NNN', default=20 * 1024 * 1024 * 1024, help='Trim if the cache gets larger than this value, default=%default') cache_group.add_option( '--min-free-space', type='int', metavar='NNN', default=2 * 1024 * 1024 * 1024, help='Trim if disk free space becomes lower than this value, ' 'default=%default') cache_group.add_option( '--max-items', type='int', metavar='NNN', default=100000, help='Trim if more than this number of items are in the cache ' 'default=%default') parser.add_option_group(cache_group) auth.add_auth_options(parser) options, args = parser.parse_args(args) auth.process_auth_options(parser, options) isolateserver.process_isolate_server_options(data_group, options) if bool(options.isolated) == bool(options.hash): logging.debug('One and only one of --isolated or --hash is required.') parser.error('One and only one of --isolated or --hash is required.') options.cache = os.path.abspath(options.cache) policies = CachePolicies(options.max_cache_size, options.min_free_space, options.max_items) # |options.cache| path may not exist until DiskCache() instance is created. cache = DiskCache(options.cache, policies, isolateserver.get_hash_algo(options.namespace)) remote = options.isolate_server or options.indir if file_path.is_url(remote): auth.ensure_logged_in(remote) with isolateserver.get_storage(remote, options.namespace) as storage: # Hashing schemes used by |storage| and |cache| MUST match. assert storage.hash_algo == cache.hash_algo return run_tha_test(options.isolated or options.hash, storage, cache, args)
def main(args): tools.disable_buffering() parser = tools.OptionParserWithLogging( usage='%prog <options>', version=__version__, log_file=RUN_ISOLATED_LOG_FILE) data_group = optparse.OptionGroup(parser, 'Data source') data_group.add_option( '-s', '--isolated', metavar='FILE', help='File/url describing what to map or run') data_group.add_option( '-H', '--hash', help='Hash of the .isolated to grab from the hash table') isolateserver.add_isolate_server_options(data_group, True) parser.add_option_group(data_group) cache_group = optparse.OptionGroup(parser, 'Cache management') cache_group.add_option( '--cache', default='cache', metavar='DIR', help='Cache directory, default=%default') cache_group.add_option( '--max-cache-size', type='int', metavar='NNN', default=20*1024*1024*1024, help='Trim if the cache gets larger than this value, default=%default') cache_group.add_option( '--min-free-space', type='int', metavar='NNN', default=2*1024*1024*1024, help='Trim if disk free space becomes lower than this value, ' 'default=%default') cache_group.add_option( '--max-items', type='int', metavar='NNN', default=100000, help='Trim if more than this number of items are in the cache ' 'default=%default') parser.add_option_group(cache_group) auth.add_auth_options(parser) options, args = parser.parse_args(args) auth.process_auth_options(parser, options) isolateserver.process_isolate_server_options(data_group, options) if bool(options.isolated) == bool(options.hash): logging.debug('One and only one of --isolated or --hash is required.') parser.error('One and only one of --isolated or --hash is required.') options.cache = os.path.abspath(options.cache) policies = CachePolicies( options.max_cache_size, options.min_free_space, options.max_items) # |options.cache| path may not exist until DiskCache() instance is created. cache = DiskCache( options.cache, policies, isolateserver.get_hash_algo(options.namespace)) remote = options.isolate_server or options.indir if file_path.is_url(remote): auth.ensure_logged_in(remote) with isolateserver.get_storage(remote, options.namespace) as storage: # Hashing schemes used by |storage| and |cache| MUST match. assert storage.hash_algo == cache.hash_algo return run_tha_test( options.isolated or options.hash, storage, cache, args)