def CMDarchive(parser, args): """Creates a .isolated file and uploads the tree to an isolate server. All the files listed in the .isolated file are put in the isolate server cache via isolateserver.py. """ add_isolate_options(parser) add_subdir_option(parser) isolateserver.add_isolate_server_options(parser) auth.add_auth_options(parser) options, args = parser.parse_args(args) if args: parser.error('Unsupported argument: %s' % args) process_isolate_options(parser, options) auth.process_auth_options(parser, options) isolateserver.process_isolate_server_options(parser, options, True, True) server_ref = isolate_storage.ServerRef( options.isolate_server, options.namespace) result = isolate_and_archive([(options, unicode(os.getcwd()))], server_ref) if result is None: return EXIT_CODE_UPLOAD_ERROR assert len(result) == 1, result if result.values()[0] is None: return EXIT_CODE_ISOLATE_ERROR return 0
def parse_args(self, *args, **kwargs): options, args = logging_utils.OptionParserWithLogging.parse_args(self, *args, **kwargs) auth.process_auth_options(self, options) user = self._process_swarming(options) if hasattr(options, "user") and not options.user: options.user = user return options, args
def parse_args(self, *args, **kwargs): options, args = tools.OptionParserWithLogging.parse_args(self, *args, **kwargs) options.swarming = options.swarming.rstrip("/") if not options.swarming: self.error("--swarming is required.") auth.process_auth_options(self, options) return options, args
def main(args): tools.disable_buffering() parser = tools.OptionParserWithLogging(usage="%prog <options>", version=__version__, log_file=RUN_ISOLATED_LOG_FILE) data_group = optparse.OptionGroup(parser, "Data source") data_group.add_option("-s", "--isolated", metavar="FILE", help="File/url describing what to map or run") data_group.add_option("-H", "--hash", help="Hash of the .isolated to grab from the hash table") isolateserver.add_isolate_server_options(data_group, True) parser.add_option_group(data_group) cache_group = optparse.OptionGroup(parser, "Cache management") cache_group.add_option("--cache", default="cache", metavar="DIR", help="Cache directory, default=%default") cache_group.add_option( "--max-cache-size", type="int", metavar="NNN", default=20 * 1024 * 1024 * 1024, help="Trim if the cache gets larger than this value, default=%default", ) cache_group.add_option( "--min-free-space", type="int", metavar="NNN", default=2 * 1024 * 1024 * 1024, help="Trim if disk free space becomes lower than this value, " "default=%default", ) cache_group.add_option( "--max-items", type="int", metavar="NNN", default=100000, help="Trim if more than this number of items are in the cache " "default=%default", ) parser.add_option_group(cache_group) auth.add_auth_options(parser) options, args = parser.parse_args(args) auth.process_auth_options(parser, options) isolateserver.process_isolate_server_options(data_group, options) if bool(options.isolated) == bool(options.hash): logging.debug("One and only one of --isolated or --hash is required.") parser.error("One and only one of --isolated or --hash is required.") options.cache = os.path.abspath(options.cache) policies = CachePolicies(options.max_cache_size, options.min_free_space, options.max_items) try: # |options.cache| path may not exist until DiskCache() instance is created. cache = DiskCache(options.cache, policies, isolateserver.get_hash_algo(options.namespace)) remote = options.isolate_server or options.indir with isolateserver.get_storage(remote, options.namespace) as storage: # Hashing schemes used by |storage| and |cache| MUST match. assert storage.hash_algo == cache.hash_algo return run_tha_test(options.isolated or options.hash, storage, cache, args) except Exception as e: # Make sure any exception is logged. tools.report_error(e) logging.exception(e) return 1
def CMDarchive(parser, args): """Creates a .isolated file and uploads the tree to an isolate server. All the files listed in the .isolated file are put in the isolate server cache via isolateserver.py. """ add_isolate_options(parser) add_subdir_option(parser) isolateserver.add_isolate_server_options(parser) auth.add_auth_options(parser) options, args = parser.parse_args(args) if args: parser.error('Unsupported argument: %s' % args) process_isolate_options(parser, options) auth.process_auth_options(parser, options) isolateserver.process_isolate_server_options(parser, options, True) result = isolate_and_archive( [(options, unicode(os.getcwd()))], options.isolate_server, options.namespace) if result is None: return EXIT_CODE_UPLOAD_ERROR assert len(result) == 1, result if result.values()[0] is None: return EXIT_CODE_ISOLATE_ERROR return 0
def parse_args(self, *args, **kwargs): options, args = tools.OptionParserWithLogging.parse_args( self, *args, **kwargs) options.swarming = options.swarming.rstrip('/') if not options.swarming: self.error('--swarming is required.') auth.process_auth_options(self, options) return options, args
def main(args): parser = logging_utils.OptionParserWithLogging( usage='%prog <options>', version=__version__, log_file=RUN_ISOLATED_LOG_FILE) parser.add_option( '--json', help= 'dump output metadata to json file. When used, run_isolated returns ' 'non-zero only on internal failure') parser.add_option('--hard-timeout', type='float', help='Enforce hard timeout in execution') parser.add_option('--grace-period', type='float', help='Grace period between SIGTERM and SIGKILL') data_group = optparse.OptionGroup(parser, 'Data source') data_group.add_option( '-s', '--isolated', help='Hash of the .isolated to grab from the isolate server') isolateserver.add_isolate_server_options(data_group) parser.add_option_group(data_group) isolateserver.add_cache_options(parser) parser.set_defaults(cache='cache') debug_group = optparse.OptionGroup(parser, 'Debugging') debug_group.add_option( '--leak-temp-dir', action='store_true', help='Deliberately leak isolate\'s temp dir for later examination ' '[default: %default]') debug_group.add_option('--root-dir', help='Use a directory instead of a random one') parser.add_option_group(debug_group) auth.add_auth_options(parser) options, args = parser.parse_args(args) if not options.isolated: parser.error('--isolated is required.') auth.process_auth_options(parser, options) isolateserver.process_isolate_server_options(parser, options, True) cache = isolateserver.process_cache_options(options) if options.root_dir: options.root_dir = unicode(os.path.abspath(options.root_dir)) if options.json: options.json = unicode(os.path.abspath(options.json)) with isolateserver.get_storage(options.isolate_server, options.namespace) as storage: # Hashing schemes used by |storage| and |cache| MUST match. assert storage.hash_algo == cache.hash_algo return run_tha_test(options.isolated, storage, cache, options.leak_temp_dir, options.json, options.root_dir, options.hard_timeout, options.grace_period, args)
def parse_args(self, *args, **kwargs): options, args = logging_utils.OptionParserWithLogging.parse_args( self, *args, **kwargs) options.swarming = options.swarming.rstrip('/') if not options.swarming: self.error('--swarming is required.') auth.process_auth_options(self, options) isolateserver.process_isolate_server_options(self, options, True) options.dimensions = dict(options.dimensions) return options, args
def parse_args(self, *args, **kwargs): options, args = tools.OptionParserWithLogging.parse_args( self, *args, **kwargs) options.swarming = options.swarming.rstrip('/') if not options.swarming: self.error('--swarming is required.') auth.process_auth_options(self, options) isolateserver.process_isolate_server_options(self, options, False) options.dimensions = dict(options.dimensions) return options, args
def main(args): tools.disable_buffering() parser = logging_utils.OptionParserWithLogging( usage='%prog <options>', version=__version__, log_file=RUN_ISOLATED_LOG_FILE) parser.add_option( '--json', help='dump output metadata to json file. When used, run_isolated returns ' 'non-zero only on internal failure') parser.add_option( '--hard-timeout', type='int', help='Enforce hard timeout in execution') parser.add_option( '--grace-period', type='int', help='Grace period between SIGTERM and SIGKILL') data_group = optparse.OptionGroup(parser, 'Data source') data_group.add_option( '-s', '--isolated', help='Hash of the .isolated to grab from the isolate server') isolateserver.add_isolate_server_options(data_group) parser.add_option_group(data_group) isolateserver.add_cache_options(parser) parser.set_defaults(cache='cache') debug_group = optparse.OptionGroup(parser, 'Debugging') debug_group.add_option( '--leak-temp-dir', action='store_true', help='Deliberately leak isolate\'s temp dir for later examination ' '[default: %default]') debug_group.add_option( '--root-dir', help='Use a directory instead of a random one') parser.add_option_group(debug_group) auth.add_auth_options(parser) options, args = parser.parse_args(args) if not options.isolated: parser.error('--isolated is required.') auth.process_auth_options(parser, options) isolateserver.process_isolate_server_options(parser, options, True) cache = isolateserver.process_cache_options(options) if options.root_dir: options.root_dir = unicode(os.path.abspath(options.root_dir)) if options.json: options.json = unicode(os.path.abspath(options.json)) with isolateserver.get_storage( options.isolate_server, options.namespace) as storage: # Hashing schemes used by |storage| and |cache| MUST match. assert storage.hash_algo == cache.hash_algo return run_tha_test( options.isolated, storage, cache, options.leak_temp_dir, options.json, options.root_dir, options.hard_timeout, options.grace_period, args)
def main(args): tools.disable_buffering() parser = tools.OptionParserWithLogging( usage='%prog <options>', version=__version__, log_file=RUN_ISOLATED_LOG_FILE) data_group = optparse.OptionGroup(parser, 'Data source') data_group.add_option( '-s', '--isolated', help='Hash of the .isolated to grab from the isolate server') data_group.add_option( '-H', dest='isolated', help=optparse.SUPPRESS_HELP) isolateserver.add_isolate_server_options(data_group) parser.add_option_group(data_group) isolateserver.add_cache_options(parser) parser.set_defaults(cache='cache') debug_group = optparse.OptionGroup(parser, 'Debugging') debug_group.add_option( '--leak-temp-dir', action='store_true', help='Deliberately leak isolate\'s temp dir for later examination ' '[default: %default]') parser.add_option_group(debug_group) auth.add_auth_options(parser) options, args = parser.parse_args(args) if not options.isolated: parser.error('--isolated is required.') auth.process_auth_options(parser, options) isolateserver.process_isolate_server_options(parser, options, True) cache = isolateserver.process_cache_options(options) with isolateserver.get_storage( options.isolate_server, options.namespace) as storage: # Hashing schemes used by |storage| and |cache| MUST match. assert storage.hash_algo == cache.hash_algo return run_tha_test( options.isolated, storage, cache, options.leak_temp_dir, args)
def main(args): tools.disable_buffering() parser = logging_utils.OptionParserWithLogging( usage='%prog <options>', version=__version__, log_file=RUN_ISOLATED_LOG_FILE) data_group = optparse.OptionGroup(parser, 'Data source') data_group.add_option( '-s', '--isolated', help='Hash of the .isolated to grab from the isolate server') data_group.add_option( '-H', dest='isolated', help=optparse.SUPPRESS_HELP) isolateserver.add_isolate_server_options(data_group) parser.add_option_group(data_group) isolateserver.add_cache_options(parser) parser.set_defaults(cache='cache') debug_group = optparse.OptionGroup(parser, 'Debugging') debug_group.add_option( '--leak-temp-dir', action='store_true', help='Deliberately leak isolate\'s temp dir for later examination ' '[default: %default]') parser.add_option_group(debug_group) auth.add_auth_options(parser) options, args = parser.parse_args(args) if not options.isolated: parser.error('--isolated is required.') auth.process_auth_options(parser, options) isolateserver.process_isolate_server_options(parser, options, True) cache = isolateserver.process_cache_options(options) with isolateserver.get_storage( options.isolate_server, options.namespace) as storage: # Hashing schemes used by |storage| and |cache| MUST match. assert storage.hash_algo == cache.hash_algo return run_tha_test( options.isolated, storage, cache, options.leak_temp_dir, args)
def main(args): tools.disable_buffering() parser = logging_utils.OptionParserWithLogging( usage="%prog <options>", version=__version__, log_file=RUN_ISOLATED_LOG_FILE ) parser.add_option( "--json", help="dump output metadata to json file. When used, run_isolated returns " "non-zero only on internal failure", ) data_group = optparse.OptionGroup(parser, "Data source") data_group.add_option("-s", "--isolated", help="Hash of the .isolated to grab from the isolate server") isolateserver.add_isolate_server_options(data_group) parser.add_option_group(data_group) isolateserver.add_cache_options(parser) parser.set_defaults(cache="cache") debug_group = optparse.OptionGroup(parser, "Debugging") debug_group.add_option( "--leak-temp-dir", action="store_true", help="Deliberately leak isolate's temp dir for later examination " "[default: %default]", ) debug_group.add_option("--root-dir", help="Use a directory instead of a random one") parser.add_option_group(debug_group) auth.add_auth_options(parser) options, args = parser.parse_args(args) if not options.isolated: parser.error("--isolated is required.") auth.process_auth_options(parser, options) isolateserver.process_isolate_server_options(parser, options, True) cache = isolateserver.process_cache_options(options) with isolateserver.get_storage(options.isolate_server, options.namespace) as storage: # Hashing schemes used by |storage| and |cache| MUST match. assert storage.hash_algo == cache.hash_algo return run_tha_test( options.isolated, storage, cache, options.leak_temp_dir, options.json, options.root_dir, args )
def CMDbatcharchive(parser, args): """Archives multiple isolated trees at once. Using single command instead of multiple sequential invocations allows to cut redundant work when isolated trees share common files (e.g. file hashes are checked only once, their presence on the server is checked only once, and so on). Takes a list of paths to *.isolated.gen.json files that describe what trees to isolate. Format of files is: { "version": 1, "dir": <absolute path to a directory all other paths are relative to>, "args": [list of command line arguments for single 'archive' command] } """ isolateserver.add_isolate_server_options(parser) isolateserver.add_archive_options(parser) auth.add_auth_options(parser) parser.add_option( "--dump-json", metavar="FILE", help="Write isolated hashes of archived trees to this file as JSON" ) options, args = parser.parse_args(args) auth.process_auth_options(parser, options) isolateserver.process_isolate_server_options(parser, options, True) # Validate all incoming options, prepare what needs to be archived as a list # of tuples (archival options, working directory). work_units = [] for gen_json_path in args: # Validate JSON format of a *.isolated.gen.json file. data = tools.read_json(gen_json_path) if data.get("version") != ISOLATED_GEN_JSON_VERSION: parser.error("Invalid version in %s" % gen_json_path) cwd = data.get("dir") if not isinstance(cwd, unicode) or not os.path.isdir(cwd): parser.error("Invalid dir in %s" % gen_json_path) args = data.get("args") if not isinstance(args, list) or not all(isinstance(x, unicode) for x in args): parser.error("Invalid args in %s" % gen_json_path) # Convert command line (embedded in JSON) to Options object. work_units.append((parse_archive_command_line(args, cwd), cwd)) # Perform the archival, all at once. isolated_hashes = isolate_and_archive(work_units, options.isolate_server, options.namespace) # TODO(vadimsh): isolate_and_archive returns None on upload failure, there's # no way currently to figure out what *.isolated file from a batch were # successfully uploaded, so consider them all failed (and emit empty dict # as JSON result). if options.dump_json: tools.write_json(options.dump_json, isolated_hashes or {}, False) if isolated_hashes is None: return EXIT_CODE_UPLOAD_ERROR # isolated_hashes[x] is None if 'x.isolate' contains a error. if not all(isolated_hashes.itervalues()): return EXIT_CODE_ISOLATE_ERROR return 0
def main(args): (parser, options, args) = parse_args(args) isolate_cache = isolateserver.process_cache_options(options, trim=False) named_cache_manager = named_cache.process_named_cache_options( parser, options) if options.clean: if options.isolated: parser.error('Can\'t use --isolated with --clean.') if options.isolate_server: parser.error('Can\'t use --isolate-server with --clean.') if options.json: parser.error('Can\'t use --json with --clean.') if options.named_caches: parser.error('Can\t use --named-cache with --clean.') clean_caches(options, isolate_cache, named_cache_manager) return 0 if not options.no_clean: clean_caches(options, isolate_cache, named_cache_manager) if not options.isolated and not args: parser.error('--isolated or command to run is required.') auth.process_auth_options(parser, options) isolateserver.process_isolate_server_options(parser, options, True, False) if not options.isolate_server: if options.isolated: parser.error('--isolated requires --isolate-server') if ISOLATED_OUTDIR_PARAMETER in args: parser.error('%s in args requires --isolate-server' % ISOLATED_OUTDIR_PARAMETER) if options.root_dir: options.root_dir = unicode(os.path.abspath(options.root_dir)) if options.json: options.json = unicode(os.path.abspath(options.json)) cipd.validate_cipd_options(parser, options) install_packages_fn = noop_install_packages if options.cipd_enabled: install_packages_fn = lambda run_dir: install_client_and_packages( run_dir, cipd.parse_package_args(options.cipd_packages), options.cipd_server, options.cipd_client_package, options.cipd_client_version, cache_dir=options.cipd_cache) @contextlib.contextmanager def init_named_caches(run_dir): # WARNING: this function depends on "options" variable defined in the outer # function. with named_cache_manager.open(): named_cache_manager.create_symlinks(run_dir, options.named_caches) try: yield finally: if not options.leak_temp_dir: named_cache_manager.delete_symlinks(run_dir, options.named_caches) try: if options.isolate_server: storage = isolateserver.get_storage(options.isolate_server, options.namespace) with storage: # Hashing schemes used by |storage| and |isolate_cache| MUST match. assert storage.hash_algo == isolate_cache.hash_algo return run_tha_test(args, options.isolated, storage, isolate_cache, options.output, init_named_caches, options.leak_temp_dir, options.json, options.root_dir, options.hard_timeout, options.grace_period, options.bot_file, install_packages_fn, options.use_symlinks) return run_tha_test(args, options.isolated, None, isolate_cache, options.output, init_named_caches, options.leak_temp_dir, options.json, options.root_dir, options.hard_timeout, options.grace_period, options.bot_file, install_packages_fn, options.use_symlinks) except (cipd.Error, named_cache.Error) as ex: print >> sys.stderr, ex.message return 1
def main(args): # Warning: when --argsfile is used, the strings are unicode instances, when # parsed normally, the strings are str instances. (parser, options, args) = parse_args(args) if not file_path.enable_symlink(): logging.error('Symlink support is not enabled') isolate_cache = isolateserver.process_cache_options(options, trim=False) named_cache_manager = named_cache.process_named_cache_options(parser, options) if options.clean: if options.isolated: parser.error('Can\'t use --isolated with --clean.') if options.isolate_server: parser.error('Can\'t use --isolate-server with --clean.') if options.json: parser.error('Can\'t use --json with --clean.') if options.named_caches: parser.error('Can\t use --named-cache with --clean.') clean_caches(options, isolate_cache, named_cache_manager) return 0 if not options.no_clean: clean_caches(options, isolate_cache, named_cache_manager) if not options.isolated and not args: parser.error('--isolated or command to run is required.') auth.process_auth_options(parser, options) isolateserver.process_isolate_server_options( parser, options, True, False) if not options.isolate_server: if options.isolated: parser.error('--isolated requires --isolate-server') if ISOLATED_OUTDIR_PARAMETER in args: parser.error( '%s in args requires --isolate-server' % ISOLATED_OUTDIR_PARAMETER) if options.root_dir: options.root_dir = unicode(os.path.abspath(options.root_dir)) if options.json: options.json = unicode(os.path.abspath(options.json)) if any('=' not in i for i in options.env): parser.error( '--env required key=value form. value can be skipped to delete ' 'the variable') options.env = dict(i.split('=', 1) for i in options.env) prefixes = {} cwd = os.path.realpath(os.getcwd()) for item in options.env_prefix: if '=' not in item: parser.error( '--env-prefix %r is malformed, must be in the form `VAR=./path`' % item) key, opath = item.split('=', 1) if os.path.isabs(opath): parser.error('--env-prefix %r path is bad, must be relative.' % opath) opath = os.path.normpath(opath) if not os.path.realpath(os.path.join(cwd, opath)).startswith(cwd): parser.error( '--env-prefix %r path is bad, must be relative and not contain `..`.' % opath) prefixes.setdefault(key, []).append(opath) options.env_prefix = prefixes cipd.validate_cipd_options(parser, options) install_packages_fn = noop_install_packages if options.cipd_enabled: install_packages_fn = lambda run_dir: install_client_and_packages( run_dir, cipd.parse_package_args(options.cipd_packages), options.cipd_server, options.cipd_client_package, options.cipd_client_version, cache_dir=options.cipd_cache) @contextlib.contextmanager def install_named_caches(run_dir): # WARNING: this function depends on "options" variable defined in the outer # function. caches = [ (os.path.join(run_dir, unicode(relpath)), name) for name, relpath in options.named_caches ] with named_cache_manager.open(): for path, name in caches: named_cache_manager.install(path, name) try: yield finally: # Uninstall each named cache, returning it to the cache pool. If an # uninstall fails for a given cache, it will remain in the task's # temporary space, get cleaned up by the Swarming bot, and be lost. # # If the Swarming bot cannot clean up the cache, it will handle it like # any other bot file that could not be removed. with named_cache_manager.open(): for path, name in caches: try: named_cache_manager.uninstall(path, name) except named_cache.Error: logging.exception('Error while removing named cache %r at %r. ' 'The cache will be lost.', path, name) extra_args = [] command = [] if options.raw_cmd: command = args if options.relative_cwd: a = os.path.normpath(os.path.abspath(options.relative_cwd)) if not a.startswith(os.getcwd()): parser.error( '--relative-cwd must not try to escape the working directory') else: if options.relative_cwd: parser.error('--relative-cwd requires --raw-cmd') extra_args = args data = TaskData( command=command, relative_cwd=options.relative_cwd, extra_args=extra_args, isolated_hash=options.isolated, storage=None, isolate_cache=isolate_cache, outputs=options.output, install_named_caches=install_named_caches, leak_temp_dir=options.leak_temp_dir, root_dir=_to_unicode(options.root_dir), hard_timeout=options.hard_timeout, grace_period=options.grace_period, bot_file=options.bot_file, switch_to_account=options.switch_to_account, install_packages_fn=install_packages_fn, use_symlinks=options.use_symlinks, env=options.env, env_prefix=options.env_prefix) try: if options.isolate_server: storage = isolateserver.get_storage( options.isolate_server, options.namespace) with storage: data = data._replace(storage=storage) # Hashing schemes used by |storage| and |isolate_cache| MUST match. assert storage.hash_algo == isolate_cache.hash_algo return run_tha_test(data, options.json) return run_tha_test(data, options.json) except (cipd.Error, named_cache.Error) as ex: print >> sys.stderr, ex.message return 1
def main(args): parser = create_option_parser() options, args = parser.parse_args(args) cache = isolateserver.process_cache_options(options) if options.clean: if options.isolated: parser.error('Can\'t use --isolated with --clean.') if options.isolate_server: parser.error('Can\'t use --isolate-server with --clean.') if options.json: parser.error('Can\'t use --json with --clean.') cache.cleanup() return 0 if not options.no_clean: cache.cleanup() if not options.isolated and not args: parser.error('--isolated or command to run is required.') auth.process_auth_options(parser, options) isolateserver.process_isolate_server_options(parser, options, True, False) if not options.isolate_server: if options.isolated: parser.error('--isolated requires --isolate-server') if ISOLATED_OUTDIR_PARAMETER in args: parser.error('%s in args requires --isolate-server' % ISOLATED_OUTDIR_PARAMETER) if options.root_dir: options.root_dir = unicode(os.path.abspath(options.root_dir)) if options.json: options.json = unicode(os.path.abspath(options.json)) cipd.validate_cipd_options(parser, options) install_packages_fn = lambda run_dir: install_packages( run_dir, cipd.parse_package_args(options.cipd_packages), options.cipd_server, options.cipd_client_package, options.cipd_client_version, cache_dir=options.cipd_cache) try: command = [] if options.isolated else args if options.isolate_server: storage = isolateserver.get_storage(options.isolate_server, options.namespace) with storage: # Hashing schemes used by |storage| and |cache| MUST match. assert storage.hash_algo == cache.hash_algo return run_tha_test(command, options.isolated, storage, cache, options.leak_temp_dir, options.json, options.root_dir, options.hard_timeout, options.grace_period, options.bot_file, args, install_packages_fn, options.use_symlinks) return run_tha_test(command, options.isolated, None, cache, options.leak_temp_dir, options.json, options.root_dir, options.hard_timeout, options.grace_period, options.bot_file, args, install_packages_fn, options.use_symlinks) except cipd.Error as ex: print >> sys.stderr, ex.message return 1
def CMDbatcharchive(parser, args): """Archives multiple isolated trees at once. Using single command instead of multiple sequential invocations allows to cut redundant work when isolated trees share common files (e.g. file hashes are checked only once, their presence on the server is checked only once, and so on). Takes a list of paths to *.isolated.gen.json files that describe what trees to isolate. Format of files is: { "version": 1, "dir": <absolute path to a directory all other paths are relative to>, "args": [list of command line arguments for single 'archive' command] } """ isolateserver.add_isolate_server_options(parser) isolateserver.add_archive_options(parser) auth.add_auth_options(parser) parser.add_option( '--dump-json', metavar='FILE', help='Write isolated hashes of archived trees to this file as JSON') options, args = parser.parse_args(args) auth.process_auth_options(parser, options) isolateserver.process_isolate_server_options(parser, options, True, True) # Validate all incoming options, prepare what needs to be archived as a list # of tuples (archival options, working directory). work_units = [] for gen_json_path in args: # Validate JSON format of a *.isolated.gen.json file. try: data = tools.read_json(gen_json_path) except IOError as e: parser.error('Failed to open %s: %s' % (gen_json_path, e)) if data.get('version') != ISOLATED_GEN_JSON_VERSION: parser.error('Invalid version in %s' % gen_json_path) cwd = data.get('dir') if not isinstance(cwd, unicode) or not fs.isdir(cwd): parser.error('Invalid dir in %s' % gen_json_path) args = data.get('args') if (not isinstance(args, list) or not all(isinstance(x, unicode) for x in args)): parser.error('Invalid args in %s' % gen_json_path) # Convert command line (embedded in JSON) to Options object. work_units.append((parse_archive_command_line(args, cwd), cwd)) # Perform the archival, all at once. isolated_hashes = isolate_and_archive(work_units, options.isolate_server, options.namespace) # TODO(vadimsh): isolate_and_archive returns None on upload failure, there's # no way currently to figure out what *.isolated file from a batch were # successfully uploaded, so consider them all failed (and emit empty dict # as JSON result). if options.dump_json: tools.write_json(options.dump_json, isolated_hashes or {}, False) if isolated_hashes is None: return EXIT_CODE_UPLOAD_ERROR # isolated_hashes[x] is None if 'x.isolate' contains a error. if not all(isolated_hashes.itervalues()): return EXIT_CODE_ISOLATE_ERROR return 0
def main(args): (parser, options, args) = parse_args(args) isolate_cache = isolateserver.process_cache_options(options, trim=False) named_cache_manager = named_cache.process_named_cache_options(parser, options) if options.clean: if options.isolated: parser.error('Can\'t use --isolated with --clean.') if options.isolate_server: parser.error('Can\'t use --isolate-server with --clean.') if options.json: parser.error('Can\'t use --json with --clean.') if options.named_caches: parser.error('Can\t use --named-cache with --clean.') clean_caches(options, isolate_cache, named_cache_manager) return 0 if not options.no_clean: clean_caches(options, isolate_cache, named_cache_manager) if not options.isolated and not args: parser.error('--isolated or command to run is required.') auth.process_auth_options(parser, options) isolateserver.process_isolate_server_options( parser, options, True, False) if not options.isolate_server: if options.isolated: parser.error('--isolated requires --isolate-server') if ISOLATED_OUTDIR_PARAMETER in args: parser.error( '%s in args requires --isolate-server' % ISOLATED_OUTDIR_PARAMETER) if options.root_dir: options.root_dir = unicode(os.path.abspath(options.root_dir)) if options.json: options.json = unicode(os.path.abspath(options.json)) cipd.validate_cipd_options(parser, options) install_packages_fn = noop_install_packages if options.cipd_enabled: install_packages_fn = lambda run_dir: install_client_and_packages( run_dir, cipd.parse_package_args(options.cipd_packages), options.cipd_server, options.cipd_client_package, options.cipd_client_version, cache_dir=options.cipd_cache) @contextlib.contextmanager def install_named_caches(run_dir): # WARNING: this function depends on "options" variable defined in the outer # function. caches = [ (os.path.join(run_dir, unicode(relpath)), name) for name, relpath in options.named_caches ] with named_cache_manager.open(): for path, name in caches: named_cache_manager.install(path, name) try: yield finally: # Uninstall each named cache, returning it to the cache pool. If an # uninstall fails for a given cache, it will remain in the task's # temporary space, get cleaned up by the Swarming bot, and be lost. # # If the Swarming bot cannot clean up the cache, it will handle it like # any other bot file that could not be removed. with named_cache_manager.open(): for path, name in caches: try: named_cache_manager.uninstall(path, name) except named_cache.Error: logging.exception('Error while removing named cache %r at %r. ' 'The cache will be lost.', path, name) try: if options.isolate_server: storage = isolateserver.get_storage( options.isolate_server, options.namespace) with storage: # Hashing schemes used by |storage| and |isolate_cache| MUST match. assert storage.hash_algo == isolate_cache.hash_algo return run_tha_test( args, options.isolated, storage, isolate_cache, options.output, install_named_caches, options.leak_temp_dir, options.json, options.root_dir, options.hard_timeout, options.grace_period, options.bot_file, options.switch_to_account, install_packages_fn, options.use_symlinks) return run_tha_test( args, options.isolated, None, isolate_cache, options.output, install_named_caches, options.leak_temp_dir, options.json, options.root_dir, options.hard_timeout, options.grace_period, options.bot_file, options.switch_to_account, install_packages_fn, options.use_symlinks) except (cipd.Error, named_cache.Error) as ex: print >> sys.stderr, ex.message return 1
def main(args): tools.disable_buffering() parser = tools.OptionParserWithLogging( usage='%prog <options>', version=__version__, log_file=RUN_ISOLATED_LOG_FILE) data_group = optparse.OptionGroup(parser, 'Data source') data_group.add_option( '-s', '--isolated', metavar='FILE', help='File/url describing what to map or run') data_group.add_option( '-H', '--hash', help='Hash of the .isolated to grab from the hash table') isolateserver.add_isolate_server_options(data_group, True) parser.add_option_group(data_group) cache_group = optparse.OptionGroup(parser, 'Cache management') cache_group.add_option( '--cache', default='cache', metavar='DIR', help='Cache directory, default=%default') cache_group.add_option( '--max-cache-size', type='int', metavar='NNN', default=20*1024*1024*1024, help='Trim if the cache gets larger than this value, default=%default') cache_group.add_option( '--min-free-space', type='int', metavar='NNN', default=2*1024*1024*1024, help='Trim if disk free space becomes lower than this value, ' 'default=%default') cache_group.add_option( '--max-items', type='int', metavar='NNN', default=100000, help='Trim if more than this number of items are in the cache ' 'default=%default') parser.add_option_group(cache_group) auth.add_auth_options(parser) options, args = parser.parse_args(args) auth.process_auth_options(parser, options) isolateserver.process_isolate_server_options(data_group, options) if bool(options.isolated) == bool(options.hash): logging.debug('One and only one of --isolated or --hash is required.') parser.error('One and only one of --isolated or --hash is required.') options.cache = os.path.abspath(options.cache) policies = CachePolicies( options.max_cache_size, options.min_free_space, options.max_items) algo = isolateserver.get_hash_algo(options.namespace) try: # |options.cache| may not exist until DiskCache() instance is created. cache = DiskCache(options.cache, policies, algo) remote = options.isolate_server or options.indir with isolateserver.get_storage(remote, options.namespace) as storage: return run_tha_test( options.isolated or options.hash, storage, cache, algo, args) except Exception as e: # Make sure any exception is logged. tools.report_error(e) logging.exception(e) return 1
def main(args): tools.disable_buffering() parser = tools.OptionParserWithLogging( usage='%prog <options>', version=__version__, log_file=RUN_ISOLATED_LOG_FILE) data_group = optparse.OptionGroup(parser, 'Data source') data_group.add_option( '-s', '--isolated', metavar='FILE', help='File/url describing what to map or run') data_group.add_option( '-H', '--hash', help='Hash of the .isolated to grab from the hash table') isolateserver.add_isolate_server_options(data_group, True) parser.add_option_group(data_group) cache_group = optparse.OptionGroup(parser, 'Cache management') cache_group.add_option( '--cache', default='cache', metavar='DIR', help='Cache directory, default=%default') cache_group.add_option( '--max-cache-size', type='int', metavar='NNN', default=20*1024*1024*1024, help='Trim if the cache gets larger than this value, default=%default') cache_group.add_option( '--min-free-space', type='int', metavar='NNN', default=2*1024*1024*1024, help='Trim if disk free space becomes lower than this value, ' 'default=%default') cache_group.add_option( '--max-items', type='int', metavar='NNN', default=100000, help='Trim if more than this number of items are in the cache ' 'default=%default') parser.add_option_group(cache_group) auth.add_auth_options(parser) options, args = parser.parse_args(args) auth.process_auth_options(parser, options) isolateserver.process_isolate_server_options(data_group, options) if bool(options.isolated) == bool(options.hash): logging.debug('One and only one of --isolated or --hash is required.') parser.error('One and only one of --isolated or --hash is required.') options.cache = os.path.abspath(options.cache) policies = CachePolicies( options.max_cache_size, options.min_free_space, options.max_items) try: # |options.cache| path may not exist until DiskCache() instance is created. cache = DiskCache( options.cache, policies, isolateserver.get_hash_algo(options.namespace)) remote = options.isolate_server or options.indir with isolateserver.get_storage(remote, options.namespace) as storage: # Hashing schemes used by |storage| and |cache| MUST match. assert storage.hash_algo == cache.hash_algo return run_tha_test( options.isolated or options.hash, storage, cache, args) except Exception as e: # Make sure any exception is logged. tools.report_error(e) logging.exception(e) return 1