def __init__(self, require_result=True, *args, **kwargs): OptionParserWithLogging.__init__(self, *args, **kwargs) default_variables = [('OS', isolate_common.get_flavor())] if sys.platform in ('win32', 'cygwin'): default_variables.append(('EXECUTABLE_SUFFIX', '.exe')) else: default_variables.append(('EXECUTABLE_SUFFIX', '')) group = optparse.OptionGroup(self, "Common options") group.add_option( '-r', '--result', metavar='FILE', help='.result file to store the json manifest') group.add_option( '-i', '--isolate', metavar='FILE', help='.isolate file to load the dependency data from') group.add_option( '-V', '--variable', nargs=2, action='append', default=default_variables, dest='variables', metavar='FOO BAR', help='Variables to process in the .isolate file, default: %default. ' 'Variables are persistent accross calls, they are saved inside ' '<results>.state') group.add_option( '-o', '--outdir', metavar='DIR', help='Directory used to recreate the tree or store the hash table. ' 'If the environment variable ISOLATE_HASH_TABLE_DIR exists, it ' 'will be used. Otherwise, for run and remap, uses a /tmp ' 'subdirectory. For the other modes, defaults to the directory ' 'containing --result') self.add_option_group(group) self.require_result = require_result
def process_input(filepath, prevdict, level, read_only): """Processes an input file, a dependency, and return meta data about it. Arguments: - filepath: File to act on. - prevdict: the previous dictionary. It is used to retrieve the cached sha-1 to skip recalculating the hash. - level: determines the amount of information retrieved. - read_only: If True, the file mode is manipulated. In practice, only save one of 4 modes: 0755 (rwx), 0644 (rw), 0555 (rx), 0444 (r). On windows, mode is not set since all files are 'executable' by default. """ assert level in (NO_INFO, STATS_ONLY, WITH_HASH) out = {} if level >= STATS_ONLY: filestats = os.lstat(filepath) is_link = stat.S_ISLNK(filestats.st_mode) if isolate_common.get_flavor() != 'win': # Ignore file mode on Windows since it's not really useful there. filemode = stat.S_IMODE(filestats.st_mode) # Remove write access for group and all access to 'others'. filemode &= ~(stat.S_IWGRP | stat.S_IRWXO) if read_only: filemode &= ~stat.S_IWUSR if filemode & stat.S_IXUSR: filemode |= stat.S_IXGRP else: filemode &= ~stat.S_IXGRP out['mode'] = filemode if not is_link: out['size'] = filestats.st_size # Used to skip recalculating the hash. Use the most recent update time. out['timestamp'] = int(round(filestats.st_mtime)) # If the timestamp wasn't updated, carry on the sha-1. if prevdict.get('timestamp') == out['timestamp']: if 'sha-1' in prevdict: # Reuse the previous hash. out['sha-1'] = prevdict['sha-1'] if 'link' in prevdict: # Reuse the previous link destination. out['link'] = prevdict['link'] if level >= WITH_HASH and not out.get('sha-1') and not out.get('link'): if is_link: # A symlink, store the link destination instead. out['link'] = os.readlink(filepath) else: with open(filepath, 'rb') as f: out['sha-1'] = hashlib.sha1(f.read()).hexdigest() return out
def process_input(filepath, prevdict, level, read_only): """Processes an input file, a dependency, and return meta data about it. Arguments: - filepath: File to act on. - prevdict: the previous dictionary. It is used to retrieve the cached sha-1 to skip recalculating the hash. - level: determines the amount of information retrieved. - read_only: If True, the file mode is manipulated. In practice, only save one of 4 modes: 0755 (rwx), 0644 (rw), 0555 (rx), 0444 (r). On windows, mode is not set since all files are 'executable' by default. """ assert level in (NO_INFO, STATS_ONLY, WITH_HASH) out = {} if level >= STATS_ONLY: filestats = os.stat(filepath) if isolate_common.get_flavor() != 'win': filemode = stat.S_IMODE(filestats.st_mode) # Remove write access for group and all access to 'others'. filemode &= ~(stat.S_IWGRP | stat.S_IRWXO) if read_only: filemode &= ~stat.S_IWUSR if filemode & stat.S_IXUSR: filemode |= stat.S_IXGRP else: filemode &= ~stat.S_IXGRP out['mode'] = filemode out['size'] = filestats.st_size # Used to skip recalculating the hash. Use the most recent update time. out['timestamp'] = int(round(filestats.st_mtime)) # If the timestamp wasn't updated, carry on the sha-1. if (prevdict.get('timestamp') == out['timestamp'] and 'sha-1' in prevdict): # Reuse the previous hash. out['sha-1'] = prevdict['sha-1'] if level >= WITH_HASH and not out.get('sha-1'): h = hashlib.sha1() with open(filepath, 'rb') as f: h.update(f.read()) out['sha-1'] = h.hexdigest() return out
def load_isolate(content, error): """Loads the .isolate file and returns the information unprocessed. Returns the command, dependencies and read_only flag. The dependencies are fixed to use os.path.sep. """ # Load the .isolate file, process its conditions, retrieve the command and # dependencies. configs = merge_isolate.load_gyp(merge_isolate.eval_content(content), None, merge_isolate.DEFAULT_OSES) flavor = isolate_common.get_flavor() config = configs.per_os.get(flavor) or configs.per_os.get(None) if not config: error('Failed to load configuration for \'%s\'' % flavor) # Merge tracked and untracked dependencies, isolate.py doesn't care about the # trackability of the dependencies, only the build tool does. dependencies = [ f.replace('/', os.path.sep) for f in config.tracked + config.untracked ] return config.command, dependencies, config.read_only
def load_isolate(content): """Loads the .isolate file and returns the information unprocessed. Returns the command, dependencies and read_only flag. The dependencies are fixed to use os.path.sep. """ # Load the .isolate file, process its conditions, retrieve the command and # dependencies. configs = merge_isolate.load_gyp( merge_isolate.eval_content(content), None, merge_isolate.DEFAULT_OSES) flavor = isolate_common.get_flavor() config = configs.per_os.get(flavor) or configs.per_os.get(None) if not config: raise ExecutionError('Failed to load configuration for \'%s\'' % flavor) # Merge tracked and untracked dependencies, isolate.py doesn't care about the # trackability of the dependencies, only the build tool does. dependencies = [ f.replace('/', os.path.sep) for f in config.tracked + config.untracked ] return config.command, dependencies, config.read_only
def main(): """CLI frontend to validate arguments.""" default_variables = [('OS', isolate_common.get_flavor())] if sys.platform in ('win32', 'cygwin'): default_variables.append(('EXECUTABLE_SUFFIX', '.exe')) else: default_variables.append(('EXECUTABLE_SUFFIX', '')) parser = optparse.OptionParser( usage='%prog <options> [gtest]', description=sys.modules['__main__'].__doc__) parser.format_description = lambda *_: parser.description parser.add_option( '-c', '--cwd', default='', help='Signal to start the process from this relative directory. When ' 'specified, outputs the inputs files in a way compatible for ' 'gyp processing. Should be set to the relative path containing the ' 'gyp file, e.g. \'chrome\' or \'net\'') parser.add_option( '-V', '--variable', nargs=2, action='append', default=default_variables, dest='variables', metavar='FOO BAR', help='Variables to process in the .isolate file, default: %default') parser.add_option( '--root-dir', default=ROOT_DIR, help='Root directory to base everything off. Default: %default') parser.add_option( '-o', '--out', help='output file, defaults to <executable>.test_cases') parser.add_option( '-j', '--jobs', type='int', help='number of parallel jobs') parser.add_option( '-t', '--timeout', default=120, type='int', help='number of parallel jobs') parser.add_option( '-v', '--verbose', action='count', default=0, help='Use multiple times to increase verbosity') group = optparse.OptionGroup(parser, 'Which test cases to run') group.add_option( '-w', '--whitelist', default=[], action='append', help='filter to apply to test cases to run, wildcard-style, defaults to ' 'all test') group.add_option( '-b', '--blacklist', default=[], action='append', help='filter to apply to test cases to skip, wildcard-style, defaults to ' 'no test') group.add_option( '-i', '--index', type='int', help='Shard index to run') group.add_option( '-s', '--shards', type='int', help='Total number of shards to calculate from the --index to run') group.add_option( '-T', '--test-case-file', help='File containing the exact list of test cases to run') parser.add_option_group(group) options, args = parser.parse_args() levels = [logging.ERROR, logging.WARNING, logging.INFO, logging.DEBUG] logging.basicConfig( level=levels[min(len(levels)-1, options.verbose)], format='%(levelname)5s %(module)15s(%(lineno)3d): %(message)s') if len(args) != 1: parser.error( 'Please provide the executable line to run, if you need fancy things ' 'like xvfb, start this script from *inside* xvfb, it\'ll be much faster' '.') if bool(options.shards) != bool(options.index is not None): parser.error('Use both --index X --shards Y or none of them') options.root_dir = os.path.abspath(options.root_dir) if not os.path.isdir(options.root_dir): parser.error('--root-dir "%s" must exist' % options.root_dir) if not os.path.isdir(os.path.join(options.root_dir, options.cwd)): parser.error( '--cwd "%s" must be an existing directory relative to %s' % (options.cwd, options.root_dir)) executable = args[0] if not os.path.isabs(executable): executable = os.path.abspath(os.path.join(options.root_dir, executable)) if not os.path.isfile(executable): parser.error('"%s" doesn\'t exist.' % executable) if not options.out: options.out = '%s.test_cases' % executable # First, grab the test cases. if options.test_case_file: with open(options.test_case_file, 'r') as f: test_cases = filter(None, f.read().splitlines()) else: test_cases = get_test_cases( executable, options.whitelist, options.blacklist, options.index, options.shards) # Then run them. return trace_test_cases( executable, options.root_dir, options.cwd, dict(options.variables), test_cases, options.jobs, # TODO(maruel): options.timeout, options.out)
def main(): """CLI frontend to validate arguments.""" default_variables = [('OS', isolate_common.get_flavor())] if sys.platform in ('win32', 'cygwin'): default_variables.append(('EXECUTABLE_SUFFIX', '.exe')) else: default_variables.append(('EXECUTABLE_SUFFIX', '')) parser = optparse.OptionParser(usage='%prog <options> [gtest]', description=sys.modules['__main__'].__doc__) parser.format_description = lambda *_: parser.description parser.add_option( '-c', '--cwd', default='chrome', help='Signal to start the process from this relative directory. When ' 'specified, outputs the inputs files in a way compatible for ' 'gyp processing. Should be set to the relative path containing the ' 'gyp file, e.g. \'chrome\' or \'net\'') parser.add_option( '-V', '--variable', nargs=2, action='append', default=default_variables, dest='variables', metavar='FOO BAR', help='Variables to process in the .isolate file, default: %default') parser.add_option( '--root-dir', default=ROOT_DIR, help='Root directory to base everything off. Default: %default') parser.add_option('-o', '--out', help='output file, defaults to <executable>.test_cases') parser.add_option( '-w', '--whitelist', default=[], action='append', help='filter to apply to test cases to run, wildcard-style, defaults to ' 'all test') parser.add_option( '-b', '--blacklist', default=[], action='append', help= 'filter to apply to test cases to skip, wildcard-style, defaults to ' 'no test') parser.add_option('-j', '--jobs', type='int', help='number of parallel jobs') parser.add_option('-i', '--index', type='int', help='Shard index to run') parser.add_option( '-s', '--shards', type='int', help='Total number of shards to calculate from the --index to run') parser.add_option('-t', '--timeout', default=120, type='int', help='number of parallel jobs') parser.add_option('-v', '--verbose', action='count', default=0, help='Use multiple times to increase verbosity') options, args = parser.parse_args() levels = [logging.ERROR, logging.WARNING, logging.INFO, logging.DEBUG] logging.basicConfig( level=levels[min(len(levels) - 1, options.verbose)], format='%(levelname)5s %(module)15s(%(lineno)3d): %(message)s') if len(args) != 1: parser.error( 'Please provide the executable line to run, if you need fancy things ' 'like xvfb, start this script from *inside* xvfb, it\'ll be much faster' '.') if bool(options.shards) != bool(options.index is not None): parser.error('Use both --index X --shards Y or none of them') executable = args[0] if not os.path.isabs(executable): executable = os.path.abspath(os.path.join(options.root_dir, executable)) if not options.out: options.out = '%s.test_cases' % executable return trace_test_cases( executable, options.root_dir, options.cwd, dict(options.variables), options.whitelist, options.blacklist, options.jobs, # TODO(maruel): options.timeout, options.index, options.shards, options.out)
def main(): """CLI frontend to validate arguments.""" default_variables = [("OS", isolate_common.get_flavor())] if sys.platform in ("win32", "cygwin"): default_variables.append(("EXECUTABLE_SUFFIX", ".exe")) else: default_variables.append(("EXECUTABLE_SUFFIX", "")) parser = optparse.OptionParser(usage="%prog <options> [gtest]", description=sys.modules["__main__"].__doc__) parser.format_description = lambda *_: parser.description parser.add_option( "-c", "--cwd", default="chrome", help="Signal to start the process from this relative directory. When " "specified, outputs the inputs files in a way compatible for " "gyp processing. Should be set to the relative path containing the " "gyp file, e.g. 'chrome' or 'net'", ) parser.add_option( "-V", "--variable", nargs=2, action="append", default=default_variables, dest="variables", metavar="FOO BAR", help="Variables to process in the .isolate file, default: %default", ) parser.add_option("--root-dir", default=ROOT_DIR, help="Root directory to base everything off. Default: %default") parser.add_option("-o", "--out", help="output file, defaults to <executable>.test_cases") parser.add_option("-j", "--jobs", type="int", help="number of parallel jobs") parser.add_option("-t", "--timeout", default=120, type="int", help="number of parallel jobs") parser.add_option("-v", "--verbose", action="count", default=0, help="Use multiple times to increase verbosity") group = optparse.OptionGroup(parser, "Which test cases to run") group.add_option( "-w", "--whitelist", default=[], action="append", help="filter to apply to test cases to run, wildcard-style, defaults to " "all test", ) group.add_option( "-b", "--blacklist", default=[], action="append", help="filter to apply to test cases to skip, wildcard-style, defaults to " "no test", ) group.add_option("-i", "--index", type="int", help="Shard index to run") group.add_option("-s", "--shards", type="int", help="Total number of shards to calculate from the --index to run") group.add_option("-T", "--test-case-file", help="File containing the exact list of test cases to run") parser.add_option_group(group) options, args = parser.parse_args() levels = [logging.ERROR, logging.WARNING, logging.INFO, logging.DEBUG] logging.basicConfig( level=levels[min(len(levels) - 1, options.verbose)], format="%(levelname)5s %(module)15s(%(lineno)3d): %(message)s", ) if len(args) != 1: parser.error( "Please provide the executable line to run, if you need fancy things " "like xvfb, start this script from *inside* xvfb, it'll be much faster" "." ) if bool(options.shards) != bool(options.index is not None): parser.error("Use both --index X --shards Y or none of them") executable = args[0] if not os.path.isabs(executable): executable = os.path.abspath(os.path.join(options.root_dir, executable)) if not options.out: options.out = "%s.test_cases" % executable # First, grab the test cases. if options.test_case_file: with open(options.test_case_file, "r") as f: test_cases = filter(None, f.read().splitlines()) else: test_cases = get_test_cases(executable, options.whitelist, options.blacklist, options.index, options.shards) # Then run them. return trace_test_cases( executable, options.root_dir, options.cwd, dict(options.variables), test_cases, options.jobs, # TODO(maruel): options.timeout, options.out, )
def main(): """Handles CLI and normalizes the input arguments to pass them to isolate(). """ default_variables = [('OS', isolate_common.get_flavor())] if sys.platform in ('win32', 'cygwin'): default_variables.append(('EXECUTABLE_SUFFIX', '.exe')) else: default_variables.append(('EXECUTABLE_SUFFIX', '')) valid_modes = sorted(VALID_MODES.keys() + ['noop']) parser = optparse.OptionParser(usage='%prog [options] [.isolate file]', description=sys.modules[__name__].__doc__) parser.format_description = lambda *_: parser.description parser.add_option('-v', '--verbose', action='count', default=int(os.environ.get('ISOLATE_DEBUG', 0)), help='Use multiple times') parser.add_option('-m', '--mode', choices=valid_modes, help='Determines the action to be taken: %s' % ', '.join(valid_modes)) parser.add_option('-r', '--result', metavar='FILE', help='Result file to store the json manifest') parser.add_option( '-V', '--variable', nargs=2, action='append', default=default_variables, dest='variables', metavar='FOO BAR', help='Variables to process in the .isolate file, default: %default') parser.add_option( '-o', '--outdir', metavar='DIR', help='Directory used to recreate the tree or store the hash table. ' 'If the environment variable ISOLATE_HASH_TABLE_DIR exists, it will ' 'be used. Otherwise, for run and remap, uses a /tmp subdirectory. ' 'For the other modes, defaults to the directory containing --result') options, args = parser.parse_args() levels = [logging.ERROR, logging.WARNING, logging.INFO, logging.DEBUG] logging.basicConfig( level=levels[min(len(levels) - 1, options.verbose)], format='%(levelname)5s %(module)15s(%(lineno)3d): %(message)s') if not options.mode: parser.error('--mode is required') if not options.result: parser.error('--result is required.') if len(args) > 1: logging.debug('%s' % sys.argv) parser.error('Use only one argument which should be a .isolate file') # Make sure the paths make sense. On Windows, / and \ are often mixed together # in a path. result_file = os.path.abspath(options.result.replace('/', os.path.sep)) if options.mode == 'noop': # This undocumented mode is to help transition since some builders do not # have all the test data files checked out. Touch result_file and exit # silently. open(result_file, 'a').close() return 0 # input_file may be None. input_file = (os.path.abspath(args[0].replace('/', os.path.sep)) if args else None) # out_dir may be None. out_dir = (os.path.abspath(options.outdir.replace('/', os.path.sep)) if options.outdir else None) # Fix variables. variables = dict(options.variables) # After basic validation, pass this to isolate(). return isolate(result_file, input_file, options.mode, variables, out_dir, parser.error)