def test_load_from_non_utf8_file_raises_ConfigError(tmpdir): config_path = tmpdir / 'config.conf' config = {'key': 'value'} with config_path.open(mode='wb') as handle: handle.write(serialize_config(config).encode('utf-16')) with pytest.raises(ConfigError): load_config(str(config_path))
def test_load_invalid_stdin_raises_ConfigError(mocker): temp_stdin = io.StringIO() temp_stdin.write('{invalid') temp_stdin.seek(0) mocker.patch('sys.stdin', temp_stdin) with pytest.raises(ConfigError): load_config()
def init(config_file, session_file): """Initialize a mutation testing session from a configuration. This primarily creates a session - a database of "work to be done" - which describes all of the mutations and test runs that need to be executed for a full mutation testing run. The configuration specifies the top-level module to mutate, the tests to run, and how to run them. This command doesn't actually run any tests. Instead, it scans the modules-under-test and simply generates the work order which can be executed with other commands. """ cfg = load_config(config_file) modules = cosmic_ray.modules.find_modules(Path(cfg['module-path'])) modules = cosmic_ray.modules.filter_paths(modules, cfg.get('exclude-modules', ())) if log.isEnabledFor(logging.INFO): log.info('Modules discovered:') per_dir = defaultdict(list) for m in modules: per_dir[m.parent].append(m.name) for directory, files in per_dir.items(): log.info(' - %s: %s', directory, ', '.join(sorted(files))) with use_db(session_file) as database: cosmic_ray.commands.init(modules, database, cfg) sys.exit(ExitCode.OK)
def handle_worker(args): """usage: {program} worker \ [options] <module> <operator> <occurrence> [<config-file>] Run a worker process which performs a single mutation and test run. Each worker does a minimal, isolated chunk of work: it mutates the <occurence>-th instance of <operator> in <module>, runs the test suite defined in the configuration, prints the results, and exits. Normally you won't run this directly. Rather, it will be launched by an execution engine. However, it can be useful to run this on its own for testing and debugging purposes. options: --keep-stdout Do not squelch stdout """ config = load_config(args.get('<config-file>')) if config.get('local-imports', True): sys.path.insert(0, '') operator = cosmic_ray.plugins.get_operator(args['<operator>']) test_runner = cosmic_ray.plugins.get_test_runner( config['test-runner']['name'], config['test-runner']['args']) with open(os.devnull, 'w') as devnull: with redirect_stdout(sys.stdout if args['--keep-stdout'] else devnull): work_item = cosmic_ray.worker.worker(args['<module>'], operator, int(args['<occurrence>']), test_runner) sys.stdout.write(json.dumps(work_item)) return os.EX_OK
def handle_init(args): """usage: cosmic-ray init <config-file> <session-file> Initialize a mutation testing session from a configuration. This primarily creates a session - a database of "work to be done" - which describes all of the mutations and test runs that need to be executed for a full mutation testing run. The configuration specifies the top-level module to mutate, the tests to run, and how to run them. This command doesn't actually run any tests. Instead, it scans the modules-under-test and simply generates the work order which can be executed with other commands. The `session-file` is the filename for the database in which the work order will be stored. """ config_file = args['<config-file>'] config = load_config(config_file) modules = set(cosmic_ray.modules.find_modules(Path(config['module-path']))) log.info('Modules discovered: %s', [m for m in modules]) db_name = get_db_name(args['<session-file>']) with use_db(db_name) as database: cosmic_ray.commands.init(modules, database, config) return ExitCode.OK
def test_load_from_valid_config_file(tmpdir): config_path = tmpdir / 'config.conf' config = ConfigDict() config['key'] = 'value' with config_path.open(mode='wt', encoding='utf-8') as handle: handle.write(serialize_config(config)) assert load_config(str(config_path))['key'] == 'value'
def handle_baseline(args): """usage: cosmic-ray baseline <config-file> Run an un-mutated baseline of the specific configuration. This is largely like running a "worker" process, with the difference that a baseline run doesn't mutate the code. """ sys.path.insert(0, '') config = load_config(args['<config-file>']) test_runner = cosmic_ray.plugins.get_test_runner( config['test-runner']['name'], config['test-runner']['args']) work_item = test_runner() # note: test_runner() results are meant to represent # status codes when executed against mutants. # SURVIVED means that the test suite executed without any error # hence CR thinks the mutant survived. However when running the # baseline execution we don't have mutations and really want the # test suite to report PASS, hence the comparison below! if work_item.test_outcome != TestOutcome.SURVIVED: # baseline failed, print whatever was returned # from the test runner and exit log.error('baseline failed') print(''.join(work_item.data)) return 2 return os.EX_OK
def filter(self, work_db: WorkDB, args: Namespace): """Mark as skipped all work item with filtered operator""" config = load_config(args.config) exclude_operators = config.sub("filters", "operators-filter").get("exclude-operators", ()) self._skip_filtered(work_db, exclude_operators)
def handle_counts(args): """usage: {program} counts <config-file> Count the number of tests that would be run for a given testing configuration. This is mostly useful for estimating run times and keeping track of testing statistics. """ config = load_config(args['<config-file>']) sys.path.insert(0, '') modules = cosmic_ray.modules.find_modules( cosmic_ray.modules.fixup_module_name(config['module']), config.get('exclude-modules', [])) operators = cosmic_ray.plugins.operator_names() counts = cosmic_ray.counting.count_mutants(modules, operators) print('[Counts]') pprint.pprint(counts) print('\n[Total test runs]\n', sum(itertools.chain(*(d.values() for d in counts.values())))) return os.EX_OK
def handle_worker(args): """usage: {program} worker [options] <module-path> <operator> <occurrence> [<config-file>] Run a worker process which performs a single mutation and test run. Each worker does a minimal, isolated chunk of work: it mutates the <occurence>-th instance of <operator> in <module-path>, runs the test suite defined in the configuration, prints the results, and exits. Normally you won't run this directly. Rather, it will be launched by an execution engine. However, it can be useful to run this on its own for testing and debugging purposes. options: --keep-stdout Do not squelch stdout """ config = load_config(args.get('<config-file>')) with open(os.devnull, 'w') as devnull: with redirect_stdout(sys.stdout if args['--keep-stdout'] else devnull): work_item = cosmic_ray.worker.worker(Path(args['<module-path>']), config.python_version, args['<operator>'], int(args['<occurrence>']), config.test_command(), None) sys.stdout.write(json.dumps(work_item, cls=WorkItemJsonEncoder)) return ExitCode.OK
def test_load_valid_stdin(mocker): temp_stdin = io.StringIO() temp_stdin.write('{key: value}') temp_stdin.seek(0) mocker.patch('sys.stdin', temp_stdin) assert load_config() == {'key': 'value'}
def handle_worker(args): """usage: {program} worker [options] <module-path> <operator> <occurrence> [<config-file>] Run a worker process which performs a single mutation and test run. Each worker does a minimal, isolated chunk of work: it mutates the <occurence>-th instance of <operator> in <module-path>, runs the test suite defined in the configuration, prints the results, and exits. Normally you won't run this directly. Rather, it will be launched by an execution engine. However, it can be useful to run this on its own for testing and debugging purposes. options: --keep-stdout Do not squelch stdout """ config = load_config(args.get('<config-file>')) with open(os.devnull, 'w') as devnull: with redirect_stdout(sys.stdout if args['--keep-stdout'] else devnull): work_item = cosmic_ray.worker.worker( Path(args['<module-path>']), config.python_version, args['<operator>'], int(args['<occurrence>']), config.test_command, None) sys.stdout.write(json.dumps(work_item, cls=WorkItemJsonEncoder)) return ExitCode.OK
def generate_badge(config_file, badge_file, session_file): """Generate badge file.""" with use_db(session_file, WorkDB.Mode.open) as db: config = load_config(config_file) percent = 100 - survival_rate(db) config = config["badge"] badge = Badge( label=config["label"], value=percent, value_format=config["format"], thresholds=config["thresholds"], ) log.info("Generating badge: " + config["format"], percent) # pylint: disable=logging-not-lazy try: os.unlink(badge_file) except OSError: pass badge.write_badge(badge_file)
def test_load_valid_stdin(mocker): temp_stdin = io.StringIO() temp_stdin.name = 'stringio' temp_stdin.write('{key: value}') temp_stdin.seek(0) mocker.patch('sys.stdin', temp_stdin) assert load_config()['key'] == 'value'
def filter(self, work_db: WorkDB, args: Namespace): """Mark as skipped all work item that is not new""" config = ConfigDict() if args.config is not None: config = load_config(args.config) branch = config.sub("git", "git-filter").get("branch", "master") self._skip_filtered(work_db, branch)
def handle_init(args): """usage: cosmic-ray init <config-file> <session-file> Initialize a mutation testing session from a configuration. This primarily creates a session - a database of "work to be done" - which describes all of the mutations and test runs that need to be executed for a full mutation testing run. The configuration specifies the top-level module to mutate, the tests to run, and how to run them. This command doesn't actually run any tests. Instead, it scans the modules-under-test and simply generates the work order which can be executed with other commands. The `session-file` is the filename for the database in which the work order will be stored. """ # This lets us import modules from the current directory. Should # probably be optional, and needs to also be applied to workers! sys.path.insert(0, '') config_file = args['<config-file>'] config = load_config(config_file) if 'timeout' in config: timeout = config['timeout'] elif 'baseline' in config: baseline_mult = config['baseline'] command = 'cosmic-ray baseline {}'.format(args['<config-file>']) # We run the baseline in a subprocess to more closely emulate the # runtime of a worker subprocess. with Timer() as timer: subprocess.check_call(command.split()) timeout = baseline_mult * timer.elapsed.total_seconds() else: raise ConfigValueError( "Config must specify either baseline or timeout") log.info('timeout = %f seconds', timeout) modules = set( cosmic_ray.modules.find_modules( cosmic_ray.modules.fixup_module_name(config['module']), config.get('exclude-modules', default=None))) log.info('Modules discovered: %s', [m.__name__ for m in modules]) db_name = get_db_name(args['<session-file>']) with use_db(db_name) as database: cosmic_ray.commands.init(modules, database, config, timeout) return ExitCode.OK
def test_load_valid_stdin(mocker): temp_stdin = io.StringIO() temp_stdin.name = 'stringio' config = ConfigDict() config['key'] = 'value' temp_stdin.write(serialize_config(config)) temp_stdin.seek(0) mocker.patch('sys.stdin', temp_stdin) assert load_config()['key'] == 'value'
def handle_exec(config_file, session_file): """Perform the remaining work to be done in the specified session. This requires that the rest of your mutation testing infrastructure (e.g. worker processes) are already running. """ cfg = load_config(config_file) with use_db(session_file, mode=WorkDB.Mode.open) as work_db: cosmic_ray.commands.execute(work_db, cfg) sys.exit(ExitCode.OK)
def filter(self, work_db: WorkDB, args: Namespace): """Mark as skipped all work item that is not new """ if args.config is None: config = work_db.get_config() else: config = load_config(args.config) branch = config.sub('git', 'git-filter').get('branch', 'master') self._skip_filtered(work_db, branch)
def filter(self, work_db: WorkDB, args: Namespace): """Mark as skipped all work item with filtered operator """ if args.config is None: config = work_db.get_config() else: config = load_config(args.config) exclude_operators = config.sub('filters', 'operators-filter').get( 'exclude-operators', ()) self._skip_filtered(work_db, exclude_operators)
def baseline(config_file, session_file): """Runs a baseline execution that executes the test suite over unmutated code. If ``--session-file`` is provided, the session used for baselining is stored in that file. Otherwise, the session is stored in a temporary file which is deleted after the baselining. Exits with 0 if the job has exited normally, otherwise 1. """ cfg = load_config(config_file) @contextmanager def path_or_temp(path): if path is None: with tempfile.TemporaryDirectory() as tmpdir: yield Path(tmpdir) / "session.sqlite" else: yield path with path_or_temp(session_file) as session_path: with use_db(session_path, mode=WorkDB.Mode.create) as db: db.clear() db.add_work_item(WorkItem( mutations=[], job_id="baseline", )) # Run the single-entry session. cosmic_ray.commands.execute(db, cfg) result = next(db.results)[1] if result.test_outcome == TestOutcome.KILLED: message = [ "Baseline failed. Execution with no mutation gives those following errors:" ] for line in result.output.split("\n"): message.append(" >>> {}".format(line)) log.error("\n".join(message)) sys.exit(1) else: log.info( "Baseline passed. Execution with no mutation works fine.") sys.exit(ExitCode.OK)
def generate_badge(): """cr-badge Usage: cr-badge [--config <config_file>] <badge_file> <session-file> Generate badge file. options: --config <config_file> Configuration file to use instead of session configuration """ arguments = docopt.docopt(generate_badge.__doc__, version='cr-format 0.1') config_file = arguments['--config'] badge_filename = arguments['<badge_file>'] with use_db(arguments['<session-file>'], WorkDB.Mode.open) as db: assert isinstance(db, WorkDB) if config_file: config = load_config(config_file) else: config = db.get_config() percent = 100 - survival_rate(db) config = config['badge'] badge = Badge( label=config['label'], value=percent, value_format=config['format'], thresholds=config['thresholds'], ) log.info("Generating badge: " + config['format'], percent) # pylint: disable=logging-not-lazy try: os.unlink(badge_filename) except OSError: pass badge.write_badge(badge_filename)
def worker(module_path, operator, occurrence, config_file, keep_stdout): """Run a worker process which performs a single mutation and test run. Each worker does a minimal, isolated chunk of work: it mutates the <occurrence>-th instance of <operator> in <module-path>, runs the test suite defined in the configuration, prints the results, and exits. Normally you won't run this directly. Rather, it will be launched by an execution engine. However, it can be useful to run this on its own for testing and debugging purposes. """ cfg = load_config(config_file) with open(os.devnull, 'w') as devnull: with redirect_stdout(sys.stdout if keep_stdout else devnull): work_item = cosmic_ray.mutating.mutate_and_test( Path(module_path), cfg.python_version, operator, occurrence, cfg.test_command, None) sys.stdout.write(json.dumps(work_item, cls=WorkItemJsonEncoder)) sys.exit(ExitCode.OK)
def handle_init(args): """usage: cosmic-ray init <config-file> <session-file> Initialize a mutation testing session from a configuration. This primarily creates a session - a database of "work to be done" - which describes all of the mutations and test runs that need to be executed for a full mutation testing run. The configuration specifies the top-level module to mutate, the tests to run, and how to run them. This command doesn't actually run any tests. Instead, it scans the modules-under-test and simply generates the work order which can be executed with other commands. The `session-file` is the filename for the database in which the work order will be stored. """ config_file = args['<config-file>'] config = load_config(config_file) modules = cosmic_ray.modules.find_modules(Path(config['module-path'])) modules = cosmic_ray.modules.filter_paths( modules, config.get('exclude-modules', ())) if log.isEnabledFor(logging.INFO): log.info('Modules discovered:') per_dir = defaultdict(list) for m in modules: per_dir[m.parent].append(m.name) for dir, files in per_dir.items(): log.info(' - %s: %s', dir, ', '.join(sorted(files))) db_name = args['<session-file>'] with use_db(db_name) as database: cosmic_ray.commands.init(modules, database, config) return ExitCode.OK
def test_load_from_valid_config_file(tmpdir): config_path = tmpdir / 'config.yml' with config_path.open(mode='wt', encoding='utf-8') as handle: handle.write('{key: value}') assert load_config(str(config_path)) == {'key': 'value'}
def test_load_from_non_utf8_file_raises_ConfigError(tmpdir): config_path = tmpdir / 'config.yml' with config_path.open(mode='wb') as handle: handle.write('{key: value}'.encode('utf-16')) with pytest.raises(ConfigError): load_config(str(config_path))
def test_load_from_invalid_config_file_raises_ConfigError(tmpdir): config_path = tmpdir / 'config.yml' with config_path.open(mode='wt', encoding='utf-8') as handle: handle.write('{asdf') with pytest.raises(ConfigError): load_config(str(config_path))
def test_load_non_existent_file_raises_ConfigError(): with pytest.raises(ConfigError): load_config('/foo/bar/this/does/no-exist/I/hope')