def test_set_multiple_results_works(work_db): work_db.add_work_item( WorkItem.single( "job_id", ResolvedMutationSpec("path", "operator", 0, (0, 0), (0, 1)))) work_db.set_result( "job_id", WorkResult(output="first result", test_outcome=TOutcome.KILLED, worker_outcome=WorkerOutcome.NORMAL, diff="diff"), ) work_db.set_result( "job_id", WorkResult(output="second result", test_outcome=TOutcome.KILLED, worker_outcome=WorkerOutcome.NORMAL, diff="diff"), ) results = [r for job_id, r in work_db.results if job_id == "job_id"] assert len(results) == 1 assert results[0].output == "second result"
def test_results(work_db): for idx in range(10): work_db.add_work_item( WorkItem.single( f"job_id_{idx}", ResolvedMutationSpec("path_{}".format(idx), "operator_{}".format(idx), idx, (idx, idx), (idx, idx + 1)), )) original = [( "job_id_{}".format(idx), WorkResult( output="data_{}".format(idx), test_outcome=TOutcome.KILLED, worker_outcome=WorkerOutcome.NORMAL, diff="diff_{}".format(idx), ), ) for idx in range(10)] for result in original: work_db.set_result(*result) actual = list(work_db.results) assert actual == original
def test_adding_result_clears_pending(work_db): items = [ WorkItem.single( f"job_id_{idx}", ResolvedMutationSpec("path_{}".format(idx), "operator_{}".format(idx), idx, (idx, idx), (idx, idx + 1)), ) for idx in range(10) ] for item in items: work_db.add_work_item(item) for idx, item in enumerate(items): assert sorted(list(work_db.pending_work_items), key=repr) == sorted(items[idx:], key=repr) result = ( "job_id_{}".format(idx), WorkResult( output="data_{}".format(idx), test_outcome=TOutcome.KILLED, worker_outcome=WorkerOutcome.NORMAL, diff="diff_{}".format(idx), ), ) work_db.set_result(*result)
def _skip_filtered(self, work_db, exclude_operators): if not exclude_operators: return re_exclude_operators = re.compile("|".join("(:?%s)" % e for e in exclude_operators)) for item in work_db.pending_work_items: for mutation in item.mutations: if re_exclude_operators.match(mutation.operator_name): log.info( "operator skipping %s %s %s %s %s %s", item.job_id, mutation.operator_name, mutation.occurrence, mutation.module_path, mutation.start_pos, mutation.end_pos, ) work_db.set_result( item.job_id, WorkResult( output="Filtered operator", worker_outcome=WorkerOutcome.SKIPPED, ), ) break
def test_adding_result_completes_work_item(work_db): items = [ WorkItem.single( f"job_id_{idx}", ResolvedMutationSpec("path_{}".format(idx), "operator_{}".format(idx), idx, (idx, idx), (idx, idx + 1)), ) for idx in range(10) ] for item in items: work_db.add_work_item(item) for idx, item in enumerate(items): assert [r[0] for r in work_db.completed_work_items] == items[:idx] result = ( "job_id_{}".format(idx), WorkResult( output="data_{}".format(idx), test_outcome=TOutcome.KILLED, worker_outcome=WorkerOutcome.NORMAL, diff="diff_{}".format(idx), ), ) work_db.set_result(*result)
def intercept(work_db: WorkDB, config: ConfigDict): """Mark as skipped all work item with filtered operator """ exclude_operators = config.get('exclude-operators') re_exclude_operators = re.compile('|'.join('(:?%s)' % e for e in exclude_operators)) for item in work_db.pending_work_items: if re_exclude_operators.match(item.operator_name): log.info( "operator skipping %s %s %s %s %s %s", item.job_id, item.operator_name, item.occurrence, item.module_path, item.start_pos, item.end_pos, ) work_db.set_result( item.job_id, WorkResult( output="Filtered operator", worker_outcome=WorkerOutcome.SKIPPED, ), )
async def send_request(url, work_item: WorkItem, test_command, timeout): """Sends a mutate-and-test request to a worker. Args: url: The URL of the worker. work_item: The `WorkItem` representing the work to be done. test_command: The command that the worker should use to run the tests. timeout: The maximum number of seconds to spend running the test. Returns: A `WorkResult`. """ parameters = { "mutations": [{ "module_path": str(mutation.module_path), "operator": mutation.operator_name, "occurrence": mutation.occurrence, } for mutation in work_item.mutations], "test_command": test_command, "timeout": timeout, } log.info("Sending HTTP request to %s", url) async with aiohttp.request("POST", url, json=parameters) as resp: result = await resp.json() # TODO: Account for possibility that `data` is the wrong shape. return WorkResult( worker_outcome=result["worker_outcome"], output=result["output"], test_outcome=result["test_outcome"], diff=result["diff"], )
def _skip_filtered(self, work_db, branch): git_news = self._git_news(branch) for item in work_db.pending_work_items: item.module_path item.start_pos item.end_pos if item.module_path not in git_news or \ not (git_news[item.module_path] & set(range(item.start_pos[0], item.end_pos[0] + 1))): log.info( "git skipping %s %s %s %s %s %s", item.job_id, item.operator_name, item.occurrence, item.module_path, item.start_pos, item.end_pos, ) work_db.set_result( item.job_id, WorkResult( output="Filtered git", worker_outcome=WorkerOutcome.SKIPPED, ), )
def test_set_result_throws_KeyError_if_no_matching_work_item(work_db): with pytest.raises(KeyError): work_db.set_result( 'job_id', WorkResult(output='data', test_outcome=TestOutcome.KILLED, worker_outcome=WorkerOutcome.NORMAL, diff='diff'))
def test_set_result_throws_KeyError_if_no_matching_work_item(work_db): with pytest.raises(KeyError): work_db.set_result( "job_id", WorkResult(output="data", test_outcome=TOutcome.KILLED, worker_outcome=WorkerOutcome.NORMAL, diff="diff"), )
def test_clear_work_items_removes_results(work_db): for idx in range(10): work_db.add_work_item( WorkItem('path', 'operator', 0, (0, 0), (0, 1), 'job_id_{}'.format(idx))) work_db.set_result('job_id_{}'.format(idx), WorkResult(WorkerOutcome.NORMAL)) work_db.clear() assert work_db.num_results == 0
def test_no_test_return_value(path_utils, data_dir, python_version): with path_utils.excursion(data_dir): result = mutate_and_test(Path("a/b.py"), python_version, 'core/ReplaceTrueWithFalse', 100, 'python -m unittest tests', 1000) expected = WorkResult(output=None, test_outcome=None, diff=None, worker_outcome=WorkerOutcome.NO_TEST) assert result == expected
def test_clear_work_items_removes_results(work_db): for idx in range(10): work_db.add_work_item( WorkItem.single( f"job_id_{idx}", ResolvedMutationSpec("path", "operator", 0, (0, 0), (0, 1)))) work_db.set_result(f"job_id_{idx}", WorkResult(WorkerOutcome.NORMAL)) work_db.clear() assert work_db.num_results == 0
def test_jobs_with_results_are_not_pending(work_db): work_db.add_work_item( WorkItem('path', 'operator', 0, (0, 0), (0, 1), 'job_id')) work_db.set_result( 'job_id', WorkResult(output='data', test_outcome=TestOutcome.KILLED, worker_outcome=WorkerOutcome.NORMAL, diff='diff')) assert not list(work_db.pending_work_items)
def test_set_multiple_results_works(work_db): work_db.add_work_item( WorkItem('path', 'operator', 0, (0, 0), (0, 1), 'job_id')) work_db.set_result( 'job_id', WorkResult(output='first result', test_outcome=TestOutcome.KILLED, worker_outcome=WorkerOutcome.NORMAL, diff='diff')) work_db.set_result( 'job_id', WorkResult(output='second result', test_outcome=TestOutcome.KILLED, worker_outcome=WorkerOutcome.NORMAL, diff='diff')) results = [r for job_id, r in work_db.results if job_id == 'job_id'] assert len(results) == 1 assert results[0].output == 'second result'
def filter(self, work_db, _args): """Look for WorkItems in `work_db` that should not be mutated due to spor metadata. For each WorkItem, find anchors for the item's file/line/columns. If an anchor exists with metadata containing `{mutate: False}` then the WorkItem is marked as SKIPPED. Args: work_db: A WorkDB to filter. """ @lru_cache() def file_contents(file_path): "A simple cache of file contents." with file_path.open(mode="rt") as handle: return handle.readlines() for item in work_db.pending_work_items: try: repo = open_repository(item.module_path) except ValueError: log.info("No spor repository for %s", item.module_path) continue for _, anchor in repo.items(): if anchor.file_path != item.module_path.absolute(): continue metadata = anchor.metadata lines = file_contents(item.module_path) if _item_in_context( lines, item, anchor.context) and not metadata.get("mutate", True): log.info( "spor skipping %s %s %s %s %s %s", item.job_id, item.operator_name, item.occurrence, item.module_path, item.start_pos, item.end_pos, ) work_db.set_result( item.job_id, WorkResult( output="Filtered by spor", test_outcome=None, diff=None, worker_outcome=WorkerOutcome.SKIPPED, ), )
def test_jobs_with_results_are_not_pending(work_db): work_db.add_work_item( WorkItem.single( "job_id", ResolvedMutationSpec("path", "operator", 0, (0, 0), (0, 1)))) work_db.set_result( "job_id", WorkResult(output="data", test_outcome=TOutcome.KILLED, worker_outcome=WorkerOutcome.NORMAL, diff="diff"), ) assert not list(work_db.pending_work_items)
async def handle_completed_task(task): # TODO: If one of the URLs we've got is bad (i.e. no worker is running on it), that will result in an # exception from one of the tasks. We should notice this, log it, and remove the offending URL from the # pool. url, completed_job_id = fetchers[task] try: result = await task except Exception as exc: # TODO: Do something with the exception log.exception("Error fetching result") result = WorkResult(worker_outcome=WorkerOutcome.ABNORMAL, output=str(exc)) finally: del fetchers[task] urls.append(url) on_task_complete(completed_job_id, result)
def test_no_test_return_value(path_utils, data_dir): with path_utils.excursion(data_dir): result = asyncio.get_event_loop().run_until_complete( mutate_and_test( [ MutationSpec(Path("a/b.py"), "core/ReplaceTrueWithFalse", 100) ], "python -m unittest tests", 1000, )) expected = WorkResult(output=None, test_outcome=None, diff=None, worker_outcome=WorkerOutcome.NO_TEST) assert result == expected
def test_results(work_db): for idx in range(10): work_db.add_work_item( WorkItem('path_{}'.format(idx), 'operator_{}'.format(idx), idx, (idx, idx), (idx, idx + 1), 'job_id_{}'.format(idx))) original = [('job_id_{}'.format(idx), WorkResult(output='data_{}'.format(idx), test_outcome=TestOutcome.KILLED, worker_outcome=WorkerOutcome.NORMAL, diff='diff_{}'.format(idx))) for idx in range(10)] for result in original: work_db.set_result(*result) actual = list(work_db.results) assert actual == original
def test_adding_result_completes_work_item(work_db): items = [ WorkItem('path_{}'.format(idx), 'operator_{}'.format(idx), idx, (idx, idx), (idx, idx + 1), 'job_id_{}'.format(idx)) for idx in range(10) ] for item in items: work_db.add_work_item(item) for idx, item in enumerate(items): assert [r[0] for r in work_db.completed_work_items] == items[:idx] result = ('job_id_{}'.format(idx), WorkResult(output='data_{}'.format(idx), test_outcome=TestOutcome.KILLED, worker_outcome=WorkerOutcome.NORMAL, diff='diff_{}'.format(idx))) work_db.set_result(*result)
def test_adding_result_clears_pending(work_db): items = [ WorkItem('path_{}'.format(idx), 'operator_{}'.format(idx), idx, (idx, idx), (idx, idx + 1), 'job_id_{}'.format(idx)) for idx in range(10) ] for item in items: work_db.add_work_item(item) for idx, item in enumerate(items): assert list(work_db.pending_work_items) == items[idx:] result = ('job_id_{}'.format(idx), WorkResult(output='data_{}'.format(idx), test_outcome=TestOutcome.KILLED, worker_outcome=WorkerOutcome.NORMAL, diff='diff_{}'.format(idx))) work_db.set_result(*result)
def filter(self, work_db, _args): """Mark lines with "# pragma: no mutate" as SKIPPED For all work_item in db, if the LAST line of the working zone is marked with "# pragma: no mutate", This work_item will be skipped. """ @lru_cache() def file_contents(file_path): "A simple cache of file contents." with file_path.open(mode="rt") as handle: return handle.readlines() re_is_mutate = re.compile(r".*#.*pragma:.*no mutate.*") for item in work_db.work_items: for mutation in item.mutations: print(mutation.module_path) lines = file_contents(mutation.module_path) try: # item.{start,end}_pos[0] seems to be 1-based. line_number = mutation.end_pos[0] - 1 if mutation.end_pos[1] == 0: # The working zone ends at begin of line, # consider the previous line. line_number -= 1 line = lines[line_number] if re_is_mutate.match(line): work_db.set_result( item.job_id, WorkResult(output=None, test_outcome=None, diff=None, worker_outcome=WorkerOutcome.SKIPPED), ) except Exception as ex: raise Exception( "module_path: %s, start_pos: %s, end_pos: %s, len(lines): %s" % (mutation.module_path, mutation.start_pos, mutation.end_pos, len(lines))) from ex
def _skip_filtered(self, work_db, exclude_operators): re_exclude_operators = re.compile('|'.join('(:?%s)' % e for e in exclude_operators)) for item in work_db.pending_work_items: if re_exclude_operators.match(item.operator_name): log.info( "operator skipping %s %s %s %s %s %s", item.job_id, item.operator_name, item.occurrence, item.module_path, item.start_pos, item.end_pos, ) work_db.set_result( item.job_id, WorkResult( output="Filtered operator", worker_outcome=WorkerOutcome.SKIPPED, ), )
def worker(module_path, python_version, operator_name, occurrence, test_command, timeout): """Mutate the OCCURRENCE-th site for OPERATOR_NAME in MODULE_PATH, run the tests, and report the results. This is fundamentally the single-mutation-and-test-run process implementation. There are three high-level ways that a worker can finish. First, it could fail exceptionally, meaning that some uncaught exception made its way from some part of the operation to terminate the function. This function will intercept all exceptions and return it in a non-exceptional structure. Second, the mutation testing machinery may determine that there is no OCCURENCE-th instance for OPERATOR_NAME in the module under test. In this case there is no way to report a test result (i.e. killed, survived, or incompetent) so a special value is returned indicating that no mutation is possible. Finally, and hopefully normally, the worker will find that it can run a test. It will do so and report back the result - killed, survived, or incompetent - in a structured way. Args: module_name: The path to the module to mutate python_version: The version of Python to use when interpreting the code in `module_path`. A string of the form "MAJOR.MINOR", e.g. "3.6" for Python 3.6.x. operator_name: The name of the operator plugin to use occurrence: The occurrence of the operator to apply test_command: The command to execute to run the tests timeout: The maximum amount of time (seconds) to let the tests run Returns: A WorkResult Raises: This will generally not raise any exceptions. Rather, exceptions will be reported using the 'exception' result-type in the return value. """ try: operator_class = cosmic_ray.plugins.get_operator(operator_name) operator = operator_class(python_version) with cosmic_ray.mutating.use_mutation(module_path, operator, occurrence) as (original_code, mutated_code): if mutated_code is None: return WorkResult(worker_outcome=WorkerOutcome.NO_TEST) test_outcome, output = run_tests(test_command, timeout) diff = _make_diff(original_code, mutated_code, module_path) return WorkResult(output=output, diff='\n'.join(diff), test_outcome=test_outcome, worker_outcome=WorkerOutcome.NORMAL) except Exception: # noqa # pylint: disable=broad-except return WorkResult(output=traceback.format_exc(), test_outcome=TestOutcome.INCOMPETENT, worker_outcome=WorkerOutcome.EXCEPTION)