示例#1
0
    def process_single_message(self, msg):
        logger.info(f'process_single_message - {msg.kind}')
        if msg.kind == 'handshake':
            found_task = self.find_task_with_id(msg)
            if found_task is None:
                raise Exception('no task found for subprocess. ')

            logger.debug(f'sending task definition, {found_task.id}')
            msg_to_reply = ScheduledTaskDefinitionMessage(
                task=found_task,
                coverage_exclusions=config.coverage_exclusions)
            bytes_msg = pickle.dumps(msg_to_reply)
            self.transport.write(bytes_msg)
        if msg.kind == 'test_run_results':
            results = msg.results
            self.timeline.mark_event(
                'TCP: Got test run results from subprocess')
            self.results_did_become_available(results)
        if msg.kind == 'timings':
            self.timeline.mark_event('TCP: Got timings from subprocess')
            execution_history.save(msg.timeline)
        if msg.kind == 'close':
            self.timeline.mark_event('TCP: Received close message')
            logger.debug('Close the client socket')
            self.transport.close()
            self.completion_future.set_result(self.results)
示例#2
0
def test_serialization():
    sut = Timeline('complex')
    # 1 - root
    #   2 ... 4.5 nested_1
    #       3.1 ... 4.1 double_nested
    #  5 end root
    sut.start()
    # assert 1 == 2
    sut.begin_nested_interval('nested_1')

    sut.begin_nested_interval('a')
    sut.end_nested_interval()

    sut.begin_nested_interval('b')
    sut.end_nested_interval()

    sut.begin_nested_interval('c')
    sut.end_nested_interval()

    sut.end_nested_interval()

    sut.begin_nested_interval('1')
    sut.end_nested_interval()

    sut.begin_nested_interval('2')

    sut.begin_nested_interval('2.1')
    sut.end_nested_interval()

    sut.begin_nested_interval('2.2')

    sut.begin_nested_interval('2.2.1')
    sut.end_nested_interval()

    sut.end_nested_interval()

    sut.end_nested_interval()
    sut.mark_event('aaa')
    sut.begin_nested_interval('3')
    sut.end_nested_interval()

    sut.stop()

    execution_history.save(sut)
    pprint(execution_history.to_json())
示例#3
0
    async def run(self):
        """
            Here we run multiple tests at once using one or multiple processes
        """
        self.timeline.mark_event('run')
        watchdog_pipeline.add_task(TestExecutionBeginTask(len(self.tests)))
        socket_notification_task = asyncio.ensure_future(
            engine.tests_will_run(self.tests))

        converted_tests = self.get_converted_test_list()
        runner = self.create_test_runner()

        # while not cancelled
        runner_task = asyncio.ensure_future(runner.run(tests=converted_tests))
        run_results_compound = await self.wait_with_cancellation(runner_task)
        if run_results_compound.is_failed():
            failure_reason = self.user_friendly_error_message(
                run_results_compound.status)

            for _ in converted_tests:
                candidate_fqn = _['fqn']
                cov_run = CoverageRun(
                    candidate_fqn,
                    -1,
                    None,
                    execution_result=ExecutionResult.create_failed_with_reason(
                        failure_reason))
                run_results_compound.results[candidate_fqn] = cov_run

        run_results = run_results_compound.results

        self.timeline.mark_event('before tests_did_run')

        # asynchronously send message over websocket
        # Line bellow communicates test statuses as a side effect
        async_tasks_post = [engine.tests_did_run(run_results)]

        self.post_process_combined_coverage(run_results)

        self.timeline.mark_event('Sending: test_run_completed event')
        # todo: i'm only using `filename` in connector, why bother with everything?
        cov_to_send = dict(all_runs=self.convert_result_to_json(run_results))
        async_tasks_post.append(
            shared.pipe.push(
                event_type='test_run_completed',
                coverage=cov_to_send,
                timings=dict(start=self.timestamp, end=clock.now()),
            ))

        self.timeline.mark_event('Started combined coverage serialization')
        serialized = serialize_combined_coverage(combined_coverage)
        self.timeline.mark_event('Completed combined coverage serialization')

        self.timeline.mark_event('Sending: combined coverage over WS')
        async_tasks_post.append(
            shared.pipe.push(
                event_type='combined_coverage_updated',
                combined_coverage=serialized,
                # Todo: why do I need dependencies to be exposed? It is internal state.
                # dependencies=self.build_dependencies(),
                aggregated_results=engine.all_tests.legacy_aggregated_statuses(
                ),
                timings=dict(start=self.timestamp, end=clock.now()),
            ))

        self.timeline.mark_event(
            'Waiting until post-processing tasks are completed')
        await asyncio.gather(*async_tasks_post)
        watchdog_pipeline.add_task(TestExecutionEndTask())

        self.timeline.mark_event('Send: done, stopping timeline')

        self.timeline.stop()
        execution_history.save(self.timeline)
示例#4
0
    async def run(self):
        self.timeline.mark_event('run')

        await engine.tests_will_run(self.tests)
        converted_tests = list()
        for test in self.tests:
            converted_tests.append(
                dict(fqn=test.discovered_test.fqn,
                     filename=test.discovered_test.filename,
                     name=test.discovered_test.name,
                     module=test.discovered_test.module,
                     state='converted'))

        runner = MultiprocessTestRunner(timeout=30, timeline=self.timeline)
        self.timeline.mark_event('before running tests')
        await runner.run(tests=converted_tests)
        self.results = runner.results
        # runner = TestRunner(runner_engine=runner_engine)
        # with ModuleCleanup() as cleanup:
        #     results = runner.run(self.tests)
        if self.results is not None:
            logger.debug('results are not none')
        if self.results is None:
            logger.error('!!! None in results')

        self.timeline.mark_event('before tests_did_run')
        if not self.results:
            self.results = dict()

        await engine.tests_did_run(self.results)

        self.timeline.mark_event(
            'Postprocessing: combined coverage, line hits, dependency tree')
        combined_coverage.add_multiple_results(self.results)
        self.timeline.mark_event('Postprocessing: completed')

        results_as_json = dict()
        for k, v in self.results.items():
            results_as_json[k] = v.as_json()

        self.timeline.mark_event('Sending: test_run_completed event')

        await shared.pipe.push(
            event_type='test_run_completed',
            coverage=dict(all_runs=results_as_json),
            # data=serialize_test_set_state(self.tests),
            timings=dict(start=self.timestamp, end=shared.timestamp()),
        ),

        self.timeline.mark_event('Started combined coverage serialization')
        serialized = serialize_combined_coverage(combined_coverage)
        self.timeline.mark_event('Completed combined coverage serialization')

        self.timeline.mark_event('Send: combined coverage over WS')
        await shared.pipe.push(
            event_type='combined_coverage_updated',
            combined_coverage=serialized,
            dependencies={
                entry_point: list(filenames)
                for entry_point, filenames in
                combined_coverage.dependencies.items()
            },
            aggregated_results=engine.all_tests.legacy_aggregated_statuses(),
            timings=dict(start=self.timestamp, end=shared.timestamp()),
        ),

        self.timeline.mark_event('Send: done, stopping timeline')

        self.timeline.stop()
        execution_history.save(self.timeline)

        pass