def test_pytest_parallel_threads(mock_client_init):
    """Verify "pytest_parallel" plugin run tests in two threads.

    :param mock_client_init: Pytest fixture
    """
    mock_client = mock_client_init.return_value
    mock_client.start_test_item.side_effect = item_id_gen

    result = utils.run_pytest_tests(tests=['examples/hierarchy'],
                                    args=['--tests-per-worker', '2'])
    assert int(result) == 0, 'Exit code should be 0 (no errors)'

    mock_client = mock_client_init.return_value

    expect(mock_client.start_launch.call_count == 1,
           '"start_launch" method was not called')
    expect(mock_client.finish_launch.call_count == 1,
           '"finish_launch" method was not called')
    assert_expectations()

    finish_args = mock_client.finish_launch.call_args_list
    expect(finish_args[0][1]['status'] in ('PASSED', None), 'Launch failed')
    launch_end_time = finish_args[0][1]['end_time']
    expect(launch_end_time is not None and int(launch_end_time) > 0,
           'Launch end time is empty')
    assert_expectations()
Beispiel #2
0
def test_issue_report(mock_client_init):
    """Verify agent reports issue ids and defect type.

    :param mock_client_init: Pytest fixture
    """
    mock_client = mock_client_init.return_value
    mock_client.start_test_item.side_effect = utils.item_id_gen
    mock_client.get_project_settings.side_effect = utils.project_settings

    variables = dict()
    variables['rp_issue_system_url'] = ISSUE_URL_PATTERN
    variables.update(utils.DEFAULT_VARIABLES.items())
    result = utils.run_pytest_tests(tests=['examples/test_issue_id.py'],
                                    variables=variables)
    assert int(result) == 1, 'Exit code should be 1 (test failed)'

    call_args = mock_client.finish_test_item.call_args_list
    finish_test_step = call_args[0][1]
    issue = finish_test_step['issue']

    assert isinstance(issue, Issue)
    expect(issue.issue_type == 'pb001')
    expect(issue.comment is not None)
    assert_expectations()
    comments = issue.comment.split('\n')
    assert len(comments) == 1
    comment = comments[0]
    assert comment == "* {}: [{}]({})" \
        .format(test_issue_id.REASON, test_issue_id.ID,
                ISSUE_URL_PATTERN.replace(ISSUE_PLACEHOLDER, test_issue_id.ID))
Beispiel #3
0
def test_external_issue(mock_client_init):
    """Verify skipped test with issue decorator handling.

    :param mock_client_init: mocked Report Portal client Pytest fixture
    """
    mock_client = mock_client_init.return_value
    mock_client.start_test_item.side_effect = utils.item_id_gen
    mock_client.get_project_settings.side_effect = utils.project_settings

    variables = dict()
    variables['rp_bts_project'] = BTS_PROJECT
    variables['rp_bts_url'] = BTS_URL
    variables['rp_issue_system_url'] = ISSUE_URL_PATTERN
    variables.update(utils.DEFAULT_VARIABLES.items())

    result = utils.run_pytest_tests(tests=['examples/test_issue_id.py'],
                                    variables=variables)

    assert int(result) == 1, 'Exit code should be 1 (test failed)'
    call_args = mock_client.finish_test_item.call_args_list
    finish_test_step = call_args[0][1]
    actual_issue = finish_test_step.get('issue', None)
    assert isinstance(actual_issue, Issue)
    expect(actual_issue.issue_type == 'pb001')
    expect(actual_issue.comment is not None)
    external_issues = actual_issue._external_issues
    expect(len(external_issues) == 1)
    assert_expectations()
    external_issue = external_issues[0]
    expect(external_issue['btsUrl'] == BTS_URL)
    expect(external_issue['btsProject'] == BTS_PROJECT)
    expect(external_issue['ticketId'] == test_issue_id.ID)
    expect(external_issue['url'] == ISSUE_URL_PATTERN.replace(
        ISSUE_PLACEHOLDER, test_issue_id.ID))
    assert_expectations()
Beispiel #4
0
def test_issue_id_attribute(mock_client_init, issue_id_mark):
    """Verify agent reports issue attribute if configured.

    :param mock_client_init: Pytest fixture
    :param issue_id_mark:    Attribute report configuration
    """
    mock_client = mock_client_init.return_value
    mock_client.start_test_item.side_effect = utils.item_id_gen
    mock_client.get_project_settings.side_effect = utils.project_settings

    variables = dict()
    variables['rp_issue_id_marks'] = issue_id_mark
    variables.update(utils.DEFAULT_VARIABLES.items())
    result = utils.run_pytest_tests(tests=['examples/test_issue_id.py'],
                                    variables=variables)
    assert int(result) == 1, 'Exit code should be 1 (test failed)'

    call_args = mock_client.start_test_item.call_args_list
    finish_test_step = call_args[-1][1]
    attributes = finish_test_step['attributes']

    if issue_id_mark:
        assert len(attributes) == 1
        issue_attribute = attributes[0]
        expect(issue_attribute['key'] == 'issue')
        expect(issue_attribute['value'] == test_issue_id.ID)
        assert_expectations()
    else:
        assert len(attributes) == 0
Beispiel #5
0
def test_rp_parent_item_id_and_rp_launch_id(mock_client_init):
    """Verify RP handles both conf props 'rp_parent_item_id' & 'rp_launch_id'.

    :param mock_client_init: Pytest fixture
    """
    parent_id = "parent_id"
    variables = dict()
    variables['rp_parent_item_id'] = parent_id
    variables['rp_launch_id'] = "test_launch_id"
    variables.update(utils.DEFAULT_VARIABLES.items())
    result = utils.run_pytest_tests(tests=['examples/test_simple.py'],
                                    variables=variables)

    assert int(result) == 0, 'Exit code should be 0 (no errors)'

    mock_client = mock_client_init.return_value
    expect(mock_client.start_launch.call_count == 0,
           '"start_launch" method was called')
    expect(mock_client.finish_launch.call_count == 0,
           '"finish_launch" method was called')

    start_call_args = mock_client.start_test_item.call_args_list
    finish_call_args = mock_client.finish_test_item.call_args_list

    expect(len(start_call_args) == len(finish_call_args))
    expect(start_call_args[0][1]["parent_item_id"] == parent_id)
    assert_expectations()
Beispiel #6
0
def test_skipped_custom_issue(mock_client_init):
    """Verify skipped test with issue decorator handling.

    :param mock_client_init: mocked Report Portal client Pytest fixture
    """
    mock_client = mock_client_init.return_value
    mock_client.start_test_item.side_effect = utils.item_id_gen
    mock_client.get_project_settings.side_effect = utils.project_settings

    variables = dict()
    variables['rp_is_skipped_an_issue'] = True
    variables['rp_issue_system_url'] = ISSUE_URL_PATTERN
    variables.update(utils.DEFAULT_VARIABLES.items())

    result = utils.run_pytest_tests(tests=['examples/skip/test_skip_issue.py'],
                                    variables=variables)

    assert int(result) == 0, 'Exit code should be 0 (no failures)'
    call_args = mock_client.finish_test_item.call_args_list
    finish_test_step = call_args[0][1]
    actual_issue = finish_test_step.get('issue', None)
    assert isinstance(actual_issue, Issue)
    expect(actual_issue.issue_type == 'pb001')
    expect(actual_issue.comment is not None)
    assert_expectations()
Beispiel #7
0
def test_rp_launch_id(mock_client_init):
    """Verify that RP plugin does not start/stop launch if 'rp_launch_id' set.

    :param mock_client_init: Pytest fixture
    """
    variables = dict()
    variables['rp_launch_id'] = TEST_LAUNCH_ID
    variables.update(utils.DEFAULT_VARIABLES.items())
    result = utils.run_pytest_tests(tests=['examples/test_simple.py'],
                                    variables=variables)

    assert int(result) == 0, 'Exit code should be 0 (no errors)'

    expect(
        mock_client_init.call_args_list[0][1]['launch_id'] == TEST_LAUNCH_ID)

    mock_client = mock_client_init.return_value
    expect(mock_client.start_launch.call_count == 0,
           '"start_launch" method was called')
    expect(mock_client.finish_launch.call_count == 0,
           '"finish_launch" method was called')

    start_call_args = mock_client.start_test_item.call_args_list
    finish_call_args = mock_client.finish_test_item.call_args_list

    expect(len(start_call_args) == len(finish_call_args))
    assert_expectations()
def test_rp_hierarchy_parameters(mock_client_init, test, variables,
                                 expected_items):
    """Verify suite hierarchy with `rp_hierarchy_dirs=True`.

    :param mock_client_init: Pytest fixture
    """
    mock_client = mock_client_init.return_value
    mock_client.start_test_item.side_effect = utils.item_id_gen

    result = utils.run_pytest_tests(tests=test, variables=variables)
    assert int(result) == 0, 'Exit code should be 0 (no errors)'

    verify_start_item_parameters(mock_client, expected_items)
Beispiel #9
0
def test_rp_log_batch_payload_size(mock_client_init):
    log_size = 123456
    variables = {'rp_log_batch_payload_size': log_size}
    variables.update(utils.DEFAULT_VARIABLES.items())

    result = utils.run_pytest_tests(['examples/test_rp_logging.py'],
                                    variables=variables)
    assert int(result) == 0, 'Exit code should be 0 (no errors)'

    expect(mock_client_init.call_count == 1)

    constructor_args = mock_client_init.call_args_list[0][1]
    expect(constructor_args['log_batch_payload_size'] == log_size)
    assert_expectations()
Beispiel #10
0
def test_parameters(mock_client_init, test, expected_params):
    """Verify different tests have correct parameters.

    :param mock_client_init: Pytest fixture
    :param test:             a test to run
    :param expected_params:  an expected parameter dictionary
    """
    variables = utils.DEFAULT_VARIABLES
    result = utils.run_pytest_tests(tests=[test], variables=variables)
    assert int(result) == 0, 'Exit code should be 0 (no errors)'

    mock_client = mock_client_init.return_value
    assert mock_client.start_test_item.call_count > 0, \
        '"start_test_item" called incorrect number of times'

    call_args = mock_client.start_test_item.call_args_list
    step_call_args = call_args[-1][1]
    assert step_call_args['parameters'] == expected_params
Beispiel #11
0
def test_passed_no_issue_report(mock_client_init):
    """Verify agent do not report issue if test passed.

    :param mock_client_init: Pytest fixture
    """
    mock_client = mock_client_init.return_value
    mock_client.start_test_item.side_effect = utils.item_id_gen
    mock_client.get_project_settings.side_effect = utils.project_settings

    variables = dict()
    variables['rp_issue_system_url'] = ISSUE_URL_PATTERN
    variables.update(utils.DEFAULT_VARIABLES.items())
    result = utils.run_pytest_tests(tests=['examples/test_issue_id_pass.py'],
                                    variables=variables)
    assert int(result) == 0, 'Exit code should be 0 (no failures)'

    call_args = mock_client.finish_test_item.call_args_list
    finish_test_step = call_args[0][1]
    assert 'issue' not in finish_test_step or finish_test_step['issue'] is None
Beispiel #12
0
def test_simple_tests(mock_client_init, test, expected_run_status,
                      expected_item_status):
    """Verify a simple test creates correct structure and finishes all items.

    Report 'None' for suites and launch due to possible parallel execution.
    Leave status calculation on Server.
    :param mock_client_init:     mocked Report Portal client Pytest fixture
    :param test:                 a test to run as use case
    :param expected_run_status:  expected pytest run status
    :param expected_item_status: expected result test item status
    """
    mock_client = mock_client_init.return_value
    mock_client.start_test_item.side_effect = utils.item_id_gen

    result = utils.run_pytest_tests(tests=[test])
    assert int(result) == expected_run_status, 'Exit code should be ' + str(
        expected_run_status)

    start_call_args = mock_client.start_test_item.call_args_list
    finish_call_args = mock_client.finish_test_item.call_args_list
    assert len(start_call_args) == len(finish_call_args), \
        'Number of started items should be equal to finished items'

    for i in range(len(start_call_args)):
        start_test_step = start_call_args[-1 - i][1]
        finish_test_step = finish_call_args[i][1]

        expect(finish_test_step['item_id'].startswith(start_test_step['name']))
        if i == 0:
            actual_status = finish_test_step['status']
            expect(
                actual_status == expected_item_status,
                'Invalid item status, actual "{}", expected: "{}"'.format(
                    actual_status, expected_item_status))

    finish_launch_call_args = mock_client.finish_launch.call_args_list
    expect(len(finish_launch_call_args) == 1)
    expect('end_time' in finish_launch_call_args[0][1])
    expect(finish_launch_call_args[0][1]['end_time'] is not None)
    expect('status' not in finish_launch_call_args[0][1])
    assert_expectations()
Beispiel #13
0
def test_skipped_not_issue(mock_client_init, flag_value, expected_issue):
    """Verify 'rp_is_skipped_an_issue' option handling.

    :param mock_client_init: mocked Report Portal client Pytest fixture
    :param flag_value:       option value to set during the test
    :param expected_issue:   result issue value to verify
    """
    mock_client = mock_client_init.return_value
    mock_client.start_test_item.side_effect = utils.item_id_gen

    variables = dict()
    if flag_value is not None:
        variables['rp_is_skipped_an_issue'] = flag_value
    variables.update(utils.DEFAULT_VARIABLES.items())

    result = utils.run_pytest_tests(
        tests=['examples/skip/test_simple_skip.py'], variables=variables)

    assert int(result) == 0, 'Exit code should be 0 (no failures)'
    call_args = mock_client.finish_test_item.call_args_list
    finish_test_step = call_args[0][1]
    actual_issue = finish_test_step.get('issue', None)
    assert actual_issue == expected_issue
def test_launch_mode(mock_client_init, mode, expected_mode):
    """Verify different launch modes are passed to `start_launch` method.

    :param mock_client_init: Pytest fixture
    :param mode:             a variable to be passed to pytest
    :param expected_mode:    a value which should be passed to
    ReportPortalService
    """
    variables = dict()
    if mode is not None:
        variables['rp_mode'] = mode
    variables.update(utils.DEFAULT_VARIABLES.items())
    result = utils.run_pytest_tests(tests=['examples/test_simple.py'],
                                    variables=variables)
    assert int(result) == 0, 'Exit code should be 0 (no errors)'

    mock_client = mock_client_init.return_value
    assert mock_client.start_launch.call_count == 1, \
        '"start_launch" method was not called'

    call_args = mock_client.start_launch.call_args_list
    start_launch_kwargs = call_args[0][1]
    assert start_launch_kwargs['mode'] == expected_mode
Beispiel #15
0
def test_empty_run(mock_client_init):
    """Verify that RP plugin does not fail if there is not tests in run.

    :param mock_client_init: Pytest fixture
    """
    result = utils.run_pytest_tests(tests=['examples/epmty/'])

    assert int(result) == 4, 'Exit code should be 4 (no tests)'

    mock_client = mock_client_init.return_value
    expect(mock_client.start_launch.call_count == 1,
           '"start_launch" method was not called')
    expect(mock_client.finish_launch.call_count == 1,
           '"finish_launch" method was not called')
    assert_expectations()

    finish_args = mock_client.finish_launch.call_args_list
    expect('status' not in finish_args[0][1],
           'Launch status should not be defined')
    launch_end_time = finish_args[0][1]['end_time']
    expect(launch_end_time is not None and int(launch_end_time) > 0,
           'Launch end time is empty')
    assert_expectations()