Exemplo n.º 1
0
def test_repeat_upload_artifacts_changed_artifacts(temp_dir, prepare_artifacts_with_ssh_structure):
    """
    repeat usage upload_artifacts with changed artifacts should copy only changed artifacts
    """

    ssh = LocalPool(config['ssh'])
    ssh.connect()

    ssh_home_dir = join(config["ssh"]["home"], '127.0.0.1')
    art_dir = join(ssh_home_dir, 'artifacts')

    init_remote_hosts(ssh, config)
    upload_artifacts(ssh, config, prepare_artifacts_result)
    previous_art_creation_times = dict(
        [(artifact, c_time(join(art_dir, artifact))) for artifact in listdir(art_dir)])
    test_file_path = join(config["artifacts_dir"], config['_test_artifacts']["additional"]["file"]["name"])
    with open(test_file_path, "w") as f:
        f.write("Things changed")
    upload_artifacts(ssh, config, prepare_artifacts_result)
    actual_art_creation_times = [(c_time(join(art_dir, artifact)), artifact) for artifact in listdir(art_dir)]

    for actual_art_creation_time, artifact_name in actual_art_creation_times:
        if artifact_name == config['_test_artifacts']["additional"]["file"]["name"]:
            assert actual_art_creation_time != previous_art_creation_times[artifact_name]
        else:
            assert actual_art_creation_time == previous_art_creation_times[artifact_name]
Exemplo n.º 2
0
def test_upload_artifacts(temp_dir, prepare_artifacts_with_ssh_structure):
    """
    upload_artifacts should copy artifacts and unzip archives
    """
    ssh = LocalPool(config['ssh'])
    ssh.connect()

    ssh_home_dir = join(config["ssh"]["home"], '127.0.0.1')

    init_remote_hosts(ssh, config)
    upload_artifacts(ssh, config, prepare_artifacts_result)

    assert sorted(["artifacts", 'test-0']) == sorted(listdir(ssh_home_dir))

    expected_artifacts_dir_files = [
        config['_test_artifacts']["source"]["arch"]["repack_name"],
        config['_test_artifacts']["additional"]["file"]["name"],
        config['_test_artifacts']["additional"]["arch"]["name"]
    ]
    actual_artifacts_files_list = listdir(join(ssh_home_dir, 'artifacts'))
    assert sorted(expected_artifacts_dir_files) == sorted(actual_artifacts_files_list)

    expected_tests_files = [name for name, conf in config["artifacts"].items() if conf.get("remote_unzip", False)]
    actual_tests_files = listdir(join(ssh_home_dir, 'test-0'))
    assert sorted(expected_tests_files) == sorted(actual_tests_files)
Exemplo n.º 3
0
def test_init_remote_hosts_clean_all(temp_dir, prepare_remote_structure):
    """
    init_remote_hosts should remove all var folders with --clean=all option
    """
    ssh = LocalPool(config['ssh'])
    ssh.connect()

    init_remote_hosts(ssh, config)

    home_dir = join(config["ssh"]["home"], '127.0.0.1')
    expected = ["artifacts", 'test-0']
    actual = listdir(home_dir)
    assert sorted(expected) == sorted(actual)
    artifacts_dir_creation_time = c_time(join(home_dir, 'artifacts'))

    config["remote"]["suite_var_dir"] = join(config["ssh"]["home"], 'test-1')

    config['clean'] = 'all'
    init_remote_hosts(ssh, config)

    expected = ["artifacts", 'test-1']
    actual = listdir(home_dir)
    assert sorted(expected) == sorted(actual)

    assert artifacts_dir_creation_time != c_time(join(home_dir, 'artifacts'))
Exemplo n.º 4
0
def test_init_remote_hosts_clean_none(temp_dir, prepare_remote_structure):
    """
    init_remote_hosts should add new tests folders without --clean option
    """
    ssh = LocalPool(config['ssh'])
    ssh.connect()

    init_remote_hosts(ssh, config)

    home_dir = join(config["ssh"]["home"], '127.0.0.1')
    expected = ["artifacts", 'test-0']
    actual = listdir(home_dir)
    assert sorted(expected) == sorted(actual)
    artifacts_dir_stat = c_time(join(home_dir, 'artifacts'))
    test_dir_stat = c_time(join(home_dir, 'test-0'))

    config["remote"]["suite_var_dir"] = join(config["ssh"]["home"], 'test-1')

    init_remote_hosts(ssh, config)

    expected = ["artifacts", 'test-0', 'test-1']
    actual = listdir(home_dir)
    assert sorted(expected) == sorted(actual)

    assert c_time(join(home_dir, 'artifacts')) == artifacts_dir_stat
    assert test_dir_stat == c_time(join(home_dir, 'test-0'))
Exemplo n.º 5
0
def repeat_upload_changed_artifacts(temp_dir, prepare_artifacts_with_ssh_structure):
    """
    upload artifact
    changing source artifact
    trying to run without --clean
    artifact should be uploaded again
    """
    global config

    ssh = LocalPool(config['ssh'])
    ssh.connect()

    ssh_home_dir = join(config["ssh"]["home"], '127.0.0.1')
    art_dir = join(ssh_home_dir, 'artifacts')

    init_remote_hosts(ssh, config)
    upload_artifacts(ssh, config, prepare_artifacts_result)

    new_file = join(config["artifacts_dir"], "new_file.txt")
    with open(new_file, "w") as f:
        f.write("Things changed")

    config["artifacts"]["source_zip"]["repack"].append("copy {} self:/".format(new_file))
    new_prepare_artifacts_result, config = prepare(config)
    upload_artifacts(ssh, config, new_prepare_artifacts_result)

    expected_files = ['additional_artifact.txt', 'new_file.txt', 'source_artifact.txt',
                      'tiden_repack_original.checksum.sha256']
    with ZipFile(join(art_dir,
                      basename(config["artifacts"]["source_zip"]["path"])), 'r') as repack_zip:
        actual_files_names = [item.filename for item in repack_zip.filelist]
    assert sorted(actual_files_names) == sorted(expected_files)
Exemplo n.º 6
0
def test_runner_handle_exceptions_in_module_teardown(with_dec_classpath,
                                                     local_config, tmpdir,
                                                     mock_pm):
    """
    Check that if we got exception in the module teardown tests will not be failed.
    :return:
    """
    var_dir = _ensure_var_dir(tmpdir)
    xunit_file = _ensure_xunit_file_empty(var_dir)
    suite_var_dir = str(var_dir.mkdir('suite-mock'))
    config_path = str(var_dir.join('config.yaml'))

    config = deepcopy(local_config)

    config.update({
        'artifacts': {},
        'suite_var_dir':
        suite_var_dir,
        'suite_dir':
        join(dirname(__file__), 'res', 'decorators', 'suites'),
        'remote': {
            'suite_var_dir': suite_var_dir,
        },
        'config_path':
        config_path,
    })

    ssh_pool = LocalPool(local_config['ssh'])
    ssh_pool.connect()

    for class_name in [
            'mock_test_module_with_generic_exceptions_in_teardown',
            'mock_test_module_with_tiden_exceptions_in_teardown'
    ]:
        modules = {
            'mock3.mock_test_module_with_exceptions_in_teardown': {
                'path':
                '%s/mock3/mock_test_module_with_exceptions_in_teardown.py' %
                config['suite_dir'],
                'module_short_name':
                class_name,
            }
        }

        tr = TidenRunner(config,
                         modules=modules,
                         ssh_pool=ssh_pool,
                         plugin_manager=mock_pm,
                         xunit_path=xunit_file)
        tr.process_tests()
        res = tr.get_tests_results()
        _tests = res.get_tests()
        print(_tests)
        assert len(_tests) == 2
        assert res.get_tests_num('pass') == 2
Exemplo n.º 7
0
def test_java_app(with_java_app_classpath, local_config, tmpdir, mock_pm):
    from tiden.result import Result
    from tiden.localpool import LocalPool
    from tiden.tidenfabric import TidenFabric
    from copy import deepcopy
    from datetime import datetime

    var_dir = str(tmpdir.mkdir('var'))
    xunit_file = str(tmpdir.join('var').join('xunit.xml'))
    tmpdir.join('var').join('xunit.xml').write('', ensure=True)
    report_path = 'report.yaml'

    config = deepcopy(local_config)

    config.update({
        'suite_name': 'mock',
        'test_name': '*',
        'suite_dir': join(dirname(__file__), 'res', 'java_app', 'suites'),
        'dir_prefix': f'mock-{datetime.now().strftime("%y%m%d-%H%M%S")}',
    })
    config.update({
        'suite_var_dir': str(tmpdir.join('var').mkdir(config['dir_prefix'])),
        'remote': {
            'artifacts': join(config['environment']['home'], 'artifacts'),
            'suite_var_dir': join(config['environment']['home'], config['dir_prefix']),
        },
        'config_path': str(tmpdir.join('var').join('config.yaml')),
    })
    config.update({
        'artifacts': {
            'mockapp': {
                'type': 'mockapp',
                'path': join(var_dir, 'artifacts', 'mockapp'),
                'remote_path': join(config['remote']['artifacts'], 'mockapp'),
            }
        },
    })

    ssh_pool = LocalPool(local_config['ssh'])
    res = Result(xunit_path=xunit_file)
    modules = {
        'mock.mock_test_app': {
            'path': '%s/mock/mock_test_app.py' % config['suite_dir'],
            'module_short_name': 'mock_test_app',
        },
    }
    from tiden.tidenrunner import TidenRunner
    ssh_pool.connect()
    TidenFabric().setSshPool(ssh_pool)
    TidenFabric().setConfig(config)
    tr = TidenRunner(config, modules=modules, ssh_pool=ssh_pool, plugin_manager=mock_pm, xunit_path=xunit_file)
    tr.process_tests()
    res = tr.get_tests_results()
    res.flush_xunit()
    res.create_testrail_report(config, report_file=str(report_path))
Exemplo n.º 8
0
def test_runner_handle_exception_in_module_setup(with_dec_classpath,
                                                 local_config, tmpdir,
                                                 mock_pm):
    """
    Check that if we got exception in the module setup no one test executed.
    :return:
    """
    import pytest
    var_dir = _ensure_var_dir(tmpdir)
    xunit_file = _ensure_xunit_file_empty(var_dir)
    suite_var_dir = str(var_dir.mkdir('suite-mock'))
    config_path = str(var_dir.join('config.yaml'))

    config = deepcopy(local_config)

    config.update({
        'artifacts': {},
        'suite_var_dir':
        suite_var_dir,
        'suite_dir':
        join(dirname(__file__), 'res', 'decorators', 'suites'),
        'remote': {
            'suite_var_dir': suite_var_dir,
        },
        'config_path':
        config_path,
    })

    ssh_pool = LocalPool(local_config['ssh'])
    ssh_pool.connect()
    modules = {
        'mock3.mock_test_module_with_exceptions_in_setup': {
            'path':
            '%s/mock3/mock_test_module_with_exceptions_in_setup.py' %
            config['suite_dir'],
            'module_short_name':
            'mock_test_module_with_exceptions_in_setup',
        }
    }

    tr = TidenRunner(config,
                     modules=modules,
                     ssh_pool=ssh_pool,
                     plugin_manager=mock_pm,
                     xunit_path=xunit_file)
    with pytest.raises(SystemExit):
        tr.process_tests()
    res = tr.get_tests_results()
    _tests = res.get_tests()
    print(_tests)
    assert len(_tests) == 0
    for status in res.statuses:
        assert res.get_tests_num(status) == 0
Exemplo n.º 9
0
def test_local_pool_exec_on_host_rm_one_file(local_config):
    pool = LocalPool(local_config['ssh'])
    home_path = local_config['environment']['home']
    host = local_config['ssh']['hosts'][0]
    host_home_path = os.path.join(home_path, host)
    file_path = os.path.join(host_home_path, 'test')
    os.makedirs(host_home_path, exist_ok=True)
    with open(file_path, 'w') as f:
        f.close()

    pool.exec_on_host(host,
                      ["rm -rf %s/test" % local_config['environment']['home']])
    assert not os.path.exists(file_path)
Exemplo n.º 10
0
def test_priority_decorator_run_tests(with_dec_classpath, local_config, tmpdir,
                                      mock_pm):
    from tiden.result import Result
    from tiden.localpool import LocalPool
    from copy import deepcopy

    var_dir = str(tmpdir.mkdir('var'))
    suite_var_dir = str(tmpdir.join('var').mkdir('suite-mock2'))
    remote_suite_var_dir = str(
        tmpdir.join('var').mkdir('remote').mkdir('suite-mock2'))
    xunit_file = str(tmpdir.join('var').join('xunit.xml'))
    tmpdir.join('var').join('xunit.xml').write('', ensure=True)

    res = Result(xunit_path=xunit_file)
    config = deepcopy(local_config)
    config.update({
        'suite_var_dir':
        suite_var_dir,
        'suite_name':
        'mock2',
        'test_name':
        '*',
        'config_path':
        '%s/config.yaml' % suite_var_dir,
        'suite_dir':
        join(dirname(__file__), 'res', 'decorators', 'suites'),
        'remote': {
            'suite_var_dir': remote_suite_var_dir,
        }
    })
    ssh_pool = LocalPool(local_config['ssh'])
    ssh_pool.connect()
    modules = {
        'mock2.mock_test_module_with_test_priorities': {
            'path':
            join(config['suite_dir'], 'mock2',
                 'mock_test_module_with_test_priorities.py'),
            'module_short_name':
            'mock_test_module_with_test_priorities',
        },
    }
    from tiden.tidenrunner import TidenRunner
    tr = TidenRunner(config,
                     modules=modules,
                     ssh_pool=ssh_pool,
                     plugin_manager=mock_pm,
                     xunit_path=xunit_file)
    tr.process_tests()
Exemplo n.º 11
0
def init_ssh_pool(config):
    log_print("*** Create SSH Pool ***", color='blue')
    # Collect unique hosts
    hosts = []
    for name, data in config['environment'].items():
        if name.endswith(
                '_hosts'
        ) and not name == 'apps_use_global_hosts' and data is not None:
            hosts.extend(data)
    for name, data in config['environment'].items():
        if isinstance(data, dict):
            for inner_name, inner_data in data.items():
                if inner_name.endswith('_hosts'):
                    hosts.extend(inner_data)

    hosts = set(hosts)
    config['ssh']['hosts'] = list(hosts)
    # Calculate threads number
    config['ssh']['threads_num'] = floor(sqrt(len(hosts)))
    if config['ssh']['threads_num'] < cpu_count():
        config['ssh']['threads_num'] = cpu_count()

    if config['environment'].get('env_vars'):
        config['ssh']['env_vars'] = config['environment']['env_vars']
    write_yaml_file(config['config_path'], config)

    # Make SSH connection pool
    ssh_pool = None
    if 'ansible' == config['connection_mode']:
        try:
            from tiden.ansiblepool import AnsiblePool
            ssh_pool = AnsiblePool(config['ssh'])
        except ImportError as e:
            log_put('ERROR: unable to import AnsiblePool: %s' % e)
            exit(1)
    elif 'paramiko' == config['connection_mode']:
        ssh_pool = SshPool(config['ssh'])
    elif 'local' == config['connection_mode']:
        config['ignite']['bind_to_host'] = True
        config['ignite']['unique_node_ports'] = True
        try:
            from tiden.localpool import LocalPool
            ssh_pool = LocalPool(config['ssh'])
        except ImportError as e:
            log_put('ERROR: unable to import LocalPool: %s' % e)
            exit(1)
        except NotImplementedError as e:
            log_put('ERROR: %s' % e)
            exit(1)
    else:
        log_put("ERROR: Unknown 'connection_mode' %s" %
                config['connection_mode'])
        exit(1)

    if ssh_pool:
        TidenFabric().setSshPool(ssh_pool)
        ssh_pool.connect()
    return ssh_pool
Exemplo n.º 12
0
def test_repeat_upload_artifacts_same_artifacts(temp_dir, prepare_artifacts_with_ssh_structure):
    """
    repeat usage upload_artifacts should not touch old artifacts
    """
    ssh = LocalPool(config['ssh'])
    ssh.connect()

    ssh_home_dir = join(config["ssh"]["home"], '127.0.0.1')
    art_dir = join(ssh_home_dir, 'artifacts')

    init_remote_hosts(ssh, config)
    upload_artifacts(ssh, config, prepare_artifacts_result)
    previous_art_creation_times = [c_time(join(art_dir, artifact)) for artifact in listdir(art_dir)]

    # don't unzip here twice, because we don't change test directory
    upload_artifacts(ssh, config, [])
    actual_art_creation_times = [c_time(join(art_dir, artifact)) for artifact in listdir(art_dir)]

    assert sorted(previous_art_creation_times) == sorted(actual_art_creation_times)
Exemplo n.º 13
0
def test_repeat_upload_tar_artifacts(temp_dir, simple_structure):
    """
    upload and repack tar artifacts
    """
    global config

    ssh = LocalPool(config['ssh'])
    ssh.connect()

    init_remote_hosts(ssh, config)
    upload_artifacts(ssh, config, prepare_artifacts_result)
    not_found = [art for art in config["artifacts"].keys() if not exists(join(config["remote"]["suite_var_dir"], art))]
    assert not_found != [], "Can't find artifacts: {}".format(', '.join(not_found))

    for_repack = ['source_artifact.txt',
                  'tiden_repack_original.checksum.sha256',
                  'second_inner_dir']

    for name, art in config["artifacts"].items():
        assert join(config["remote"]["suite_var_dir"], name) == art["remote_path"]

        actual_dirs_list = listdir(art["remote_path"].replace("remote", "remote/127.0.0.1"))
        expected_dirs_list = for_repack if 'repack' in name else [for_repack[0]]
        assert sorted(actual_dirs_list) == sorted(expected_dirs_list)
Exemplo n.º 14
0
def test_class_decorator_process_tests(with_dec_classpath, local_config,
                                       tmpdir, mock_pm):
    from tiden.result import Result
    from tiden.localpool import LocalPool
    from tiden.util import cfg
    from copy import deepcopy

    var_dir = str(tmpdir.mkdir('var'))
    suite_var_dir = str(tmpdir.join('var').mkdir('suite-mock'))
    xunit_file = str(tmpdir.join('var').join('xunit.xml'))
    tmpdir.join('var').join('xunit.xml').write('', ensure=True)
    config_path = tmpdir.join('var').join('config.yaml')
    report_path = 'report.yaml'

    config = deepcopy(local_config)

    config.update({
        'artifacts': {},
        'suite_var_dir':
        suite_var_dir,
        'suite_name':
        'mock',
        'test_name':
        '*',
        'suite_dir':
        join(dirname(__file__), 'res', 'decorators', 'suites'),
        'remote': {
            'suite_var_dir': '',
        },
        'config_path':
        str(config_path),
    })
    cfg(config, 'pitr_enabled', 'True')
    cfg(config, 'load_factor', '0.1')

    ssh_pool = LocalPool(local_config['ssh'])
    res = Result(xunit_path=xunit_file)
    modules = {
        'mock.mock_test_module': {
            'path': '%s/mock/mock_test_module.py' % config['suite_dir'],
            'module_short_name': 'mock_test_module',
        },
        'mock.mock_test_module_with_test_configuration': {
            'path':
            '%s/mock/mock_test_module_with_test_configuration.py' %
            config['suite_dir'],
            'module_short_name':
            'mock_test_module_with_test_configuration',
        },
        'mock.mock_test_module_with_test_configuration_subset': {
            'path':
            '%s/mock/mock_test_module_with_test_configuration_subset.py' %
            config['suite_dir'],
            'module_short_name':
            'mock_test_module_with_test_configuration_subset',
        },
    }
    from tiden.tidenrunner import TidenRunner
    ssh_pool.connect()
    tr = TidenRunner(config,
                     modules=modules,
                     ssh_pool=ssh_pool,
                     plugin_manager=mock_pm,
                     xunit_path=xunit_file)
    tr.process_tests()
    res = tr.get_tests_results()
    res.flush_xunit()
    res.create_testrail_report(config, report_file=str(report_path))
Exemplo n.º 15
0
    def _init(self):
        share_config = self.config['environment'].get('share_storage', {})
        self.share_host = share_config.get('host')

        if 'folder_remove_timeout' in share_config:
            self.folder_remove_timeout = int(
                share_config['folder_remove_timeout'])

        self.share_root = share_config.get('root')
        self.share_home = share_config.get('home')

        if not self.share_host or not self.share_home or not self.share_root:
            print_red(
                'WARNING: NasManager environment.share_storage configuration missing!'
            )
            return

        config = {
            'ssh': {
                'threads_num':
                1,
                'hosts': [self.share_host],
                'default_timeout':
                SshPool.default_timeout,
                'username':
                self.config['environment'].get('username'),
                'private_key_path':
                self.config['environment'].get('private_key_path'),
                'home':
                self.share_root,
            },
        }

        if self.config['environment'].get('env_vars'):
            config['ssh']['env_vars'] = self.config['environment']['env_vars']

        # Make SSH connection pool
        connection_mode = self.config['connection_mode']
        if 'ansible' == connection_mode:
            try:
                from tiden.ansiblepool import AnsiblePool

                self.ssh = AnsiblePool(config['ssh'])
            except ImportError as e:
                log_put('Error: unable to import AnsiblePool: %s' % e)
                exit(1)
        elif 'paramiko' == connection_mode:
            self.ssh = SshPool(config['ssh'])
        elif 'local' == connection_mode:
            try:
                from tiden.localpool import LocalPool
                self.ssh = LocalPool(config['ssh'])
            except ImportError as e:
                log_put('Error: unable to import LocalPool: %s' % e)
                exit(1)
            except NotImplementedError as e:
                log_put('Error: %s' % e)
                exit(1)

        if self.ssh:
            self.ssh.connect()
Exemplo n.º 16
0
def test_runner_skipped_configurations(with_dec_classpath, local_config, tmpdir, mock_pm):
    """
    Test configurations correctly passed to TestRail report for skipped tests
    :return:
    """
    var_dir = _ensure_var_dir(tmpdir)
    xunit_file = _ensure_xunit_file_empty(var_dir)
    testrail_report_file = _ensure_tr_report_file_empty(var_dir)

    suite_var_dir = str(var_dir.mkdir('suite-mock'))
    config_path = str(var_dir.join('config.yaml'))
    source = 'mock_test_module_with_test_configuration'
    suite = 'mock'
    module_name = 'suites.%s.%s.MockTestModuleWithTestConfiguration' % (suite, source)
    test_prefix = module_name + '.'

    config = deepcopy(local_config)

    config.update({
        'artifacts': {},
        # 'attrib': 'test_runner',
        # 'attr_match': 'any',
        'suite_var_dir': suite_var_dir,
        'suite_dir': join(dirname(__file__), 'res', 'decorators', 'suites'),
        'remote': {
            'suite_var_dir': suite_var_dir,
        },
        'config_path': config_path,
        'zookeeper_enabled': False,
        'pitr_enabled': False,
        'compaction_enabled': True,
    })

    ssh_pool = LocalPool(local_config['ssh'])
    test_module_source_file_name = '%s/%s/%s.py' % (config['suite_dir'], suite, source)

    modules = {
        '%s.%s' % (suite, source): {
            'path': test_module_source_file_name,
            'module_short_name': source,
        }
    }
    test_configuration = '(pitr_enabled=false, compaction_enabled=true, zookeeper_enabled=false)'
    expected_configuration_options = ['pitr_enabled', 'compaction_enabled', 'zookeeper_enabled']
    expected_result = {
        'test_main':
            {'status': 'pass', 'type': None, 'message': None},
        'test_zookeeper_only':
            {'status': 'skipped', 'type': 'skipped cause of config.zookeeper_enabled is False', 'message': None},
    }

    expected_statuses_count = {'pass': 1,
                               'fail': 0,
                               'error': 0,
                               'skip': 1,
                               'total': len(expected_result)}

    from tiden.tidenfabric import TidenFabric
    TidenFabric().reset().setConfig(config)
    tr = TidenRunner(config, modules=modules, ssh_pool=ssh_pool, plugin_manager=mock_pm, xunit_path=xunit_file)
    tr.process_tests()
    res = tr.get_tests_results()
    res.create_testrail_report(config, report_file=basename(testrail_report_file))
    _tests = res.get_tests()
    print(_tests)

    # validate raw test results
    assert len(_tests) == len(expected_result)

    for test_to_check in expected_result.keys():
        status, error_type, message, test_name = res.get_test_details('{}{}{}'.format(test_prefix, test_to_check, test_configuration))
        assert expected_result[test_to_check].get('status') == status
        assert expected_result[test_to_check].get('type') == error_type
        if expected_result[test_to_check].get('message') is None:
            assert message is None
        else:
            assert expected_result[test_to_check].get('message') == message \
                   or expected_result[test_to_check].get('message') in message

    for status, count in expected_statuses_count.items():
        assert res.get_tests_num(status) == count

    # validate generated TestRail .yaml report
    tr_report = read_yaml_file(testrail_report_file)
    assert type({}) == type(tr_report)
    assert len(_tests) == len(tr_report)
    for test_run, test in tr_report.items():
        assert 'suite_run_id' in test
        assert 'test_run_id' in test
        assert test_run == test['test_run_id']
        assert 'module' in test
        assert test['module'] == module_name
        assert 'test_configuration_options' in test
        assert expected_configuration_options == test['test_configuration_options']
        assert 'function' in test
        assert test['function'] in expected_result.keys()
        expected_test_result = expected_result[test['function']]
        expected_status = res.util_status_to_testrail_status(expected_test_result['status'])
        assert 'last_status' in test
        assert expected_status == test['last_status']

        # a test message will be either in 'message' or 'type' if 'message' is None
        assert 'asserts' in test
        assert type([]) == type(test['asserts'])

        # currently Tiden generates only one assert per test
        assert len(test['asserts']) == 1
        assert type({}) == type(test['asserts'][0])
        assert 'status' in test['asserts'][0]
        assert expected_status == test['asserts'][0]['status']

        expected_assert_message = expected_test_result['message'] if expected_test_result['message'] is not None else \
        expected_test_result['type']
        if expected_assert_message is not None:
            assert res.util_filter_escape_seqs(expected_assert_message) in test['asserts'][0]['message']

    # check all test run id's are unique
    test_run_ids = [test['test_run_id'] for test in tr_report.values()]
    assert len(test_run_ids) == len(set(test_run_ids))

    # check all suite run id is the same
    suite_run_ids = set([test['suite_run_id'] for test in tr_report.values()])
    assert 1 == len(suite_run_ids)
Exemplo n.º 17
0
def test_runner_repeated_test_continue_on_fail(with_dec_classpath, local_config, tmpdir, mock_pm):
    """
    This test is for testing test option repeated_test_continue_on_fail. It should have higher priority than decorator.
    If it passed through test options (like this: -to=repeated_test_continue_on_fail=True) then test with repeated test
    decorators will be executed even if some iteration will be failed.
    :return:
    """
    var_dir = _ensure_var_dir(tmpdir)
    xunit_file = _ensure_xunit_file_empty(var_dir)
    suite_var_dir = str(var_dir.mkdir('suite-mock'))
    config_path = str(var_dir.join('config.yaml'))

    test_prefix = 'suites.mock3.mock_test_module_with_decorators.MockTestModuleWithDecorators.'
    iterations = 5
    config = deepcopy(local_config)

    config.update({
        'artifacts': {},
        'suite_var_dir': suite_var_dir,
        'suite_dir': join(dirname(__file__), 'res', 'decorators', 'suites'),
        'remote': {
            'suite_var_dir': suite_var_dir,
        },
        'config_path': str(config_path),
        'repeated_test': iterations,
        'repeated_test_continue_on_fail': True
    })

    ssh_pool = LocalPool(local_config['ssh'])
    modules = {
        'mock3.mock_test_module_with_decorators': {
            'path': '%s/mock3/mock_test_module_with_decorators.py' % config['suite_dir'],
            'module_short_name': 'mock_test_module_with_decorators',
        }
    }
    expected_result = {
        'test_not_repeated_test': {
            'status': 'pass',
            'type': None,
            'message': None,
            'remote_dirs': [
                'test_not_repeated_test_1',
                'test_not_repeated_test_2',
                'test_not_repeated_test_3',
                'test_not_repeated_test_4',
                'test_not_repeated_test_5',
            ],
        },
        'test_repeated_test': {
            'status': 'pass',
            'type': None,
            'message': None,
            'remote_dirs': [
                'test_repeated_test_1',
                'test_repeated_test_2',
                'test_repeated_test_3',
                'test_repeated_test_4',
                'test_repeated_test_5',
            ],
        },
        'test_with_repeated_test_and_full_test_names': {
            'status': 'pass',
            'type': None,
            'message': None,
            'remote_dirs': [
                'test_with_repeated_test_and_full_test_names_first',
                'test_with_repeated_test_and_full_test_names_second',
                'test_with_repeated_test_and_full_test_names_3',
                'test_with_repeated_test_and_full_test_names_4',
                'test_with_repeated_test_and_full_test_names_5',
            ],
        },
        'test_with_repeated_test_and_not_full_test_names': {
            'status': 'pass',
            'type': None,
            'message': None,
            'remote_dirs': [
                'test_with_repeated_test_and_not_full_test_names_example',
                'test_with_repeated_test_and_not_full_test_names_2',
                'test_with_repeated_test_and_not_full_test_names_3',
                'test_with_repeated_test_and_not_full_test_names_4',
                'test_with_repeated_test_and_not_full_test_names_5',
            ],
        },
        'test_with_repeated_test_and_fail_on_iteration_3': {
            'status': 'fail',
            'type': 'TidenException',
            'message': 'TidenException(\'Exception on iteration 3\')',
            'test_name': 'test_with_repeated_test_and_fail_on_iteration_3',
            'remote_dirs': ['test_with_repeated_test_and_fail_on_iteration_3_first',
                            'test_with_repeated_test_and_fail_on_iteration_3_second',
                            'test_with_repeated_test_and_fail_on_iteration_3_3',
                            'test_with_repeated_test_and_fail_on_iteration_3_4',
                            'test_with_repeated_test_and_fail_on_iteration_3_5'
                            ]
        },
    }

    expected_statuses_count = {'pass': len(expected_result) - 1,
                               'fail': 1,
                               'error': 0,
                               'skip': 0,
                               'total': len(expected_result)}

    tr = TidenRunner(config, modules=modules, ssh_pool=ssh_pool, plugin_manager=mock_pm, xunit_path=xunit_file)
    tr.process_tests()
    res = tr.get_tests_results()
    _tests = res.get_tests()
    print(_tests)

    # to test tests execution we check:
    # 1. correct result in the Results.
    # 2. to test correct test execution we check correct directory creation + correct logs files generation.
    assert len(_tests) == len(expected_result)

    for test_to_check in expected_result.keys():
        status, error_type, message, test_name = res.get_test_details('{}{}'.format(test_prefix, test_to_check))
        assert expected_result[test_to_check].get('status') == status
        assert expected_result[test_to_check].get('type') == error_type
        assert expected_result[test_to_check].get('message') == message \
               or expected_result[test_to_check].get('message') in message
        assert test_name is not None
        assert test_name in expected_result[test_to_check].get('test_name', test_to_check)

        # Also check directory and log file exist
        iteration = 0
        for remote_directory in expected_result[test_to_check].get('remote_dirs'):
            iteration += 1
            log_file = '{}/{}/{}/{}_iteration_{}.log'.format(config['rt']['remote']['test_module_dir'],
                                                             config['rt']['test_class'], remote_directory,
                                                             test_name, iteration)
            assert exists(log_file)

    for status, count in expected_statuses_count.items():
        assert res.get_tests_num(status) == count
Exemplo n.º 18
0
def test_runner_collect(with_dec_classpath, local_config, tmpdir, mock_pm):
    var_dir = _ensure_var_dir(tmpdir)
    xunit_file_collect = _ensure_xunit_file_empty(var_dir, '-collect')
    xunit_file_process = _ensure_xunit_file_empty(var_dir, '-process')
    testrail_report_file_collect = _ensure_tr_report_file_empty(var_dir, '-collect')
    testrail_report_file_process = _ensure_tr_report_file_empty(var_dir, '-process')

    suite_var_dir = str(var_dir.mkdir('suite-mock'))
    config_path = str(var_dir.join('config.yaml'))

    source = 'mock_test_module_with_test_configuration'
    suite = 'mock'
    module_name = 'suites.%s.%s.MockTestModuleWithTestConfiguration' % (suite, source)
    test_prefix = module_name + '.'

    config = deepcopy(local_config)

    config.update({
        'artifacts': {},
        'suite_var_dir': suite_var_dir,
        'suite_dir': join(dirname(__file__), 'res', 'decorators', 'suites'),
        'remote': {
            'suite_var_dir': suite_var_dir,
        },
        'config_path': config_path,
        'zookeeper_enabled': False,
        'pitr_enabled': False,
        'compaction_enabled': True,
    })

    ssh_pool = LocalPool(local_config['ssh'])
    test_module_source_file_name = '%s/%s/%s.py' % (config['suite_dir'], suite, source)

    modules = {
        '%s.%s' % (suite, source): {
            'path': test_module_source_file_name,
            'module_short_name': source,
        }
    }
    test_configuration = '(pitr_enabled=false, compaction_enabled=true, zookeeper_enabled=false)'
    expected_configuration_options = ['pitr_enabled', 'compaction_enabled', 'zookeeper_enabled']
    expected_result = {
        'test_main':
            {'status': 'pass', 'type': None, 'message': None},
        'test_zookeeper_only':
            {'status': 'skipped', 'type': 'skipped cause of config.zookeeper_enabled is None', 'message': None},
    }

    from tiden.tidenfabric import TidenFabric
    TidenFabric().reset().setConfig(config)

    tr = TidenRunner(config, modules=modules, ssh_pool=ssh_pool, plugin_manager=mock_pm, xunit_path=xunit_file_collect)
    tr.collect_tests()
    res = tr.get_tests_results()
    res.update_xunit()
    res.create_testrail_report(config, report_file=basename(testrail_report_file_collect))
    _tests = res.get_tests()
    assert 12 == len(_tests)
    print(_tests)

    TidenFabric().reset().setConfig(config)
    tr = TidenRunner(config, modules=modules, ssh_pool=ssh_pool, plugin_manager=mock_pm, xunit_path=xunit_file_process)
    tr.process_tests()
    res = tr.get_tests_results()
    res.create_testrail_report(config, report_file=basename(testrail_report_file_process))
    _tests = res.get_tests()
    assert 2 == len(_tests)
    print(_tests)
Exemplo n.º 19
0
def test_runner_repeated_decorator(with_dec_classpath, local_config, tmpdir, mock_pm):
    """
    This test is for repeated_test decorator. Checks that reporting and execution correspond to repeated_test decorator
    logic:
    1. Test executes as many times as mentioned in decorator or if it fails execution stops.
    2. If test passed during all it's iterations it marks as pass and shows as one test in results.
    3. Test uses it's unique remote directory (this is the decorator logic).
    4. If test fails in some iteration it shows as one failed test in test results and it's name changed to one that
    contains iteration maker (ex. test_one -> test_one_iteration_5).
    :return:
    """
    var_dir = _ensure_var_dir(tmpdir)
    xunit_file = _ensure_xunit_file_empty(var_dir)
    suite_var_dir = str(var_dir.mkdir('suite-mock'))
    config_path = str(var_dir.join('config.yaml'))

    module_name = 'suites.mock3.mock_test_module_with_decorators.MockTestModuleWithDecorators'
    test_prefix = module_name + '.'

    config = deepcopy(local_config)

    config.update({
        'artifacts': {},
        'suite_var_dir': suite_var_dir,
        'suite_dir': join(dirname(__file__), 'res', 'decorators', 'suites'),
        'remote': {
            'suite_var_dir': suite_var_dir,
        },
        'config_path': config_path,
    })

    ssh_pool = LocalPool(local_config['ssh'])
    modules = {
        'mock3.mock_test_module_with_decorators': {
            'path': '%s/mock3/mock_test_module_with_decorators.py' % config['suite_dir'],
            'module_short_name': 'mock_test_module_with_decorators',
        }
    }
    expected_result = {
        'test_not_repeated_test': {
            'status': 'pass',
            'type': None,
            'message': None,
            'remote_dirs': ['test_not_repeated_test'],
        },
        'test_repeated_test': {
            'status': 'pass',
            'type': None,
            'message': None,
            'remote_dirs': ['test_repeated_test_1',
                            'test_repeated_test_2'],
        },
        'test_with_repeated_test_and_full_test_names': {
            'status': 'pass',
            'type': None,
            'message': None,
            'remote_dirs': ['test_with_repeated_test_and_full_test_names_first',
                            'test_with_repeated_test_and_full_test_names_second'],
        },
        'test_with_repeated_test_and_not_full_test_names': {
            'status': 'pass',
            'type': None,
            'message': None,
            'remote_dirs': ['test_with_repeated_test_and_not_full_test_names_example',
                            'test_with_repeated_test_and_not_full_test_names_2'],
        },
        'test_with_repeated_test_and_fail_on_iteration_3_iteration_3': {
            'status': 'fail',
            'type': 'TidenException',
            'message': 'TidenException(\'Exception on iteration 3\')',
            'test_name': 'test_with_repeated_test_and_fail_on_iteration_3',
            'remote_dirs': ['test_with_repeated_test_and_fail_on_iteration_3_first',
                            'test_with_repeated_test_and_fail_on_iteration_3_second',
                            'test_with_repeated_test_and_fail_on_iteration_3_3'],
        },
    }

    expected_statuses_count = {'pass': len(expected_result) - 1,
                               'fail': 1,
                               'error': 0,
                               'skip': 0,
                               'total': len(expected_result)}

    tr = TidenRunner(config, modules=modules, ssh_pool=ssh_pool, plugin_manager=mock_pm, xunit_path=xunit_file)
    tr.process_tests()
    res = tr.get_tests_results()
    _tests = res.get_tests()
    print(_tests)

    # to test tests execution we check:
    # 1. correct result in the Results.
    # 2. to test correct test execution we check correct directory creation + correct logs files generation.
    assert len(_tests) == len(expected_result)

    for test_to_check in expected_result.keys():
        status, error_type, message, test_name = res.get_test_details('{}{}'.format(test_prefix, test_to_check))
        assert expected_result[test_to_check].get('status') == status
        assert expected_result[test_to_check].get('type') == error_type
        assert expected_result[test_to_check].get('message') == message \
               or expected_result[test_to_check].get('message') in message
        assert test_name is not None
        assert test_name in expected_result[test_to_check].get('test_name', test_to_check)

        # Also check directory and log file exist
        iteration = 0
        for remote_directory in expected_result[test_to_check].get('remote_dirs'):
            iteration += 1
            # test_name = expected_result[test_to_check].get('test_name', test_to_check)
            log_file = '{}/{}/{}/{}_iteration_{}.log'.format(config['rt']['remote']['test_module_dir'],
                                                             config['rt']['test_class'], remote_directory, test_name,
                                                             iteration)
            assert exists(log_file)

    for status, count in expected_statuses_count.items():
        assert res.get_tests_num(status) == count
Exemplo n.º 20
0
def test_runner_basic(with_dec_classpath, local_config, tmpdir, mock_pm):
    """
    Just test that after TidenRunner execution we've got correct test results ans correct exceptions in the
    failed tests.
    :return:
    """
    var_dir = _ensure_var_dir(tmpdir)
    xunit_file = _ensure_xunit_file_empty(var_dir)
    testrail_report_file = _ensure_tr_report_file_empty(var_dir)

    suite_var_dir = str(var_dir.mkdir('suite-mock'))
    config_path = str(var_dir.join('config.yaml'))
    suite = 'mock3'
    module_short_name = 'mock_test_module_with_exceptions'
    module_class_name = 'MockTestModuleWithExceptions'
    module_name = 'suites.%s.%s.%s' % (suite, module_short_name, module_class_name)
    test_prefix = module_name + '.'

    config = deepcopy(local_config)

    config.update({
        'artifacts': {},
        'attrib': 'test_runner',
        'attr_match': 'any',
        'suite_var_dir': suite_var_dir,
        'suite_dir': join(dirname(__file__), 'res', 'decorators', 'suites'),
        'remote': {
            'suite_var_dir': suite_var_dir,
        },
        'config_path': config_path,
    })

    ssh_pool = LocalPool(local_config['ssh'])
    test_module_source_file_name = '%s/%s/%s.py' % (config['suite_dir'], suite, module_short_name)

    modules = {
        '%s.%s' % (suite, module_short_name): {
            'path': test_module_source_file_name,
            'module_short_name': module_short_name,
        }
    }
    expected_result = {
        'test_should_pass':
            {'status': 'pass', 'type': None, 'message': None},
        'test_passed_with_result_message':
            {'status': 'pass', 'type': None, 'message': 'WOO-HOO'},
        'test_should_fail':
            {'status': 'fail', 'type': 'TidenException', 'message': 'TidenException(\'Fake exception in test\')'},
        'test_should_be_skipped':
            {'status': 'skipped',
             'type': 'skipped cause of expression evaluates to False at %s:45' % test_module_source_file_name,
             'message': None},
        'test_should_be_not_started':
            {'status': 'skipped_no_start',
             'type': 'skipped cause of attrib mismatch',
             'message': None},
        'test_with_exception_in_setup':
            {'status': 'fail', 'type': 'TidenException', 'message': 'TidenException(\'Exception in test setup\')'},
        'test_pass_with_exception_in_teardown':
            {'status': 'pass', 'type': None, 'message': None},
        'test_fail_with_exception_in_teardown':
            {'status': 'fail', 'type': 'TidenException', 'message': 'TidenException(\'Fake exception in test\')'},
        'test_should_fail_with_error':
            {'status': 'error', 'type': 'OSError', 'message': 'IOError(\'Fake IO exception in test\')'},
    }

    expected_statuses_count = {'pass': 3,
                               'fail': 3,
                               'error': 1,
                               'skip': 2,
                               'total': len(expected_result)}

    tr = TidenRunner(config, modules=modules, ssh_pool=ssh_pool, plugin_manager=mock_pm, xunit_path=xunit_file)
    tr.process_tests()
    res = tr.get_tests_results()
    _tests = res.get_tests()
    print(_tests)

    # validate raw test results
    assert len(_tests) == len(expected_result)

    for test_to_check in expected_result.keys():
        status, error_type, message, test_name = res.get_test_details('{}{}'.format(test_prefix, test_to_check))
        assert expected_result[test_to_check].get('status') == status
        assert expected_result[test_to_check].get('type') == error_type
        if expected_result[test_to_check].get('message') is None:
            assert message is None
        else:
            assert expected_result[test_to_check].get('message') == message \
                   or expected_result[test_to_check].get('message') in message

    for status, count in expected_statuses_count.items():
        assert res.get_tests_num(status) == count

    # validate generated TestRail .yaml report
    res.create_testrail_report(config, report_file=basename(testrail_report_file))
    tr_report = read_yaml_file(testrail_report_file)
    assert type({}) == type(tr_report)
    assert len(_tests) == len(tr_report)
    for test_run, test in tr_report.items():
        assert 'suite_run_id' in test
        assert 'test_run_id' in test
        assert test_run == test['test_run_id']
        assert 'module' in test
        assert test['module'] == module_name
        assert 'test_configuration_options' in test
        assert [] == test['test_configuration_options']
        assert 'function' in test
        assert test['function'] in expected_result.keys()
        expected_test_result = expected_result[test['function']]
        expected_status = res.util_status_to_testrail_status(expected_test_result['status'])
        assert 'last_status' in test
        assert expected_status == test['last_status']

        # a test message will be either in 'message' or 'type' if 'message' is None
        assert 'asserts' in test
        assert type([]) == type(test['asserts'])

        # currently Tiden generates only one assert per test
        assert len(test['asserts']) == 1
        assert type({}) == type(test['asserts'][0])
        assert 'status' in test['asserts'][0]
        assert expected_status == test['asserts'][0]['status']

        expected_assert_message = expected_test_result['message'] if expected_test_result['message'] is not None else \
        expected_test_result['type']
        if expected_assert_message is not None:
            assert res.util_filter_escape_seqs(expected_assert_message) in test['asserts'][0]['message']

    # check all test run id's are unique
    test_run_ids = [test['test_run_id'] for test in tr_report.values()]
    assert len(test_run_ids) == len(set(test_run_ids))

    # check all suite run id is the same
    suite_run_ids = set([test['suite_run_id'] for test in tr_report.values()])
    assert 1 == len(suite_run_ids)
Exemplo n.º 21
0
class NasManager:

    # private SshPool to work with NAS
    ssh = None

    # NAS local root available for Tiden
    share_root = None

    # NAS IP address
    share_host = None

    # NAS mount point at non-NAS hosts
    share_mount_home = None

    folder_remove_timeout = 60

    def __init__(self, config):
        self.config = config
        self._init()

    def _init(self):
        share_config = self.config['environment'].get('share_storage', {})
        self.share_host = share_config.get('host')

        if 'folder_remove_timeout' in share_config:
            self.folder_remove_timeout = int(
                share_config['folder_remove_timeout'])

        self.share_root = share_config.get('root')
        self.share_home = share_config.get('home')

        if not self.share_host or not self.share_home or not self.share_root:
            print_red(
                'WARNING: NasManager environment.share_storage configuration missing!'
            )
            return

        config = {
            'ssh': {
                'threads_num':
                1,
                'hosts': [self.share_host],
                'default_timeout':
                SshPool.default_timeout,
                'username':
                self.config['environment'].get('username'),
                'private_key_path':
                self.config['environment'].get('private_key_path'),
                'home':
                self.share_root,
            },
        }

        if self.config['environment'].get('env_vars'):
            config['ssh']['env_vars'] = self.config['environment']['env_vars']

        # Make SSH connection pool
        connection_mode = self.config['connection_mode']
        if 'ansible' == connection_mode:
            try:
                from tiden.ansiblepool import AnsiblePool

                self.ssh = AnsiblePool(config['ssh'])
            except ImportError as e:
                log_put('Error: unable to import AnsiblePool: %s' % e)
                exit(1)
        elif 'paramiko' == connection_mode:
            self.ssh = SshPool(config['ssh'])
        elif 'local' == connection_mode:
            try:
                from tiden.localpool import LocalPool
                self.ssh = LocalPool(config['ssh'])
            except ImportError as e:
                log_put('Error: unable to import LocalPool: %s' % e)
                exit(1)
            except NotImplementedError as e:
                log_put('Error: %s' % e)
                exit(1)

        if self.ssh:
            self.ssh.connect()

    def get_share_mount_point(self):
        return self.share_home

    def get_share_root(self):
        return self.share_root

    def is_configured(self):
        return self.ssh is not None

    def remove_shared_folder(self, folder):
        if not self.is_configured():
            raise NasManagerException('Shared folder not configured')

        folder = folder.strip()
        if folder == '' or folder == '/':
            print_red('WARNING: attempt to remove NAS root!')
            return False

        snapshot_storage = folder.replace(self.share_home, self.share_root)
        command = 'if [ -d "{ss}" ]; then ' \
                  '  rm -fr {ss}; ' \
                  'fi && echo "Done"'.format(ss=snapshot_storage)
        print_blue('Going to remove shared storage: %s.' % snapshot_storage)
        result = self.ssh.exec([command], timeout=self.folder_remove_timeout)
        log_print(result)
        return 'Done' in result[self.share_host][0]

    def create_shared_folder(self, folder, cleanup=True):
        """
        creates shared folder and return its absolute path
        :param folder: folder name
        :param cleanup: set to False if directory should not be recreated
        :return:
        """
        if not self.is_configured():
            raise NasManagerException(
                'NasManager (shared folder) is not configured')

        snapshot_storage = self.get_share_root() + '/' + folder
        log_print(
            'Going to create shared storage: {}'.format(snapshot_storage),
            color='debug')
        cleanup_str = '  echo "Cleaning up {ss}"; ' \
                      '  rm -fr {ss}; ' \
                      '  echo "Recreating {ss}"; ' \
                      '  mkdir -p {ss} 2>&1; '
        check_and_create = [
            'if [ -d "{ss}" ]; then ', '  echo "Recreating {ss}"; ', 'else '
            '  echo "Creating {ss}";'
            '  mkdir -p {ss} 2>&1; '
            'fi; '
            'echo "Result code: $?";'
        ]
        command = check_and_create
        if cleanup:
            command.pop(1)
            command.insert(1, cleanup_str)

        command = ''.join(command).format(ss=snapshot_storage)
        result = self.ssh.exec([command])
        log_print(result, color='debug')

        return self.share_home + '/' + folder

    def touch_file(self, file):
        if not self.is_configured():
            raise NasManagerException(
                'NasManager (shared folder) is not configured')

        shared_file = self.get_share_root() + '/' + file
        log_print('Going to touch shared file: %s.' % shared_file)
        command = 'touch {sf}; ' \
                  'echo "Result code: $?";'.format(sf=shared_file)
        result = self.ssh.exec([command])
        # log_debug(result)
        return self.share_home + '/' + file

    def delete_file(self, file):
        if not self.is_configured():
            raise NasManagerException(
                'NasManager (shared folder) is not configured')

        shared_file = self.get_share_root() + '/' + file
        print_blue('Going to delete shared file: %s.' % shared_file)
        command = 'rm -f {sf}; ' \
                  'echo "Result code: $?";'.format(sf=shared_file)
        result = self.ssh.exec([command])
        log_print(result)