Beispiel #1
0
def repeat_upload_changed_artifacts(temp_dir, prepare_artifacts_with_ssh_structure):
    """
    upload artifact
    changing source artifact
    trying to run without --clean
    artifact should be uploaded again
    """
    global config

    ssh = LocalPool(config['ssh'])
    ssh.connect()

    ssh_home_dir = join(config["ssh"]["home"], '127.0.0.1')
    art_dir = join(ssh_home_dir, 'artifacts')

    init_remote_hosts(ssh, config)
    upload_artifacts(ssh, config, prepare_artifacts_result)

    new_file = join(config["artifacts_dir"], "new_file.txt")
    with open(new_file, "w") as f:
        f.write("Things changed")

    config["artifacts"]["source_zip"]["repack"].append("copy {} self:/".format(new_file))
    new_prepare_artifacts_result, config = prepare(config)
    upload_artifacts(ssh, config, new_prepare_artifacts_result)

    expected_files = ['additional_artifact.txt', 'new_file.txt', 'source_artifact.txt',
                      'tiden_repack_original.checksum.sha256']
    with ZipFile(join(art_dir,
                      basename(config["artifacts"]["source_zip"]["path"])), 'r') as repack_zip:
        actual_files_names = [item.filename for item in repack_zip.filelist]
    assert sorted(actual_files_names) == sorted(expected_files)
Beispiel #2
0
def test_repeat_upload_artifacts_changed_artifacts(temp_dir, prepare_artifacts_with_ssh_structure):
    """
    repeat usage upload_artifacts with changed artifacts should copy only changed artifacts
    """

    ssh = LocalPool(config['ssh'])
    ssh.connect()

    ssh_home_dir = join(config["ssh"]["home"], '127.0.0.1')
    art_dir = join(ssh_home_dir, 'artifacts')

    init_remote_hosts(ssh, config)
    upload_artifacts(ssh, config, prepare_artifacts_result)
    previous_art_creation_times = dict(
        [(artifact, c_time(join(art_dir, artifact))) for artifact in listdir(art_dir)])
    test_file_path = join(config["artifacts_dir"], config['_test_artifacts']["additional"]["file"]["name"])
    with open(test_file_path, "w") as f:
        f.write("Things changed")
    upload_artifacts(ssh, config, prepare_artifacts_result)
    actual_art_creation_times = [(c_time(join(art_dir, artifact)), artifact) for artifact in listdir(art_dir)]

    for actual_art_creation_time, artifact_name in actual_art_creation_times:
        if artifact_name == config['_test_artifacts']["additional"]["file"]["name"]:
            assert actual_art_creation_time != previous_art_creation_times[artifact_name]
        else:
            assert actual_art_creation_time == previous_art_creation_times[artifact_name]
Beispiel #3
0
def test_upload_artifacts(temp_dir, prepare_artifacts_with_ssh_structure):
    """
    upload_artifacts should copy artifacts and unzip archives
    """
    ssh = LocalPool(config['ssh'])
    ssh.connect()

    ssh_home_dir = join(config["ssh"]["home"], '127.0.0.1')

    init_remote_hosts(ssh, config)
    upload_artifacts(ssh, config, prepare_artifacts_result)

    assert sorted(["artifacts", 'test-0']) == sorted(listdir(ssh_home_dir))

    expected_artifacts_dir_files = [
        config['_test_artifacts']["source"]["arch"]["repack_name"],
        config['_test_artifacts']["additional"]["file"]["name"],
        config['_test_artifacts']["additional"]["arch"]["name"]
    ]
    actual_artifacts_files_list = listdir(join(ssh_home_dir, 'artifacts'))
    assert sorted(expected_artifacts_dir_files) == sorted(actual_artifacts_files_list)

    expected_tests_files = [name for name, conf in config["artifacts"].items() if conf.get("remote_unzip", False)]
    actual_tests_files = listdir(join(ssh_home_dir, 'test-0'))
    assert sorted(expected_tests_files) == sorted(actual_tests_files)
Beispiel #4
0
def test_init_remote_hosts_clean_all(temp_dir, prepare_remote_structure):
    """
    init_remote_hosts should remove all var folders with --clean=all option
    """
    ssh = LocalPool(config['ssh'])
    ssh.connect()

    init_remote_hosts(ssh, config)

    home_dir = join(config["ssh"]["home"], '127.0.0.1')
    expected = ["artifacts", 'test-0']
    actual = listdir(home_dir)
    assert sorted(expected) == sorted(actual)
    artifacts_dir_creation_time = c_time(join(home_dir, 'artifacts'))

    config["remote"]["suite_var_dir"] = join(config["ssh"]["home"], 'test-1')

    config['clean'] = 'all'
    init_remote_hosts(ssh, config)

    expected = ["artifacts", 'test-1']
    actual = listdir(home_dir)
    assert sorted(expected) == sorted(actual)

    assert artifacts_dir_creation_time != c_time(join(home_dir, 'artifacts'))
Beispiel #5
0
def test_init_remote_hosts_clean_none(temp_dir, prepare_remote_structure):
    """
    init_remote_hosts should add new tests folders without --clean option
    """
    ssh = LocalPool(config['ssh'])
    ssh.connect()

    init_remote_hosts(ssh, config)

    home_dir = join(config["ssh"]["home"], '127.0.0.1')
    expected = ["artifacts", 'test-0']
    actual = listdir(home_dir)
    assert sorted(expected) == sorted(actual)
    artifacts_dir_stat = c_time(join(home_dir, 'artifacts'))
    test_dir_stat = c_time(join(home_dir, 'test-0'))

    config["remote"]["suite_var_dir"] = join(config["ssh"]["home"], 'test-1')

    init_remote_hosts(ssh, config)

    expected = ["artifacts", 'test-0', 'test-1']
    actual = listdir(home_dir)
    assert sorted(expected) == sorted(actual)

    assert c_time(join(home_dir, 'artifacts')) == artifacts_dir_stat
    assert test_dir_stat == c_time(join(home_dir, 'test-0'))
Beispiel #6
0
def test_repeat_upload_artifacts_same_artifacts(temp_dir, prepare_artifacts_with_ssh_structure):
    """
    repeat usage upload_artifacts should not touch old artifacts
    """
    ssh = LocalPool(config['ssh'])
    ssh.connect()

    ssh_home_dir = join(config["ssh"]["home"], '127.0.0.1')
    art_dir = join(ssh_home_dir, 'artifacts')

    init_remote_hosts(ssh, config)
    upload_artifacts(ssh, config, prepare_artifacts_result)
    previous_art_creation_times = [c_time(join(art_dir, artifact)) for artifact in listdir(art_dir)]

    # don't unzip here twice, because we don't change test directory
    upload_artifacts(ssh, config, [])
    actual_art_creation_times = [c_time(join(art_dir, artifact)) for artifact in listdir(art_dir)]

    assert sorted(previous_art_creation_times) == sorted(actual_art_creation_times)
Beispiel #7
0
def test_repeat_upload_tar_artifacts(temp_dir, simple_structure):
    """
    upload and repack tar artifacts
    """
    global config

    ssh = LocalPool(config['ssh'])
    ssh.connect()

    init_remote_hosts(ssh, config)
    upload_artifacts(ssh, config, prepare_artifacts_result)
    not_found = [art for art in config["artifacts"].keys() if not exists(join(config["remote"]["suite_var_dir"], art))]
    assert not_found != [], "Can't find artifacts: {}".format(', '.join(not_found))

    for_repack = ['source_artifact.txt',
                  'tiden_repack_original.checksum.sha256',
                  'second_inner_dir']

    for name, art in config["artifacts"].items():
        assert join(config["remote"]["suite_var_dir"], name) == art["remote_path"]

        actual_dirs_list = listdir(art["remote_path"].replace("remote", "remote/127.0.0.1"))
        expected_dirs_list = for_repack if 'repack' in name else [for_repack[0]]
        assert sorted(actual_dirs_list) == sorted(expected_dirs_list)
Beispiel #8
0
def main():
    """
    Run Tiden tests
    """
    log_print("*** Initialization ***", color='blue')
    log_print('(c) 2017-{} GridGain Systems. All Rights Reserved'.format(
        max(datetime.now().year, 2019)))
    log_print(version)
    exit_code = None

    # parse arguments,
    # load configuration,
    # initialize working directories
    config = TidenFabric().setConfig(setup_test_environment(
        process_args())).obj
    log_print('The configuration stored in %s' % config['config_path'])

    logger = _get_default_logger(config)
    sys.path.insert(0, abspath(getcwd()))

    pm = PluginManager(config)

    # prepare artifacts, artifact information is updated into config
    # this must be done before tests collections,
    # because some tests are applicable for specific artifacts only
    log_print('*** Prepare artifacts ***', color='blue')
    pm.do('before_prepare_artifacts', config)
    remote_unzip_files, config = prepare(config)

    if collect_only:
        # we don't run any test, so no ssh pool nor plugin manager required
        ssh_pool = None
        pm = None
    else:
        # otherwise, create ssh pool,
        # and prepare plugins to use it
        ssh_pool = init_ssh_pool(config)
        if pm.plugins:
            log_print('*** Plugins ***', color='blue')
            for name, plugin in pm.plugins.items():
                log_print("%s, version %s" %
                          (name, plugin['TIDEN_PLUGIN_VERSION']))
            pm.set(ssh=ssh_pool)

    # initialize tests runner
    log_print('*** Runner ***', color='blue')
    tr = TidenRunner(config,
                     collect_only=collect_only,
                     ssh_pool=ssh_pool,
                     plugin_manager=pm)
    if len(tr.modules.keys()) == 0:
        log_print("Error: no test modules found")
        exit(1)
    log_print(
        "%s module(s) matched %s.%s" %
        (len(tr.modules.keys()), config['suite_name'], config['test_name']))

    if collect_only:
        tr.collect_tests()
    else:
        pm.do('before_hosts_setup')
        init_remote_hosts(ssh_pool, config)

        pm.do('after_hosts_setup')
        upload_artifacts(ssh_pool, config, remote_unzip_files)

        if pm.do_check('before_tests_run'):
            tr.process_tests()
        else:
            exit_code = -1
        pm.do('after_tests_run')

    result = tr.get_tests_results()
    result.flush_xunit()
    result.print_summary()
    result.create_testrail_report(config,
                                  report_file=config.get('testrail_report'))

    print_blue("Execution time %d:%02d:%02d " %
               hms(int(time()) - result.get_started()))

    if exit_code:
        exit(exit_code)