def repeat_upload_changed_artifacts(temp_dir, prepare_artifacts_with_ssh_structure): """ upload artifact changing source artifact trying to run without --clean artifact should be uploaded again """ global config ssh = LocalPool(config['ssh']) ssh.connect() ssh_home_dir = join(config["ssh"]["home"], '127.0.0.1') art_dir = join(ssh_home_dir, 'artifacts') init_remote_hosts(ssh, config) upload_artifacts(ssh, config, prepare_artifacts_result) new_file = join(config["artifacts_dir"], "new_file.txt") with open(new_file, "w") as f: f.write("Things changed") config["artifacts"]["source_zip"]["repack"].append("copy {} self:/".format(new_file)) new_prepare_artifacts_result, config = prepare(config) upload_artifacts(ssh, config, new_prepare_artifacts_result) expected_files = ['additional_artifact.txt', 'new_file.txt', 'source_artifact.txt', 'tiden_repack_original.checksum.sha256'] with ZipFile(join(art_dir, basename(config["artifacts"]["source_zip"]["path"])), 'r') as repack_zip: actual_files_names = [item.filename for item in repack_zip.filelist] assert sorted(actual_files_names) == sorted(expected_files)
def prepare_test_artifacts(): global prepare_artifacts_result global config config = make_local_structure({}) config = make_basic_artifacts(config, add_function=add_from_another_zip) # execute artifacts preparation prepare_artifacts_result, config = prepare(config)
def simple_structure(): global config global prepare_artifacts_result config = {"clean": None} config = make_local_structure(config) config = make_tar_files(config) prepare_artifacts_result, config = prepare(config) ssh_config = { 'hosts': ["127.0.0.1"], 'username': '', 'private_key_path': '', 'threads_num': 1 } config["ssh"].update(ssh_config)
def prepare_artifacts_with_ssh_structure(): global config global prepare_artifacts_result config = {"clean": None} config = make_local_structure(config) config = make_basic_artifacts(config, add_function=add_from_another_zip) prepare_artifacts_result, config = prepare(config) ssh_config = { 'hosts': ["127.0.0.1"], 'username': '', 'private_key_path': '', 'threads_num': 1 } config["ssh"].update(ssh_config)
def test_repeat_prepare_existed_artifacts(temp_dir, prepare_test_artifacts): """ Artifact preparation should not delete already existed artifacts """ global config files_stat = {} for root, dirs, files in walk(config["artifacts_dir"]): for file in files: files_stat[file] = c_time(join(root, file)) command, config = prepare(config) for root, dirs, files in walk(config["artifacts_dir"]): for file in files: assert files_stat[file] == c_time(join(root, file)), "File '{}' was not be changed".format(file)
def test_prepare_test_artifacts_repeat_remake_files(temp_dir, prepare_test_artifacts): """ Remake artifacts if source files was changed """ global config new_config = make_basic_artifacts(config, text="New files with new text", add_function=add_from_another_zip) files_stat = {} for root, dirs, files in walk(config["artifacts_dir"]): for file in files: files_stat[file] = c_time(join(root, file)) # wait for file creation date will changed sleep(1) command, config = prepare(new_config) for root, dirs, files in walk(config["artifacts_dir"]): for file in files: assert files_stat[file] != c_time(join(root, file)), "File '{}' was be changed".format(file)
def main(): """ Run Tiden tests """ log_print("*** Initialization ***", color='blue') log_print('(c) 2017-{} GridGain Systems. All Rights Reserved'.format( max(datetime.now().year, 2019))) log_print(version) exit_code = None # parse arguments, # load configuration, # initialize working directories config = TidenFabric().setConfig(setup_test_environment( process_args())).obj log_print('The configuration stored in %s' % config['config_path']) logger = _get_default_logger(config) sys.path.insert(0, abspath(getcwd())) pm = PluginManager(config) # prepare artifacts, artifact information is updated into config # this must be done before tests collections, # because some tests are applicable for specific artifacts only log_print('*** Prepare artifacts ***', color='blue') pm.do('before_prepare_artifacts', config) remote_unzip_files, config = prepare(config) if collect_only: # we don't run any test, so no ssh pool nor plugin manager required ssh_pool = None pm = None else: # otherwise, create ssh pool, # and prepare plugins to use it ssh_pool = init_ssh_pool(config) if pm.plugins: log_print('*** Plugins ***', color='blue') for name, plugin in pm.plugins.items(): log_print("%s, version %s" % (name, plugin['TIDEN_PLUGIN_VERSION'])) pm.set(ssh=ssh_pool) # initialize tests runner log_print('*** Runner ***', color='blue') tr = TidenRunner(config, collect_only=collect_only, ssh_pool=ssh_pool, plugin_manager=pm) if len(tr.modules.keys()) == 0: log_print("Error: no test modules found") exit(1) log_print( "%s module(s) matched %s.%s" % (len(tr.modules.keys()), config['suite_name'], config['test_name'])) if collect_only: tr.collect_tests() else: pm.do('before_hosts_setup') init_remote_hosts(ssh_pool, config) pm.do('after_hosts_setup') upload_artifacts(ssh_pool, config, remote_unzip_files) if pm.do_check('before_tests_run'): tr.process_tests() else: exit_code = -1 pm.do('after_tests_run') result = tr.get_tests_results() result.flush_xunit() result.print_summary() result.create_testrail_report(config, report_file=config.get('testrail_report')) print_blue("Execution time %d:%02d:%02d " % hms(int(time()) - result.get_started())) if exit_code: exit(exit_code)