def assert_directory_contents_match_expected(self, dir_path, expected_dir_contents): """ Assert that the specified directory has the expected contents. :param dir_path: The path of the directory whose artifacts to check :type dir_path: string :param expected_dir_contents: A list of FSItems corresponding to the expected directory contents :type expected_dir_contents: list[FSItem] """ if expected_dir_contents is not None: dir_path = os.path.abspath(dir_path) # converts path to absolute, removes trailing slash if present expected_dir_name = os.path.basename(dir_path) expected_build_artifacts = Directory(expected_dir_name, expected_dir_contents) expected_build_artifacts.assert_matches_path(dir_path, allow_extra_items=False)
def assert_directory_contents_match_expected(self, dir_path, expected_dir_contents): """ Assert that the specified directory has the expected contents. :param dir_path: The path of the directory whose artifacts to check :type dir_path: string :param expected_dir_contents: A list of FSItems corresponding to the expected directory contents :type expected_dir_contents: list[FSItem] """ if expected_dir_contents is not None: dir_path = os.path.abspath(dir_path) # converts path to absolute, removes trailing slash if present expected_dir_name = os.path.basename(dir_path) expected_build_artifacts = Directory(expected_dir_name, expected_dir_contents) expected_build_artifacts.assert_matches_path(dir_path, allow_extra_items=False)
def assert_build_artifact_contents_match_expected(self, build_id, expected_build_artifact_contents): """ Assert that artifact files for this build have the expected contents. :param build_id: The id of the build whose artifacts to check :type build_id: int :param expected_build_artifact_contents: A list of lists of mappings from artifact filename to artifact contents string; the outer list corresponds to subjob ids, the inner list corresponds to atom ids, and the dict should be a mapping of filenames to expected file contents for the corresponding atom. See the configs in functional_test_job_configs.py for examples. :type expected_build_artifact_contents: list[FSItem] """ build_artifacts_dir = os.path.join(self.test_app_base_dir.name, 'results', 'master', str(build_id)) expected_build_artifacts = Directory(str(build_id), expected_build_artifact_contents) expected_build_artifacts.assert_matches_path(build_artifacts_dir, allow_extra_items=False)
def test_git_type_demo_project_config(self): master = self.cluster.start_master() self.cluster.start_slave(num_executors_per_slave=10) build_resp = master.post_new_build({ 'type': 'git', 'url': 'https://github.com/boxengservices/ClusterRunnerDemo.git', 'job_name': 'Simple', }) build_id = build_resp['build_id'] master.block_until_build_finished( build_id, timeout=20) # extra time here to allow for cloning the repo # Each atom of the demo project just echoes one of the numbers 1 through 10. expected_artifact_contents = [ Directory('artifact_{}_0'.format(i), [ File('clusterrunner_command'), File('clusterrunner_console_output', contents='{}\n\n'.format(i + 1)), File('clusterrunner_exit_code', contents='0\n'), File('clusterrunner_time'), ]) for i in range(10) ] expected_artifact_contents.append(File('results.tar.gz')) self.assert_build_has_successful_status(build_id=build_id) self.assert_build_status_contains_expected_data(build_id=build_id, expected_data={ 'num_atoms': 10, 'num_subjobs': 10 }) self.assert_build_artifact_contents_match_expected( build_id=build_id, expected_build_artifact_contents=expected_artifact_contents)
def test_git_type_demo_project_config(self): master = self.cluster.start_master() self.cluster.start_slave(num_executors_per_slave=10) build_resp = master.post_new_build({ 'type': 'git', 'url': 'https://github.com/boxengservices/ClusterRunnerDemo.git', 'job_name': 'Simple', }) build_id = build_resp['build_id'] self.assertTrue( master.block_until_build_finished(build_id, timeout=30), 'The build should finish building within the timeout.') # Each atom of the demo project just echoes one of the numbers 1 through 10. expected_artifact_contents = [ Directory('artifact_{}_0'.format(i), [ File('clusterrunner_command'), File('clusterrunner_console_output', contents='{}\n\n'.format(i + 1)), File('clusterrunner_exit_code', contents='0\n'), File('clusterrunner_time'), ]) for i in range(10) ] expected_artifact_contents.append( File(BuildArtifact.ARTIFACT_TARFILE_NAME)) expected_artifact_contents.append( File(BuildArtifact.ARTIFACT_ZIPFILE_NAME)) self.assert_build_has_successful_status(build_id=build_id) self.assert_build_status_contains_expected_data(build_id=build_id, expected_data={ 'num_atoms': 10, 'num_subjobs': 10 }) self.assert_build_artifact_contents_match_expected( build_id=build_id, expected_build_artifact_contents=expected_artifact_contents)
- TOKEN: seq 0 4 | xargs -I {} echo "This is atom {}" """, 'nt': """ BasicJob: commands: - echo !TOKEN!> !ARTIFACT_DIR!\\result.txt atomizers: - TOKEN: FOR /l %n in (0,1,4) DO @echo This is atom %n """, }, expected_to_fail=False, expected_num_subjobs=5, expected_num_atoms=5, expected_artifact_contents=[ Directory('artifact_0_0', DEFAULT_ATOM_FILES + [File('result.txt', contents='This is atom 0\n')]), Directory('artifact_1_0', DEFAULT_ATOM_FILES + [File('result.txt', contents='This is atom 1\n')]), Directory('artifact_2_0', DEFAULT_ATOM_FILES + [File('result.txt', contents='This is atom 2\n')]), Directory('artifact_3_0', DEFAULT_ATOM_FILES + [File('result.txt', contents='This is atom 3\n')]), Directory('artifact_4_0', DEFAULT_ATOM_FILES + [File('result.txt', contents='This is atom 4\n')]), ], ) # This is a very basic job, but one of the atoms will fail with non-zero exit code. BASIC_FAILING_JOB = FunctionalTestJobConfig( config={ 'posix': """ BasicFailingJob: commands: - if [ "$TOKEN" = "This is atom 3" ]; then exit 1; fi