def determine_new_patch_filename(self): patch_dir = FileUtils.join_path(self.patch_basedir, self.patch_branch) FileUtils.ensure_dir_created(patch_dir) found_patches = FileUtils.find_files(patch_dir, regex=self.patch_branch + PATCH_FILE_REGEX, single_level=True) new_patch_filename, new_patch_num = PatchUtils.get_next_filename( patch_dir, found_patches) # Double-check new filename vs. putting it altogether manually new_patch_filename_sanity = FileUtils.join_path( self.patch_basedir, self.patch_branch, f"{self.patch_branch}.{str(new_patch_num)}{PATCH_EXTENSION}") # If this is a new patch, use the appended name, # Otherwise, use the generated filename if new_patch_num == FIRST_PATCH_NUMBER: new_patch_filename = new_patch_filename_sanity if new_patch_filename != new_patch_filename_sanity: raise ValueError( "File paths do not match. " f"Calculated: {new_patch_filename}, Concatenated: {new_patch_filename_sanity}" ) self.new_patch_filename = new_patch_filename
def test_YARN_10496(self): project_out_root = ProjectUtils.get_test_output_basedir(PROJECT_NAME, allow_python_commons_as_project=True) result_basedir = FileUtils.join_path(project_out_root, "jira-data") FileUtils.ensure_dir_created(result_basedir) jira_id = "YARN-10496" jira_html_file = FileUtils.join_path(result_basedir, "jira.html") jira_list_file = FileUtils.join_path(result_basedir, "jira-list.txt") jira_html = JiraUtils.download_jira_html( "https://issues.apache.org/jira/browse/", jira_id, jira_html_file ) jira_ids_and_titles = JiraUtils.parse_subjiras_and_jira_titles_from_umbrella_html( jira_html, jira_list_file, filter_ids=[jira_id] ) expected_jira_ids = ['YARN-10169', 'YARN-10504', 'YARN-10505', 'YARN-10506', 'YARN-10512', 'YARN-10513', 'YARN-10521', 'YARN-10522', 'YARN-10524', 'YARN-10525', 'YARN-10531', 'YARN-10532', 'YARN-10535', 'YARN-10564', 'YARN-10565', 'YARN-10571', 'YARN-10573', 'YARN-10574', 'YARN-10576', 'YARN-10577', 'YARN-10578', 'YARN-10579', 'YARN-10581', 'YARN-10582', 'YARN-10583', 'YARN-10584', 'YARN-10587', 'YARN-10590', 'YARN-10592', 'YARN-10596', 'YARN-10598', 'YARN-10599', 'YARN-10600', 'YARN-10604', 'YARN-10605', 'YARN-10609', 'YARN-10614', 'YARN-10615', 'YARN-10620', 'YARN-10622', 'YARN-10624'] all_list_items_found = all(id1 in jira_ids_and_titles.keys() for id1 in expected_jira_ids) self.assertTrue(all_list_items_found) expected_mappings = {'YARN-10624': 'Support max queues limit configuration in new auto created queue, consistent with old auto created.'} self.assertEqual(expected_mappings['YARN-10624'], jira_ids_and_titles['YARN-10624']) self.assertTrue(isinstance(jira_ids_and_titles['YARN-10624'], str))
def print_and_save_summary(self, rendered_summary): LOG.info(rendered_summary.printable_summary_str) filename = FileUtils.join_path(self.config.output_dir, SUMMARY_FILE_TXT) LOG.info(f"Saving summary to text file: {filename}") FileUtils.save_to_file(filename, rendered_summary.writable_summary_str) filename = FileUtils.join_path(self.config.output_dir, SUMMARY_FILE_HTML) LOG.info(f"Saving summary to html file: {filename}") FileUtils.save_to_file(filename, rendered_summary.html_summary)
def test_with_not_existing_patch(self): args = Object() args.patch_file = FileUtils.join_path("tmp", "blablabla") review_branch_creator = ReviewBranchCreator(args, self.repo_wrapper, BASE_BRANCH, REMOTE_BASE_BRANCH) self.assertRaises(ValueError, review_branch_creator.run)
def get_output_basedir(cls, basedir_name: str, ensure_created=True, allow_python_commons_as_project=False, project_root_determination_strategy=None): if not basedir_name: raise ValueError("Basedir name should be specified!") project_name = cls.verify_caller_filename_valid( allow_python_commons_as_project=allow_python_commons_as_project, project_root_determination_strategy= project_root_determination_strategy) proj_basedir = FileUtils.join_path(PROJECTS_BASEDIR, basedir_name) if project_name in cls.PROJECT_BASEDIR_DICT: old_basedir = cls.PROJECT_BASEDIR_DICT[project_name] if old_basedir != proj_basedir: raise ValueError( "Project is already registered with a different output basedir. Details: \n" f"Old basedir name: {old_basedir.split(os.sep)[-1]}\n" f"Project basedir's old full path: {old_basedir}\n" f"New basedir name would be: {basedir_name}\n" f"Project basedir's new full path would be: {proj_basedir}\n" ) cls.PROJECT_BASEDIR_DICT[project_name] = proj_basedir if ensure_created: FileUtils.ensure_dir_created(proj_basedir) return proj_basedir
def test_run_in_a_non_git_repo_working_dir(self): working_dir = FileUtils.join_path("/tmp", "dummydir") FileUtils.ensure_dir_created(working_dir) format_patch_saver = FormatPatchSaver(self.setup_args(), working_dir, self.current_datetime) self.assertRaises(ValueError, format_patch_saver.run)
def get_test_output_basedir(cls, basedir_name: str, allow_python_commons_as_project=False, project_root_determination_strategy=None): """ :param basedir_name: :param allow_python_commons_as_project: This is useful and a must for test executions of ProjectUtils (e.g. JiraUtilsTests) as stackframes calling pythoncommons are only the methods of the unittest framework. :return: """ cls.test_execution = True project_name = cls.verify_caller_filename_valid( allow_python_commons_as_project=allow_python_commons_as_project, project_root_determination_strategy= project_root_determination_strategy) if project_name not in cls.PROJECT_BASEDIR_DICT: # Creating project dir for the first time proj_basedir = cls.get_output_basedir( basedir_name, allow_python_commons_as_project=allow_python_commons_as_project, project_root_determination_strategy= project_root_determination_strategy) else: proj_basedir = cls.PROJECT_BASEDIR_DICT[project_name] return FileUtils.join_path(proj_basedir, TEST_OUTPUT_DIR_NAME)
def _get_session_dir_under_child_dir(cls, child_dir_name, test: bool = False): child_dir_type: str = "child dir" if not test else "test child dir" dir_dict = cls.CHILD_DIR_DICT if not test else cls.CHILD_DIR_TEST_DICT if not child_dir_name: raise ValueError( f"Project {child_dir_type} name should be specified!") project_name = cls._validate_project_for_child_dir_creation() if project_name in dir_dict and child_dir_name in dir_dict[ project_name]: stored_dir = dir_dict[project_name][child_dir_name] LOG.debug( f"Found already stored {child_dir_type} for project '{project_name}': {stored_dir}" ) session_dir = FileUtils.join_path( stored_dir, f"session-{DateUtils.now_formatted('%Y%m%d_%H%M%S')}") FileUtils.ensure_dir_created(session_dir) return session_dir else: raise ValueError( f"Cannot find stored {child_dir_type} for project. " f"Project: {project_name}, " f"Child dir: {child_dir_name}, " f"All stored {child_dir_type}s: {dir_dict}")
def get_next_filename(patch_dir, list_of_prev_patches): list_of_prev_patches = sorted(list_of_prev_patches, reverse=True) LOG.info("Found patches: %s", list_of_prev_patches) if len(list_of_prev_patches) == 0: return FileUtils.join_path(patch_dir, FIRST_PATCH_NUMBER), FIRST_PATCH_NUMBER else: latest_patch = list_of_prev_patches[0] last_patch_num = PatchUtils.extract_patch_number_from_filename_as_str( latest_patch) next_patch_filename = PatchUtils.get_next_patch_filename( latest_patch) return ( FileUtils.join_path(patch_dir, next_patch_filename), StringUtils.increase_numerical_str(last_patch_num), )
def test_fetch_with_upstream_umbrella_cached_mode(self): self.utils.checkout_trunk() umbrella_fetcher = UpstreamJiraUmbrellaFetcher( self.setup_args(force_mode=False), self.repo_wrapper, self.repo_wrapper, self.utils.jira_umbrella_data_dir, self.base_branch, ) # Run first, to surely have results pickled for this umbrella umbrella_fetcher.run() # Run again, with using cache umbrella_fetcher.run() output_dir = FileUtils.join_path(self.utils.jira_umbrella_data_dir, UPSTREAM_JIRA_ID) original_mod_dates = FileUtils.get_mod_dates_of_files( output_dir, *ALL_OUTPUT_FILES) self._verify_files_and_mod_dates(output_dir) # Since we are using non-force mode (cached mode), we expect the files untouched new_mod_dates = FileUtils.get_mod_dates_of_files( output_dir, *ALL_OUTPUT_FILES) self.assertDictEqual(original_mod_dates, new_mod_dates)
def run(self): LOG.info( f"Starting sending latest command data in email.\n Config: {str(self.config)}" ) zip_extract_dest = FileUtils.join_path(os.sep, "tmp", "extracted_zip") ZipFileUtils.extract_zip_file(self.config.email.attachment_file, zip_extract_dest) # Pick file from zip that will be the email's body email_body_file = FileUtils.join_path(os.sep, zip_extract_dest, self.config.email_body_file) FileUtils.ensure_file_exists(email_body_file) email_body_contents: str = FileUtils.read_file(email_body_file) body_mimetype: EmailMimeType = self._determine_body_mimetype_by_attachment( email_body_file) email_service = EmailService(self.config.email.email_conf) try: email_service.send_mail( self.config.email.sender, self.config.email.subject, email_body_contents, self.config.email.recipients, self.config.email.attachment_file, body_mimetype=body_mimetype, override_attachment_filename=self.config.email. attachment_filename, ) except SMTPAuthenticationError as smtpe: ignore_smpth_auth_env: str = OsUtils.get_env_value( EnvVar.IGNORE_SMTP_AUTH_ERROR.value, "") LOG.info( f"Recognized env var '{EnvVar.IGNORE_SMTP_AUTH_ERROR.value}': {ignore_smpth_auth_env}" ) if not ignore_smpth_auth_env: raise smtpe else: # Swallow exeption LOG.exception( f"SMTP auth error occurred but env var " f"'{EnvVar.IGNORE_SMTP_AUTH_ERROR.value}' was set", exc_info=True, ) LOG.info("Finished sending email to recipients")
def get_default_log_file(cls, project_name: str, postfix: str = None): if postfix: postfix += "-" else: postfix = "" filename = f"{project_name}-{postfix}{DateUtils.get_current_datetime()}" log_dir = cls.get_logs_dir() return FileUtils.join_path(log_dir, filename)
def create_image_from_dir(cls, dockerfile_dir_path, tag=None, build_args=None): dockerfile_path = FileUtils.join_path(dockerfile_dir_path, "Dockerfile") cls._build_image_internal(dockerfile_dir_path, dockerfile_path, tag=tag, build_args=build_args)
def setUp(self): self.current_datetime = DateUtils.get_current_datetime() self.patches_basedir = FileUtils.join_path(self.saved_patches_dir, DEST_DIR_PREFIX, self.current_datetime) self.assertIsNotNone(self.patches_basedir) self.assertNotEqual(self.patches_basedir, "~") self.assertNotEqual(self.patches_basedir, "/") self.assertTrue(self.saved_patches_dir in self.patches_basedir) FileUtils.remove_files(self.patches_basedir, FORMAT_PATCH_FILE_PREFIX)
def _add_file_to_zip(zip, dirpath, filename, src_dir): file_full_path = os.path.join(dirpath, filename) dir_path_from_src_dir = dirpath.replace(src_dir, '') if dir_path_from_src_dir.startswith(os.sep): dir_path_from_src_dir = dir_path_from_src_dir[1:] path_in_zip = FileUtils.join_path(dir_path_from_src_dir, filename) LOG.debug( f"Writing to zip: File full path: {file_full_path}, path in zip file: {path_in_zip}" ) zip.write(file_full_path, path_in_zip)
def test_with_bad_patch_content(self): patch_file = FileUtils.join_path(self.dummy_patches_dir, PATCH_FILENAME) FileUtils.save_to_file(patch_file, "dummycontents") args = Object() args.patch_file = patch_file review_branch_creator = ReviewBranchCreator(args, self.repo_wrapper, BASE_BRANCH, REMOTE_BASE_BRANCH) self.assertRaises(ValueError, review_branch_creator.run)
def test_with_oddly_named_patch(self): patch_file = FileUtils.join_path(self.dummy_patches_dir, "testpatch1.patch") FileUtils.create_files(patch_file) args = Object() args.patch_file = patch_file review_branch_creator = ReviewBranchCreator(args, self.repo_wrapper, BASE_BRANCH, REMOTE_BASE_BRANCH) self.assertRaises(ValueError, review_branch_creator.run)
def _exec_script_only_on_master(compare_script, feature_br_name, master_br_name, output_dir, working_dir): args1 = f"{feature_br_name} {master_br_name}" output_file1 = FileUtils.join_path(output_dir, f"only-on-{master_br_name}") cli_cmd, cli_output = CommandRunner.execute_script( compare_script, args=args1, working_dir=working_dir, output_file=output_file1, use_tee=True) return cli_cmd, cli_output
def save_to_test_file(cls, dir_name: str, filename: str, file_contents: str): if not dir_name: raise ValueError("Dir name should be specified!") if not filename: raise ValueError("Filename should be specified!") project_name = cls._validate_project_for_child_dir_creation() cls.validate_test_child_dir(dir_name, project_name) dir_path = cls.CHILD_DIR_TEST_DICT[project_name][dir_name] FileUtils.save_to_file(FileUtils.join_path(dir_path, filename), file_contents)
def get_test_output_child_dir(cls, dir_name: str, ensure_created=True, special_parent_dir=None): if not dir_name: raise ValueError("Dir name should be specified!") project_name = cls._validate_project_for_child_dir_creation() if project_name in cls.CHILD_DIR_TEST_DICT and dir_name in cls.CHILD_DIR_TEST_DICT[ project_name]: stored_dir = cls.CHILD_DIR_TEST_DICT[project_name][dir_name] LOG.debug( f"Found already stored child test dir for project '{project_name}': {stored_dir}" ) FileUtils.ensure_dir_created(stored_dir) return stored_dir if special_parent_dir: if not FileUtils.does_path_exist(special_parent_dir): raise ValueError( f"Specified parent dir does not exist: {special_parent_dir}" ) LOG.debug( f"Parent dir of new child directory will be: {special_parent_dir}" ) parent_dir = special_parent_dir new_child_dir = FileUtils.join_path(parent_dir, dir_name) else: # Default parent dir: Basedir of project # New child dir: basedir/test/<new child dir name> parent_dir = cls.PROJECT_BASEDIR_DICT[project_name] new_child_dir = FileUtils.join_path(parent_dir, TEST_OUTPUT_DIR_NAME, dir_name) if project_name not in cls.CHILD_DIR_TEST_DICT: cls.CHILD_DIR_TEST_DICT[project_name] = {} cls.CHILD_DIR_TEST_DICT[project_name][dir_name] = new_child_dir if ensure_created: FileUtils.ensure_dir_created(new_child_dir) return new_child_dir
def _generate_dummy_text_files_in_container_dir(self, dir_path: str, number_of_files: int): self.exec_cmd_in_container("mkdir -p " + dir_path) for i in range(number_of_files): path = os.path.normpath(dir_path) path_segments = path.split(os.sep) path_segments = list(filter(None, path_segments)) file_name = "_".join(path_segments) + "_" + str(i + 1) file_path = FileUtils.join_path(dir_path, file_name) cmd = f"echo dummy_{str(i + 1)} > {file_path}" # Simple redirect did not work: self._exec_cmd_in_container(cmd) # See: https://github.com/docker/docker-py/issues/1637 # Use this as a workaround self.exec_cmd_in_container(['sh', '-c', cmd])
def add_some_file_changes(self, commit=False, commit_message_prefix=None): FileUtils.save_to_file( FileUtils.join_path(self.sandbox_repo_path, DUMMYFILE_1), DUMMYFILE_1) FileUtils.save_to_file( FileUtils.join_path(self.sandbox_repo_path, DUMMYFILE_2), DUMMYFILE_2) yarn_config_java = FileUtils.join_path(self.sandbox_repo_path, YARNCONFIGURATION_PATH) FileUtils.append_to_file(yarn_config_java, "dummy_changes_to_conf_1\n") FileUtils.append_to_file(yarn_config_java, "dummy_changes_to_conf_2\n") if commit: commit_msg = "test_commit" if commit_message_prefix: commit_msg = commit_message_prefix + commit_msg self.repo_wrapper.commit( commit_msg, author=Actor("A test author", "*****@*****.**"), committer=Actor("A test committer", "*****@*****.**"), add_files_to_index=[ DUMMYFILE_1, DUMMYFILE_2, yarn_config_java ], )
def add_file_changes_and_save_to_patch(self, patch_file): self.add_some_file_changes() yarn_config_java = FileUtils.join_path(self.sandbox_repo_path, YARNCONFIGURATION_PATH) self.repo_wrapper.add_to_index( [DUMMYFILE_1, DUMMYFILE_2, yarn_config_java]) diff = self.repo_wrapper.diff(HEAD, cached=True) PatchUtils.save_diff_to_patch_file(diff, patch_file) self.reset_changes() # Verify file self.assert_file_contains(patch_file, "+dummyfile1") self.assert_file_contains(patch_file, "+dummyfile2") self.assert_file_contains(patch_file, "+dummy_changes_to_conf_1") self.assert_file_contains(patch_file, "+dummy_changes_to_conf_2")
def test_with_normal_patch(self): patch_file = FileUtils.join_path(self.dummy_patches_dir, PATCH_FILENAME) self.utils.add_file_changes_and_save_to_patch(patch_file) args = Object() args.patch_file = patch_file review_branch_creator = ReviewBranchCreator(args, self.repo_wrapper, BASE_BRANCH, REMOTE_BASE_BRANCH) review_branch_creator.run() self.assertTrue(REVIEW_BRANCH in self.repo.heads, f"Review branch does not exist: {REVIEW_BRANCH}") self.utils.verify_commit_message_of_branch( REVIEW_BRANCH, COMMIT_MSG_TEMPLATE.format(file=patch_file))
def _setup_dirs(cls): found_dirs = FileUtils.find_files( cls.repo_root_dir, find_type=FindResultType.DIRS, regex=TEST_DIR_NAME, parent_dir=SOME_PARENT_DIR, single_level=False, full_path_result=True, ) if len(found_dirs) != 1: raise ValueError( f"Expected to find 1 dir with name {TEST_DIR_NAME} " f"and parent dir '{SOME_PARENT_DIR}'. " f"Actual results: {found_dirs}" ) cls.repo_root_dir = found_dirs[0] cls.some_other_dir = FileUtils.join_path(cls.repo_root_dir, "some-other-dir")
def setUpClass(cls): # Test expects that MAIL_ACC_PASSWORD is set with env var if CdswEnvVar.MAIL_ACC_PASSWORD.value not in os.environ: raise ValueError(f"Please set '{CdswEnvVar.MAIL_ACC_PASSWORD.value}' env var and re-run the test!") cls._setup_logging() cls.repo_root_dir = FileUtils.find_repo_root_dir(__file__, REPO_ROOT_DIRNAME) found_cdsw_dirs = FileUtils.find_files( cls.repo_root_dir, find_type=FindResultType.DIRS, regex=CDSW_DIRNAME, parent_dir="yarndevtools", single_level=False, full_path_result=True, ) if len(found_cdsw_dirs) != 1: raise ValueError( f"Expected to find 1 dir with name {CDSW_DIRNAME} " f"and parent dir 'yarndevtools'. " f"Actual results: {found_cdsw_dirs}" ) cls.repo_cdsw_root_dir = found_cdsw_dirs[0] cls.yarn_dev_tools_results_dir = FileUtils.join_path(cls.repo_cdsw_root_dir, "yarndevtools-results") cls.branchdiff_cdsw_runner_script = YarnCdswBranchDiffTests.find_cdsw_runner_script( os.path.join(cls.repo_cdsw_root_dir, BRANCH_DIFF_REPORTER_DIR_NAME) ) cls.docker_test_setup = DockerTestSetup( DOCKER_IMAGE, create_image=CREATE_IMAGE, dockerfile_location=cls.repo_cdsw_root_dir, logger=CMD_LOG ) exec_mode_env: str = OsUtils.get_env_value(CdswEnvVar.TEST_EXECUTION_MODE.value, TestExecMode.CLOUDERA.value) cls.exec_mode: TestExecMode = TestExecMode[exec_mode_env.upper()] # !! WARNING: User-specific settings !! if cls.exec_mode == TestExecMode.CLOUDERA: # We need both upstream / downstream repos for Cloudera-mode os.environ[CdswEnvVar.CLOUDERA_HADOOP_ROOT.value] = "/Users/snemeth/development/cloudera/hadoop/" os.environ[CdswEnvVar.HADOOP_DEV_DIR.value] = "/Users/snemeth/development/apache/hadoop" elif cls.exec_mode == TestExecMode.UPSTREAM: os.environ[CdswEnvVar.HADOOP_DEV_DIR.value] = "/Users/snemeth/development/apache/hadoop" os.environ[BranchComparatorEnvVar.REPO_TYPE.value] = RepoType.UPSTREAM.value os.environ[BranchComparatorEnvVar.FEATURE_BRANCH.value] = "origin/branch-3.3" os.environ[BranchComparatorEnvVar.MASTER_BRANCH.value] = "origin/trunk"
def _execute_compare_script( config, branches, working_dir) -> Dict[BranchType, Tuple[str, str]]: compare_script = config.legacy_compare_script_path master_br_name = branches.get_branch(BranchType.MASTER).shortname feature_br_name = branches.get_branch(BranchType.FEATURE).shortname output_dir = FileUtils.join_path(config.output_dir, "git_compare_script_output") FileUtils.ensure_dir_created(output_dir) results: Dict[BranchType, Tuple[str, str]] = { BranchType.MASTER: LegacyScriptRunner._exec_script_only_on_master( compare_script, feature_br_name, master_br_name, output_dir, working_dir), BranchType.FEATURE: LegacyScriptRunner._exec_script_only_on_feature( compare_script, feature_br_name, master_br_name, output_dir, working_dir), } return results
def _check_input_files(self, input_files: List[str], project_basedir: str): LOG.info( f"Checking provided input files. Command: {self.cmd_type}, Files: {input_files}" ) resolved_files = [ FileUtils.join_path(project_basedir, f) for f in input_files ] not_found_files = [] # Sanity check for f in resolved_files: exists = FileUtils.does_file_exist(f) if not exists: not_found_files.append(f) if len(not_found_files) > 0: raise ValueError( f"The following files could not be found: {not_found_files}") LOG.info( f"Listing resolved input files. Command: {self.cmd_type}, Files: {resolved_files}" ) return resolved_files
def test_executing_script_from_uncommon_directory(self): # Can't use /tmp as it's platform dependent. # On MacOS, it's mounted as /private/tmp but some Linux systems don't have /private/tmp. # Let's use python's built-in temp dir creation methods. tmp_dir: tempfile.TemporaryDirectory = tempfile.TemporaryDirectory() script_dir = FileUtils.ensure_dir_created( FileUtils.join_path(tmp_dir.name, "python")) sys.path.append(script_dir) script_abs_path = script_dir + os.sep + "hello_world.py" contents = "from pythoncommons.project_utils import ProjectUtils,ProjectRootDeterminationStrategy\n" \ "print(\"hello world\")\n" \ "basedir = ProjectUtils.get_output_basedir('test', project_root_determination_strategy=ProjectRootDeterminationStrategy.SYS_PATH)\n" \ "logfilename = ProjectUtils.get_default_log_file('test')\n" FileUtils.save_to_file(script_abs_path, contents) os.system(f'python3 {script_abs_path}') proc = subprocess.run(["python3", script_abs_path], capture_output=True) # print(f"stdout: {proc.stdout}") # print(f"stderr: {str(proc.stderr)}") print(f"exit code: {proc.returncode}") self.assertEqual(0, proc.returncode)
def test_with_normal_patch_from_yarn_dev_tools(self): self.cleanup_and_checkout_branch() self.utils.add_some_file_changes(commit=False) self.utils.set_env_vars(self.utils.sandbox_repo_path, self.utils.sandbox_repo_path) yarn_dev_tools = YarnDevTools() yarn_dev_tools.upstream_repo = self.repo_wrapper args = Object() patch_file = FileUtils.join_path(self.dummy_patches_dir, PATCH_FILENAME) self.utils.add_file_changes_and_save_to_patch(patch_file) args.patch_file = patch_file yarn_dev_tools.create_review_branch(args) self.assertTrue(REVIEW_BRANCH in self.repo.heads, f"Review branch does not exist: {REVIEW_BRANCH}") self.utils.verify_commit_message_of_branch( REVIEW_BRANCH, COMMIT_MSG_TEMPLATE.format(file=patch_file))