def test_are_modules_installed_for_verify_false_res(tmp_path): r = str(tmp_path / "README.md") with open(r, 'w') as f: f.write('Test readme') readme_validator = ReadMeValidator(r) # modules will be missing in tmp_path assert not readme_validator.are_modules_installed_for_verify(tmp_path)
def test_is_image_path_valid(): """ Given - A README file with 2 invalid images paths in it. When - Run validate on README file Then - Ensure: - Validation fails - Both images paths were caught correctly - Valid image path was not caught - An alternative paths were suggested """ captured_output = io.StringIO() sys.stdout = captured_output # redirect stdout. images_paths = [ "https://github.com/demisto/content/blob/123/Packs/AutoFocus/doc_files/AutoFocusPolling.png", "https://github.com/demisto/content/blob/123/Packs/FeedOffice365/doc_files/test.png", "https://github.com/demisto/content/raw/123/Packs/valid/doc_files/test.png" ] alternative_images_paths = [ "https://github.com/demisto/content/raw/123/Packs/AutoFocus/doc_files/AutoFocusPolling.png", "https://github.com/demisto/content/raw/123/Packs/FeedOffice365/doc_files/test.png" ] readme_validator = ReadMeValidator(INVALID_MD) result = readme_validator.is_image_path_valid() sys.stdout = sys.__stdout__ # reset stdout. assert not result assert images_paths[0] and alternative_images_paths[ 0] in captured_output.getvalue() assert images_paths[1] and alternative_images_paths[ 1] in captured_output.getvalue() assert images_paths[2] not in captured_output.getvalue()
def validate_pack_readme_images(self): readme_file_path = os.path.join(self.pack_path, self.readme_file) readme_validator = ReadMeValidator(readme_file_path, ignored_errors=self.ignored_errors, specific_validations=self.specific_validations) errors = readme_validator.check_readme_relative_image_paths(is_pack_readme=True) errors += readme_validator.check_readme_absolute_image_paths(is_pack_readme=True) if errors: self._errors.extend(errors) return False return True
def test_is_file_valid(mocker, current, answer): readme_validator = ReadMeValidator(current) valid = readme_validator.are_modules_installed_for_verify( readme_validator.content_path) if not valid: pytest.skip('skipping mdx test. ' + MDX_SKIP_NPM_MESSAGE) return mocker.patch.dict(os.environ, { 'DEMISTO_README_VALIDATION': 'yes', 'DEMISTO_MDX_CMD_VERIFY': 'yes' }) assert readme_validator.is_valid_file() is answer assert not ReadMeValidator._MDX_SERVER_PROCESS
def test_verify_readme_image_paths(mocker): """ Given - A README file (not pack README) with valid/invalid relative image paths and valid/invalid absolute image paths in it. When - Run validate on README file Then - Ensure: - Validation fails - Image paths were caught correctly - Valid paths are not caught """ captured_output = io.StringIO() sys.stdout = captured_output # redirect stdout. readme_validator = ReadMeValidator(IMAGES_MD) mocker.patch.object(GitUtil, 'get_current_working_branch', return_value='branch_name') with requests_mock.Mocker() as m: # Mock get requests m.get('https://github.com/demisto/test1.png', status_code=404, text="Test1", reason='just because') m.get('https://github.com/demisto/content/raw/test2.png', status_code=404, text="Test2") m.get('https://github.com/demisto/test3.png', status_code=200, text="Test3") is_valid = readme_validator.verify_readme_image_paths() sys.stdout = sys.__stdout__ # reset stdout. captured_output = captured_output.getvalue() assert not is_valid assert 'The following image relative path is not valid, please recheck it:\n' \ '![Identity with High Risk Score](../../default.png)' in captured_output assert 'The following image relative path is not valid, please recheck it:\n' \ '![Identity with High Risk Score](default.png)' not in captured_output assert 'Branch name was found in the URL, please change it to the commit hash:\n' \ '![branch in url]' in captured_output assert 'Branch name was found in the URL, please change it to the commit hash:\n' \ '![commit hash in url]' not in captured_output assert "\n".join(("[RM108] - Error in readme image: got HTTP response code 404, reason = just because", "The following image link seems to be broken, please repair it:", "![Identity with High Risk Score](https://github.com/demisto/test1.png)")) in captured_output assert "\n".join(("[RM108] - Error in readme image: got HTTP response code 404 ", "The following image link seems to be broken, please repair it:", "(https://github.com/demisto/content/raw/test2.png)")) in captured_output assert 'please repair it:\n' \ '![Identity with High Risk Score](https://github.com/demisto/test3.png)' \ not in captured_output
def test_context_only_runs_once_when_error_exist(mocker, integration, errors_found, errors_ignore, expected): """ Given - README that contains changes and YML file When - Run validate on README file and YML Then - Ensure validation only run once, either for YML or for README """ readme_validator = ReadMeValidator(FAKE_INTEGRATION_README) mocker.patch.object(ReadMeValidator, '_get_error_lists', return_value=(errors_found, errors_ignore)) result = readme_validator.is_context_different_in_yml() assert result == expected
def test_valid_sections(integration, file_input): """ Given - Valid sections in different forms from SECTIONS When - Run validate on README file Then - Ensure no empty sections from the SECTIONS list """ integration.readme.write(file_input) readme_path = integration.readme.path readme_validator = ReadMeValidator(readme_path) result = readme_validator.verify_no_empty_sections() assert result
def test_is_file_valid(mocker, current, answer): readme_validator = ReadMeValidator(current) valid = readme_validator.are_modules_installed_for_verify(readme_validator.content_path) if not valid: pytest.skip('skipping mdx test. ' + MDX_SKIP_NPM_MESSAGE) return with requests_mock.Mocker() as m: # Mock get requests m.get('https://github.com/demisto/content/blob/123/Packs/AutoFocus/doc_files/AutoFocusPolling.png', status_code=200, text="Test1") m.get('https://github.com/demisto/content/blob/123/Packs/FeedOffice365/doc_files/test.png', status_code=200, text="Test2") m.get('https://github.com/demisto/content/raw/123/Packs/valid/doc_files/test.png', status_code=200, text="Test3") mocker.patch.dict(os.environ, {'DEMISTO_README_VALIDATION': 'yes', 'DEMISTO_MDX_CMD_VERIFY': 'yes'}) assert readme_validator.is_valid_file() is answer assert not ReadMeValidator._MDX_SERVER_PROCESS
def test_invalid_short_file(capsys): """ Given - Non empty Readme with less than 30 chars. When - Running validate on README file Then - Ensure verify on Readme fails """ readme_validator = ReadMeValidator(INVALID3_MD) result = readme_validator.verify_readme_is_not_too_short() stdout, _ = capsys.readouterr() short_readme_error = 'Your Pack README is too small (29 chars). Please move its content to the pack ' \ 'description or add more useful information to the Pack README. ' \ 'Pack README files are expected to include a few sentences about the pack and/or images.' assert not result assert short_readme_error in stdout
def test_verify_no_default_sections_left(integration, capsys, file_input, section): """ Given - Read me that contains sections that are created as default and need to be changed When - Run validate on README file Then - Ensure no default sections in the readme file """ integration.readme.write(file_input) readme_path = integration.readme.path readme_validator = ReadMeValidator(readme_path) result = readme_validator.verify_no_default_sections_left() stdout, _ = capsys.readouterr() section_error = f'Replace "{section}" with a suitable info.' assert not result assert section_error in stdout
def test_context_difference_created_is_valid(mocker, difference_found, expected): """ Given - README that contains changes and YML file When - Run validate on README file and YML Then - Ensure the difference context is correct """ mocker.patch('demisto_sdk.commands.common.hook_validations.readme.compare_context_path_in_yml_and_readme', return_value=difference_found) readme_validator = ReadMeValidator(FAKE_INTEGRATION_README) handle_error_mock = mocker.patch.object(ReadMeValidator, 'handle_error') valid = readme_validator.is_context_different_in_yml() assert valid == expected if not valid: handle_error_mock.assert_called() else: handle_error_mock.assert_not_called()
def test_readme_ignore(integration, readme_fake_path, readme_text): """ Check that packs in ignore list are ignored. Given - README path of ignore pack When - Run validate on README of ignored pack Then - Ensure validation ignored the pack """ integration.readme.write(readme_text) readme_path = integration.readme.path readme_validator = ReadMeValidator(readme_path) # change the pack path to readme_fake_path from pathlib import Path readme_validator.file_path = Path(readme_fake_path) readme_validator.pack_path = readme_validator.file_path.parent result = readme_validator.verify_no_default_sections_left() assert result
def test_unvalid_verify_no_empty_sections(integration, capsys, file_input, missing_section): """ Given - Empty sections in different forms When - Run validate on README file Then - Ensure no empty sections from the SECTIONS list """ integration.readme.write(file_input) readme_path = integration.readme.path readme_validator = ReadMeValidator(readme_path) result = readme_validator.verify_no_empty_sections() stdout, _ = capsys.readouterr() section_error = f'{missing_section} is empty, please elaborate or delete the section.' assert not result assert section_error in stdout
def test_combined_unvalid_verify_no_empty_sections(integration, capsys, file_input): """ Given - Couple of empty sections When - Run validate on README file Then - Ensure no empty sections from the SECTIONS list """ integration.readme.write(file_input) readme_path = integration.readme.path readme_validator = ReadMeValidator(readme_path) result = readme_validator.verify_no_empty_sections() stdout, _ = capsys.readouterr() error = 'Failed verifying README.md Error Message is: Troubleshooting is empty, please elaborate or delete the' \ ' section.\nAdditional Information is empty, please elaborate or delete the section.' assert not result assert error in stdout
def test_is_file_valid_mdx_server(mocker, current, answer): ReadMeValidator.add_node_env_vars() with ReadMeValidator.start_mdx_server(): readme_validator = ReadMeValidator(current) valid = readme_validator.are_modules_installed_for_verify(readme_validator.content_path) if not valid: pytest.skip('skipping mdx server test. ' + MDX_SKIP_NPM_MESSAGE) return mocker.patch.dict(os.environ, {'DEMISTO_README_VALIDATION': 'yes'}) assert readme_validator.is_valid_file() is answer assert ReadMeValidator._MDX_SERVER_PROCESS is not None
def test_demisto_not_in_readme(repo): """ Given - An integration README without the word 'Demisto'. When - Running verify_demisto_in_readme_content. Then - Ensure that the validation passes. """ pack = repo.create_pack('PackName') integration = pack.create_integration('IntName') readme_path = glob.glob(os.path.join(os.path.dirname(integration.yml.path), '*README.md'))[0] with open(readme_path, 'w') as f: f.write('This checks if we have the word XSOAR in the README.') readme_validator = ReadMeValidator(integration.readme.path) assert readme_validator.verify_demisto_in_readme_content()
def test_demisto_in_repo_readme(mocker, repo): """ Given - A repo README contains the word 'Demisto'. When - Running verify_demisto_in_readme_content. Then - Ensure that the validation not fails. """ from pathlib import Path readme_path = Path(repo.path) / 'README.md' mocker.patch.object(ReadMeValidator, '__init__', return_value=None) with open(readme_path, 'w') as f: f.write('This checks if we have the word Demisto in the README.') with ChangeCWD(repo.path): readme_validator = ReadMeValidator() init_readmeValidator(readme_validator, repo, readme_path) assert readme_validator.verify_demisto_in_readme_content()
def test_verify_template_not_in_readme(repo): """ Given - An integration README contains the generic sentence '%%FILL HERE%%'. When - Running verify_template_not_in_readme. Then - Ensure that the validation fails. """ pack = repo.create_pack('PackName') integration = pack.create_integration('IntName') readme_path = glob.glob(os.path.join(os.path.dirname(integration.yml.path), '*README.md'))[0] with open(readme_path, 'w') as f: f.write('This checks if we have the sentence %%FILL HERE%% in the README.') with ChangeCWD(repo.path): readme_validator = ReadMeValidator(integration.readme.path) assert not readme_validator.verify_template_not_in_readme()
def test_is_file_valid(current, answer): readme_validator = ReadMeValidator(current) valid = readme_validator.are_modules_installed_for_verify() env_var = os.environ.get('DEMISTO_README_VALIDATION') if valid and env_var: assert readme_validator.is_valid_file() is answer
def run_all_validations_on_file(self, file_path: str, file_type: str = None) -> None: """ Runs all validations on file specified in 'file_path' Args: file_path: A relative content path to a file to be validated file_type: The output of 'find_type' method """ file_extension = os.path.splitext(file_path)[-1] # We validate only yml json and .md files if file_extension not in ['.yml', '.json', '.md']: return # Ignoring changelog and description files since these are checked on the integration validation if 'changelog' in file_path.lower( ) or 'description' in file_path.lower(): return # unified files should not be validated if file_path.endswith('_unified.yml'): return print(f'Validating {file_path}') if 'README' in file_path: readme_validator = ReadMeValidator(file_path) if not readme_validator.is_valid_file(): self._is_valid = False return structure_validator = StructureValidator(file_path, predefined_scheme=file_type) if not structure_validator.is_valid_file(): self._is_valid = False elif re.match(TEST_PLAYBOOK_REGEX, file_path, re.IGNORECASE): playbook_validator = PlaybookValidator(structure_validator) if not playbook_validator.is_valid_playbook(): self._is_valid = False elif re.match(PLAYBOOK_REGEX, file_path, re.IGNORECASE) or file_type == 'playbook': playbook_validator = PlaybookValidator(structure_validator) if not playbook_validator.is_valid_playbook(validate_rn=False): self._is_valid = False elif checked_type(file_path, INTEGRATION_REGXES) or file_type == 'integration': integration_validator = IntegrationValidator(structure_validator) if not integration_validator.is_valid_file(validate_rn=False): self._is_valid = False elif checked_type(file_path, YML_ALL_SCRIPTS_REGEXES) or file_type == 'script': # Set file path to the yml file structure_validator.file_path = file_path script_validator = ScriptValidator(structure_validator) if not script_validator.is_valid_file(validate_rn=False): self._is_valid = False elif checked_type(file_path, YML_BETA_INTEGRATIONS_REGEXES ) or file_type == 'betaintegration': integration_validator = IntegrationValidator(structure_validator) if not integration_validator.is_valid_beta_integration(): self._is_valid = False # incident fields and indicator fields are using the same scheme. elif checked_type(file_path, JSON_INDICATOR_AND_INCIDENT_FIELDS) or \ file_type in ('incidentfield', 'indicatorfield'): incident_field_validator = IncidentFieldValidator( structure_validator) if not incident_field_validator.is_valid_file(validate_rn=False): self._is_valid = False elif checked_type( file_path, JSON_ALL_INDICATOR_TYPES_REGEXES) or file_type == 'reputation': reputation_validator = ReputationValidator(structure_validator) if not reputation_validator.is_valid_file(validate_rn=False): self._is_valid = False elif checked_type(file_path, JSON_ALL_LAYOUT_REGEXES) or file_type == 'layout': layout_validator = LayoutValidator(structure_validator) if not layout_validator.is_valid_layout(validate_rn=False): self._is_valid = False elif checked_type( file_path, JSON_ALL_DASHBOARDS_REGEXES) or file_type == 'dashboard': dashboard_validator = DashboardValidator(structure_validator) if not dashboard_validator.is_valid_dashboard(validate_rn=False): self._is_valid = False elif checked_type(file_path, JSON_ALL_INCIDENT_TYPES_REGEXES ) or file_type == 'incidenttype': incident_type_validator = IncidentTypeValidator( structure_validator) if not incident_type_validator.is_valid_incident_type( validate_rn=False): self._is_valid = False elif checked_type(file_path, CHECKED_TYPES_REGEXES): print(f'Could not find validations for file {file_path}') else: print_error( 'The file type of {} is not supported in validate command'. format(file_path)) print_error( 'validate command supports: Integrations, Scripts, Playbooks, dashboards, incident types, ' 'reputations, Incident fields, Indicator fields, Images, Release notes, ' 'Layouts and Descriptions') self._is_valid = False
def validate_modified_files(self, modified_files): # noqa: C901 """Validate the modified files from your branch. In case we encounter an invalid file we set the self._is_valid param to False. Args: modified_files (set): A set of the modified files in the current branch. """ for file_path in modified_files: old_file_path = None if isinstance(file_path, tuple): old_file_path, file_path = file_path print('Validating {}'.format(file_path)) if not checked_type(file_path): print_warning( '- Skipping validation of non-content entity file.') continue if re.match(TEST_PLAYBOOK_REGEX, file_path, re.IGNORECASE): continue elif 'README' in file_path: readme_validator = ReadMeValidator(file_path) if not readme_validator.is_valid_file(): self._is_valid = False continue structure_validator = StructureValidator( file_path, old_file_path=old_file_path) if not structure_validator.is_valid_file(): self._is_valid = False if self.validate_id_set: if not self.id_set_validator.is_file_valid_in_set(file_path): self._is_valid = False elif checked_type(file_path, YML_INTEGRATION_REGEXES): image_validator = ImageValidator(file_path) if not image_validator.is_valid(): self._is_valid = False description_validator = DescriptionValidator(file_path) if not description_validator.is_valid(): self._is_valid = False integration_validator = IntegrationValidator( structure_validator) if self.is_backward_check and not integration_validator.is_backward_compatible( ): self._is_valid = False if not integration_validator.is_valid_file(): self._is_valid = False elif checked_type(file_path, YML_BETA_INTEGRATIONS_REGEXES): image_validator = ImageValidator(file_path) if not image_validator.is_valid(): self._is_valid = False description_validator = DescriptionValidator(file_path) if not description_validator.is_valid_beta_description(): self._is_valid = False integration_validator = IntegrationValidator( structure_validator) if not integration_validator.is_valid_beta_integration(): self._is_valid = False elif checked_type(file_path, [SCRIPT_REGEX]): script_validator = ScriptValidator(structure_validator) if self.is_backward_check and not script_validator.is_backward_compatible( ): self._is_valid = False if not script_validator.is_valid_file(): self._is_valid = False elif checked_type(file_path, PLAYBOOKS_REGEXES_LIST): playbook_validator = PlaybookValidator(structure_validator) if not playbook_validator.is_valid_playbook( is_new_playbook=False): self._is_valid = False elif checked_type(file_path, PACKAGE_SCRIPTS_REGEXES): unifier = Unifier(os.path.dirname(file_path)) yml_path, _ = unifier.get_script_package_data() # Set file path to the yml file structure_validator.file_path = yml_path script_validator = ScriptValidator(structure_validator) if self.is_backward_check and not script_validator.is_backward_compatible( ): self._is_valid = False if not script_validator.is_valid_file(): self._is_valid = False elif re.match(IMAGE_REGEX, file_path, re.IGNORECASE): image_validator = ImageValidator(file_path) if not image_validator.is_valid(): self._is_valid = False # incident fields and indicator fields are using the same scheme. elif checked_type(file_path, JSON_INDICATOR_AND_INCIDENT_FIELDS): incident_field_validator = IncidentFieldValidator( structure_validator) if not incident_field_validator.is_valid_file( validate_rn=True): self._is_valid = False if self.is_backward_check and not incident_field_validator.is_backward_compatible( ): self._is_valid = False elif checked_type(file_path, [REPUTATION_REGEX]): reputation_validator = ReputationValidator(structure_validator) if not reputation_validator.is_valid_file(validate_rn=True): self._is_valid = False elif checked_type(file_path, JSON_ALL_LAYOUT_REGEXES): layout_validator = LayoutValidator(structure_validator) if not layout_validator.is_valid_layout(validate_rn=True): self._is_valid = False elif checked_type(file_path, JSON_ALL_DASHBOARDS_REGEXES): dashboard_validator = DashboardValidator(structure_validator) if not dashboard_validator.is_valid_dashboard( validate_rn=True): self._is_valid = False elif checked_type(file_path, JSON_ALL_INCIDENT_TYPES_REGEXES): incident_type_validator = IncidentTypeValidator( structure_validator) if not incident_type_validator.is_valid_incident_type( validate_rn=True): self._is_valid = False if self.is_backward_check and not incident_type_validator.is_backward_compatible( ): self._is_valid = False elif 'CHANGELOG' in file_path: self.is_valid_release_notes(file_path) elif checked_type(file_path, CHECKED_TYPES_REGEXES): pass else: print_error( "The file type of {} is not supported in validate command". format(file_path)) print_error( "'validate' command supports: Integrations, Scripts, Playbooks, " "Incident fields, Indicator fields, Images, Release notes, Layouts and Descriptions" ) self._is_valid = False
def validate_added_files(self, added_files, file_type: str = None): # noqa: C901 """Validate the added files from your branch. In case we encounter an invalid file we set the self._is_valid param to False. Args: added_files (set): A set of the modified files in the current branch. file_type (str): Used only with -p flag (the type of the file). """ for file_path in added_files: print('Validating {}'.format(file_path)) if re.match(TEST_PLAYBOOK_REGEX, file_path, re.IGNORECASE) and not file_type: continue elif 'README' in file_path: readme_validator = ReadMeValidator(file_path) if not readme_validator.is_valid_file(): self._is_valid = False continue structure_validator = StructureValidator( file_path, is_new_file=True, predefined_scheme=file_type) if not structure_validator.is_valid_file(): self._is_valid = False if self.validate_id_set: if not self.id_set_validator.is_file_valid_in_set(file_path): self._is_valid = False if self.id_set_validator.is_file_has_used_id(file_path): self._is_valid = False elif re.match(PLAYBOOK_REGEX, file_path, re.IGNORECASE) or file_type == 'playbook': playbook_validator = PlaybookValidator(structure_validator) if not playbook_validator.is_valid_playbook(): self._is_valid = False elif checked_type( file_path, YML_INTEGRATION_REGEXES) or file_type == 'integration': image_validator = ImageValidator(file_path) # if file_type(non git path) the image is not in a separate path image_validator.file_path = file_path if file_type else image_validator.file_path if not image_validator.is_valid(): self._is_valid = False description_validator = DescriptionValidator(file_path) if not description_validator.is_valid(): self._is_valid = False integration_validator = IntegrationValidator( structure_validator) if not integration_validator.is_valid_file( validate_rn=not file_type): self._is_valid = False elif checked_type( file_path, PACKAGE_SCRIPTS_REGEXES) or file_type == 'script': unifier = Unifier(os.path.dirname(file_path)) yml_path, _ = unifier.get_script_package_data() # Set file path to the yml file structure_validator.file_path = yml_path script_validator = ScriptValidator(structure_validator) if not script_validator.is_valid_file( validate_rn=not file_type): self._is_valid = False elif re.match(BETA_INTEGRATION_REGEX, file_path, re.IGNORECASE) or \ re.match(BETA_INTEGRATION_YML_REGEX, file_path, re.IGNORECASE): description_validator = DescriptionValidator(file_path) if not description_validator.is_valid_beta_description(): self._is_valid = False integration_validator = IntegrationValidator( structure_validator) if not integration_validator.is_valid_beta_integration(): self._is_valid = False elif re.match(IMAGE_REGEX, file_path, re.IGNORECASE): image_validator = ImageValidator(file_path) if not image_validator.is_valid(): self._is_valid = False # incident fields and indicator fields are using the same scheme. elif checked_type(file_path, JSON_INDICATOR_AND_INCIDENT_FIELDS) or \ file_type in ('incidentfield', 'indicatorfield'): incident_field_validator = IncidentFieldValidator( structure_validator) if not incident_field_validator.is_valid_file( validate_rn=not file_type): self._is_valid = False elif checked_type(file_path, [REPUTATION_REGEX]) or file_type == 'reputation': reputation_validator = ReputationValidator(structure_validator) if not reputation_validator.is_valid_file( validate_rn=not file_type): self._is_valid = False elif checked_type( file_path, JSON_ALL_LAYOUT_REGEXES) or file_type == 'layout': layout_validator = LayoutValidator(structure_validator) if not layout_validator.is_valid_layout( validate_rn=not file_type): self._is_valid = False elif checked_type( file_path, JSON_ALL_DASHBOARDS_REGEXES) or file_type == 'dashboard': dashboard_validator = DashboardValidator(structure_validator) if not dashboard_validator.is_valid_dashboard( validate_rn=not file_type): self._is_valid = False elif checked_type(file_path, JSON_ALL_INCIDENT_TYPES_REGEXES): incident_type_validator = IncidentTypeValidator( structure_validator) if not incident_type_validator.is_valid_incident_type( validate_rn=not file_type): self._is_valid = False elif 'CHANGELOG' in file_path: self.is_valid_release_notes(file_path) elif checked_type(file_path, CHECKED_TYPES_REGEXES): pass else: print_error( "The file type of {} is not supported in validate command". format(file_path)) print_error( "validate command supports: Integrations, Scripts, Playbooks, " "Incident fields, Indicator fields, Images, Release notes, Layouts and Descriptions" ) self._is_valid = False
def validate_readme(self, file_path, pack_error_ignore_list): readme_validator = ReadMeValidator(file_path, ignored_errors=pack_error_ignore_list, print_as_warnings=self.print_ignored_errors) return readme_validator.is_valid_file()