def test_check_deprecated_where_ignored_list_does_not_exist(repo): """ Given - An deprecated integration yml. - No pre-existing ignored errors list for the integration. When - Running check_deprecated method. Then - Ensure the predefined deprecated ignored errors contains the default error list. - Ensure the ignored errors pack ignore does not contain anything. - Ensure the predefined by support ignored errors does not contain anything. """ pack = repo.create_pack('pack') integration = pack.create_integration('integration') integration.yml.write_dict({'deprecated': True}) files_path = integration.yml.path with ChangeCWD(repo.path): base_validator = BaseValidator(ignored_errors={}) base_validator.check_deprecated(files_path) assert base_validator.predefined_deprecated_ignored_errors == { files_path: DEPRECATED_IGNORE_ERRORS_DEFAULT_LIST } assert not base_validator.ignored_errors assert not base_validator.predefined_by_support_ignored_errors
def test_check_deprecated_playbook(repo): """ Given - An non-deprecated playbook yml. When - Running check_deprecated method. Then - Ensure the predefined deprecated ignored errors list includes the deprecated default error list only. """ pack = repo.create_pack('pack') playbook = pack.create_integration('playbook-somePlaybook') test_file_path = join(git_path(), 'demisto_sdk', 'tests', 'test_files') valid_deprecated_playbook_file_path = join( test_file_path, 'Packs', 'CortexXDR', 'Playbooks', 'Valid_Deprecated_Playbook.yml') playbook.yml.write_dict(get_yaml(valid_deprecated_playbook_file_path)) files_path = playbook.yml.path with ChangeCWD(repo.path): base_validator = BaseValidator(ignored_errors={}) base_validator.check_deprecated(files_path) assert base_validator.predefined_deprecated_ignored_errors == { files_path: DEPRECATED_IGNORE_ERRORS_DEFAULT_LIST }
def test_check_support_status_partner_file(repo, mocker): """ Given - An partner supported integration yml. When - Running check_support_status method. Then - Ensure the resulting ignored errors list includes the partner ignore-list. """ pack = repo.create_pack('pack') integration = pack.create_integration('integration') meta_json = {PACK_METADATA_SUPPORT: "partner"} mocker.patch.object(BaseValidator, 'get_metadata_file_content', return_value=meta_json) pack.pack_metadata.write_json(meta_json) with ChangeCWD(repo.path): base_validator = BaseValidator(ignored_errors={}) base_validator.update_checked_flags_by_support_level( integration.yml.rel_path) assert base_validator.ignored_errors[ 'integration.yml'] == PRESET_ERROR_TO_IGNORE['partner']
def test_handle_error_on_unignorable_error_codes(mocker, ignored_errors, error_code): """ Given - error code which is not allowed to be ignored. - error codes/prefix error codes as the ignored errors from the pack-ignore file. When - Running handle_error method Then - Ensure that the correct error message is returned - Ensure that the correct error message is printed out. - Ensure that the un-ignorable errors are in FOUND_FILES_AND_ERRORS list. - Ensure that the un-ignorable errors are not in FOUND_FILES_AND_IGNORED_ERRORS list. """ import click base_validator = BaseValidator(ignored_errors=ignored_errors) expected_error = f'[ERROR]: file_name: [{error_code}] can not be ignored in .pack-ignore\n' click_mock = mocker.patch.object(click, 'secho') result = base_validator.handle_error(error_message='', error_code=error_code, file_path='file_name', suggested_fix='fix') assert result == expected_error assert click_mock.called assert click_mock.call_args.args[0] == expected_error assert f'file_name - [{error_code}]' in FOUND_FILES_AND_ERRORS assert f'file_name - [{error_code}]' not in FOUND_FILES_AND_IGNORED_ERRORS
def test_check_support_status_certified_partner_file(repo, mocker): """ Given - An certified partner supported integration yml. When - Running check_support_status method. Then - Ensure the resulting ignored errors list includes the community ignore-list. """ pack = repo.create_pack('pack') integration = pack.create_integration('integration') meta_json = { PACK_METADATA_SUPPORT: "partner", PACK_METADATA_CERTIFICATION: "certified" } mocker.patch.object(BaseValidator, 'get_metadata_file_content', return_value=meta_json) pack.pack_metadata.write_json(meta_json) with ChangeCWD(repo.path): base_validator = BaseValidator(ignored_errors={}) base_validator.update_checked_flags_by_support_level( integration.yml_path) assert 'integration.yml' not in base_validator.ignored_errors
def test_check_support_status_community_file(repo, mocker): """ Given - An community supported integration yml. When - Running check_support_status method. Then - Ensure the resulting ignored errors list does not include the integration file name. """ pack = repo.create_pack('pack') integration = pack.create_integration('integration') meta_json = { PACK_METADATA_SUPPORT: "community", PACK_METADATA_CERTIFICATION: "not certified" } mocker.patch.object(BaseValidator, 'get_metadata_file_content', return_value=meta_json) pack.pack_metadata.write_json(meta_json) with ChangeCWD(repo.path): base_validator = BaseValidator(ignored_errors={}) base_validator.update_checked_flags_by_support_level( integration.yml_path) assert base_validator.ignored_errors[ 'integration.yml'] == PRESET_ERROR_TO_IGNORE['community']
def test_handle_error(): """ Given - An ignore errors list associated with a file. - An error, message, code and file paths. When - Running handle_error method. Then - Ensure the resulting error messages are correctly formatted. - Ensure ignored error codes return None. """ base_validator = BaseValidator(ignored_errors={"file_name": ["BA101"]}) formatted_error = base_validator.handle_error("Error-message", "SC102", "PATH") assert formatted_error == 'PATH: [SC102] - Error-message\n' formatted_error = base_validator.handle_error("another-error-message", "IN101", "path/to/file_name") assert formatted_error == 'path/to/file_name: [IN101] - another-error-message\n' formatted_error = base_validator.handle_error("ignore-file-specific", "BA101", "path/to/file_name") assert formatted_error is None
def test_check_support_status_community_file(repo, mocker): """ Given - An community supported integration yml. When - Running check_support_status method. Then - Ensure the 'predefined by support ignored errors' list includes the community ignore-list. """ pack = repo.create_pack('pack') integration = pack.create_integration('integration') meta_json = {PACK_METADATA_SUPPORT: "community"} mocker.patch.object(BaseValidator, 'get_metadata_file_content', return_value=meta_json) pack.pack_metadata.write_json(meta_json) with ChangeCWD(repo.path): base_validator = BaseValidator(ignored_errors={}) base_validator.update_checked_flags_by_support_level( integration.yml.rel_path) assert base_validator.predefined_by_support_ignored_errors[ integration.yml.rel_path] == PRESET_ERROR_TO_IGNORE[ 'community'] # noqa: E501
def test_yml_has_test_key(file_path, schema, expected): # type: (str, str, bool) -> None """ Given - A yml file test playbook list and the yml file type When - Checking if file has test playbook exists Then - Ensure the method 'yml_has_test_key' return answer accordingly """ structure_validator = StructureValidator(file_path, predefined_scheme=schema) validator = BaseValidator(structure_validator) tests = structure_validator.current_file.get('tests') assert validator.yml_has_test_key(tests, schema) == expected
def test_json_output_with_unified_yml_image_error(self, repo): """ Given - A ui applicable image error that occurred in a unified yml. - An existing and an empty json_outputs file. When - Running json_output method. Then - Ensure the json outputs file is created and it hold the json error in the `outputs` field. - Ensure the entityType is 'image'. """ pack = repo.create_pack('PackName') integration = pack.create_integration('MyInt') integration.create_default_integration() json_path = os.path.join(repo.path, 'valid_json.json') open(json_path, "x") base = BaseValidator(json_file_path=json_path) ui_applicable_error_message, ui_applicable_error_code = Errors.image_too_large( ) expected_json_1 = [{ 'filePath': integration.yml.path, 'fileType': 'yml', 'entityType': 'image', 'errorType': 'Settings', 'name': 'Sample', 'severity': 'error', 'errorCode': ui_applicable_error_code, 'message': ui_applicable_error_message, 'ui': True, 'relatedField': 'image', 'validate': 'linter' }] with ChangeCWD(repo.path): # create new file base.json_output(integration.yml.path, ui_applicable_error_code, ui_applicable_error_message, False) with open(base.json_file_path, 'r') as f: json_output = json.load(f) assert json_output.sort() == expected_json_1.sort()
def test_json_output_with_json_file(self, repo): """ Given - A ui applicable error. - An existing and an empty json_outputs file. When - Running json_output method. Then - Ensure the json outputs file is created and it hold the json error in the `outputs` field. - Ensure it's not failing because the file is empty. """ pack = repo.create_pack('PackName') integration = pack.create_integration('MyInt') integration.create_default_integration() json_path = os.path.join(repo.path, 'valid_json.json') open(json_path, "x") base = BaseValidator(json_file_path=json_path) ui_applicable_error_message, ui_applicable_error_code = Errors.wrong_display_name( 'param1', 'param2') expected_json_1 = [{ 'filePath': integration.yml.path, 'fileType': 'yml', 'entityType': 'integration', 'errorType': 'Settings', 'name': 'Sample', 'severity': 'error', 'errorCode': ui_applicable_error_code, 'message': ui_applicable_error_message, 'ui': True, 'relatedField': '<parameter-name>.display', 'linter': 'validate' }] with ChangeCWD(repo.path): # create new file base.json_output(integration.yml.path, ui_applicable_error_code, ui_applicable_error_message, False) with open(base.json_file_path, 'r') as f: json_output = json.load(f) assert json_output.sort() == expected_json_1.sort()
def test_check_deprecated_playbook(repo): """ Given - An non-deprecated playbook yml. When - Running check_deprecated method. Then - Ensure the resulting ignored errors list included the deprecated default error list only. """ pack = repo.create_pack('pack') playbook = pack.create_integration('playbook-somePlaybook') playbook.yml.write_dict({'hidden': True}) files_path = playbook.yml.path with ChangeCWD(repo.path): base_validator = BaseValidator(ignored_errors={}) base_validator.check_deprecated(files_path) assert base_validator.ignored_errors[ 'playbook-somePlaybook.yml'] == DEPRECATED_IGNORE_ERRORS_DEFAULT_LIST
def test_check_deprecated_non_deprecated_integration_no_ignored_errors(repo): """ Given - An non-deprecated integration yml. - No pre-existing ignored errors list for the integration. When - Running check_deprecated method. Then - Ensure there is no resulting ignored errors list. """ pack = repo.create_pack('pack') integration = pack.create_integration('integration') integration.yml.write_dict({'deprecated': False}) files_path = integration.yml.path with ChangeCWD(repo.path): base_validator = BaseValidator(ignored_errors={}) base_validator.check_deprecated(files_path) assert 'integration' not in base_validator.ignored_errors
def test_check_deprecated_non_deprecated_integration_with_ignored_errors(repo): """ Given - An non-deprecated integration yml. - A pre-existing ignored errors list for the integration. When - Running check_deprecated method. Then - Ensure the resulting ignored errors list is the pre-existing one. """ pack = repo.create_pack('pack') integration = pack.create_integration('integration') integration.write_yml({'deprecated': False}) files_path = integration.yml_path with ChangeCWD(repo.path): base_validator = BaseValidator( ignored_errors={'integration.yml': ["BA101"]}) base_validator.check_deprecated(files_path) assert base_validator.ignored_errors['integration.yml'] == ['BA101']
def test_check_deprecated_where_ignored_list_does_not_exist(repo): """ Given - An deprecated integration yml. - No pre-existing ignored errors list for the integration. When - Running check_deprecated method. Then - Ensure the resulting ignored errors list included the deprecated default error list only. """ pack = repo.create_pack('pack') integration = pack.create_integration('integration') integration.yml.write_dict({'deprecated': True}) files_path = integration.yml.path with ChangeCWD(repo.path): base_validator = BaseValidator(ignored_errors={}) base_validator.check_deprecated(files_path) assert base_validator.ignored_errors[ 'integration.yml'] == DEPRECATED_IGNORE_ERRORS_DEFAULT_LIST
def test_check_deprecated_where_ignored_list_exists(repo): """ Given - An deprecated integration yml. - A pre-existing ignored errors list for the integration. When - Running check_deprecated method. Then - Ensure the resulting ignored errors list included the existing errors as well as the deprecated default error list. """ pack = repo.create_pack('pack') integration = pack.create_integration('integration') integration.write_yml({'deprecated': True}) files_path = integration.yml_path with ChangeCWD(repo.path): base_validator = BaseValidator( ignored_errors={'integration.yml': ['BA101']}) base_validator.check_deprecated(files_path) assert base_validator.ignored_errors['integration.yml'] == [ "BA101" ] + DEPRECATED_IGNORE_ERRORS_DEFAULT_LIST
def test_handle_error(): """ Given - An ignore errors list associated with a file. - An error, message, code and file paths. When - Running handle_error method. Then - Ensure the resulting error messages are correctly formatted. - Ensure ignored error codes return None. - Ensure non ignored errors are in FOUND_FILES_AND_ERRORS list. - Ensure ignored error are not in FOUND_FILES_AND_ERRORS and in FOUND_FILES_AND_IGNORED_ERRORS """ base_validator = BaseValidator(ignored_errors={"file_name": ["BA101"]}, print_as_warnings=True) # passing the flag checks - checked separately base_validator.checked_files.union({'PATH', "file_name"}) formatted_error = base_validator.handle_error("Error-message", "SC102", "PATH") assert formatted_error == 'PATH: [SC102] - Error-message\n' assert 'PATH - [SC102]' in FOUND_FILES_AND_ERRORS formatted_error = base_validator.handle_error("another-error-message", "IN101", "path/to/file_name") assert formatted_error == 'path/to/file_name: [IN101] - another-error-message\n' assert 'path/to/file_name - [IN101]' in FOUND_FILES_AND_ERRORS formatted_error = base_validator.handle_error("ignore-file-specific", "BA101", "path/to/file_name") assert formatted_error is None assert 'path/to/file_name - [BA101]' not in FOUND_FILES_AND_ERRORS assert 'path/to/file_name - [BA101]' in FOUND_FILES_AND_IGNORED_ERRORS
def test_check_support_status_xsoar_file(repo, mocker): """ Given - An xsoar supported integration yml. When - Running check_support_status method. Then - Ensure the resulting ignored errors list does not include the integration file name. """ pack = repo.create_pack('pack') integration = pack.create_integration('integration') meta_json = {PACK_METADATA_SUPPORT: "xsoar"} mocker.patch.object(BaseValidator, 'get_metadata_file_content', return_value=meta_json) pack.pack_metadata.write_json(meta_json) with ChangeCWD(repo.path): base_validator = BaseValidator(ignored_errors={}) base_validator.update_checked_flags_by_support_level( integration.yml.rel_path) assert 'integration.yml' not in base_validator.ignored_errors
def test_handle_error_file_with_path(pack): """ Given - An ignore errors list associated with a file_path. - An error, message, code and file paths. When - Running handle_error method. Then _ Ensure ignoring right file when full path mentioned in .pack-ignore. - Ensure the resulting error messages are correctly formatted. - Ensure ignored error codes which can be ignored return None. - Ensure non ignored errors are in FOUND_FILES_AND_ERRORS list. - Ensure ignored error are not in FOUND_FILES_AND_ERRORS and in FOUND_FILES_AND_IGNORED_ERRORS """ integration = pack.create_integration("TestIntegration") rel_path_integration_readme = integration.readme.path[integration.readme. path.find("Packs"):] rel_path_pack_readme = pack.readme.path[pack.readme.path.find("Packs"):] pack_ignore_text = f"""[file:{rel_path_integration_readme}] ignore=ST109 [file:{rel_path_pack_readme}] ignore=BA101""" pack.pack_ignore.write_text(pack_ignore_text) base_validator = BaseValidator(ignored_errors={ rel_path_pack_readme: ["BA101"], rel_path_integration_readme: ["PA113"] }, print_as_warnings=True) formatted_error = base_validator.handle_error("Error-message", "BA101", integration.readme.path) assert formatted_error == f'[ERROR]: {integration.readme.path}: [BA101] - Error-message\n' assert f'{integration.readme.path} - [BA101]' in FOUND_FILES_AND_ERRORS formatted_error = base_validator.handle_error("Error-message", "PA113", integration.readme.path) assert formatted_error is None assert f'{integration.readme.path} - [PA113]' not in FOUND_FILES_AND_ERRORS assert f'{integration.readme.path} - [PA113]' in FOUND_FILES_AND_IGNORED_ERRORS formatted_error = base_validator.handle_error("Error-message", "PA113", pack.readme.path) assert formatted_error == f'[ERROR]: {pack.readme.path}: [PA113] - Error-message\n' assert f'{pack.readme.path} - [PA113]' in FOUND_FILES_AND_ERRORS formatted_error = base_validator.handle_error("Error-message", "BA101", pack.readme.path) assert formatted_error is None assert f'{pack.readme.path} - [BA101]' not in FOUND_FILES_AND_ERRORS assert f'{pack.readme.path} - [BA101]' in FOUND_FILES_AND_IGNORED_ERRORS
def __init__(self, is_backward_check=True, prev_ver=None, use_git=False, only_committed_files=False, print_ignored_files=False, skip_conf_json=True, validate_id_set=False, file_path=None, validate_all=False, is_external_repo=False, skip_pack_rn_validation=False, print_ignored_errors=False, silence_init_prints=False, no_docker_checks=False): # General configuration self.skip_docker_checks = False self.no_configuration_prints = silence_init_prints self.skip_conf_json = skip_conf_json self.is_backward_check = is_backward_check self.validate_in_id_set = validate_id_set self.is_circle = only_committed_files self.validate_all = validate_all self.use_git = use_git self.skip_pack_rn_validation = skip_pack_rn_validation self.prev_ver = prev_ver if prev_ver else 'origin/master' self.print_ignored_files = print_ignored_files self.print_ignored_errors = print_ignored_errors self.compare_type = '...' # Class constants self.handle_error = BaseValidator(print_as_warnings=print_ignored_errors).handle_error self.file_path = file_path self.branch_name = '' self.changes_in_schema = False self.check_only_schema = False self.always_valid = False self.ignored_files = set() self.new_packs = set() self.skipped_file_types = (FileType.CHANGELOG, FileType.DESCRIPTION, FileType.TEST_PLAYBOOK) if is_external_repo: if not self.no_configuration_prints: click.echo('Running in a private repository') self.skip_conf_json = True if validate_all: # No need to check docker images on build branch hence we do not check on -a mode self.skip_docker_checks = True self.skip_pack_rn_validation = True if self.validate_in_id_set: self.id_set_validator = IDSetValidator(is_circle=self.is_circle, configuration=Configuration()) if no_docker_checks: self.skip_docker_checks = True
def validate_no_missing_release_notes(self, modified_files, added_files): """Validate that there are no missing RN for changed files Args: modified_files (set): a set of modified files. added_files (set): a set of files that were added. Returns: bool. True if no missing RN found, False otherwise """ click.secho("\n================= Checking for missing release notes =================\n", fg="bright_cyan") # existing packs that have files changed (which are not RN, README nor test files) - should have new RN packs_that_should_have_new_rn = get_pack_names_from_files(modified_files, skip_file_types={FileType.RELEASE_NOTES, FileType.README, FileType.TEST_PLAYBOOK, FileType.TEST_SCRIPT}) packs_that_have_new_rn = self.get_packs_with_added_release_notes(added_files) packs_that_have_missing_rn = packs_that_should_have_new_rn.difference(packs_that_have_new_rn) if len(packs_that_have_missing_rn) > 0: is_valid = set() for pack in packs_that_have_missing_rn: # # ignore RN in NonSupported pack if 'NonSupported' in pack: continue ignored_errors_list = self.get_error_ignore_list(pack) error_message, error_code = Errors.missing_release_notes_for_pack(pack) if not BaseValidator(ignored_errors=ignored_errors_list, print_as_warnings=self.print_ignored_errors).handle_error( error_message, error_code, file_path=os.path.join(PACKS_DIR, pack)): is_valid.add(True) else: is_valid.add(False) return all(is_valid) else: click.secho("No missing release notes found.\n", fg="bright_green") return True
from demisto_sdk.commands.common.constants import (PACK_METADATA_CERTIFICATION, PACK_METADATA_SUPPORT) from demisto_sdk.commands.common.errors import (FOUND_FILES_AND_ERRORS, FOUND_FILES_AND_IGNORED_ERRORS, PRESET_ERROR_TO_CHECK, PRESET_ERROR_TO_IGNORE) from demisto_sdk.commands.common.hook_validations.base_validator import \ BaseValidator from TestSuite.test_tools import ChangeCWD DEPRECATED_IGNORE_ERRORS_DEFAULT_LIST = BaseValidator.create_reverse_ignored_errors_list( PRESET_ERROR_TO_CHECK['deprecated']) def test_handle_error(): """ Given - An ignore errors list associated with a file. - An error, message, code and file paths. When - Running handle_error method. Then - Ensure the resulting error messages are correctly formatted. - Ensure ignored error codes return None. - Ensure non ignored errors are in FOUND_FILES_AND_ERRORS list. - Ensure ignored error are not in FOUND_FILES_AND_ERRORS and in FOUND_FILES_AND_IGNORED_ERRORS """ base_validator = BaseValidator(ignored_errors={"file_name": ["BA101"]}, print_as_warnings=True)
def test_json_output(repo): """ Given - Scenario 1: - A ui applicable error. - No pre existing json_outputs file. - Scenario 2: - A non ui applicable warning. - A pre existing json_outputs file. When - Running json_output method. Then - Scenario 1: - Ensure the json outputs file is created and it hold the json error in the `outputs` field. - Scenario 2: - Ensure the json outputs file is modified and holds the json warning in the `outputs` field. """ pack = repo.create_pack('PackName') integration = pack.create_integration('MyInt') integration.create_default_integration() json_path = os.path.join(repo.path, 'valid_json.json') base = BaseValidator(json_file_path=json_path) ui_applicable_error_message, ui_applicable_error_code = Errors.wrong_display_name( 'param1', 'param2') non_ui_applicable_error_message, non_ui_applicable_error_code = Errors.wrong_subtype( ) expected_json_1 = { integration.yml.path: { "file-type": "yml", "entity-type": "integration", "display-name": "Sample", "outputs": [{ "severity": "error", "code": ui_applicable_error_code, "message": ui_applicable_error_message, "ui": True, 'related-field': '<parameter-name>.display' }] } } expected_json_2 = { integration.yml.path: { "file-type": "yml", "entity-type": "integration", "display-name": "Sample", "outputs": [{ "severity": "error", "code": ui_applicable_error_code, "message": ui_applicable_error_message, "ui": True, 'related-field': '<parameter-name>.display' }, { "severity": "warning", "code": non_ui_applicable_error_code, "message": non_ui_applicable_error_message, "ui": False, 'related-field': 'subtype' }] } } with ChangeCWD(repo.path): # create new file base.json_output(integration.yml.path, ui_applicable_error_code, ui_applicable_error_message, False) with open(base.json_file_path, 'r') as f: json_output = json.load(f) assert json_output == expected_json_1 # update existing file base.json_output(integration.yml.path, non_ui_applicable_error_code, non_ui_applicable_error_message, True) with open(base.json_file_path, 'r') as f: json_output = json.load(f) assert json_output == expected_json_2
def get_modified_and_added_files(compare_type, prev_ver, ignored_errors=dict(), no_configuration_prints=False, staged=False, print_ignored_files=False, is_circle=False, branch_name=None): """Get the modified and added files from a specific branch Args: is_circle (bool): Whether the code runs on circle build. print_ignored_files (bool): Whether to print ignored files. staged (bool): Whether to return only staged files no_configuration_prints (bool): Whether to print additional config prints ignored_errors (dict): A dict of ignored errors per file. branch_name (str): the branch name compare_type (str): whether to run diff with two dots (..) or three (...) prev_ver (str): Against which branch to run the comparision - master/last release Returns: tuple. 3 sets representing modified files, added files and files of old format who have changed. """ if not branch_name: branch_name = get_current_working_branch() base_validator = BaseValidator(ignored_errors=ignored_errors) if not no_configuration_prints: if staged: click.echo("Collecting staged files only") else: click.echo("Collecting all committed files") prev_ver = add_origin(branch_name, prev_ver) # all committed changes of the current branch vs the prev_ver all_committed_files_string = run_command( f'git diff --name-status {prev_ver}{compare_type}refs/heads/{branch_name}' ) modified_files, added_files, _, old_format_files, changed_meta_files, ignored_files, new_packs = \ filter_changed_files(all_committed_files_string, prev_ver, print_ignored_files=print_ignored_files) if not is_circle: remote_configured = has_remote_configured() is_origin_demisto = is_origin_content_repo() if remote_configured and not is_origin_demisto: if not no_configuration_prints: click.echo( "Collecting all local changed files from fork against the content master" ) # only changes against prev_ver (without local changes) all_changed_files_string = run_command( 'git diff --name-status upstream/master...HEAD') modified_files_from_tag, added_files_from_tag, _, _, changed_meta_files_from_tag, \ ignored_files_from_tag, new_packs_from_tag = \ filter_changed_files(all_changed_files_string, print_ignored_files=print_ignored_files) # all local non-committed changes and changes against prev_ver outer_changes_files_string = run_command( 'git diff --name-status --no-merges upstream/master...HEAD') nc_modified_files, nc_added_files, nc_deleted_files, nc_old_format_files, nc_changed_meta_files, \ nc_ignored_files, nc_new_packs = \ filter_changed_files(outer_changes_files_string, print_ignored_files=print_ignored_files) else: if (not is_origin_demisto and not remote_configured) and not no_configuration_prints: error_message, error_code = Errors.changes_may_fail_validation( ) base_validator.handle_error(error_message, error_code, file_path="General-Error", warning=True, drop_line=True) if not no_configuration_prints and not staged: click.echo( "Collecting all local changed files against the content master" ) # only changes against prev_ver (without local changes) all_changed_files_string = run_command( 'git diff --name-status {}'.format(prev_ver)) modified_files_from_tag, added_files_from_tag, _, _, changed_meta_files_from_tag, \ ignored_files_from_tag, new_packs_from_tag = \ filter_changed_files(all_changed_files_string, print_ignored_files=print_ignored_files) # all local non-committed changes and changes against prev_ver outer_changes_files_string = run_command( 'git diff --name-status --no-merges HEAD') nc_modified_files, nc_added_files, nc_deleted_files, nc_old_format_files, nc_changed_meta_files, \ nc_ignored_files, nc_new_packs = \ filter_changed_files(outer_changes_files_string, print_ignored_files=print_ignored_files) old_format_files = old_format_files.union(nc_old_format_files) modified_files = modified_files.union( modified_files_from_tag.intersection(nc_modified_files)) added_files = added_files.union( added_files_from_tag.intersection(nc_added_files)) changed_meta_files = changed_meta_files.union( changed_meta_files_from_tag.intersection(nc_changed_meta_files)) ignored_files = ignored_files.union( ignored_files_from_tag.intersection(nc_ignored_files)) new_packs = new_packs.union( new_packs_from_tag.intersection(nc_new_packs)) modified_files = modified_files - set(nc_deleted_files) added_files = added_files - set(nc_deleted_files) changed_meta_files = changed_meta_files - set(nc_deleted_files) if staged: modified_files, added_files, old_format_files, changed_meta_files = \ filter_staged_only(modified_files, added_files, old_format_files, changed_meta_files) modified_packs = get_packs(modified_files).union( get_packs(old_format_files)).union(get_packs(added_files)) return modified_files, added_files, old_format_files, changed_meta_files, \ modified_packs, ignored_files, new_packs
def test_json_output(self, repo): """ Given - Scenario 1: - A ui applicable error. - No pre existing json_outputs file. - Scenario 2: - A non ui applicable warning. - A pre existing json_outputs file. When - Running json_output method. Then - Scenario 1: - Ensure the json outputs file is created and it hold the json error in the `outputs` field. - Scenario 2: - Ensure the json outputs file is modified and holds the json warning in the `outputs` field. """ pack = repo.create_pack('PackName') integration = pack.create_integration('MyInt') integration.create_default_integration() json_path = os.path.join(repo.path, 'valid_json.json') base = BaseValidator(json_file_path=json_path) ui_applicable_error_message, ui_applicable_error_code = Errors.wrong_display_name( 'param1', 'param2') non_ui_applicable_error_message, non_ui_applicable_error_code = Errors.wrong_subtype( ) expected_json_1 = [{ 'filePath': integration.yml.path, 'fileType': 'yml', 'entityType': 'integration', 'errorType': 'Settings', 'name': 'Sample', 'severity': 'error', 'errorCode': ui_applicable_error_code, 'message': ui_applicable_error_message, 'ui': True, 'relatedField': '<parameter-name>.display' }] expected_json_2 = [{ 'filePath': integration.yml.path, 'fileType': 'yml', 'entityType': 'integration', 'errorType': 'Settings', 'name': 'Sample', 'severity': 'error', 'errorCode': ui_applicable_error_code, 'message': ui_applicable_error_message, 'ui': True, 'relatedField': '<parameter-name>.display', 'linter': 'validate' }, { 'filePath': integration.yml.path, 'fileType': 'yml', 'entityType': 'integration', 'errorType': 'Settings', 'name': 'Sample', 'severity': 'warning', 'errorCode': non_ui_applicable_error_code, 'message': non_ui_applicable_error_message, 'ui': False, 'relatedField': 'subtype', 'linter': 'validate' }] with ChangeCWD(repo.path): # create new file base.json_output(integration.yml.path, ui_applicable_error_code, ui_applicable_error_message, False) with open(base.json_file_path) as f: json_output = json.load(f) assert json_output.sort() == expected_json_1.sort() # update existing file base.json_output(integration.yml.path, non_ui_applicable_error_code, non_ui_applicable_error_message, True) with open(base.json_file_path) as f: json_output = json.load(f) assert json_output == expected_json_2