Пример #1
0
    def test_added_files_type_using_function(self, repo, mocker):
        """
            Given:
                - A list of errors that should be checked
            When:
                - Running create_ignored_errors_list from validate manager
            Then:
                - verify that the ignored error list that comes out is correct
        """

        mocker.patch.object(BaseValidator, 'check_file_flags', return_value='')
        saved_stdout = sys.stdout
        pack = repo.create_pack('pack')
        pack.create_test_script()
        with ChangeCWD(pack.repo_path):
            os.mkdir('Packs/pack/TestPlaybooks/')
            os.system('mv Packs/pack/Scripts/sample_script/sample_script.yml Packs/pack/TestPlaybooks/')
            x = FilesValidator()
            try:
                out = StringIO()
                sys.stdout = out

                x.validate_added_files({'Packs/pack/TestPlaybooks/sample_script.yml'})
                assert 'Missing id in root' not in out.getvalue()
            except Exception:
                assert False
            finally:
                sys.stdout = saved_stdout
Пример #2
0
def get_new_and_modified_integration_files(git_sha1):
    """Return 2 lists - list of new integrations and list of modified integrations since the commit of the git_sha1.

    Args:
        git_sha1 (str): The git sha of the commit against which we will run the 'git diff' command.

    Returns:
        (tuple): Returns a tuple of two lists, the file paths of the new integrations and modified integrations.
    """
    # get changed yaml files (filter only added and modified files)
    tag = get_last_release_version()
    file_validator = FilesValidator()
    change_log = run_command('git diff --name-status {}'.format(git_sha1))
    modified_files, added_files, removed_files, old_format_files = file_validator.get_modified_files(change_log, tag)
    all_integration_regexes = YML_INTEGRATION_REGEXES
    all_integration_regexes.extend([INTEGRATION_REGEX, PACKS_INTEGRATION_REGEX, BETA_INTEGRATION_REGEX])

    new_integration_files = [
        file_path for file_path in added_files if checked_type(file_path, all_integration_regexes)
    ]

    modified_integration_files = [
        file_path for file_path in modified_files if
        isinstance(file_path, str) and checked_type(file_path, all_integration_regexes)
    ]

    return new_integration_files, modified_integration_files
Пример #3
0
    def test_added_files_type_using_function(self, repo):
        """
            Given:
                - A file path to a new script, that is not located in a "regular" scripts path
            When:
                - verifying added files are valid
            Then:
                - verify that the validation detects the correct file type and passes successfully
        """
        saved_stdout = sys.stdout

        pack = repo.create_pack('pack')
        pack.create_test_script()
        with ChangeCWD(pack.repo_path):
            os.mkdir('Packs/pack/TestPlaybooks/')
            os.system('mv Packs/pack/Scripts/sample_script/sample_script.yml Packs/pack/TestPlaybooks/')
            x = FilesValidator()
            try:
                out = StringIO()
                sys.stdout = out

                x.validate_added_files({'Packs/pack/TestPlaybooks/sample_script.yml'})
                assert 'Missing id in root' not in out.getvalue()
            except Exception:
                assert False
            finally:
                sys.stdout = saved_stdout
Пример #4
0
 def test_script_valid_rn(self, mocker, file_path, file_type):
     mocker.patch.object(ScriptValidator,
                         'is_valid_name',
                         return_value=True)
     self.mock_unifier()
     file_validator = FilesValidator(validate_conf_json=False)
     file_validator.validate_added_files(file_path, file_type)
     assert file_validator._is_valid
Пример #5
0
def test_content_release_identifier_exists():
    """
    When running validate file, it should get a git sha1 from content repo.
    This test assures that if someone changes the .circle/config.yml scheme, it'll fail.
    """
    fv = FilesValidator()
    fv.branch_name = 'master'
    sha1 = fv.get_content_release_identifier()
    assert sha1, 'GIT_SHA1 path in config.yml has been chaged. Fix the demisto-sdk or revert changes in content repo.'
Пример #6
0
    def test_validate_against_previous_version(self, prev_var, get_modified_and_added_files, release_iden, answer,
                                               is_valid, mocker):
        file_validator = FilesValidator(skip_conf_json=True, prev_ver=prev_var)
        file_validator._is_valid = is_valid
        mocker.patch.object(FilesValidator, 'get_modified_and_added_files', return_value=get_modified_and_added_files)
        mocker.patch.object(FilesValidator, 'get_content_release_identifier', return_value=release_iden)
        mocker.patch.object(FilesValidator, 'validate_modified_files', return_value=None)

        assert file_validator.validate_against_previous_version() is None
        assert file_validator._is_valid is answer
Пример #7
0
    def test_get_error_ignore_list(self, mocker):
        files_path = os.path.normpath(
            os.path.join(__file__, f'{git_path()}/demisto_sdk/tests', 'test_files'))
        test_file = os.path.join(files_path, 'fake_pack/.pack-ignore')

        mocker.patch.object(FilesValidator, 'get_pack_ignore_file_path', return_value=test_file)

        file_validator = FilesValidator()
        ignore_errors_list = file_validator.get_error_ignore_list("fake")
        assert ignore_errors_list['file_name'] == ['BA101', 'IF107']
        assert 'SC100' not in ignore_errors_list['file_name']
Пример #8
0
 def test_create_ignored_errors_list__file_validator(self):
     """
         Given:
             - A list of errors that should be checked
         When:
             - Running create_ignored_errors_list from file validator
         Then:
             - verify that the ignored error list that comes out is correct
     """
     file_validator = FilesValidator()
     errors_to_check = ["IN", "SC", "CJ", "DA", "DB", "DO", "ID", "DS", "IM", "IF", "IT", "RN", "RM", "PA", "PB",
                        "WD", "RP", "BA100", "BC100", "ST", "CL", "MP"]
     ignored_list = file_validator.create_ignored_errors_list(errors_to_check)
     assert ignored_list == ["BA101", "BA102", "BA103", "BA104", "BC101", "BC102", "BC103", "BC104"]
Пример #9
0
 def test_script_valid_rn(self, mocker, file_path, file_type):
     """
         Given:
             - A valid script path
         When:
             - checking validity of added files
         Then:
             - return a True validation response
     """
     mocker.patch.object(ScriptValidator, 'is_valid_name', return_value=True)
     self.mock_unifier()
     file_validator = FilesValidator(skip_conf_json=True)
     file_validator.validate_added_files(file_path, file_type)
     assert file_validator._is_valid
Пример #10
0
 def test_verify_no_dup_rn(self, added_files: set, expected: bool):
     """
         Given:
             - A list of added files
         When:
             - verifying there are no other new release notes.
         Then:
             - return a validation response
         Case 1: Release notes in different packs.
         Case 2: Release notes where one is in the same pack
     """
     file_validator = FilesValidator(skip_conf_json=True)
     file_validator.verify_no_dup_rn(added_files)
     assert file_validator._is_valid is expected
Пример #11
0
    def test_run_all_validations_on_file(self, _, file_path, file_type):
        """
        Given
        - A file in packs or beta integration

        When
        - running run_all_validations_on_file on that file

        Then
        -  The file will be validated
        """
        file_validator = FilesValidator(skip_conf_json=True)
        file_validator.run_all_validations_on_file(file_path, file_type)
        assert file_validator._is_valid
Пример #12
0
def test_is_py_or_yml_invalid():
    """
        Given:
            - A file path which contains a python script in a legacy yml schema
        When:
            - verifying the yml is valid
        Then:
            - return a False validation response
    """
    files_path = os.path.normpath(
        os.path.join(__file__, f'{git_path()}/demisto_sdk/tests', 'test_files'))
    test_file = os.path.join(files_path, 'UnifiedIntegrations/Integrations/integration-Symantec_Messaging_Gateway.yml')
    file_validator = FilesValidator()
    res = file_validator._is_py_script_or_integration(test_file)
    assert res is False
Пример #13
0
def validate(config, **kwargs):
    sys.path.append(config.configuration.env_dir)

    file_path = kwargs['path']

    if file_path and not os.path.isfile(file_path):
        print_error(F'File {file_path} was not found')
        return 1
    else:
        validator = FilesValidator(configuration=config.configuration,
                                   is_backward_check=not kwargs['no_backward_comp'],
                                   is_circle=kwargs['post_commit'], prev_ver=kwargs['prev_ver'],
                                   validate_conf_json=kwargs['conf_json'], use_git=kwargs['use_git'],
                                   file_path=kwargs.get('path'))
        return validator.run()
Пример #14
0
    def test_run_all_validations_on_file(self, _, file_path, file_type):
        """
        Given
        - A file in packs or beta integration

        When
        - running run_all_validations_on_file on that file

        Then
        -  If the file is not json,yml or md- it will be skipped (will be considered as valid)
        -  If the file is a CHANGELOG  or DESCRIPTION it will be skipped  (will be considered as valid)
        -  In any other case the file will be validated
        """
        file_validator = FilesValidator(validate_conf_json=False)
        file_validator.run_all_validations_on_file(file_path, file_type)
        assert file_validator._is_valid
Пример #15
0
 def test_is_valid_rn(self, mocker, file_path, file_type):
     mocker.patch.object(OldReleaseNotesValidator, 'get_master_diff', sreturn_value=None)
     mocker.patch.object(StructureValidator, 'is_valid_file', return_value=True)
     mocker.patch.object(IntegrationValidator, 'is_valid_subtype', return_value=True)
     mocker.patch.object(IntegrationValidator, 'is_valid_feed', return_value=True)
     mocker.patch.object(IntegrationValidator, 'is_valid_description', return_value=True)
     mocker.patch.object(IntegrationValidator, 'is_valid_version', return_value=True)
     mocker.patch.object(ImageValidator, 'is_valid', return_value=True)
     mocker.patch.object(DashboardValidator, 'is_id_equals_name', return_value=True)
     mocker.patch.object(ReputationValidator, 'is_id_equals_details', return_value=True)
     mocker.patch.object(IntegrationValidator, 'is_valid_beta', return_value=True)
     mocker.patch.object(IntegrationValidator, 'are_tests_configured', return_value=True)
     mocker.patch.object(PlaybookValidator, 'are_tests_configured', return_value=True)
     file_validator = FilesValidator(skip_conf_json=True)
     file_validator.validate_added_files(file_path, file_type)
     assert file_validator._is_valid
Пример #16
0
 def test_is_file_structure(self, source, target):
     # type: (str, str) -> None
     try:
         copyfile(source, target)
         assert FilesValidator(skip_conf_json=True).is_valid_structure()
     finally:
         os.remove(target)
Пример #17
0
def test_is_py_or_yml():
    """
        Given:
            - A file path which contains a python script
        When:
            - verifying the yml is valid
        Then:
            - return a False validation response
    """
    files_path = os.path.normpath(
        os.path.join(__file__, f'{git_path()}/demisto_sdk/tests', 'test_files'))
    test_file = os.path.join(files_path, 'CortexXDR',
                             'Integrations/PaloAltoNetworks_XDR/PaloAltoNetworks_XDR.yml')
    file_validator = FilesValidator()
    res = file_validator._is_py_script_or_integration(test_file)
    assert res is False
Пример #18
0
def update_pack_releasenotes(**kwargs):
    _pack = kwargs.get('pack')
    update_type = kwargs.get('update_type')
    pre_release = kwargs.get('pre_release')
    is_all = kwargs.get('all')
    modified, added, old, _packs = FilesValidator(
        use_git=True).get_modified_and_added_files()
    packs_existing_rn = set()
    for pf in added:
        if 'ReleaseNotes' in pf:
            pack_with_existing_rn = get_pack_name(pf)
            packs_existing_rn.add(pack_with_existing_rn)
    if len(packs_existing_rn):
        existing_rns = ''.join(f"{p}, " for p in packs_existing_rn)
        print_warning(
            f"Found existing release notes for the following packs: {existing_rns.rstrip(', ')}"
        )
    if len(_packs) > 1:
        pack_list = ''.join(f"{p}, " for p in _packs)
        if not is_all:
            if _pack:
                pass
            else:
                print_error(
                    f"Detected changes in the following packs: {pack_list.rstrip(', ')}\n"
                    f"To update release notes in a specific pack, please use the -p parameter "
                    f"along with the pack name.")
                sys.exit(0)
    if len(modified) < 1:
        print_warning('No changes were detected.')
        sys.exit(0)
    if is_all and not _pack:
        packs = list(_packs - packs_existing_rn)
        packs_list = ''.join(f"{p}, " for p in packs)
        print_warning(
            f"Adding release notes to the following packs: {packs_list.rstrip(', ')}"
        )
        for pack in packs:
            update_pack_rn = UpdateRN(pack=pack,
                                      update_type=update_type,
                                      pack_files=modified,
                                      pre_release=pre_release)
            update_pack_rn.execute_update()
    elif is_all and _pack:
        print_error(
            "Please remove the --all flag when specifying only one pack.")
        sys.exit(0)
    else:
        if _pack:
            if _pack in packs_existing_rn:
                print_error(
                    f"New release notes file already found for {_pack}. "
                    f"Please update manually or delete {pack_name_to_path(_pack)}"
                )
            else:
                update_pack_rn = UpdateRN(pack=_pack,
                                          update_type=update_type,
                                          pack_files=modified,
                                          pre_release=pre_release)
                update_pack_rn.execute_update()
Пример #19
0
    def test_get_error_ignore_list__file_validator(self, mocker):
        """
            Given:
                - A file path to pack ignore
            When:
                - running get_error_ignore_list from file validator
            Then:
                - verify that the created ignored_errors list is correct
        """
        files_path = os.path.normpath(
            os.path.join(__file__, f'{git_path()}/demisto_sdk/tests', 'test_files'))
        test_file = os.path.join(files_path, 'fake_pack/.pack-ignore')

        mocker.patch.object(FilesValidator, 'get_pack_ignore_file_path', return_value=test_file)

        file_validator = FilesValidator()
        ignore_errors_list = file_validator.get_error_ignore_list("fake")
        assert ignore_errors_list['file_name'] == ['BA101', 'IF107']
        assert 'SC100' not in ignore_errors_list['file_name']
Пример #20
0
def main():
    arg_parser = argparse.ArgumentParser()
    arg_parser.add_argument('version', help='Release version')
    arg_parser.add_argument('git_sha1',
                            help='commit sha1 to compare changes with')
    arg_parser.add_argument('server_version', help='Server version')
    arg_parser.add_argument('-d',
                            '--date',
                            help='release date in the format %Y-%m-%d',
                            required=False)
    args = arg_parser.parse_args()

    date = args.date if args.date else datetime.now().strftime('%Y-%m-%d')

    # get changed yaml/json files (filter only relevant changed files)
    files_validator = FilesValidator()
    change_log = run_command('git diff --name-status {}'.format(args.git_sha1))
    modified_files, added_files, _, _ = files_validator.get_modified_files(
        change_log)

    for file_path in get_changed_content_entities(modified_files, added_files):
        if not should_clear(file_path, args.server_version):
            continue
        rn_path = get_release_notes_file_path(file_path)
        if os.path.isfile(rn_path):
            # if file exist, mark the current notes as release relevant
            with open(rn_path, 'r+') as rn_file:
                text = rn_file.read()
                rn_file.seek(0)
                text = text.replace(
                    UNRELEASE_HEADER,
                    CHANGE_LOG_FORMAT.format(version=args.version, date=date))
                rn_file.write(text)
        else:
            # if file doesn't exist, create it with new header
            with open(rn_path, 'w') as rn_file:
                text = CHANGE_LOG_FORMAT.format(
                    version=args.version,
                    date=date) + get_new_header(file_path)
                rn_file.write(text)
            run_command('git add {}'.format(rn_path))
Пример #21
0
def validate(config, **kwargs):
    sys.path.append(config.configuration.env_dir)

    file_path = kwargs['path'] or kwargs['input']
    if file_path and not os.path.isfile(file_path) and not os.path.isdir(file_path):
        print_error(f'File {file_path} was not found')
        return 1
    else:
        is_private_repo = tools.is_private_repository()

        validator = FilesValidator(configuration=config.configuration,
                                   is_backward_check=not kwargs['no_backward_comp'],
                                   only_committed_files=kwargs['post_commit'], prev_ver=kwargs['prev_ver'],
                                   skip_conf_json=kwargs['no_conf_json'], use_git=kwargs['use_git'],
                                   file_path=file_path,
                                   validate_all=kwargs.get('validate_all'),
                                   validate_id_set=kwargs['id_set'],
                                   skip_pack_rn_validation=kwargs['skip_pack_release_notes'],
                                   print_ignored_errors=kwargs['print_ignored_errors'],
                                   is_private_repo=is_private_repo, )
        return validator.run()
Пример #22
0
    def test_unified_files_ignored(self):
        """
            Given
            - A unified yml file

            When
            - Validating it

            Then
            -  validator should ignore those files
        """
        file_validator = FilesValidator()
        file_validator.validate_modified_files(
            {INVALID_IGNORED_UNIFIED_INTEGRATION})
        assert file_validator._is_valid
        file_validator.validate_added_files(
            {INVALID_IGNORED_UNIFIED_INTEGRATION})
        assert file_validator._is_valid
        file_validator.run_all_validations_on_file(
            INVALID_IGNORED_UNIFIED_INTEGRATION)
        assert file_validator._is_valid
Пример #23
0
def main():
    arg_parser = argparse.ArgumentParser()
    arg_parser.add_argument('version', help='Release version')
    arg_parser.add_argument('git_sha1',
                            help='commit sha1 to compare changes with')
    arg_parser.add_argument('asset_id', help='Asset ID')
    arg_parser.add_argument('server_version', help='Server version')
    arg_parser.add_argument('--github-token', help='Github token')
    args = arg_parser.parse_args()

    tag = get_last_release_version()
    print('Last release version: {}'.format(tag))

    # get changed yaml/json files (filter only relevant changed files)
    file_validator = FilesValidator()
    try:
        change_log = run_command('git diff --name-status {}'.format(
            args.git_sha1),
                                 exit_on_error=False)
    except RuntimeError:
        print_error(
            'Unable to get the SHA1 of the commit in which the version was released. This can happen if your '
            'branch is not updated with origin master. Merge from origin master and, try again.\n'
            'If you\'re not on a fork, run "git merge origin/master".\n'
            'If you are on a fork, first set https://github.com/demisto/content to be '
            'your upstream by running "git remote add upstream https://github.com/demisto/content". After '
            'setting the upstream, run "git fetch upstream", and then run "git merge upstream/master". Doing '
            'these steps will merge your branch with content master as a base.'
        )
        sys.exit(1)
    else:
        modified_files, added_files, removed_files, _ = file_validator.get_modified_files(
            change_log)
        modified_files, added_files, removed_files = filter_packagify_changes(
            modified_files, added_files, removed_files, tag=tag)

        for file_path in added_files:
            create_file_release_notes('A', file_path)

        for file_path in modified_files:
            create_file_release_notes('M', file_path)

        for file_path in removed_files:
            # content entities are only yml/json files. ignore all the rest.
            if file_path.endswith('.yml') or file_path.endswith('.json'):
                handle_deleted_file(file_path, tag)

        # join all release notes
        res = []
        beta_res = []
        missing_release_notes = False
        for key in RELEASE_NOTES_ORDER:
            value = RELEASE_NOTE_GENERATOR[key]
            ans, beta_ans = value.generate_release_notes(args.server_version)
            if ans is None or value.is_missing_release_notes:
                missing_release_notes = True
            if ans:
                res.append(ans)
            if beta_ans:
                beta_res.append(beta_ans)

        release_notes = "\n---\n".join(res)
        beta_release_notes = "\n---\n".join(beta_res)
        create_content_descriptor(args.version,
                                  args.asset_id,
                                  release_notes,
                                  args.github_token,
                                  beta_rn=beta_release_notes)

        if missing_release_notes:
            print_error(
                "Error: some release notes are missing. See previous errors.")
            sys.exit(1)
Пример #24
0
def update_pack_releasenotes(**kwargs):
    _pack = kwargs.get('pack')
    update_type = kwargs.get('update_type')
    pre_release = kwargs.get('pre_release')
    is_all = kwargs.get('all')
    specific_version = kwargs.get('version')
    print("Starting to update release notes.")
    modified, added, old, _packs = FilesValidator(
        use_git=True, silence_init_prints=True).get_modified_and_added_files()
    packs_existing_rn = set()
    for pf in added:
        if 'ReleaseNotes' in pf:
            pack_with_existing_rn = get_pack_name(pf)
            packs_existing_rn.add(pack_with_existing_rn)
    if len(packs_existing_rn):
        existing_rns = ''.join(f"{p}, " for p in packs_existing_rn)
        print_warning(
            f"Found existing release notes for the following packs: {existing_rns.rstrip(', ')}"
        )
    if len(_packs) > 1:
        pack_list = ''.join(f"{p}, " for p in _packs)
        if not is_all:
            if _pack:
                pass
            else:
                print_error(
                    f"Detected changes in the following packs: {pack_list.rstrip(', ')}\n"
                    f"To update release notes in a specific pack, please use the -p parameter "
                    f"along with the pack name.")
                sys.exit(0)
    if (len(modified) < 1) and (len(added) < 1):
        print_warning(
            'No changes were detected. If changes were made, please commit the changes '
            'and rerun the command')
        sys.exit(0)
    if is_all and not _pack:
        packs = list(_packs - packs_existing_rn)
        packs_list = ''.join(f"{p}, " for p in packs)
        print_warning(
            f"Adding release notes to the following packs: {packs_list.rstrip(', ')}"
        )
        for pack in packs:
            update_pack_rn = UpdateRN(pack=pack,
                                      update_type=update_type,
                                      pack_files=modified,
                                      pre_release=pre_release,
                                      added_files=added,
                                      specific_version=specific_version)
            update_pack_rn.execute_update()
    elif is_all and _pack:
        print_error(
            "Please remove the --all flag when specifying only one pack.")
        sys.exit(0)
    else:
        if _pack:
            if _pack in packs_existing_rn and update_type is not None:
                print_error(
                    f"New release notes file already found for {_pack}. "
                    f"Please update manually or run `demisto-sdk update-release-notes "
                    f"-p {_pack}` without specifying the update_type.")
            else:
                update_pack_rn = UpdateRN(pack=_pack,
                                          update_type=update_type,
                                          pack_files=modified,
                                          pre_release=pre_release,
                                          added_files=added,
                                          specific_version=specific_version)
                update_pack_rn.execute_update()
Пример #25
0
 def test_create_ignored_errors_list(self, mocker):
     file_validator = FilesValidator()
     errors_to_check = ["IN", "SC", "CJ", "DA", "DB", "DO", "ID", "DS", "IM", "IF", "IT", "RN", "RM", "PA", "PB",
                        "WD", "RP", "BA100", "BC100", "ST", "CL", "MP"]
     ignored_list = file_validator.create_ignored_errors_list(errors_to_check)
     assert ignored_list == ["BA101", "BA102", "BC101", "BC102", "BC103", "BC104"]
Пример #26
0
 def test_files_validator_validate_pack_unique_files(self,):
     files_validator = FilesValidator(skip_conf_json=True)
     files_validator.validate_pack_unique_files({VALID_PACK})
     assert files_validator._is_valid
Пример #27
0
 def test_pack_validation(self):
     file_validator = FilesValidator(skip_conf_json=True)
     file_validator.file_path = VALID_PACK
     file_validator.is_valid_structure()
     assert file_validator._is_valid is False