Esempio n. 1
0
def test_generators_detection(attribute: str, content_type: tuple, items: int):
    pack = Content(TEST_CONTENT_REPO)
    generator_as_list = list(pack.__getattribute__(attribute))
    # Check detect all objects
    assert len(generator_as_list) == items
    # Check all objects detected correctly
    for item in generator_as_list:
        assert isinstance(item, content_type)
Esempio n. 2
0
def prepare_single_content_item_for_validation(filename: str, data: bytes, tmp_directory: str) -> str:
    content = Content(tmp_directory)
    pack_name = 'TmpPack'
    pack_dir = content.path / 'Packs' / pack_name
    # create pack_metadata.json file in TmpPack
    contrib_converter = ContributionConverter(name=pack_name, base_dir=tmp_directory, pack_dir_name=pack_name)
    contrib_converter.create_metadata_file({'description': 'Temporary Pack', 'author': 'xsoar'})
    prefix = '-'.join(filename.split('-')[:-1])
    containing_dir = pack_dir / ENTITY_TYPE_TO_DIR.get(prefix, 'Integrations')
    containing_dir.mkdir(exist_ok=True)
    data_as_string = data.decode()
    loaded_data = yaml.load(data_as_string)
    buff = io.StringIO()
    yaml.dump(loaded_data, buff)
    data_as_string = buff.getvalue()
    # write yaml integration file to file system
    file_path = containing_dir / filename
    file_path.write_text(data_as_string)
    file_type = find_type(str(file_path))
    file_type = file_type.value if file_type else file_type
    extractor = Extractor(
        input=str(file_path), file_type=file_type, output=containing_dir, no_logging=True, no_pipenv=True)
    # validate the resulting package files, ergo set path_to_validate to the package directory that results
    # from extracting the unified yaml to a package format
    extractor.extract_to_package_format()
    return extractor.get_output_path()
Esempio n. 3
0
def mock_git(mocker):
    from demisto_sdk.commands.common.content import Content

    # Mock git working directory
    mocker.patch.object(Content, 'git')
    Content.git().working_tree_dir = TEST_CONTENT_REPO
    yield
    def __init__(self, artifacts_path: str, content_version: str, suffix: str, zip: bool, packs: bool,
                 cpus: int):
        """ Content artifacts configuration

        Args:
            artifacts_path: existing destination directory for creating artifacts.
            content_version: release content varsion.
            packs: create only content_packs artifacts if True.
            suffix: suffix to add all file we creates.
            zip: True for zip all content artifacts to 3 diffrent zip files in same structure else False.
            cpus: Availble cpus in the computer.
        """
        self.suffix = suffix
        self.content_version = content_version
        self.zip_artifacts = zip
        self.only_content_packs = packs
        self.artifacts_path = Path(artifacts_path)
        self.content_new_path = self.artifacts_path / 'content_new'
        self.content_test_path = self.artifacts_path / 'content_test'
        self.content_packs_path = self.artifacts_path / 'content_packs'
        self.content_all_path = self.artifacts_path / 'all_content'
        self.cpus = cpus
        self.execution_start = time.time()
        self.content = Content.from_cwd()
        self.exit_code = EX_SUCCESS
def mock_single_pack_git(mocker):
    """Mock git Repo object"""
    from demisto_sdk.commands.common.content import Content

    # Mock git working directory
    mocker.patch.object(Content, 'git')
    Content.git(
    ).working_tree_dir = TEST_DATA / 'content_repo_with_alternative_fields'
    yield
Esempio n. 6
0
    def __init__(self,
                 artifacts_path: str,
                 zip: bool,
                 packs: bool,
                 content_version: str,
                 suffix: str,
                 cpus: int,
                 id_set_path: str = '',
                 pack_names: str = 'all',
                 signature_key: str = '',
                 sign_directory: Path = None,
                 remove_test_playbooks: bool = True):
        """ Content artifacts configuration

        Args:
            artifacts_path: existing destination directory for creating artifacts.
            zip: True for zip all content artifacts to 3 different zip files in same structure else False.
            packs: create only content_packs artifacts if True.
            content_version: release content version.
            suffix: suffix to add all file we creates.
            cpus: available cpus in the computer.
            id_set_path: the full path of id_set.json.
            pack_names: Packs to create artifacts for.
            signature_key: Base64 encoded signature key used for signing packs.
            sign_directory: Path to the signDirectory executable file.
            remove_test_playbooks: Should remove test playbooks from content packs or not.
        """
        # options arguments
        self.artifacts_path = Path(artifacts_path)
        self.zip_artifacts = zip
        self.only_content_packs = packs
        self.content_version = content_version
        self.suffix = suffix
        self.cpus = cpus
        self.id_set_path = id_set_path
        self.pack_names = arg_to_list(pack_names)
        self.signature_key = signature_key
        self.signDirectory = sign_directory
        self.remove_test_playbooks = remove_test_playbooks

        # run related arguments
        self.content_new_path = self.artifacts_path / 'content_new'
        self.content_test_path = self.artifacts_path / 'content_test'
        self.content_packs_path = self.artifacts_path / 'content_packs'
        self.content_all_path = self.artifacts_path / 'all_content'
        self.content_uploadable_zips_path = self.artifacts_path / 'uploadable_packs'

        # inits
        self.content = Content.from_cwd()
        self.execution_start = time.time()
        self.exit_code = EX_SUCCESS
Esempio n. 7
0
    def __init__(self,
                 file_paths: Optional[List] = None,
                 known_words_file_paths: Optional[List] = None,
                 no_camel_case: bool = False,
                 no_failure: bool = False,
                 expand_dictionary: bool = False,
                 templates: bool = False,
                 use_git: bool = False,
                 prev_ver: str = None,
                 release_notes_only: bool = False,
                 load_known_words_from_pack: bool = False):
        if templates:
            ReleaseNotesChecker(template_examples=True)
            sys.exit(0)

        # if nothing entered will default to use git
        elif not file_paths and not use_git:
            use_git = True

        self.file_paths = file_paths if file_paths else []
        self.git_util = None

        if use_git:
            self.git_util = GitUtil(repo=Content.git())
            self.prev_ver = self.git_util.handle_prev_ver()[1]
        else:
            self.prev_ver = prev_ver if prev_ver else 'demisto/master'

        if release_notes_only:
            self.SUPPORTED_FILE_TYPES = [FileType.RELEASE_NOTES]
            # if running doc-review --release-notes there is no need to consider invalid schema files of yml/json
            self.ignore_invalid_schema_file = True
        else:
            self.ignore_invalid_schema_file = False

        self.known_words_file_paths = known_words_file_paths if known_words_file_paths else []
        self.load_known_words_from_pack = load_known_words_from_pack
        self.known_pack_words_file_path = ''

        self.current_pack = None
        self.files: list = []
        self.spellchecker = SpellChecker()
        self.unknown_words = {}  # type:Dict
        self.no_camel_case = no_camel_case
        self.found_misspelled = False
        self.no_failure = no_failure
        self.expand_dictionary = expand_dictionary
        self.files_with_misspells = set()  # type:Set
        self.files_without_misspells = set()  # type:Set
        self.malformed_rn_files = set()  # type:Set
Esempio n. 8
0
def prepare_single_content_item_for_validation(
        filename: str, data: bytes, tmp_directory: str) -> Tuple[str, Dict]:
    content = Content(tmp_directory)
    pack_name = 'TmpPack'
    pack_dir = content.path / 'Packs' / pack_name
    # create pack_metadata.json file in TmpPack
    contrib_converter = ContributionConverter(name=pack_name,
                                              base_dir=tmp_directory,
                                              pack_dir_name=pack_name)
    contrib_converter.create_metadata_file({
        'description': 'Temporary Pack',
        'author': 'xsoar'
    })
    prefix = '-'.join(filename.split('-')[:-1])
    containing_dir = pack_dir / ENTITY_TYPE_TO_DIR.get(prefix, 'Integrations')
    containing_dir.mkdir(exist_ok=True)
    is_json = filename.casefold().endswith('.json')
    data_as_string = data.decode()
    loaded_data = json.loads(data_as_string) if is_json else yaml.load(
        data_as_string)
    if is_json:
        data_as_string = json.dumps(loaded_data)
    else:
        buff = io.StringIO()
        yaml.dump(loaded_data, buff)
        data_as_string = buff.getvalue()
    # write content item file to file system
    file_path = containing_dir / filename
    file_path.write_text(data_as_string)
    file_type = find_type(str(file_path))
    file_type = file_type.value if file_type else file_type
    if is_json or file_type in (FileType.PLAYBOOK.value,
                                FileType.TEST_PLAYBOOK.value):
        return str(file_path), {}
    extractor = Extractor(input=str(file_path),
                          file_type=file_type,
                          output=containing_dir,
                          no_logging=True,
                          no_pipenv=True,
                          no_basic_fmt=True)
    # validate the resulting package files, ergo set path_to_validate to the package directory that results
    # from extracting the unified yaml to a package format
    extractor.extract_to_package_format()
    code_fp_to_row_offset = {
        get_extracted_code_filepath(extractor):
        extractor.lines_inserted_at_code_start
    }
    return extractor.get_output_path(), code_fp_to_row_offset
    def is_release_branch():
        # type: () -> bool
        """Check if we are working on a release branch.

        Returns:
            (bool): is release branch
        """
        git_util = GitUtil(repo=Content.git())
        main_branch = git_util.handle_prev_ver()[1]
        if not main_branch.startswith('origin'):
            main_branch = 'origin/' + main_branch

        diff_string_config_yml = run_command(f"git diff {main_branch} .circleci/config.yml")
        if re.search(r'[+-][ ]+CONTENT_VERSION: ".*', diff_string_config_yml):
            return True
        return False
def test_create_private_content_artifacts(private_repo):
    from demisto_sdk.commands.common.content import Content
    from demisto_sdk.commands.create_artifacts.content_artifacts_creator import (
        ArtifactsManager)

    with temp_dir() as temp:
        config = ArtifactsManager(artifacts_path=temp,
                                  content_version='6.0.0',
                                  zip=False,
                                  suffix='',
                                  cpus=1,
                                  packs=False)
        config.content = Content(private_repo)
        exit_code = config.create_content_artifacts()

        assert same_folders(temp, ARTIFACTS_EXPECTED_RESULTS / 'private')
        assert exit_code == 0
Esempio n. 11
0
    def __init__(self,
                 pack_path: str,
                 update_type: Union[str, None],
                 modified_files_in_pack: set,
                 added_files: set,
                 specific_version: str = None,
                 pre_release: bool = False,
                 pack: str = None,
                 pack_metadata_only: bool = False,
                 text: str = '',
                 existing_rn_version_path: str = '',
                 is_force: bool = False,
                 is_bc: bool = False):
        self.pack = pack if pack else get_pack_name(pack_path)
        self.update_type = update_type
        self.pack_path = pack_path
        # renamed files will appear in the modified list as a tuple: (old path, new path)
        modified_files_in_pack = {
            file_[1] if isinstance(file_, tuple) else file_
            for file_ in modified_files_in_pack
        }
        self.modified_files_in_pack = set()
        for file_path in modified_files_in_pack:
            self.modified_files_in_pack.add(
                self.change_image_or_desc_file_path(file_path))

        self.added_files = added_files
        self.pre_release = pre_release
        self.specific_version = specific_version
        self.existing_rn_changed = False
        self.text = text
        self.existing_rn_version_path = existing_rn_version_path
        self.should_delete_existing_rn = False
        self.pack_metadata_only = pack_metadata_only
        self.is_force = is_force
        git_util = GitUtil(repo=Content.git())
        self.main_branch = git_util.handle_prev_ver()[1]
        self.metadata_path = os.path.join(self.pack_path, 'pack_metadata.json')
        self.master_version = self.get_master_version()
        self.rn_path = ''
        self.is_bc = is_bc
        self.bc_path = ''
Esempio n. 12
0
    def get_all_diff_text_files(self, branch_name, is_circle):
        """
        Get all new/modified text files that need to be searched for secrets
        :param branch_name: current branch being worked on
        :param is_circle: boolean to check if being ran from circle
        :return: list: list of text files
        """
        if is_circle:
            prev_ver = self.prev_ver
            if not prev_ver:
                self.git_util = GitUtil(repo=Content.git())
                prev_ver = self.git_util.handle_prev_ver()[1]
            if not prev_ver.startswith('origin'):
                prev_ver = 'origin/' + prev_ver
            print(f"Running secrets validation against {prev_ver}")

            changed_files_string = run_command(f"git diff --name-status {prev_ver}...{branch_name}")
        else:
            print("Running secrets validation on all changes")
            changed_files_string = run_command("git diff --name-status --no-merges HEAD")
        return list(self.get_diff_text_files(changed_files_string))
Esempio n. 13
0
def test_detect_all_docs():
    expected = ['doc-CommonServer.json', 'doc-howto.json']
    pack = Content(TEST_CONTENT_REPO)
    documentations = pack.documentations
    for doc in documentations:
        assert doc.path.name in expected
Esempio n. 14
0
def test_detection(attribute: str, content_type: type):
    pack = Content(TEST_CONTENT_REPO)
    assert isinstance(pack.__getattribute__(attribute), content_type)
Esempio n. 15
0
class PackUniqueFilesValidator(BaseValidator):
    """PackUniqueFilesValidator is designed to validate the correctness of content pack's files structure.
    Existence and validity of this files is essential."""

    git_util = GitUtil(repo=Content.git())
    main_branch = git_util.handle_prev_ver()[1]
    if not main_branch.startswith('origin'):
        main_branch = 'origin/' + main_branch

    def __init__(self, pack, pack_path=None, validate_dependencies=False, ignored_errors=None, print_as_warnings=False,
                 should_version_raise=False, id_set_path=None, suppress_print=False, private_repo=False,
                 skip_id_set_creation=False, prev_ver=main_branch, json_file_path=None, support=None,
                 specific_validations=None):
        """Inits the content pack validator with pack's name, pack's path, and unique files to content packs such as:
        secrets whitelist file, pack-ignore file, pack-meta file and readme file
        :param pack: content package name, which is the directory name of the pack
        """
        super().__init__(ignored_errors=ignored_errors, print_as_warnings=print_as_warnings,
                         suppress_print=suppress_print, json_file_path=json_file_path, specific_validations=specific_validations)
        self.pack = pack
        self.pack_path = pack_name_to_path(self.pack) if not pack_path else pack_path
        self.secrets_file = PACKS_WHITELIST_FILE_NAME
        self.pack_ignore_file = PACKS_PACK_IGNORE_FILE_NAME
        self.pack_meta_file = PACKS_PACK_META_FILE_NAME
        self.readme_file = PACKS_README_FILE_NAME
        self.validate_dependencies = validate_dependencies
        self._errors = []
        self.should_version_raise = should_version_raise
        self.id_set_path = id_set_path
        self.private_repo = private_repo
        self.skip_id_set_creation = skip_id_set_creation
        self.prev_ver = prev_ver
        self.support = support
        self.metadata_content: Dict = dict()
    # error handling

    def _add_error(self, error: Tuple[str, str], file_path: str, warning=False):
        """Adds error entry to a list under pack's name
        Returns True if added and false otherwise"""
        error_message, error_code = error

        if self.pack_path not in file_path:
            file_path = os.path.join(self.pack_path, file_path)

        formatted_error = self.handle_error(error_message, error_code, file_path=file_path, should_print=False,
                                            warning=warning)
        if formatted_error:
            self._errors.append(formatted_error)
            return True

        return False

    def get_errors(self, raw=False) -> str:
        """Get the dict version or string version for print"""
        errors = ''
        if raw:
            errors = '\n  '.join(self._errors)
        elif self._errors:
            errors = ' - Issues with unique files in pack: {}\n  {}'.format(self.pack, '\n  '.join(self._errors))

        return errors

    # file utils
    def _get_pack_file_path(self, file_name=''):
        """Returns the full file path to pack's file"""
        return os.path.join(self.pack_path, file_name)

    def _get_pack_latest_rn_version(self):
        """
        Extract all the Release notes from the pack and reutrn the highest version of release note in the Pack.

        Return:
            (str): The lastest version of RN.
        """
        list_of_files = glob.glob(self.pack_path + '/ReleaseNotes/*')
        list_of_release_notes = [os.path.basename(file) for file in list_of_files]
        list_of_versions = [rn[:rn.rindex('.')].replace('_', '.') for rn in list_of_release_notes]
        if list_of_versions:
            list_of_versions.sort(key=LooseVersion)
            return list_of_versions[-1]
        else:
            return ''

    @error_codes('PA128,PA100')
    def _is_pack_file_exists(self, file_name: str, is_required: bool = False):
        """
        Check if a file with given name exists in pack root.
        is_required is True means that absence of the file should block other tests from running
            (see BlockingValidationFailureException).
        """
        if not os.path.isfile(self._get_pack_file_path(file_name)):
            error_function = Errors.required_pack_file_does_not_exist if is_required else Errors.pack_file_does_not_exist
            if self._add_error(error_function(file_name), file_name):
                if is_required:
                    raise BlockingValidationFailureException()
                return False
        return True

    def _read_file_content(self, file_name):
        """Open & Read a file object's content throw exception if can't"""
        try:
            with io.open(self._get_pack_file_path(file_name), mode="r", encoding="utf-8") as file:
                return file.read()
        except IOError:
            if not self._add_error(Errors.cant_open_pack_file(file_name), file_name):
                return "No-Text-Required"
        except ValueError:
            if not self._add_error(Errors.cant_read_pack_file(file_name), file_name):
                return "No-Text-Required"

        return False

    def _read_metadata_content(self) -> Dict:
        """
        Reads metadata content. Avoids the duplication of file opening in case metadata was already opened once.
        Returns:
            (Dict): Metadata JSON pack file content.
        """
        if not self.metadata_content:
            pack_meta_file_content = self._read_file_content(self.pack_meta_file)
            self.metadata_content = json.loads(pack_meta_file_content)
        return self.metadata_content

    def _parse_file_into_list(self, file_name, delimiter='\n'):
        """Parse file's content to list, throw exception if can't"""
        file_content = self._read_file_content(file_name)
        try:
            if file_content:
                return file_content.split(delimiter)
        except ValueError:
            if not self._add_error(Errors.cant_parse_pack_file_to_list(file_name), file_name):
                return True

        return False

    @staticmethod
    def check_timestamp_format(timestamp):
        """Check that the timestamp is in ISO format"""
        try:
            datetime.strptime(timestamp, ISO_TIMESTAMP_FORMAT)
            return True
        except ValueError:
            return False

    # secrets validation
    def validate_secrets_file(self):
        """Validate everything related to .secrets-ignore file"""
        if self._is_pack_file_exists(self.secrets_file) and all([self._is_secrets_file_structure_valid()]):
            return True

        return False

    def _check_if_file_is_empty(self, file_name: str) -> bool:
        """
        Check if file exists and contains info other than space characters.
        Returns false if the file does not exists or not empty
        """
        if self._is_pack_file_exists(file_name):
            content = self._read_file_content(file_name)
            if not content or content.isspace():
                return True

        return False

    def validate_pack_readme_images(self):
        readme_file_path = os.path.join(self.pack_path, self.readme_file)
        readme_validator = ReadMeValidator(readme_file_path, ignored_errors=self.ignored_errors, specific_validations=self.specific_validations)
        errors = readme_validator.check_readme_relative_image_paths(is_pack_readme=True)
        errors += readme_validator.check_readme_absolute_image_paths(is_pack_readme=True)
        if errors:
            self._errors.extend(errors)
            return False
        return True

    @error_codes('IM109')
    def validate_author_image_exists(self):
        if self.metadata_content.get(PACK_METADATA_SUPPORT) == 'partner':
            author_image_path = os.path.join(self.pack_path, 'Author_image.png')
            if not os.path.exists(author_image_path):
                if self._add_error(Errors.author_image_is_missing(author_image_path), file_path=author_image_path):
                    return False

        return True

    @error_codes('RM104')
    def validate_pack_readme_file_is_not_empty(self):
        """
        Validates that README.md file is not empty for partner packs and packs with playbooks
        """
        playbooks_path = os.path.join(self.pack_path, "Playbooks")
        contains_playbooks = os.path.exists(playbooks_path) and len(os.listdir(playbooks_path)) != 0
        if (self.support == 'partner' or contains_playbooks) and self._check_if_file_is_empty(self.readme_file):
            if self._add_error(Errors.empty_readme_error(), self.readme_file):
                return False

        return True

    @error_codes('RM105')
    def validate_pack_readme_and_pack_description(self):
        """
        Validates that README.md file is not the same as the pack description.
        Returns False if the pack readme is different than the pack description.
        """
        metadata = self._read_metadata_content()
        metadata_description = metadata.get(PACK_METADATA_DESC, '').lower().strip()
        if self._is_pack_file_exists(self.readme_file) and not self._check_if_file_is_empty(self.readme_file):
            pack_readme = self._read_file_content(self.readme_file)
            readme_content = pack_readme.lower().strip()
            if metadata_description == readme_content:
                if self._add_error(Errors.readme_equal_description_error(), self.readme_file):
                    return False

        return True

    def _is_secrets_file_structure_valid(self):
        """Check if .secrets-ignore structure is parse-able"""
        if self._parse_file_into_list(self.secrets_file):
            return True

        return False

    # pack ignore validation
    def validate_pack_ignore_file(self):
        """Validate everything related to .pack-ignore file"""
        if self._is_pack_file_exists(self.pack_ignore_file) and all([self._is_pack_ignore_file_structure_valid()]):
            return True

        return False

    @error_codes('PA104')
    def _is_pack_ignore_file_structure_valid(self):
        """Check if .pack-ignore structure is parse-able"""
        try:
            if self._parse_file_into_list(self.pack_ignore_file):
                return True
        except re.error:
            if not self._add_error(Errors.pack_file_bad_format(self.pack_ignore_file), self.pack_ignore_file):
                return True

        return False

    # pack metadata validation
    def validate_pack_meta_file(self):
        """Validate everything related to pack_metadata.json file"""
        if self._is_pack_file_exists(self.pack_meta_file, is_required=True) and all([
            self._is_pack_meta_file_structure_valid(),
            self._is_valid_contributor_pack_support_details(),
            self._is_approved_usecases(),
            self._is_right_version(),
            self._is_approved_tags(),
            self._is_price_changed(),
            self._is_valid_support_type(),
            self.is_right_usage_of_usecase_tag(),
        ]):
            if self.should_version_raise:
                return self.validate_version_bump()
            else:
                return True

        return False

    @error_codes('PA114')
    def validate_version_bump(self):
        metadata_file_path = self._get_pack_file_path(self.pack_meta_file)
        old_meta_file_content = get_remote_file(metadata_file_path, tag=self.prev_ver)
        current_meta_file_content = get_json(metadata_file_path)
        old_version = old_meta_file_content.get('currentVersion', '0.0.0')
        current_version = current_meta_file_content.get('currentVersion', '0.0.0')
        if LooseVersion(old_version) < LooseVersion(current_version):
            return True
        elif self._add_error(Errors.pack_metadata_version_should_be_raised(self.pack, old_version), metadata_file_path):
            return False
        return True

    @error_codes('PA108,PA125')
    def validate_pack_name(self, metadata_file_content: Dict) -> bool:
        # check validity of pack metadata mandatory fields
        pack_name: str = metadata_file_content.get(PACK_METADATA_NAME, '')
        if not pack_name or 'fill mandatory field' in pack_name:
            if self._add_error(Errors.pack_metadata_name_not_valid(), self.pack_meta_file):
                return False
        if len(pack_name) < 3:
            if self._add_error(Errors.pack_name_is_not_in_xsoar_standards("short"), self.pack_meta_file):
                return False
        if pack_name[0].islower():
            if self._add_error(Errors.pack_name_is_not_in_xsoar_standards("capital"), self.pack_meta_file):
                return False
        if re.findall(INCORRECT_PACK_NAME_PATTERN, pack_name):
            if self._add_error(Errors.pack_name_is_not_in_xsoar_standards("wrong_word"), self.pack_meta_file):
                return False
        if not self.name_does_not_contain_excluded_word(pack_name):
            if self._add_error(
                    Errors.pack_name_is_not_in_xsoar_standards('excluded_word', EXCLUDED_DISPLAY_NAME_WORDS),
                    self.pack_meta_file):
                return False
        return True

    def name_does_not_contain_excluded_word(self, pack_name: str) -> bool:
        """
        Checks whether given object has excluded name.
        Args:
            pack_name (str): Name of the pack.
        Returns:
            (bool) False if name corresponding pack name contains excluded name, true otherwise.
        """
        lowercase_name = pack_name.lower()
        return not any(excluded_word in lowercase_name for excluded_word in EXCLUDED_DISPLAY_NAME_WORDS)

    def _is_empty_dir(self, dir_path: Path) -> bool:
        return dir_path.stat().st_size == 0

    def _is_integration_pack(self):
        integration_dir: Path = Path(self.pack_path) / INTEGRATIONS_DIR
        return integration_dir.exists() and not self._is_empty_dir(dir_path=integration_dir)

    @error_codes('PA105,PA106,PA107,PA109,PA110,PA115,PA111,PA129,PA118,PA112')
    def _is_pack_meta_file_structure_valid(self):
        """Check if pack_metadata.json structure is json parse-able and valid"""
        try:
            metadata = self._read_metadata_content()
            if not metadata:
                if self._add_error(Errors.pack_metadata_empty(), self.pack_meta_file):
                    raise BlockingValidationFailureException()

            if not isinstance(metadata, dict):
                if self._add_error(Errors.pack_metadata_should_be_dict(self.pack_meta_file), self.pack_meta_file):
                    raise BlockingValidationFailureException()

            missing_fields = [field for field in PACK_METADATA_FIELDS if field not in metadata.keys()]
            if missing_fields:
                if self._add_error(Errors.missing_field_iin_pack_metadata(self.pack_meta_file, missing_fields),
                                   self.pack_meta_file):
                    raise BlockingValidationFailureException()

            elif not self.validate_pack_name(metadata):
                raise BlockingValidationFailureException()

            description_name = metadata.get(PACK_METADATA_DESC, '').lower()
            if not description_name or 'fill mandatory field' in description_name:
                if self._add_error(Errors.pack_metadata_field_invalid(), self.pack_meta_file):
                    raise BlockingValidationFailureException()

            if not self.is_pack_metadata_desc_too_long(description_name):
                return False

            # check non mandatory dependency field
            dependencies_field = metadata.get(PACK_METADATA_DEPENDENCIES, {})
            if not isinstance(dependencies_field, dict):
                if self._add_error(Errors.dependencies_field_should_be_dict(self.pack_meta_file), self.pack_meta_file):
                    return False

            # check created field in iso format
            created_field = metadata.get(PACK_METADATA_CREATED, '')
            if created_field:
                if not self.check_timestamp_format(created_field):
                    suggested_value = parser.parse(created_field).isoformat() + "Z"
                    if self._add_error(
                            Errors.pack_timestamp_field_not_in_iso_format(PACK_METADATA_CREATED,
                                                                          created_field, suggested_value),
                            self.pack_meta_file):
                        return False

            # check metadata list fields and validate that no empty values are contained in this fields
            for list_field in (PACK_METADATA_KEYWORDS, PACK_METADATA_TAGS, PACK_METADATA_CATEGORIES,
                               PACK_METADATA_USE_CASES):
                field = metadata[list_field]
                if field and len(field) == 1:
                    value = field[0]
                    if not value:
                        if self._add_error(Errors.empty_field_in_pack_metadata(self.pack_meta_file, list_field),
                                           self.pack_meta_file):
                            return False

            # check metadata categories isn't an empty list, only if it is an integration.
            if self._is_integration_pack():
                if not metadata[PACK_METADATA_CATEGORIES]:
                    if self._add_error(Errors.pack_metadata_missing_categories(self.pack_meta_file),
                                       self.pack_meta_file):
                        return False

            # if the field 'certification' exists, check that its value is set to 'certified' or 'verified'
            certification = metadata.get(PACK_METADATA_CERTIFICATION)
            if certification and certification not in ALLOWED_CERTIFICATION_VALUES:
                if self._add_error(Errors.pack_metadata_certification_is_invalid(self.pack_meta_file),
                                   self.pack_meta_file):
                    return False

            # check format of metadata version
            version = metadata.get(PACK_METADATA_CURR_VERSION, '0.0.0')
            if not self._is_version_format_valid(version):
                return False

        except (ValueError, TypeError):
            if self._add_error(Errors.pack_metadata_isnt_json(self.pack_meta_file), self.pack_meta_file):
                raise BlockingValidationFailureException()

        return True

    @error_codes('PA126')
    def is_pack_metadata_desc_too_long(self, description_name):
        if len(description_name) > MAXIMUM_DESCRIPTION_FIELD_LENGTH:
            if self._add_error(Errors.pack_metadata_long_description(), self.pack_meta_file, warning=True):
                return False
        return True

    @error_codes('PA113')
    def validate_support_details_exist(self, pack_meta_file_content):
        """Validate either email or url exist in contributed pack details."""
        if not pack_meta_file_content[PACK_METADATA_URL] and not pack_meta_file_content[PACK_METADATA_EMAIL]:
            if self._add_error(Errors.pack_metadata_missing_url_and_email(), self.pack_meta_file):
                return False

        return True

    @error_codes('PA127')
    def validate_metadata_url(self, pack_meta_file_content):
        """Validate the url in the pack metadata doesn't lead to a github repository."""
        metadata_url = pack_meta_file_content[PACK_METADATA_URL]
        metadata_url = metadata_url.lower().strip()
        if len(re.findall("github.com", metadata_url)) > 0:
            # GitHub URLs that lead to a /issues page are also acceptable as a support URL.
            if not metadata_url.endswith('/issues'):
                self._add_error(Errors.metadata_url_invalid(), self.pack_meta_file)
                return False

        return True

    @error_codes('PA112')
    def _is_valid_contributor_pack_support_details(self):
        """Check email and url in contributed pack metadata details."""
        try:
            pack_meta_file_content = self._read_metadata_content()
            if pack_meta_file_content[PACK_METADATA_SUPPORT] in SUPPORTED_CONTRIBUTORS_LIST:
                return all([self.validate_support_details_exist(pack_meta_file_content),
                            self.validate_metadata_url(pack_meta_file_content)])

        except (ValueError, TypeError):
            if self._add_error(Errors.pack_metadata_isnt_json(self.pack_meta_file), self.pack_meta_file):
                return False

        return True

    @error_codes('PA117,PA112')
    def _is_valid_support_type(self) -> bool:
        """Checks whether the support type is valid in the pack metadata.

        Returns:
            bool: True if the support type is valid, otherwise False

        """
        try:
            pack_meta_file_content = self._read_metadata_content()
            if pack_meta_file_content[PACK_METADATA_SUPPORT] not in SUPPORT_TYPES:
                self._add_error(Errors.pack_metadata_invalid_support_type(), self.pack_meta_file)
                return False
            self.support = pack_meta_file_content[PACK_METADATA_SUPPORT]
        except (ValueError, TypeError):
            if self._add_error(Errors.pack_metadata_isnt_json(self.pack_meta_file), self.pack_meta_file):
                return False

        return True

    @error_codes('PA119')
    def _is_approved_usecases(self) -> bool:
        """Checks whether the usecases in the pack metadata are approved

        Return:
             bool: True if the usecases are approved, otherwise False
        """
        if tools.is_external_repository():
            return True

        non_approved_usecases = set()
        try:
            pack_meta_file_content = self._read_metadata_content()
            current_usecases = tools.get_current_usecases()
            non_approved_usecases = set(pack_meta_file_content[PACK_METADATA_USE_CASES]) - set(current_usecases)
            if non_approved_usecases:
                if self._add_error(
                        Errors.pack_metadata_non_approved_usecases(non_approved_usecases), self.pack_meta_file):
                    return False
        except (ValueError, TypeError):
            if self._add_error(Errors.pack_metadata_non_approved_usecases(non_approved_usecases), self.pack_meta_file):
                return False
        return True

    @error_codes('PA130')
    def _is_version_format_valid(self, version: str) -> bool:
        """
        checks if the meta-data version is in the correct format
        Args:
            version (str): The version to check the foramt on

        Returns:
            bool: True if the version is in the correct format, otherwise false.
        """
        match_obj = re.match(VERSION_REGEX, version)
        if not match_obj:
            self._add_error(Errors.wrong_version_format(), self.pack_meta_file)
            return False
        return True

    @error_codes('PA120')
    def _is_approved_tags(self) -> bool:
        """Checks whether the tags in the pack metadata are approved

        Return:
             bool: True if the tags are approved, otherwise False
        """
        if tools.is_external_repository():
            return True

        non_approved_tags = set()
        try:
            pack_meta_file_content = self._read_metadata_content()
            current_tags = tools.get_current_tags()
            non_approved_tags = set(pack_meta_file_content[PACK_METADATA_TAGS]) - set(current_tags)
            if non_approved_tags:
                if self._add_error(Errors.pack_metadata_non_approved_tags(non_approved_tags), self.pack_meta_file):
                    return False
        except (ValueError, TypeError):
            if self._add_error(Errors.pack_metadata_non_approved_tags(non_approved_tags), self.pack_meta_file):
                return False
        return True

    @error_codes('RN106,PA131')
    def _is_right_version(self):
        """Checks whether the currentVersion field in the pack metadata match the version of the latest release note.

        Return:
             bool: True if the versions are match, otherwise False
        """
        metadata_file_path = self._get_pack_file_path(self.pack_meta_file)
        current_version = self.metadata_content.get('currentVersion', '0.0.0')
        rn_version = self._get_pack_latest_rn_version()
        if not rn_version and current_version == '1.0.0':
            return True
        if not rn_version:
            self._add_error(Errors.missing_release_notes_for_pack(self.pack), self.pack)
            return False
        if parse(rn_version) != parse(current_version):
            self._add_error(Errors.pack_metadata_version_diff_from_rn(self.pack, rn_version, current_version), metadata_file_path)
            return False
        return True

    def _contains_use_case(self):
        """
        Return:
            True if the Pack contains at least one PB, Incident Type or Layout, otherwise False
        """
        playbooks_path = os.path.join(self.pack_path, "Playbooks")
        incidents_path = os.path.join(self.pack_path, "IncidentTypes")
        layouts_path = os.path.join(self.pack_path, "Layouts")

        answers = [
            os.path.exists(playbooks_path) and len(os.listdir(playbooks_path)) != 0,
            os.path.exists(incidents_path) and len(os.listdir(incidents_path)) != 0,
            os.path.exists(layouts_path) and len(os.listdir(layouts_path)) != 0,
        ]
        return any(answers)

    @error_codes('PA123')
    def is_right_usage_of_usecase_tag(self):
        """Checks whether Use Case tag in pack_metadata is used properly

        Return:
             bool: True if the Pack contains at least one PB, Incident Type or Layout, otherwise False
        """
        try:
            pack_meta_file_content = self._read_metadata_content()

            if "Use Case" in pack_meta_file_content['tags']:
                if not self._contains_use_case():
                    if self._add_error(Errors.is_wrong_usage_of_usecase_tag(), self.pack_meta_file):
                        return False
        except (ValueError, TypeError):
            if self._add_error(Errors.is_wrong_usage_of_usecase_tag(), self.pack_meta_file):
                return False
        return True

    def get_master_private_repo_meta_file(self, metadata_file_path: str):
        current_repo = Repo(Path.cwd(), search_parent_directories=True)

        # if running on master branch in private repo - do not run the test
        if current_repo.active_branch == 'master':
            if not self.suppress_print:
                click.secho("Running on master branch - skipping price change validation", fg="yellow")
            return None
        try:
            old_meta_file_content = current_repo.git.show(f'{self.main_branch}:{metadata_file_path}')

        except GitCommandError as e:
            if not self.suppress_print:
                click.secho(f"Got an error while trying to connect to git - {str(e)}\n"
                            f"Skipping price change validation")
            return None

        # if there was no past version
        if not old_meta_file_content:
            if not self.suppress_print:
                click.secho("Unable to find previous pack_metadata.json file - skipping price change validation",
                            fg="yellow")
            return None

        return json.loads(old_meta_file_content)

    @error_codes('PA121')
    def _is_price_changed(self) -> bool:
        # only check on private repo
        if not self.private_repo:
            return True

        metadata_file_path = self._get_pack_file_path(self.pack_meta_file)
        old_meta_file_content = self.get_master_private_repo_meta_file(metadata_file_path)

        # if there was no past version or running on master branch
        if not old_meta_file_content:
            return True

        current_meta_file_content = get_json(metadata_file_path)
        current_price = current_meta_file_content.get('price')
        old_price = old_meta_file_content.get('price')

        # if a price was added, removed or changed compared to the master version - return an error
        if (old_price and not current_price) or (current_price and not old_price) or (old_price != current_price):
            if self._add_error(Errors.pack_metadata_price_change(old_price, current_price), self.pack_meta_file):
                return False

        return True

    def are_valid_files(self, id_set_validations) -> str:
        """Main Execution Method"""
        try:
            self.validate_secrets_file()
            self.validate_pack_ignore_file()
            # metadata file is not validated for API_MODULES_PACK
            if API_MODULES_PACK not in self.pack:
                self.validate_pack_meta_file()

            self.validate_pack_readme_file_is_not_empty()
            self.validate_pack_readme_and_pack_description()
            self.validate_pack_readme_images()
            self.validate_author_image_exists()

            # We only check pack dependencies for -g flag
            if self.validate_dependencies:
                self.validate_pack_dependencies()

            # Check if unique files are valid against the rest of the files, using the ID set.
            if id_set_validations:
                is_valid, error = id_set_validations.is_unique_file_valid_in_set(self.pack_path, self.ignored_errors)
                if not is_valid:
                    self._add_error(error, self.pack_path)
        except BlockingValidationFailureException:
            # note that raising this should happen after adding the error to self._errors,
            # so no special handling is required on this `except` block
            pass

        return self.get_errors()

    # pack dependencies validation
    def validate_pack_dependencies(self):
        try:
            click.secho(f'\nRunning pack dependencies validation on {self.pack}\n',
                        fg="bright_cyan")
            core_pack_list = get_core_pack_list()

            first_level_dependencies = PackDependencies.find_dependencies(
                self.pack, id_set_path=self.id_set_path, silent_mode=True, exclude_ignored_dependencies=False,
                update_pack_metadata=False, skip_id_set_creation=self.skip_id_set_creation, use_pack_metadata=True
            )

            if not first_level_dependencies:
                if not self.suppress_print:
                    click.secho("No first level dependencies found", fg="yellow")
                return True

            for core_pack in core_pack_list:
                first_level_dependencies.pop(core_pack, None)
            if not first_level_dependencies:
                if not self.suppress_print:
                    click.secho("Found first level dependencies only on core packs", fg="yellow")
                return True

            dependency_result = json.dumps(first_level_dependencies, indent=4)
            click.echo(click.style(f"Found dependencies result for {self.pack} pack:", bold=True))
            click.echo(click.style(dependency_result, bold=True))

            if self.pack in core_pack_list:
                if not self.validate_core_pack_dependencies(first_level_dependencies):
                    return False

            non_supported_pack = first_level_dependencies.get('NonSupported', {})
            deprecated_pack = first_level_dependencies.get('DeprecatedContent', {})

            if not self.is_invalid_package_dependencies(non_supported_pack, deprecated_pack):
                return False

            return True

        except ValueError as e:
            if "Couldn't find any items for pack" in str(e):
                error_message, error_code = Errors.invalid_id_set()
                if self._add_error((error_message, error_code), file_path=self.pack_path):
                    return False
                return True
            else:
                raise

    @error_codes('PA116')
    def is_invalid_package_dependencies(self, non_supported_pack, deprecated_pack):
        if (non_supported_pack.get('mandatory')) or (deprecated_pack.get('mandatory')):
            error_message, error_code = Errors.invalid_package_dependencies(self.pack)
            if self._add_error((error_message, error_code), file_path=self.pack_path):
                return False
        return True

    @error_codes('PA124')
    def validate_core_pack_dependencies(self, dependencies_packs):

        found_dependencies = []
        for dependency_pack in dependencies_packs:
            if dependencies_packs.get(dependency_pack, {}).get('mandatory'):
                found_dependencies.append(dependency_pack)

        if found_dependencies:
            error_message, error_code = Errors.invalid_core_pack_dependencies(self.pack, str(found_dependencies))
            if self._add_error((error_message, error_code), file_path=self.pack_path):
                return False
        return True