Пример #1
0
    def upload(self):
        """Upload the integration specified in self.infile to the remote Demisto instance.
        """
        try:
            if self.unify:  # Create a temporary unified yml file
                try:
                    unifier = Unifier(self.path, outdir=self.path)
                    self.path = unifier.merge_script_package_to_yml()[0][0]
                except IndexError:
                    print_color(
                        'Error: Path input is not a valid package directory.',
                        LOG_COLORS.RED)
                    return 1

            # Upload the file to Demisto
            result = self.client.integration_upload(file=self.path)

            # Print results
            print_v(f'Result:\n{result.to_str()}', self.log_verbose)
            print_color(f'Uploaded \'{result.name}\' successfully',
                        LOG_COLORS.GREEN)

        except Exception as ex:
            raise ex

        finally:
            if self.unify and os.path.exists(
                    self.path):  # Remove the temporary file
                os.remove(self.path)

        return 0
Пример #2
0
    def check_api_module_imports(self, py_num):
        """
        Checks if the integration imports an API module and if so pastes the module in the package.
        :param py_num: The python version - api modules are in python 3
        """
        if py_num > 3:
            unifier = Unifier(self.project_dir)
            code_file_path = unifier.get_code_file('.py')

            try:
                # Look for an import to an API module in the code. If there is such import, we need to copy the correct
                # module file to the package directory.
                with io.open(code_file_path, mode='r',
                             encoding='utf-8') as script_file:
                    _, module_name = unifier.check_api_module_imports(
                        script_file.read())
                if module_name:
                    module_path = os.path.join(self.configuration.env_dir,
                                               'Packs', 'ApiModules',
                                               'Scripts', module_name,
                                               module_name + '.py')
                    print_v('Copying ' + os.path.join(
                        self.configuration.env_dir, 'Scripts', module_path))
                    if not os.path.exists(module_path):
                        raise ValueError(
                            'API Module {} not found, you might be outside of the content repository'
                            ' or this API module does not exist'.format(
                                module_name))
                    shutil.copy(os.path.join(module_path), self.project_dir)
            except Exception as e:
                print_v('Unable to retrieve the module file {}: {}'.format(
                    module_name, str(e)))
Пример #3
0
    def copy_packs_content_to_packs_bundle(self, packs):
        '''
        Copy content in packs to the bundle that gets zipped to 'content_packs.zip'. Preserves directory structure
        except that packages inside the "Integrations" or "Scripts" directory inside a pack are flattened. Adds file
        prefixes according to how server expects to ingest the files, e.g. 'integration-' is prepended to integration
        yml filenames and 'script-' is prepended to script yml filenames and so on and so forth.
        '''
        for pack in packs:
            pack_name = os.path.basename(pack)
            if pack_name in self.packs_to_skip:
                continue
            pack_dst = os.path.join(self.packs_bundle, pack_name)
            os.mkdir(pack_dst)
            pack_dirs = get_child_directories(pack)
            pack_files = get_child_files(pack)
            # copy first level pack files over
            for file_path in pack_files:
                shutil.copy(
                    file_path,
                    os.path.join(pack_dst, os.path.basename(file_path)))
            # handle content directories in the pack
            for content_dir in pack_dirs:
                dir_name = os.path.basename(content_dir)
                dest_dir = os.path.join(pack_dst, dir_name)
                os.mkdir(dest_dir)
                if dir_name in DIR_TO_PREFIX:
                    packages_dirs = get_child_directories(content_dir)
                    for package_dir in packages_dirs:
                        ymls, _ = get_yml_paths_in_dir(package_dir,
                                                       error_msg='')
                        if not ymls or (len(ymls) == 1
                                        and ymls[0].endswith('_unified.yml')):
                            msg = 'Skipping package: {} -'.format(package_dir)
                            if not ymls:
                                print_warning(
                                    '{} No yml files found in the package directory'
                                    .format(msg))
                            else:
                                print_warning(
                                    '{} Only unified yml found in the package directory'
                                    .format(msg))
                            continue
                        package_dir_name = os.path.basename(package_dir)
                        unifier = Unifier(package_dir, dir_name, dest_dir)
                        unifier.merge_script_package_to_yml()

                        # also copy CHANGELOG markdown files over (should only be one per package)
                        package_files = get_child_files(package_dir)
                        changelog_files = [
                            file_path for file_path in package_files
                            if 'CHANGELOG.md' in file_path
                        ]
                        for md_file_path in changelog_files:
                            md_out_name = '{}-{}_CHANGELOG.md'.format(
                                DIR_TO_PREFIX.get(dir_name), package_dir_name)
                            shutil.copyfile(
                                md_file_path,
                                os.path.join(dest_dir, md_out_name))
                else:
                    self.copy_dir_files(content_dir, dest_dir)
Пример #4
0
    def is_file_has_used_id(self, file_path):
        """Check if the ID of the given file already exist in the system.

        Args:
            file_path (string): Path to the file.

        Returns:
            bool. Whether the ID of the given file already exist in the system or not.
        """
        is_used = False
        is_json_file = False
        if self.is_circle:
            if re.match(TEST_PLAYBOOK_REGEX, file_path, re.IGNORECASE):
                obj_type = self.TEST_PLAYBOOK_SECTION
                obj_id = collect_ids(file_path)
                obj_data = get_playbook_data(file_path)

            elif re.match(SCRIPT_REGEX, file_path, re.IGNORECASE) or \
                    re.match(TEST_SCRIPT_REGEX, file_path, re.IGNORECASE):
                obj_type = self.SCRIPTS_SECTION
                obj_id = get_script_or_integration_id(file_path)
                obj_data = get_script_data(file_path)

            elif re.match(INTEGRATION_REGEX, file_path, re.IGNORECASE) or \
                    re.match(INTEGRATION_YML_REGEX, file_path, re.IGNORECASE):

                obj_type = self.INTEGRATION_SECTION
                obj_id = get_script_or_integration_id(file_path)
                obj_data = get_integration_data(file_path)

            elif re.match(PLAYBOOK_REGEX, file_path, re.IGNORECASE):
                obj_type = self.PLAYBOOK_SECTION
                obj_id = collect_ids(file_path)
                obj_data = get_playbook_data(file_path)

            elif re.match(SCRIPT_YML_REGEX, file_path, re.IGNORECASE) or \
                    re.match(SCRIPT_PY_REGEX, file_path, re.IGNORECASE) or \
                    re.match(SCRIPT_JS_REGEX, file_path, re.IGNORECASE):

                unifier = Unifier(os.path.dirname(os.path.dirname(file_path)))
                yml_path, code = unifier.get_script_package_data()

                obj_data = get_script_data(yml_path, script_code=code)

                obj_type = self.SCRIPTS_SECTION
                obj_id = get_script_or_integration_id(yml_path)

            else:  # In case of a json file
                is_json_file = True

            if not is_json_file:
                is_used = self.is_id_duplicated(obj_id, obj_data, obj_type)

        return is_used
Пример #5
0
def test_clean_python_code():
    from demisto_sdk.yaml_tools.unifier import Unifier
    unifier = Unifier("path")
    script_code = "import demistomock as demistofrom CommonServerPython import *" \
                  "from CommonServerUserPython import *from __future__ import print_function"
    # Test remove_print_future is False
    script_code = unifier.clean_python_code(script_code,
                                            remove_print_future=False)
    assert script_code == "from __future__ import print_function"
    # Test remove_print_future is True
    script_code = unifier.clean_python_code(script_code)
    assert script_code == ""
Пример #6
0
def test_get_code_file():
    from demisto_sdk.yaml_tools.unifier import Unifier
    # Test integration case
    unifier = Unifier("tests/test_files/VulnDB/")
    assert unifier.get_code_file(".py") == "tests/test_files/VulnDB/VulnDB.py"
    unifier = Unifier("tests/test_files")
    with pytest.raises(Exception):
        unifier.get_code_file(".py")
    # Test script case
    unifier = Unifier("tests/test_files/CalculateGeoDistance/")
    assert unifier.get_code_file(
        ".py"
    ) == "tests/test_files/CalculateGeoDistance/CalculateGeoDistance.py"
Пример #7
0
def test_insert_description_to_yml():
    from demisto_sdk.yaml_tools.unifier import Unifier
    with patch.object(Unifier, "__init__", lambda a, b, c, d, e: None):
        unifier = Unifier('', None, None, None)
        unifier.package_path = "tests/test_files/VulnDB/"
        unifier.dir_name = "Integrations"
        with open("tests/test_files/VulnDB/VulnDB_description.md",
                  "rb") as desc_file:
            desc_data = desc_file.read().decode("utf-8")
            desc_data = '|\n  ' + desc_data.replace('\n', '\n  ')
        yml_text, found_data_path = unifier.insert_description_to_yml({}, "")
        assert found_data_path == "tests/test_files/VulnDB/VulnDB_description.md"
        assert desc_data in yml_text
Пример #8
0
def process_script(file_path):
    res = []
    if os.path.isfile(file_path):
        if checked_type(file_path, (SCRIPT_REGEX, PACKS_SCRIPT_YML_REGEX)):
            print("adding {0} to id_set".format(file_path))
            res.append(get_script_data(file_path))
    else:
        # package script
        unifier = Unifier(file_path)
        yml_path, code = unifier.get_script_package_data()
        print("adding {0} to id_set".format(file_path))
        res.append(get_script_data(yml_path, script_code=code))

    return res
Пример #9
0
 def copy_content_yml(self, path, out_path, yml_info):
     '''Copy content ymls (except for playbooks) to the out_path (presumably a bundle)'''
     parent_dir_name = os.path.basename(os.path.dirname(path))
     if parent_dir_name in DIR_TO_PREFIX and not os.path.basename(
             path).startswith('playbook-'):
         script_obj = yml_info
         if parent_dir_name != SCRIPTS_DIR:
             script_obj = yml_info['script']
         with io.open(path, mode='r', encoding='utf-8') as file_:
             yml_text = file_.read()
         unifier = Unifier(os.path.dirname(path), parent_dir_name, out_path)
         out_map = unifier.write_yaml_with_docker(yml_text, yml_info,
                                                  script_obj)
         if len(out_map.keys()) > 1:
             print(" - yaml generated multiple files: {}".format(
                 out_map.keys()))
         return
     # not a script or integration file. Simply copy
     shutil.copyfile(path, out_path)
Пример #10
0
    def is_file_valid_in_set(self, file_path):
        """Check if the file is represented correctly in the id_set

        Args:
            file_path (string): Path to the file.

        Returns:
            bool. Whether the file is represented correctly in the id_set or not.
        """
        is_valid = True
        if self.is_circle:  # No need to check on local env because the id_set will contain this info after the commit
            if re.match(PLAYBOOK_REGEX, file_path, re.IGNORECASE):
                playbook_data = get_playbook_data(file_path)
                is_valid = self.is_valid_in_id_set(file_path, playbook_data, self.playbook_set)

            elif re.match(TEST_PLAYBOOK_REGEX, file_path, re.IGNORECASE):
                playbook_data = get_playbook_data(file_path)
                is_valid = self.is_valid_in_id_set(file_path, playbook_data, self.test_playbook_set)

            elif re.match(TEST_SCRIPT_REGEX, file_path, re.IGNORECASE) or \
                    re.match(SCRIPT_REGEX, file_path, re.IGNORECASE):

                script_data = get_script_data(file_path)
                is_valid = self.is_valid_in_id_set(file_path, script_data, self.script_set)

            elif re.match(INTEGRATION_REGEX, file_path, re.IGNORECASE) or \
                    re.match(INTEGRATION_YML_REGEX, file_path, re.IGNORECASE):

                integration_data = get_integration_data(file_path)
                is_valid = self.is_valid_in_id_set(file_path, integration_data, self.integration_set)

            elif re.match(SCRIPT_YML_REGEX, file_path, re.IGNORECASE) or \
                    re.match(SCRIPT_PY_REGEX, file_path, re.IGNORECASE) or \
                    re.match(SCRIPT_JS_REGEX, file_path, re.IGNORECASE):

                unifier = Unifier(os.path.dirname(file_path))
                yml_path, code = unifier.get_script_package_data()
                script_data = get_script_data(yml_path, script_code=code)
                is_valid = self.is_valid_in_id_set(yml_path, script_data, self.script_set)

        return is_valid
Пример #11
0
def test_insert_script_to_yml(package_path, dir_name, file_path):
    from demisto_sdk.yaml_tools.unifier import Unifier
    with patch.object(Unifier, "__init__", lambda a, b, c, d, e: None):
        unifier = Unifier("", None, None, None)
        unifier.package_path = package_path
        unifier.dir_name = dir_name
        with open(file_path + ".yml", mode="r", encoding="utf-8") as yml_file:
            test_yml_text = yml_file.read()
        with open(file_path + ".yml", "r") as yml:
            test_yml_data = yaml.safe_load(yml)

        yml_text, script_path = unifier.insert_script_to_yml(
            ".py", test_yml_text, test_yml_data)

        with open(file_path + ".py", mode="r",
                  encoding="utf-8") as script_file:
            script_code = script_file.read()
        clean_code = unifier.clean_python_code(script_code)
        lines = ['|-']
        lines.extend(u'    {}'.format(line) for line in clean_code.split('\n'))
        script_code = u'\n'.join(lines)
        test_yml_text = test_yml_text.replace("script: ''",
                                              "script: " + script_code)
        test_yml_text = test_yml_text.replace("script: '-'",
                                              "script: " + script_code)

        assert yml_text == test_yml_text
        assert script_path == file_path + ".py"
Пример #12
0
def test_get_script_package_data():
    from demisto_sdk.yaml_tools.unifier import Unifier
    unifier = Unifier("tests/")
    with pytest.raises(Exception):
        unifier.get_script_package_data()
    unifier = Unifier("tests/test_files/CalculateGeoDistance")
    with open("tests/test_files/CalculateGeoDistance/CalculateGeoDistance.py",
              "r") as code_file:
        code = code_file.read()
    yml_path, code_data = unifier.get_script_package_data()
    assert yml_path == "tests/test_files/CalculateGeoDistance/CalculateGeoDistance.yml"
    assert code_data == code
Пример #13
0
def test_insert_module_code(mocker, import_name):
    from demisto_sdk.yaml_tools.unifier import Unifier
    mocker.patch.object(Unifier,
                        '_get_api_module_code',
                        return_value=DUMMY_MODULE)
    module_name = 'MicrosoftApiModule'
    new_code = DUMMY_SCRIPT.replace(
        import_name,
        '\n### GENERATED CODE ###\n# This code was inserted in place of an API'
        ' module.{}\n'.format(DUMMY_MODULE))

    code = Unifier.insert_module_code(DUMMY_SCRIPT, import_name, module_name)

    assert code == new_code
Пример #14
0
def test_get_data():
    from demisto_sdk.yaml_tools.unifier import Unifier
    with patch.object(Unifier, "__init__", lambda a, b, c, d, e: None):
        unifier = Unifier('', None, None, None)
        unifier.package_path = "tests/test_files/VulnDB/"
        unifier.dir_name = "Integrations"
        with open("tests/test_files/VulnDB/VulnDB_image.png",
                  "rb") as image_file:
            image = image_file.read()
        data, found_data_path = unifier.get_data("*png")
        assert data == image
        assert found_data_path == "tests/test_files/VulnDB/VulnDB_image.png"
        unifier.dir_name = "Scripts"
        data, found_data_path = unifier.get_data("*png")
        assert data is None
        assert found_data_path is None
Пример #15
0
def test_insert_script_to_yml_exceptions(package_path, dir_name, file_path):
    from demisto_sdk.yaml_tools.unifier import Unifier
    with patch.object(Unifier, "__init__", lambda a, b, c, d, e: None):
        unifier = Unifier("", None, None, None)
        unifier.package_path = package_path
        unifier.dir_name = dir_name
        with open(file_path + ".yml", "r") as yml:
            test_yml_data = yaml.safe_load(yml)
        if dir_name == "Scripts":
            test_yml_data['script'] = 'blah'
        elif dir_name == "Integrations":
            test_yml_data['script']['script'] = 'blah'

        with pytest.raises(ValueError):
            unifier.insert_script_to_yml(".py", "", test_yml_data)
Пример #16
0
    def create_unifieds_and_copy(self,
                                 package_dir,
                                 dest_dir='',
                                 skip_dest_dir=''):
        '''
        For directories that have packages, aka subdirectories for each integration/script
        e.g. "Integrations", "Beta_Integrations", "Scripts". Creates a unified yml and writes
        it to the dest_dir

        Arguments:
            package_dir: (str)
                Path to directory in which there are package subdirectories. e.g. "Integrations",
                "Beta_Integrations", "Scripts"
            dest_dir: (str)
                Path to destination directory to which the unified yml for a package should be written
            skip_dest_dir: (str)
                Path to the directory to which the unified yml for a package should be written in the
                case the package is part of the skipped list
        '''
        dest_dir = dest_dir if dest_dir else self.content_bundle
        skip_dest_dir = skip_dest_dir if skip_dest_dir else self.test_bundle

        scanned_packages = glob.glob(os.path.join(package_dir, '*/'))
        package_dir_name = os.path.basename(package_dir)
        for package in scanned_packages:
            ymls, _ = get_yml_paths_in_dir(package, error_msg='')
            if not ymls or (len(ymls) == 1
                            and ymls[0].endswith('_unified.yml')):
                msg = 'Skipping package: {} -'.format(package)
                if not ymls:
                    print_warning(
                        '{} No yml files found in the package directory'.
                        format(msg))
                else:
                    print_warning(
                        '{} Only unified yml found in the package directory'.
                        format(msg))
                continue
            unification_tool = Unifier(package, package_dir_name, dest_dir)
            if any(package_to_skip in package
                   for package_to_skip in self.packages_to_skip):
                # there are some packages that we don't want to include in the content zip
                # for example HelloWorld integration
                unification_tool = Unifier(package, package_dir_name,
                                           skip_dest_dir)
                print('skipping {}'.format(package))
            unification_tool.merge_script_package_to_yml()
Пример #17
0
def test_insert_image_to_yml():
    from demisto_sdk.yaml_tools.unifier import Unifier
    with patch.object(Unifier, "__init__", lambda a, b, c, d, e: None):
        unifier = Unifier('', None, None, None)
        unifier.package_path = "tests/test_files/VulnDB/"
        unifier.dir_name = "Integrations"
        unifier.image_prefix = "data:image/png;base64,"
        with open("tests/test_files/VulnDB/VulnDB_image.png",
                  "rb") as image_file:
            image_data = image_file.read()
            image_data = unifier.image_prefix + base64.b64encode(
                image_data).decode('utf-8')
        with open("tests/test_files/VulnDB/VulnDB.yml",
                  mode="r",
                  encoding="utf-8") as yml_file:
            yml_text_test = yml_file.read()
        with open("tests/test_files/VulnDB/VulnDB.yml", "r") as yml:
            yml_data = yaml.safe_load(yml)
        yml_text, found_img_path = unifier.insert_image_to_yml(
            yml_data, yml_text_test)
        yml_text_test = 'image: ' + image_data + '\n' + yml_text_test
        assert found_img_path == "tests/test_files/VulnDB/VulnDB_image.png"
        assert yml_text == yml_text_test
Пример #18
0
def unify(**kwargs):
    unifier = Unifier(**kwargs)
    return unifier.merge_script_package_to_yml()
Пример #19
0
def test_check_api_module_imports():
    from demisto_sdk.yaml_tools.unifier import Unifier
    module_import, module_name = Unifier.check_api_module_imports(DUMMY_SCRIPT)

    assert module_import == 'from MicrosoftApiModule import *  # noqa: E402'
    assert module_name == 'MicrosoftApiModule'
Пример #20
0
    def validate_added_files(self, added_files):  # noqa: C901
        """Validate the added files from your branch.

        In case we encounter an invalid file we set the self._is_valid param to False.

        Args:
            added_files (set): A set of the modified files in the current branch.
        """
        for file_path in added_files:
            print('Validating {}'.format(file_path))

            if re.match(TEST_PLAYBOOK_REGEX, file_path, re.IGNORECASE):
                continue

            structure_validator = StructureValidator(file_path)
            if not structure_validator.is_valid_file():
                self._is_valid = False

            if self.validate_id_set:
                if not self.id_set_validator.is_file_valid_in_set(file_path):
                    self._is_valid = False

                if self.id_set_validator.is_file_has_used_id(file_path):
                    self._is_valid = False

            elif re.match(PLAYBOOK_REGEX, file_path, re.IGNORECASE):
                playbook_validator = PlaybookValidator(structure_validator)
                if not playbook_validator.is_valid_playbook():
                    self._is_valid = False

            elif checked_type(file_path, YML_INTEGRATION_REGEXES):
                image_validator = ImageValidator(file_path)
                if not image_validator.is_valid():
                    self._is_valid = False

                description_validator = DescriptionValidator(file_path)
                if not description_validator.is_valid():
                    self._is_valid = False

                integration_validator = IntegrationValidator(
                    structure_validator)
                if not integration_validator.is_valid_file(validate_rn=False):
                    self._is_valid = False

            elif checked_type(file_path, PACKAGE_SCRIPTS_REGEXES):
                unifier = Unifier(os.path.dirname(file_path))
                yml_path, _ = unifier.get_script_package_data()
                # Set file path to the yml file
                structure_validator.file_path = yml_path
                script_validator = ScriptValidator(structure_validator)

                if not script_validator.is_valid_file(validate_rn=False):
                    self._is_valid = False

            elif re.match(BETA_INTEGRATION_REGEX, file_path, re.IGNORECASE) or \
                    re.match(BETA_INTEGRATION_YML_REGEX, file_path, re.IGNORECASE):
                description_validator = DescriptionValidator(file_path)
                if not description_validator.is_valid_beta_description():
                    self._is_valid = False

                integration_validator = IntegrationValidator(
                    structure_validator)
                if not integration_validator.is_valid_beta_integration():
                    self._is_valid = False

            elif re.match(IMAGE_REGEX, file_path, re.IGNORECASE):
                image_validator = ImageValidator(file_path)
                if not image_validator.is_valid():
                    self._is_valid = False

            # incident fields and indicator fields are using the same scheme.
            elif checked_type(file_path, JSON_INDICATOR_AND_INCIDENT_FIELDS):
                incident_field_validator = IncidentFieldValidator(
                    structure_validator)
                if not incident_field_validator.is_valid_file():
                    self._is_valid = False

            elif checked_type(file_path, JSON_ALL_LAYOUT_REGEXES):
                layout_validator = LayoutValidator(structure_validator)
                if not layout_validator.is_valid_layout():
                    self._is_valid = False

            elif 'CHANGELOG' in file_path:
                self.is_valid_release_notes(file_path)

            elif checked_type(file_path, [REPUTATION_REGEX]):
                print_color(
                    F'Skipping validation for file {file_path} since no validation is currently defined.',
                    LOG_COLORS.YELLOW)

            elif checked_type(file_path, CHECKED_TYPES_REGEXES):
                pass

            else:
                print_error(
                    "The file type of {} is not supported in validate command".
                    format(file_path))
                print_error(
                    "validate command supports: Integrations, Scripts, Playbooks, "
                    "Incident fields, Indicator fields, Images, Release notes, Layouts and Descriptions"
                )
                self._is_valid = False
Пример #21
0
    def validate_modified_files(self, modified_files):  # noqa: C901
        """Validate the modified files from your branch.

        In case we encounter an invalid file we set the self._is_valid param to False.

        Args:
            modified_files (set): A set of the modified files in the current branch.
        """
        for file_path in modified_files:
            old_file_path = None
            if isinstance(file_path, tuple):
                old_file_path, file_path = file_path

            print('Validating {}'.format(file_path))
            if not checked_type(file_path):
                print_warning(
                    '- Skipping validation of non-content entity file.')
                continue

            if re.match(TEST_PLAYBOOK_REGEX, file_path, re.IGNORECASE):
                continue

            structure_validator = StructureValidator(file_path, old_file_path)
            if not structure_validator.is_valid_file():
                self._is_valid = False

            if self.validate_id_set:
                if not self.id_set_validator.is_file_valid_in_set(file_path):
                    self._is_valid = False

            elif checked_type(file_path, YML_INTEGRATION_REGEXES):
                image_validator = ImageValidator(file_path)
                if not image_validator.is_valid():
                    self._is_valid = False

                description_validator = DescriptionValidator(file_path)
                if not description_validator.is_valid():
                    self._is_valid = False

                integration_validator = IntegrationValidator(
                    structure_validator)
                if self.is_backward_check and not integration_validator.is_backward_compatible(
                ):
                    self._is_valid = False

                if not integration_validator.is_valid_file():
                    self._is_valid = False

            elif checked_type(file_path, YML_BETA_INTEGRATIONS_REGEXES):
                image_validator = ImageValidator(file_path)
                if not image_validator.is_valid():
                    self._is_valid = False

                description_validator = DescriptionValidator(file_path)
                if not description_validator.is_valid_beta_description():
                    self._is_valid = False

                integration_validator = IntegrationValidator(
                    structure_validator)
                if not integration_validator.is_valid_beta_integration():
                    self._is_valid = False

            elif checked_type(file_path, [SCRIPT_REGEX]):
                script_validator = ScriptValidator(structure_validator)
                if self.is_backward_check and not script_validator.is_backward_compatible(
                ):
                    self._is_valid = False
                if not script_validator.is_valid_file():
                    self._is_valid = False

            elif checked_type(file_path, PLAYBOOKS_REGEXES_LIST):
                playbook_validator = PlaybookValidator(structure_validator)
                if not playbook_validator.is_valid_playbook(
                        is_new_playbook=False):
                    self._is_valid = False

            elif checked_type(file_path, PACKAGE_SCRIPTS_REGEXES):
                unifier = Unifier(os.path.dirname(file_path))
                yml_path, _ = unifier.get_script_package_data()
                # Set file path to the yml file
                structure_validator.file_path = yml_path
                script_validator = ScriptValidator(structure_validator)
                if self.is_backward_check and not script_validator.is_backward_compatible(
                ):
                    self._is_valid = False

                if not script_validator.is_valid_file():
                    self._is_valid = False

            elif re.match(IMAGE_REGEX, file_path, re.IGNORECASE):
                image_validator = ImageValidator(file_path)
                if not image_validator.is_valid():
                    self._is_valid = False

            # incident fields and indicator fields are using the same scheme.
            elif checked_type(file_path, JSON_INDICATOR_AND_INCIDENT_FIELDS):
                incident_field_validator = IncidentFieldValidator(
                    structure_validator)
                if not incident_field_validator.is_valid_file():
                    self._is_valid = False
                if self.is_backward_check and not incident_field_validator.is_backward_compatible(
                ):
                    self._is_valid = False

            elif checked_type(file_path, JSON_ALL_LAYOUT_REGEXES):
                layout_validator = LayoutValidator(structure_validator)
                if not layout_validator.is_valid_layout():
                    self._is_valid = False

            elif 'CHANGELOG' in file_path:
                self.is_valid_release_notes(file_path)

            else:
                print_error(
                    "The file type of {} is not supported in validate command".
                    format(file_path))
                print_error(
                    "'validate' command supports: Integrations, Scripts, Playbooks, "
                    "Incident fields, Indicator fields, Images, Release notes, Layouts and Descriptions"
                )
                self._is_valid = False
Пример #22
0
 def _get_lint_files(self):
     unifier = Unifier(self.project_dir)
     code_file = unifier.get_code_file('.py')
     return os.path.abspath(code_file)
Пример #23
0
def update_id_set():
    branches = run_command("git branch")
    branch_name_reg = re.search(r"\* (.*)", branches)
    branch_name = branch_name_reg.group(1)

    print("Getting added files")
    files_string = run_command("git diff --name-status HEAD")
    second_files_string = run_command(
        "git diff --name-status origin/master...{}".format(branch_name))
    added_files, modified_files, added_scripts, modified_scripts = \
        get_changed_files(files_string + '\n' + second_files_string)

    if added_files or modified_files or added_scripts or modified_scripts:
        print("Updating id_set.json")

        with open('./Tests/id_set.json', 'r') as id_set_file:
            try:
                ids_dict = json.load(id_set_file,
                                     object_pairs_hook=OrderedDict)
            except ValueError as ex:
                if "Expecting property name" in str(ex):
                    # if we got this error it means we have corrupted id_set.json
                    # usually it will happen if we merged from master and we had a conflict in id_set.json
                    # so we checkout the id_set.json to be exact as in master and then run update_id_set
                    run_command("git checkout origin/master Tests/id_set.json")
                    with open('./Tests/id_set.json',
                              'r') as id_set_file_from_master:
                        ids_dict = json.load(id_set_file_from_master,
                                             object_pairs_hook=OrderedDict)
                else:
                    raise

        test_playbook_set = ids_dict['TestPlaybooks']
        integration_set = ids_dict['integrations']
        playbook_set = ids_dict['playbooks']
        script_set = ids_dict['scripts']

    if added_files:
        for file_path in added_files:
            if re.match(INTEGRATION_REGEX, file_path, re.IGNORECASE) or \
                    re.match(INTEGRATION_YML_REGEX, file_path, re.IGNORECASE):
                add_new_object_to_id_set(
                    get_script_or_integration_id(file_path),
                    get_integration_data(file_path), integration_set)
                print("Adding {0} to id_set".format(
                    get_script_or_integration_id(file_path)))
            if re.match(SCRIPT_REGEX, file_path, re.IGNORECASE):
                add_new_object_to_id_set(
                    get_script_or_integration_id(file_path),
                    get_script_data(file_path), script_set)
                print("Adding {0} to id_set".format(
                    get_script_or_integration_id(file_path)))
            if re.match(PLAYBOOK_REGEX, file_path, re.IGNORECASE):
                add_new_object_to_id_set(collect_ids(file_path),
                                         get_playbook_data(file_path),
                                         playbook_set)
                print("Adding {0} to id_set".format(collect_ids(file_path)))
            if re.match(TEST_PLAYBOOK_REGEX, file_path, re.IGNORECASE):
                add_new_object_to_id_set(collect_ids(file_path),
                                         get_playbook_data(file_path),
                                         test_playbook_set)
                print("Adding {0} to id_set".format(collect_ids(file_path)))
            if re.match(TEST_SCRIPT_REGEX, file_path, re.IGNORECASE):
                add_new_object_to_id_set(
                    get_script_or_integration_id(file_path),
                    get_script_data(file_path), script_set)
                print("Adding {0} to id_set".format(collect_ids(file_path)))

    if modified_files:
        for file_path in modified_files:
            if re.match(INTEGRATION_REGEX, file_path, re.IGNORECASE) or \
                    re.match(INTEGRATION_YML_REGEX, file_path, re.IGNORECASE):
                id = get_script_or_integration_id(file_path)
                integration_data = get_integration_data(file_path)
                update_object_in_id_set(id, integration_data, file_path,
                                        integration_set)
                print("updated {0} in id_set".format(id))
            if re.match(SCRIPT_REGEX, file_path, re.IGNORECASE) or re.match(
                    TEST_SCRIPT_REGEX, file_path, re.IGNORECASE):
                id = get_script_or_integration_id(file_path)
                script_data = get_script_data(file_path)
                update_object_in_id_set(id, script_data, file_path, script_set)
                print("updated {0} in id_set".format(id))
            if re.match(PLAYBOOK_REGEX, file_path, re.IGNORECASE):
                id = collect_ids(file_path)
                playbook_data = get_playbook_data(file_path)
                update_object_in_id_set(id, playbook_data, file_path,
                                        playbook_set)
                print("updated {0} in id_set".format(id))
            if re.match(TEST_PLAYBOOK_REGEX, file_path, re.IGNORECASE):
                id = collect_ids(file_path)
                playbook_data = get_playbook_data(file_path)
                update_object_in_id_set(id, playbook_data, file_path,
                                        test_playbook_set)
                print("updated {0} in id_set".format(id))

    if added_scripts:
        for added_script_package in added_scripts:
            unifier = Unifier(added_script_package)
            yml_path, code = unifier.get_script_package_data()
            add_new_object_to_id_set(
                get_script_or_integration_id(yml_path),
                get_script_data(yml_path, script_code=code), script_set)
            print("Adding {0} to id_set".format(
                get_script_or_integration_id(yml_path)))

    if modified_scripts:
        for modified_script_package in added_scripts:
            unifier = Unifier(modified_script_package)
            yml_path, code = unifier.get_script_package_data()
            update_object_in_id_set(
                get_script_or_integration_id(yml_path),
                get_script_data(yml_path, script_code=code), yml_path,
                script_set)
            print("Adding {0} to id_set".format(
                get_script_or_integration_id(yml_path)))

    if added_files or modified_files:
        new_ids_dict = OrderedDict()
        # we sort each time the whole set in case someone manually changed something
        # it shouldn't take too much time
        new_ids_dict['scripts'] = sort(script_set)
        new_ids_dict['playbooks'] = sort(playbook_set)
        new_ids_dict['integrations'] = sort(integration_set)
        new_ids_dict['TestPlaybooks'] = sort(test_playbook_set)

        with open('./Tests/id_set.json', 'w') as id_set_file:
            json.dump(new_ids_dict, id_set_file, indent=4)

    print("Finished updating id_set.json")