def test_get_integration_data(self): """ Test for getting all the integration data """ non_unified_file_path = TESTS_DIR + '/test_files/DummyPack/Integrations/DummyIntegration/DummyIntegration.yml' unified_file_path = TESTS_DIR + '/test_files/DummyPack/Integrations/integration-DummyIntegration.yml' non_unified_integration_data = get_integration_data( non_unified_file_path) unified_integration_data = get_integration_data(unified_file_path) test_pairs = [(non_unified_integration_data, TestIntegrations.INTEGRATION_DATA), (unified_integration_data, TestIntegrations.UNIFIED_INTEGRATION_DATA)] for pair in test_pairs: returned = pair[0] constant = pair[1] assert IsEqualFunctions.is_lists_equal(list(returned.keys()), list(constant.keys())) const_data = constant.get('Dummy Integration') returned_data = returned.get('Dummy Integration') assert IsEqualFunctions.is_dicts_equal(returned_data, const_data)
def test_get_integration_data(self): """ Test for getting all the integration data """ # mocker.patch.object('get_pack_name', return_value='DummyPack') non_unified_file_path = TESTS_DIR + '/test_files/DummyPack/Integrations/DummyIntegration/DummyIntegration.yml' unified_file_path = TESTS_DIR + '/test_files/DummyPack/Integrations/integration-DummyIntegration.yml' non_unified_integration_data = get_integration_data( non_unified_file_path) unified_integration_data = get_integration_data(unified_file_path) self.assertDictEqual(non_unified_integration_data, INTEGRATION_DATA) self.assertDictEqual(unified_integration_data, UNIFIED_INTEGRATION_DATA)
def test_get_integration_data(self): """ Test for getting all the integration data """ # mocker.patch.object('get_pack_name', return_value='DummyPack') file_path = TESTS_DIR + '/test_files/DummyPack/Integrations/DummyIntegration.yml' data = get_integration_data(file_path) self.assertDictEqual(data, INTEGRATION_DATA)
def test_get_integration_data(self): """ Test for getting all the integration data """ file_path = f'{git_path()}/demisto_sdk/tests/test_files/CortexXDR/Integrations/PaloAltoNetworks_XDR/' \ f'PaloAltoNetworks_XDR.yml' file_path = os.path.normpath(file_path) data = get_integration_data(file_path) INTEGRATION_DATA["Cortex XDR - IR"]["file_path"] = file_path self.assertDictEqual(data, INTEGRATION_DATA)
def is_file_has_used_id(self, file_path): """Check if the ID of the given file already exist in the system. Args: file_path (string): Path to the file. Returns: bool. Whether the ID of the given file already exist in the system or not. """ is_used = False is_json_file = False if self.is_circle: if re.match(constants.TEST_PLAYBOOK_REGEX, file_path, re.IGNORECASE): obj_type = self.TEST_PLAYBOOK_SECTION obj_id = collect_ids(file_path) obj_data = get_playbook_data(file_path) elif re.match(constants.PACKS_SCRIPT_NON_SPLIT_YML_REGEX, file_path, re.IGNORECASE) or \ re.match(constants.TEST_SCRIPT_REGEX, file_path, re.IGNORECASE): obj_type = self.SCRIPTS_SECTION obj_id = get_script_or_integration_id(file_path) obj_data = get_script_data(file_path) elif re.match(constants.PACKS_INTEGRATION_YML_REGEX, file_path, re.IGNORECASE) or \ re.match(constants.PACKS_INTEGRATION_NON_SPLIT_YML_REGEX, file_path, re.IGNORECASE): obj_type = self.INTEGRATION_SECTION obj_id = get_script_or_integration_id(file_path) obj_data = get_integration_data(file_path) elif re.match(constants.PLAYBOOK_REGEX, file_path, re.IGNORECASE): obj_type = self.PLAYBOOK_SECTION obj_id = collect_ids(file_path) obj_data = get_playbook_data(file_path) elif re.match(constants.PACKS_SCRIPT_YML_REGEX, file_path, re.IGNORECASE) or \ re.match(constants.PACKS_SCRIPT_PY_REGEX, file_path, re.IGNORECASE): unifier = Unifier(os.path.dirname(os.path.dirname(file_path))) yml_path, code = unifier.get_script_or_integration_package_data( ) obj_data = get_script_data(yml_path, script_code=code) obj_type = self.SCRIPTS_SECTION obj_id = get_script_or_integration_id(yml_path) else: # In case of a json file is_json_file = True if not is_json_file: is_used = self.is_id_duplicated(obj_id, obj_data, obj_type) return is_used
def is_file_valid_in_set(self, file_path): """Check if the file is represented correctly in the id_set Args: file_path (string): Path to the file. Returns: bool. Whether the file is represented correctly in the id_set or not. """ is_valid = True if self.is_circle: # No need to check on local env because the id_set will contain this info after the commit if re.match(constants.PLAYBOOK_REGEX, file_path, re.IGNORECASE): playbook_data = get_playbook_data(file_path) is_valid = self.is_valid_in_id_set(file_path, playbook_data, self.playbook_set) elif re.match(constants.TEST_PLAYBOOK_REGEX, file_path, re.IGNORECASE): playbook_data = get_playbook_data(file_path) is_valid = self.is_valid_in_id_set(file_path, playbook_data, self.test_playbook_set) elif re.match(constants.TEST_SCRIPT_REGEX, file_path, re.IGNORECASE) or \ re.match(constants.PACKS_SCRIPT_NON_SPLIT_YML_REGEX, file_path, re.IGNORECASE): script_data = get_script_data(file_path) is_valid = self.is_valid_in_id_set(file_path, script_data, self.script_set) elif re.match(constants.PACKS_INTEGRATION_YML_REGEX, file_path, re.IGNORECASE) or \ re.match(constants.PACKS_INTEGRATION_NON_SPLIT_YML_REGEX, file_path, re.IGNORECASE): integration_data = get_integration_data(file_path) is_valid = self.is_valid_in_id_set(file_path, integration_data, self.integration_set) elif re.match(constants.PACKS_SCRIPT_YML_REGEX, file_path, re.IGNORECASE) or \ re.match(constants.PACKS_SCRIPT_PY_REGEX, file_path, re.IGNORECASE): unifier = Unifier(os.path.dirname(file_path)) yml_path, code = unifier.get_script_or_integration_package_data( ) script_data = get_script_data(yml_path, script_code=code) is_valid = self.is_valid_in_id_set(yml_path, script_data, self.script_set) return is_valid
def is_file_valid_in_set(self, file_path, file_type, ignored_errors=None): """Check if the file is valid in the id_set Args: file_path (string): Path to the file. file_type (string): The file type. ignored_errors (list): a list of ignored errors for the specific file Returns: bool. Whether the file is valid in the id_set or not. """ self.ignored_errors = ignored_errors is_valid = True if self.is_circle: # No need to check on local env because the id_set will contain this info after the commit click.echo(f"id set validations for: {file_path}") if re.match(constants.PACKS_SCRIPT_YML_REGEX, file_path, re.IGNORECASE): unifier = Unifier(os.path.dirname(file_path)) yml_path, code = unifier.get_script_or_integration_package_data( ) script_data = get_script_data(yml_path, script_code=code) is_valid = self._is_non_real_command_found(script_data) elif file_type == constants.FileType.INCIDENT_TYPE: incident_type_data = OrderedDict( get_incident_type_data(file_path)) is_valid = self._is_incident_type_default_playbook_found( incident_type_data) elif file_type == constants.FileType.INTEGRATION: integration_data = get_integration_data(file_path) is_valid = self._is_integration_classifier_and_mapper_found( integration_data) elif file_type == constants.FileType.CLASSIFIER: classifier_data = get_classifier_data(file_path) is_valid = self._is_classifier_incident_types_found( classifier_data) elif file_type == constants.FileType.MAPPER: mapper_data = get_mapper_data(file_path) is_valid = self._is_mapper_incident_types_found(mapper_data) elif file_type == constants.FileType.PLAYBOOK: playbook_data = get_playbook_data(file_path) is_valid = self._are_playbook_entities_versions_valid( playbook_data, file_path) return is_valid