def _unify(self, dest_dir: Path) -> List[Path]: """Unify YAMLContentUnfiedObject in destination dir. Args: dest_dir: Destination directory. Returns: List[Path]: List of new created files. TODO: 1. Add Exception raising in unify module. 2. Verbosity to quiet mode option in unify module. """ # Directory configuration - Integrations or Scripts unify_dir = SCRIPTS_DIR if self._content_type == FileType.SCRIPT else INTEGRATIONS_DIR # Unify step unifier = Unifier(input=str(self.path.parent), dir_name=unify_dir, output=dest_dir, force=True) created_files: List[str] = unifier.merge_script_package_to_yml() # Validate that unify succeed - there is not exception raised in unify module. if not created_files: raise exc.ContentDumpError(self, self.path, "Unable to unify object") return [Path(path) for path in created_files]
def copy_packs_content_to_packs_bundle(self, packs): """ Copy content in packs to the bundle that gets zipped to 'content_packs.zip'. Preserves directory structure except that packages inside the "Integrations" or "Scripts" directory inside a pack are flattened. Adds file prefixes according to how server expects to ingest the files, e.g. 'integration-' is prepended to integration yml filenames and 'script-' is prepended to script yml filenames and so on and so forth. """ for pack in packs: pack_name = os.path.basename(pack) if pack_name in self.packs_to_skip: continue pack_dst = os.path.join(self.packs_bundle, pack_name) os.mkdir(pack_dst) pack_dirs = get_child_directories(pack) pack_files = get_child_files(pack) # copy first level pack files over for file_path in pack_files: shutil.copy( file_path, os.path.join(pack_dst, os.path.basename(file_path))) # handle content directories in the pack for content_dir in pack_dirs: dir_name = os.path.basename(content_dir) dest_dir = os.path.join(pack_dst, dir_name) os.mkdir(dest_dir) if dir_name in DIR_TO_PREFIX: packages_dirs = get_child_directories(content_dir) for package_dir in packages_dirs: ymls, _ = get_yml_paths_in_dir(package_dir, error_msg='') if not ymls or (len(ymls) == 1 and ymls[0].endswith('_unified.yml')): msg = 'Skipping package: {} -'.format(package_dir) if not ymls: print_warning( '{} No yml files found in the package directory' .format(msg)) else: print_warning( '{} Only unified yml found in the package directory' .format(msg)) continue package_dir_name = os.path.basename(package_dir) unifier = Unifier(package_dir, dir_name, dest_dir) unifier.merge_script_package_to_yml() # also copy CHANGELOG markdown files over (should only be one per package) package_files = get_child_files(package_dir) changelog_files = [ file_path for file_path in package_files if 'CHANGELOG.md' in file_path ] for md_file_path in changelog_files: md_out_name = '{}-{}_CHANGELOG.md'.format( DIR_TO_PREFIX.get(dir_name), package_dir_name) shutil.copyfile( md_file_path, os.path.join(dest_dir, md_out_name)) else: self.copy_dir_files(content_dir, dest_dir)
def test_unify_script__docker45(self): """ sanity test of merge_script_package_to_yml of script """ create_test_package( test_dir=self.test_dir_path, package_name=self.package_name, base_yml='demisto_sdk/tests/test_files/Unifier/SampleScriptPackage/SampleScriptPackageDocker45.yml', script_code=TEST_VALID_CODE, ) unifier = Unifier(input=self.export_dir_path, output=self.test_dir_path) yml_files = unifier.merge_script_package_to_yml() assert len(yml_files) == 2 export_yml_path = yml_files[0] export_yml_path_45 = yml_files[1] assert export_yml_path == self.expected_yml_path assert export_yml_path_45 == self.expected_yml_path.replace('.yml', '_45.yml') actual_yml = get_yaml(export_yml_path) expected_yml = get_yaml('demisto_sdk/tests/test_files/Unifier/SampleScriptPackage/' 'script-SampleScriptPackageSanityDocker45.yml') assert expected_yml == actual_yml actual_yml_45 = get_yaml(export_yml_path_45) expected_yml_45 = get_yaml('demisto_sdk/tests/test_files/Unifier/SampleScriptPackage/' 'script-SampleScriptPackageSanityDocker45_45.yml') assert expected_yml_45 == actual_yml_45
def check_api_module_imports(self, py_num): """ Checks if the integration imports an API module and if so pastes the module in the package. :param py_num: The python version - api modules are in python 3 """ if py_num > 3: unifier = Unifier(self.project_dir) code_file_path = unifier.get_code_file('.py') try: # Look for an import to an API module in the code. If there is such import, we need to copy the correct # module file to the package directory. with io.open(code_file_path, mode='r', encoding='utf-8') as script_file: _, module_name = unifier.check_api_module_imports( script_file.read()) if module_name: module_path = os.path.join(self.configuration.env_dir, 'Packs', 'ApiModules', 'Scripts', module_name, module_name + '.py') print_v('Copying ' + os.path.join( self.configuration.env_dir, 'Scripts', module_path)) if not os.path.exists(module_path): raise ValueError( 'API Module {} not found, you might be outside of the content repository' ' or this API module does not exist'.format( module_name)) shutil.copy(os.path.join(module_path), self.project_dir) except Exception as e: print_v('Unable to retrieve the module file {}: {}'.format( module_name, str(e)))
def create_default_script(self): default_script_dir = 'assets/default_script' code = open(suite_join_path(default_script_dir, 'sample_script.py')) yml = open(suite_join_path(default_script_dir, 'sample_script.yml')) image = open( suite_join_path(default_script_dir, 'sample_script_image.png'), 'rb') changelog = open(suite_join_path(default_script_dir, 'CHANGELOG.md')) description = open( suite_join_path(default_script_dir, 'sample_script_description.md')) self.build(code=str(code.read()), yml=yaml.load(yml, Loader=yaml.FullLoader), image=image.read(), changelog=str(changelog.read()), description=str(description.read())) yml.close() image.close() changelog.close() description.close() code.close() if self.create_unified: unifier = Unifier(input=self.path, output=os.path.dirname( self._tmpdir_integration_path)) unifier.merge_script_package_to_yml() shutil.rmtree(self._tmpdir_integration_path)
def test_get_integration_doc_link_negative(tmp_path): """ Given: - Case A: integration which does not have README in the integration dir - Case B: integration with empty README in the integration dir When: - Getting integration doc link Then: - Verify an empty string is returned """ unifier = Unifier(str(tmp_path)) integration_doc_link = unifier.get_integration_doc_link( {'commonfields': { 'id': 'Integration With No README' }}) assert integration_doc_link == '' readme = tmp_path / 'README.md' readme.write_text('') integration_doc_link = unifier.get_integration_doc_link( {'commonfields': { 'id': 'Integration With Empty README' }}) assert integration_doc_link == ''
def test_get_integration_doc_link_positive(tmp_path): """ Given: - Cortex XDR - IOC integration with README When: - Getting integration doc link Then: - Verify the expected integration doc markdown link is returned - Verify the integration doc URL exists and reachable """ readme = tmp_path / 'README.md' readme.write_text('README') unifier = Unifier(str(tmp_path)) integration_doc_link = unifier.get_integration_doc_link( {'commonfields': { 'id': 'Cortex XDR - IOC' }}) assert integration_doc_link == \ '[View Integration Documentation](https://xsoar.pan.dev/docs/reference/integrations/cortex-xdr---ioc)' link = re.findall(r'\(([^)]+)\)', integration_doc_link)[0] try: r = requests.get(link, verify=False, timeout=10) r.raise_for_status() except requests.HTTPError as ex: raise Exception( f'Failed reaching to integration doc link {link} - {ex}')
def test_unify_integration__detailed_description_with_special_char(self): """ - """ description = ''' some test with special chars שלום hello 你好 ''' create_test_package( test_dir=self.test_dir_path, package_name=self.package_name, base_yml= 'demisto_sdk/tests/test_files/Unifier/SampleIntegPackage/SampleIntegPackage.yml', script_code=TEST_VALID_CODE, image_file= 'demisto_sdk/tests/test_files/Unifier/SampleIntegPackage/SampleIntegPackage_image.png', detailed_description=description, ) unifier = Unifier(self.export_dir_path, output=self.test_dir_path) yml_files = unifier.merge_script_package_to_yml() export_yml_path = yml_files[0] assert export_yml_path == self.expected_yml_path actual_yml = get_yaml(export_yml_path) expected_yml = get_yaml( 'demisto_sdk/tests/test_files/Unifier/SampleIntegPackage/' 'integration-SampleIntegPackageDescSpecialChars.yml') assert expected_yml == actual_yml assert actual_yml['detaileddescription'] == description
def test_unify_integration(self): """ sanity test of merge_script_package_to_yml of integration """ create_test_package( test_dir=self.test_dir_path, package_name=self.package_name, base_yml= 'demisto_sdk/tests/test_files/Unifier/SampleIntegPackage/SampleIntegPackage.yml', script_code=TEST_VALID_CODE, detailed_description=TEST_VALID_DETAILED_DESCRIPTION, image_file= 'demisto_sdk/tests/test_files/Unifier/SampleIntegPackage/SampleIntegPackage_image.png', ) unifier = Unifier(input=self.export_dir_path, output=self.test_dir_path) yml_files = unifier.merge_script_package_to_yml() export_yml_path = yml_files[0] assert export_yml_path == self.expected_yml_path comment = '# this is a comment text inside a file 033dab25fd9655480dbec3a4c579a0e6' with open(export_yml_path) as file_: unified_content = file_.read() assert comment in unified_content actual_yml = get_yaml(export_yml_path) expected_yml = get_yaml( 'demisto_sdk/tests/test_files/Unifier/SampleIntegPackage/' 'integration-SampleIntegPackageSanity.yml') assert expected_yml == actual_yml
def create_default_integration(self): default_integration_dir = 'assets/default_integration' with open(suite_join_path(default_integration_dir, 'sample.py')) as code_file: code = str(code_file.read()) with open(suite_join_path(default_integration_dir, 'sample.yml')) as yml_file: yml = yaml.safe_load(yml_file) with open(suite_join_path(default_integration_dir, 'sample_image.png'), 'rb') as image_file: image = image_file.read() with open(suite_join_path(default_integration_dir, 'CHANGELOG.md')) as changelog_file: changelog = str(changelog_file.read()) with open( suite_join_path(default_integration_dir, 'sample_description.md')) as description_file: description = str(description_file.read()) self.build(code=code, yml=yml, image=image, changelog=changelog, description=description) if self.create_unified: unifier = Unifier(input=self.path, output=os.path.dirname( self._tmpdir_integration_path)) unifier.merge_script_package_to_yml() shutil.rmtree(self._tmpdir_integration_path)
def test_unify_default_output_integration_for_relative_current_dir_input( self, mocker): """ Given - Input path of '.'. - UploadTest integration. When - Running Unify on it. Then - Ensure Unify command works with default output given relative path to current directory. """ from demisto_sdk.commands.unify.unifier import Unifier abs_path_mock = mocker.patch( 'demisto_sdk.commands.unify.unifier.os.path.abspath') abs_path_mock.return_value = TESTS_DIR + '/test_files/Packs/DummyPack/Integrations/UploadTest' input_path_integration = '.' unifier = Unifier(input_path_integration) yml_files = unifier.merge_script_package_to_yml() export_yml_path = yml_files[0] expected_yml_path = TESTS_DIR + '/test_files/Packs/DummyPack/Integrations/UploadTest/integration-UploadTest.yml' assert export_yml_path == expected_yml_path os.remove(expected_yml_path)
def test_unify_script(self): """ sanity test of merge_script_package_to_yml of script """ create_test_package( test_dir=self.test_dir_path, package_name=self.package_name, base_yml= 'demisto_sdk/tests/test_files/Unifier/SampleScriptPackage/SampleScriptPackage.yml', script_code=TEST_VALID_CODE, ) unifier = Unifier(input=self.export_dir_path, output=self.test_dir_path) yml_files = unifier.merge_script_package_to_yml() export_yml_path = yml_files[0] assert export_yml_path == self.expected_yml_path actual_yml = get_yaml(export_yml_path) expected_yml = get_yaml( 'demisto_sdk/tests/test_files/Unifier/SampleScriptPackage/' 'script-SampleScriptPackageSanity.yml') assert expected_yml == actual_yml
def get_integration_api_modules(file_path, data_dictionary, is_unified_integration): unifier = Unifier(os.path.dirname(file_path)) if is_unified_integration: integration_script_code = data_dictionary.get('script', {}).get('script', '') else: _, integration_script_code = unifier.get_script_or_integration_package_data() return unifier.check_api_module_imports(integration_script_code)[1]
def test_get_code_file_case_insensative(tmp_path): # Create an integration dir with some files integration_dir = tmp_path / "TestDummyInt" os.makedirs(integration_dir) open(integration_dir / "Dummy.ps1", 'a') open(integration_dir / "ADummy.tests.ps1", 'a') # a test file which is named such a way that it comes up first unifier = Unifier(str(integration_dir)) assert unifier.get_code_file(".ps1") == str(integration_dir / "Dummy.ps1")
def test_clean_python_code(): unifier = Unifier("test_files/VulnDB") script_code = "import demistomock as demisto\nfrom CommonServerPython import * # test comment being removed\n" \ "from CommonServerUserPython import *\nfrom __future__ import print_function" # Test remove_print_future is False script_code = unifier.clean_python_code(script_code, remove_print_future=False) assert script_code == "\n\n\nfrom __future__ import print_function" # Test remove_print_future is True script_code = unifier.clean_python_code(script_code) assert script_code.strip() == ""
def is_file_has_used_id(self, file_path): """Check if the ID of the given file already exist in the system. Args: file_path (string): Path to the file. Returns: bool. Whether the ID of the given file already exist in the system or not. """ is_used = False is_json_file = False if self.is_circle: if re.match(constants.TEST_PLAYBOOK_REGEX, file_path, re.IGNORECASE): obj_type = self.TEST_PLAYBOOK_SECTION obj_id = collect_ids(file_path) obj_data = get_playbook_data(file_path) elif re.match(constants.PACKS_SCRIPT_NON_SPLIT_YML_REGEX, file_path, re.IGNORECASE) or \ re.match(constants.TEST_SCRIPT_REGEX, file_path, re.IGNORECASE): obj_type = self.SCRIPTS_SECTION obj_id = get_script_or_integration_id(file_path) obj_data = get_script_data(file_path) elif re.match(constants.PACKS_INTEGRATION_YML_REGEX, file_path, re.IGNORECASE) or \ re.match(constants.PACKS_INTEGRATION_NON_SPLIT_YML_REGEX, file_path, re.IGNORECASE): obj_type = self.INTEGRATION_SECTION obj_id = get_script_or_integration_id(file_path) obj_data = get_integration_data(file_path) elif re.match(constants.PLAYBOOK_REGEX, file_path, re.IGNORECASE): obj_type = self.PLAYBOOK_SECTION obj_id = collect_ids(file_path) obj_data = get_playbook_data(file_path) elif re.match(constants.PACKS_SCRIPT_YML_REGEX, file_path, re.IGNORECASE) or \ re.match(constants.PACKS_SCRIPT_PY_REGEX, file_path, re.IGNORECASE): unifier = Unifier(os.path.dirname(os.path.dirname(file_path))) yml_path, code = unifier.get_script_or_integration_package_data( ) obj_data = get_script_data(yml_path, script_code=code) obj_type = self.SCRIPTS_SECTION obj_id = get_script_or_integration_id(yml_path) else: # In case of a json file is_json_file = True if not is_json_file: is_used = self.is_id_duplicated(obj_id, obj_data, obj_type) return is_used
def integration_uploader(self, path: str): is_dir = False file_name = os.path.basename(path) docker45_path = '' try: if os.path.isdir(path): # Create a temporary unified yml file try: is_dir = True unifier = Unifier(input=path, output=path) unified_paths = unifier.merge_script_package_to_yml() path = unified_paths[0] docker45_path = unified_paths[1] if len( unified_paths) > 1 else '' file_name = os.path.basename(path) except IndexError: print_error( f'Error uploading integration from pack. /' f'Check that the given integration path contains a valid integration: {path}.' ) self.status_code = 1 self.failed_uploaded_files.append( (file_name, 'Integration')) return except Exception as err: print_error(str('Upload integration failed\n')) print_error(str(err)) self.failed_uploaded_files.append( (file_name, 'Integration')) self.status_code = 1 return # Upload the file to Cortex XSOAR result = self.client.integration_upload(file=path) # Print results print_v(f'Result:\n{result.to_str()}', self.log_verbose) print_color( f'Uploaded integration - \'{file_name}\': successfully', LOG_COLORS.GREEN) self.successfully_uploaded_files.append((file_name, 'Integration')) except Exception as err: self._parse_error_response(err, 'integration', file_name) self.failed_uploaded_files.append((file_name, 'Integration')) self.status_code = 1 finally: # Remove the temporary file if is_dir: self._remove_temp_file(path) if docker45_path: self._remove_temp_file(docker45_path)
def test_get_code_file(): # Test integration case unifier = Unifier(f"{git_path()}/demisto_sdk/tests/test_files/VulnDB/") assert unifier.get_code_file(".py") == f"{git_path()}/demisto_sdk/tests/test_files/VulnDB/VulnDB.py" unifier = Unifier(f"{git_path()}/demisto_sdk/tests/test_files/Unifier/SampleNoPyFile") with pytest.raises(Exception): unifier.get_code_file(".py") # Test script case unifier = Unifier(f"{git_path()}/demisto_sdk/tests/test_files/CalculateGeoDistance/") assert unifier.get_code_file(".py") == f"{git_path()}/demisto_sdk/tests/test_files/CalculateGeoDistance/" \ f"CalculateGeoDistance.py"
def test_clean_python_code(): from demisto_sdk.commands.unify.unifier import Unifier unifier = Unifier("test_files/VulnDB") script_code = "import demistomock as demistofrom CommonServerPython import *" \ "from CommonServerUserPython import *from __future__ import print_function" # Test remove_print_future is False script_code = unifier.clean_python_code(script_code, remove_print_future=False) assert script_code == "from __future__ import print_function" # Test remove_print_future is True script_code = unifier.clean_python_code(script_code) assert script_code == ""
def create_unifieds_and_copy(self, package_dir, dest_dir='', skip_dest_dir=''): """ For directories that have packages, aka subdirectories for each integration/script e.g. "Integrations", "Scripts". Creates a unified yml and writes it to the dest_dir Arguments: package_dir: (str) Path to directory in which there are package subdirectories. e.g. "Integrations", "Scripts" dest_dir: (str) Path to destination directory to which the unified yml for a package should be written skip_dest_dir: (str) Path to the directory to which the unified yml for a package should be written in the case the package is part of the skipped list """ dest_dir = dest_dir if dest_dir else self.content_bundle skip_dest_dir = skip_dest_dir if skip_dest_dir else self.test_bundle scanned_packages = glob.glob(os.path.join(package_dir, '*/')) package_dir_name = os.path.basename(package_dir) for package in scanned_packages: ymls, _ = get_yml_paths_in_dir(package, error_msg='') if not ymls or (len(ymls) == 1 and ymls[0].endswith('_unified.yml')): msg = 'Skipping package: {} -'.format(package) if not ymls: print_warning( f'{msg} No yml files found in the package directory') else: print_warning( f'{msg} Only unified yml found in the package directory' ) continue unification_tool = Unifier(package, package_dir_name, dest_dir) if any(package_to_skip in package for package_to_skip in self.packages_to_skip): # there are some packages that we don't want to include in the content zip # for example HelloWorld integration unification_tool = Unifier(package, package_dir_name, skip_dest_dir) print('skipping {}'.format(package)) if parse_version( unification_tool.yml_data.get( 'fromversion', '0.0.0')) <= parse_version('6.0.0'): unified_yml_paths = unification_tool.merge_script_package_to_yml( file_name_suffix=self.file_name_suffix) for unified_yml_path in unified_yml_paths: self.add_from_version_to_yml(unified_yml_path)
def process_script(file_path): res = [] if os.path.isfile(file_path): if checked_type(file_path, (SCRIPT_REGEX, PACKS_SCRIPT_YML_REGEX)): print("adding {0} to id_set".format(file_path)) res.append(get_script_data(file_path)) else: # package script unifier = Unifier(file_path) yml_path, code = unifier.get_script_package_data() print("adding {0} to id_set".format(file_path)) res.append(get_script_data(yml_path, script_code=code)) return res
def process_script(file_path: str, print_logs: bool) -> list: res = [] if os.path.isfile(file_path): if checked_type(file_path, (PACKS_SCRIPT_YML_REGEX, PACKS_SCRIPT_NON_SPLIT_YML_REGEX)): if print_logs: print("adding {} to id_set".format(file_path)) res.append(get_script_data(file_path)) else: # package script unifier = Unifier(file_path) yml_path, code = unifier.get_script_or_integration_package_data() if print_logs: print("adding {} to id_set".format(file_path)) res.append(get_script_data(yml_path, script_code=code)) return res
def createPyDocumentation(path, origin, language): isErrorPy = False with open(path, 'r') as file: pyScript = Unifier.clean_python_code(file.read(), remove_print_future=False) code = compile(pyScript, '<string>', 'exec') ns = {'demisto': demistomock} exec(code, ns) # guardrails-disable-line x = [] for a in ns: if a != 'demisto' and callable(ns.get(a)) and a not in pyPrivateFuncs: docstring = inspect.getdoc(ns.get(a)) if not docstring: print("docstring for function {} is empty".format(a)) isErrorPy = True else: y = parser.parse_docstring(docstring) y["name"] = a y["argList"] = list(inspect.getargspec(ns.get(a)))[0] if pyIrregularFuncs.get(a, None) is None \ else pyIrregularFuncs[a]["argList"] x.append(y) if isErrorPy: return None, isErrorPy return reformatPythonOutput(x, origin, language)
def mock_unifier(): def get_script_package_data_mock(*args, **kwargs): return VALID_SCRIPT_PATH, '' with patch.object(Unifier, '__init__', lambda a, b: None): Unifier.get_script_package_data = get_script_package_data_mock return Unifier('')
def is_file_valid_in_set(self, file_path): """Check if the file is represented correctly in the id_set Args: file_path (string): Path to the file. Returns: bool. Whether the file is represented correctly in the id_set or not. """ is_valid = True if self.is_circle: # No need to check on local env because the id_set will contain this info after the commit if re.match(constants.PLAYBOOK_REGEX, file_path, re.IGNORECASE): playbook_data = get_playbook_data(file_path) is_valid = self.is_valid_in_id_set(file_path, playbook_data, self.playbook_set) elif re.match(constants.TEST_PLAYBOOK_REGEX, file_path, re.IGNORECASE): playbook_data = get_playbook_data(file_path) is_valid = self.is_valid_in_id_set(file_path, playbook_data, self.test_playbook_set) elif re.match(constants.TEST_SCRIPT_REGEX, file_path, re.IGNORECASE) or \ re.match(constants.PACKS_SCRIPT_NON_SPLIT_YML_REGEX, file_path, re.IGNORECASE): script_data = get_script_data(file_path) is_valid = self.is_valid_in_id_set(file_path, script_data, self.script_set) elif re.match(constants.PACKS_INTEGRATION_YML_REGEX, file_path, re.IGNORECASE) or \ re.match(constants.PACKS_INTEGRATION_NON_SPLIT_YML_REGEX, file_path, re.IGNORECASE): integration_data = get_integration_data(file_path) is_valid = self.is_valid_in_id_set(file_path, integration_data, self.integration_set) elif re.match(constants.PACKS_SCRIPT_YML_REGEX, file_path, re.IGNORECASE) or \ re.match(constants.PACKS_SCRIPT_PY_REGEX, file_path, re.IGNORECASE): unifier = Unifier(os.path.dirname(file_path)) yml_path, code = unifier.get_script_or_integration_package_data( ) script_data = get_script_data(yml_path, script_code=code) is_valid = self.is_valid_in_id_set(yml_path, script_data, self.script_set) return is_valid
def copy_content_yml(path, out_path, yml_info): """ Copy content ymls (except for playbooks) to the out_path (presumably a bundle) """ parent_dir_name = os.path.basename(os.path.dirname(path)) if parent_dir_name in DIR_TO_PREFIX and not os.path.basename(path).startswith('playbook-'): script_obj = yml_info if parent_dir_name != SCRIPTS_DIR: script_obj = yml_info['script'] unifier = Unifier(os.path.dirname(path), parent_dir_name, out_path) out_map = unifier.write_yaml_with_docker(yml_info, yml_info, script_obj) if len(out_map.keys()) > 1: print(" - yaml generated multiple files: {}".format(out_map.keys())) return # not a script or integration file. Simply copy shutil.copyfile(path, out_path)
def test_insert_script_to_yml(package_path, dir_name, file_path): with patch.object(Unifier, "__init__", lambda a, b, c, d, e: None): unifier = Unifier("", None, None, None) unifier.package_path = package_path unifier.dir_name = dir_name unifier.is_script_package = dir_name == 'Scripts' with open(file_path + ".yml", "r") as yml: test_yml_data = yaml.safe_load(yml) test_yml_unified = copy.deepcopy(test_yml_data) yml_unified, script_path = unifier.insert_script_to_yml( ".py", test_yml_unified, test_yml_data) with open(file_path + ".py", mode="r", encoding="utf-8") as script_file: script_code = script_file.read() clean_code = unifier.clean_python_code(script_code) if isinstance(test_yml_unified.get('script', {}), str): test_yml_unified['script'] = clean_code else: test_yml_unified['script']['script'] = clean_code assert yml_unified == test_yml_unified assert script_path == file_path + ".py"
def test_insert_image_to_yml(): with patch.object(Unifier, "__init__", lambda a, b, c, d, e: None): unifier = Unifier('', None, None, None) unifier.package_path = f"{git_path()}/demisto_sdk/tests/test_files/VulnDB/" unifier.dir_name = "Integrations" unifier.is_script_package = False unifier.image_prefix = "data:image/png;base64," with open( f"{git_path()}/demisto_sdk/tests/test_files/VulnDB/VulnDB_image.png", "rb") as image_file: image_data = image_file.read() image_data = unifier.image_prefix + base64.b64encode( image_data).decode('utf-8') with open(f"{git_path()}/demisto_sdk/tests/test_files/VulnDB/VulnDB.yml", mode="r", encoding="utf-8") \ as yml_file: yml_unified_test = yaml.load( yml_file, Loader=yamlordereddictloader.SafeLoader) with open( f"{git_path()}/demisto_sdk/tests/test_files/VulnDB/VulnDB.yml", "r") as yml: yml_data = yaml.safe_load(yml) yml_unified, found_img_path = unifier.insert_image_to_yml( yml_data, yml_unified_test) yml_unified_test['image'] = image_data assert found_img_path == f"{git_path()}/demisto_sdk/tests/test_files/VulnDB/VulnDB_image.png" assert yml_unified == yml_unified_test
def is_file_valid_in_set(self, file_path, file_type, ignored_errors=None): """Check if the file is valid in the id_set Args: file_path (string): Path to the file. file_type (string): The file type. ignored_errors (list): a list of ignored errors for the specific file Returns: bool. Whether the file is valid in the id_set or not. """ self.ignored_errors = ignored_errors is_valid = True if self.is_circle: # No need to check on local env because the id_set will contain this info after the commit click.echo(f"id set validations for: {file_path}") if re.match(constants.PACKS_SCRIPT_YML_REGEX, file_path, re.IGNORECASE): unifier = Unifier(os.path.dirname(file_path)) yml_path, code = unifier.get_script_or_integration_package_data( ) script_data = get_script_data(yml_path, script_code=code) is_valid = self._is_non_real_command_found(script_data) elif file_type == constants.FileType.INCIDENT_TYPE: incident_type_data = OrderedDict( get_incident_type_data(file_path)) is_valid = self._is_incident_type_default_playbook_found( incident_type_data) elif file_type == constants.FileType.INTEGRATION: integration_data = get_integration_data(file_path) is_valid = self._is_integration_classifier_and_mapper_found( integration_data) elif file_type == constants.FileType.CLASSIFIER: classifier_data = get_classifier_data(file_path) is_valid = self._is_classifier_incident_types_found( classifier_data) elif file_type == constants.FileType.MAPPER: mapper_data = get_mapper_data(file_path) is_valid = self._is_mapper_incident_types_found(mapper_data) elif file_type == constants.FileType.PLAYBOOK: playbook_data = get_playbook_data(file_path) is_valid = self._are_playbook_entities_versions_valid( playbook_data, file_path) return is_valid
def create_py_documentation(path, origin, language): is_error_py = False with open(path, 'r') as file: py_script = Unifier.clean_python_code(file.read(), remove_print_future=False) code = compile(py_script, '<string>', 'exec') ns = {'demisto': demistomock} exec(code, ns) # guardrails-disable-line