def content_item_to_package_format( self, content_item_dir: str, del_unified: bool = True, source_mapping: Optional[Dict] = None, # noqa: F841 code_fp_to_row_offset: Dict = {}) -> None: child_files = get_child_files(content_item_dir) for child_file in child_files: cf_name_lower = os.path.basename(child_file).lower() if cf_name_lower.startswith( (SCRIPT, AUTOMATION, INTEGRATION)) and cf_name_lower.endswith('yml'): content_item_file_path = child_file file_type = find_type(content_item_file_path) file_type = file_type.value if file_type else file_type try: extractor = Extractor(input=content_item_file_path, file_type=file_type, output=content_item_dir, no_logging=True, no_pipenv=True, no_basic_fmt=True) extractor.extract_to_package_format() code_fp = get_extracted_code_filepath(extractor) code_fp_to_row_offset[ code_fp] = extractor.lines_inserted_at_code_start except Exception as e: err_msg = f'Error occurred while trying to split the unified YAML "{content_item_file_path}" ' \ f'into its component parts.\nError: "{e}"' self.contrib_conversion_errs.append(err_msg) if del_unified: os.remove(content_item_file_path)
def merge_and_extract_new_file(self, custom_content_object: dict) -> None: """ Merges new files of type integration/script (not existing in the output pack) :param custom_content_object: The custom content object to merge into the pack :return: None """ file_entity: str = custom_content_object['entity'] file_path: str = custom_content_object['path'] file_type: str = custom_content_object['type'] file_name: str = custom_content_object['name'] file_code_language: str = custom_content_object.get('code_lang', '') if not self.verify_code_lang(file_code_language, file_type, file_name): return dir_output_path: str = os.path.join(self.output_pack_path, file_entity) # dir name should be the same as file name without separators mentioned in constants.py dir_name: str = self.create_dir_name(file_name) dir_output_path = os.path.join(dir_output_path, dir_name) extractor = Extractor(input=file_path, output=dir_output_path, file_type=file_type, base_name=dir_name, no_auto_create_dir=True, no_logging=not self.log_verbose, no_pipenv=True) extractor.extract_to_package_format() for file_path in get_child_files(dir_output_path): self.format_file(file_path, retrieve_file_ending(file_path)) self.num_added_files += 1 self.log_finished_file('Added', file_name, file_entity[:-1])
def prepare_single_content_item_for_validation(filename: str, data: bytes, tmp_directory: str) -> str: content = Content(tmp_directory) pack_name = 'TmpPack' pack_dir = content.path / 'Packs' / pack_name # create pack_metadata.json file in TmpPack contrib_converter = ContributionConverter(name=pack_name, base_dir=tmp_directory, pack_dir_name=pack_name) contrib_converter.create_metadata_file({'description': 'Temporary Pack', 'author': 'xsoar'}) prefix = '-'.join(filename.split('-')[:-1]) containing_dir = pack_dir / ENTITY_TYPE_TO_DIR.get(prefix, 'Integrations') containing_dir.mkdir(exist_ok=True) data_as_string = data.decode() loaded_data = yaml.load(data_as_string) buff = io.StringIO() yaml.dump(loaded_data, buff) data_as_string = buff.getvalue() # write yaml integration file to file system file_path = containing_dir / filename file_path.write_text(data_as_string) file_type = find_type(str(file_path)) file_type = file_type.value if file_type else file_type extractor = Extractor( input=str(file_path), file_type=file_type, output=containing_dir, no_logging=True, no_pipenv=True) # validate the resulting package files, ergo set path_to_validate to the package directory that results # from extracting the unified yaml to a package format extractor.extract_to_package_format() return extractor.get_output_path()
def test_extract_to_package_format_py(pack, mocker, tmp_path): mocker.patch.object(Extractor, 'extract_image', return_value='12312321') mocker.patch('demisto_sdk.commands.split_yml.extractor.get_python_version', return_value='2.7') mocker.patch('demisto_sdk.commands.split_yml.extractor.get_pipenv_dir', return_value=os.path.join( git_path(), 'demisto_sdk/tests/test_files/default_python2')) integration = pack.create_integration('Sample') integration.create_default_integration() out = tmp_path / 'TestIntegration' non_sorted_imports = 'from CommonServerPython import *\nimport datetime\nimport json' integration.yml.update({ 'image': '', 'script': { 'type': 'python', 'script': non_sorted_imports } }) extractor = Extractor(input=integration.yml.path, output=str(out), file_type='integration') extractor.extract_to_package_format() with open(out / 'TestIntegration.py', encoding='utf-8') as f: file_data = f.read() # check imports are sorted assert non_sorted_imports not in file_data
def content_item_to_package_format(self, content_item_dir: str, del_unified: bool = True): """ Iterate over the YAML files in a directory and create packages (a containing directory and component files) from the YAMLs of integrations and scripts Args: content_item_dir (str): Path to the directory containing the content item YAML file(s) del_unified (bool): Whether to delete the unified yaml the package was extracted from """ child_files = get_child_files(content_item_dir) content_item_file_path = '' for child_file in child_files: cf_name_lower = os.path.basename(child_file).lower() if cf_name_lower.startswith( (SCRIPT, AUTOMATION, INTEGRATION)) and cf_name_lower.endswith('yml'): content_item_file_path = child_file file_type = find_type(content_item_file_path) file_type = file_type.value if file_type else file_type try: extractor = Extractor(input=content_item_file_path, file_type=file_type, output=content_item_dir) extractor.extract_to_package_format() except Exception as e: err_msg = f'Error occurred while trying to split the unified YAML "{content_item_file_path}" ' \ f'into its component parts.\nError: "{e}"' self.contrib_conversion_errs.append(err_msg) if del_unified: os.remove(content_item_file_path)
def merge_and_extract_existing_file(self, custom_content_object: dict) -> None: """ "Smart" merges old files of type integration/script (existing in the output pack) :param custom_content_object: The custom content object to merge into the pack :return: None """ file_path: str = custom_content_object['path'] file_name: str = custom_content_object['name'] file_type: str = custom_content_object['type'] file_entity: str = custom_content_object['entity'] file_code_language: str = custom_content_object.get('code_lang', '') if not self.verify_code_lang(file_code_language, file_type, file_name): return base_name: str = self.create_dir_name(file_name) temp_dir = mkdtemp() extractor = Extractor(input=file_path, output=temp_dir, file_type=file_type, base_name=base_name, no_logging=not self.log_verbose, no_pipenv=True, no_readme=True, no_auto_create_dir=True) extractor.extract_to_package_format() extracted_file_paths: list = get_child_files(temp_dir) corresponding_pack_object: dict = self.get_corresponding_pack_content_object(custom_content_object) for ex_file_path in extracted_file_paths: ex_file_ending: str = retrieve_file_ending(ex_file_path) ex_file_detail: str = self.get_extracted_file_detail(ex_file_ending) # Get the file name to search for in the pack object (integration/script contains several files of the # same type. For example: integration's py code and integration's unit tests code) searched_basename: str = self.get_searched_basename(file_name, ex_file_ending, ex_file_detail) corresponding_pack_file_object: dict = self.get_corresponding_pack_file_object(searched_basename, corresponding_pack_object) if not corresponding_pack_file_object: corresponding_pack_file_path: str = os.path.join(self.output_pack_path, file_entity, self.create_dir_name(file_name), searched_basename) else: corresponding_pack_file_path = corresponding_pack_file_object['path'] # We use "smart" merge only for yml files (py, png & md files to be moved regularly) if ex_file_ending == 'yml': # adding the deleted fields (by Demisto) of the old yml/json file to the custom content file. self.update_data(ex_file_path, corresponding_pack_file_path, ex_file_ending) try: shutil.move(src=ex_file_path, dst=corresponding_pack_file_path) except shutil.Error as e: print_color(e, LOG_COLORS.RED) raise self.format_file(corresponding_pack_file_path, ex_file_ending) try: shutil.rmtree(temp_dir, ignore_errors=True) except shutil.Error as e: print_color(e, LOG_COLORS.RED) raise self.num_merged_files += 1 self.log_finished_file('Merged', file_name, file_entity[:-1])
def prepare_single_content_item_for_validation( filename: str, data: bytes, tmp_directory: str) -> Tuple[str, Dict]: content = Content(tmp_directory) pack_name = 'TmpPack' pack_dir = content.path / 'Packs' / pack_name # create pack_metadata.json file in TmpPack contrib_converter = ContributionConverter(name=pack_name, base_dir=tmp_directory, pack_dir_name=pack_name) contrib_converter.create_metadata_file({ 'description': 'Temporary Pack', 'author': 'xsoar' }) prefix = '-'.join(filename.split('-')[:-1]) containing_dir = pack_dir / ENTITY_TYPE_TO_DIR.get(prefix, 'Integrations') containing_dir.mkdir(exist_ok=True) is_json = filename.casefold().endswith('.json') data_as_string = data.decode() loaded_data = json.loads(data_as_string) if is_json else yaml.load( data_as_string) if is_json: data_as_string = json.dumps(loaded_data) else: buff = io.StringIO() yaml.dump(loaded_data, buff) data_as_string = buff.getvalue() # write content item file to file system file_path = containing_dir / filename file_path.write_text(data_as_string) file_type = find_type(str(file_path)) file_type = file_type.value if file_type else file_type if is_json or file_type in (FileType.PLAYBOOK.value, FileType.TEST_PLAYBOOK.value): return str(file_path), {} extractor = Extractor(input=str(file_path), file_type=file_type, output=containing_dir, no_logging=True, no_pipenv=True, no_basic_fmt=True) # validate the resulting package files, ergo set path_to_validate to the package directory that results # from extracting the unified yaml to a package format extractor.extract_to_package_format() code_fp_to_row_offset = { get_extracted_code_filepath(extractor): extractor.lines_inserted_at_code_start } return extractor.get_output_path(), code_fp_to_row_offset
def test_extract_to_package_format_pwsh(tmpdir): out = tmpdir.join('Integrations') extractor = Extractor( input= f'{git_path()}/demisto_sdk/tests/test_files/integration-powershell_ssh_remote.yml', output=str(out), file_type='integration') assert extractor.extract_to_package_format() == 0 # check code with open(out.join('PowerShellRemotingOverSSH').join( 'PowerShellRemotingOverSSH.ps1'), 'r', encoding='utf-8') as f: file_data = f.read() assert '. $PSScriptRoot\\CommonServerPowerShell.ps1\n' in file_data assert file_data[-1] == '\n' # check description with open( out.join('PowerShellRemotingOverSSH').join( 'PowerShellRemotingOverSSH_description.md'), 'r') as f: file_data = f.read() assert 'Username and password are both associated with the user in the target machine' in file_data # check readme with open(out.join('PowerShellRemotingOverSSH').join('README.md'), 'r') as f: file_data = f.read() assert 'This is a sample test README' in file_data with open( out.join('PowerShellRemotingOverSSH').join( 'PowerShellRemotingOverSSH.yml'), 'r') as f: yaml_obj = yaml.safe_load(f) assert yaml_obj['fromversion'] == '5.5.0' assert not yaml_obj['script']['script']
def extract(config, **kwargs): file_type = find_type(kwargs.get('input')) if file_type not in ["integration", "script"]: print_error('File is not an Integration or Script.') return 1 extractor = Extractor(configuration=config.configuration, file_type=file_type, **kwargs) return extractor.extract_to_package_format()
def test_extract_to_package_format_py(tmpdir, mocker): mocker.patch('demisto_sdk.commands.split_yml.extractor.get_python_version', return_value='2.7') mocker.patch('demisto_sdk.commands.split_yml.extractor.get_pipenv_dir', return_value=os.path.join( git_path(), 'demisto_sdk/tests/test_files/default_python2')) out = tmpdir.join('Integrations') extractor = Extractor( input=f'{git_path()}/demisto_sdk/tests/test_files/integration-Zoom.yml', output=str(out), file_type='integration') extractor.extract_to_package_format() with open(out.join('Zoom').join('Zoom.py'), 'r', encoding='utf-8') as f: file_data = f.read() # check imports are sorted assert 'import datetime\nimport json\nimport shutil\nfrom zipfile import ZipFile\n\nimport requests\n\n' \ 'import demistomock as demisto\nimport jwt\nfrom CommonServerPython import *\n' in file_data
def extract(config, **kwargs): file_type = find_type(kwargs.get('input'), ignore_sub_categories=True) if file_type not in [FileType.INTEGRATION, FileType.SCRIPT]: print_error('File is not an Integration or Script.') return 1 extractor = Extractor(configuration=config.configuration, file_type=file_type.value, **kwargs) return extractor.extract_to_package_format()
def extract(config, **kwargs): extractor = Extractor(configuration=config.configuration, **kwargs) return extractor.extract_to_package_format()
def content_item_to_package_format( self, content_item_dir: str, del_unified: bool = True, source_mapping: Union[Dict[str, Dict[str, str]]] = None): """ Iterate over the YAML files in a directory and create packages (a containing directory and component files) from the YAMLs of integrations and scripts Args: content_item_dir (str): Path to the directory containing the content item YAML file(s) del_unified (bool): Whether to delete the unified yaml the package was extracted from source_mapping (Union[Dict], optional): Can be used when updating an existing pack and the package directory of a content item is not what would ordinarily be set by the `demisto-sdk` `split-yml` command. Sample value would be, `{'integration-AbuseIPDB.yml': {'containing_dir_name': 'AbuseDB', 'base_name': 'AbuseDB'}}` - the split-yml command would create a containing directory of `AbuseIPDB` for the file `integration-AbuseIPDB.yml` and we need the containing directory of the package to match what already exists in the repo. """ child_files = get_child_files(content_item_dir) content_item_file_path = '' for child_file in child_files: cf_name_lower = os.path.basename(child_file).lower() if cf_name_lower.startswith( (SCRIPT, AUTOMATION, INTEGRATION)) and cf_name_lower.endswith('yml'): content_item_file_path = child_file file_type = find_type(content_item_file_path) file_type = file_type.value if file_type else file_type try: child_file_name = os.path.basename(child_file) if source_mapping and child_file_name in source_mapping.keys( ): child_file_mapping = source_mapping.get( child_file_name, {}) base_name = child_file_mapping.get('base_name', '') containing_dir_name = child_file_mapping.get( 'containing_dir_name', '') # for legacy unified yamls in the repo, their containing directory will be that of their # entity type directly instead of the typical package format. For those cases, we need the # extractor to auto create the containing directory. An example would be - # 'content/Packs/AbuseDB/Scripts/script-AbuseIPDBPopulateIndicators.yml' autocreate_dir = containing_dir_name == ENTITY_TYPE_TO_DIR.get( file_type, '') output_dir = os.path.join( self.pack_dir_path, ENTITY_TYPE_TO_DIR.get(file_type, '')) if not autocreate_dir: output_dir = os.path.join(output_dir, containing_dir_name) os.makedirs(output_dir, exist_ok=True) extractor = Extractor( input=content_item_file_path, file_type=file_type, output=output_dir, no_readme=True, base_name=base_name, no_auto_create_dir=(not autocreate_dir), no_pipenv=self.no_pipenv) else: extractor = Extractor(input=content_item_file_path, file_type=file_type, output=content_item_dir, no_pipenv=self.no_pipenv) extractor.extract_to_package_format() except Exception as e: err_msg = f'Error occurred while trying to split the unified YAML "{content_item_file_path}" ' \ f'into its component parts.\nError: "{e}"' self.contrib_conversion_errs.append(err_msg) finally: output_path = extractor.get_output_path() if self.create_new: # Moving the unified file to its package. shutil.move(content_item_file_path, output_path) if del_unified: if os.path.exists(content_item_file_path): os.remove(content_item_file_path) moved_unified_dst = os.path.join( output_path, child_file_name) if os.path.exists(moved_unified_dst): os.remove(moved_unified_dst)