Ejemplo n.º 1
0
def content_item_to_package_format(
        self,
        content_item_dir: str,
        del_unified: bool = True,
        source_mapping: Optional[Dict] = None,  # noqa: F841
        code_fp_to_row_offset: Dict = {}) -> None:
    child_files = get_child_files(content_item_dir)
    for child_file in child_files:
        cf_name_lower = os.path.basename(child_file).lower()
        if cf_name_lower.startswith(
            (SCRIPT, AUTOMATION,
             INTEGRATION)) and cf_name_lower.endswith('yml'):
            content_item_file_path = child_file
            file_type = find_type(content_item_file_path)
            file_type = file_type.value if file_type else file_type
            try:
                extractor = YmlSplitter(input=content_item_file_path,
                                        file_type=file_type,
                                        output=content_item_dir,
                                        no_logging=True,
                                        no_pipenv=True,
                                        no_basic_fmt=True)
                extractor.extract_to_package_format()
                code_fp = get_extracted_code_filepath(extractor)
                code_fp_to_row_offset[
                    code_fp] = extractor.lines_inserted_at_code_start
            except Exception as e:
                err_msg = f'Error occurred while trying to split the unified YAML "{content_item_file_path}" ' \
                          f'into its component parts.\nError: "{e}"'
                self.contrib_conversion_errs.append(err_msg)
            if del_unified:
                os.remove(content_item_file_path)
Ejemplo n.º 2
0
    def merge_and_extract_new_file(self, custom_content_object: dict) -> None:
        """
        Merges new files of type integration/script (not existing in the output pack)
        :param custom_content_object: The custom content object to merge into the pack
        :return: None
        """
        file_entity: str = custom_content_object['entity']
        file_path: str = custom_content_object['path']
        file_type: str = custom_content_object['type']
        file_name: str = custom_content_object['name']

        file_code_language: str = custom_content_object.get('code_lang', '')
        if not self.verify_code_lang(file_code_language, file_type, file_name):
            return

        dir_output_path: str = os.path.join(self.output_pack_path, file_entity)
        # dir name should be the same as file name without separators mentioned in constants.py
        dir_name: str = self.create_dir_name(file_name)
        dir_output_path = os.path.join(dir_output_path, dir_name)

        extractor = YmlSplitter(input=file_path,
                                output=dir_output_path,
                                file_type=file_type,
                                base_name=dir_name,
                                no_auto_create_dir=True,
                                no_logging=not self.log_verbose,
                                no_pipenv=True)
        extractor.extract_to_package_format()

        for file_path in get_child_files(dir_output_path):
            self.format_file(file_path, retrieve_file_ending(file_path))
        self.num_added_files += 1
        self.log_finished_file('Added', file_name, file_entity[:-1])
def test_get_output_path_empty_output():
    input_path = Path(f'{git_path()}/demisto_sdk/tests/test_files/integration-Zoom.yml')
    extractor = YmlSplitter(input=str(input_path),
                            file_type='integration'
                            )
    res = extractor.get_output_path()
    assert res == input_path.parent
def test_get_output_path():
    out = f'{git_path()}/demisto_sdk/tests/Integrations'
    extractor = YmlSplitter(input=f'{git_path()}/demisto_sdk/tests/test_files/integration-Zoom.yml',
                            file_type='integration',
                            output=out)
    res = extractor.get_output_path()
    assert res == Path(out + "/Zoom")
def test_get_output_path_relative(repo):
    pack = repo.create_pack()
    integration = pack.create_integration()

    with ChangeCWD(repo.path):
        extractor = YmlSplitter(input=integration.yml.rel_path, file_type='integration')

    output_path = extractor.get_output_path()
    assert output_path.is_absolute()
    assert output_path.relative_to(pack.path) == Path(integration.path).relative_to(pack.path)
def test_extract_code_pwsh(tmpdir):
    extractor = YmlSplitter(input=f'{git_path()}/demisto_sdk/tests/test_files/integration-powershell_ssh_remote.yml',
                            output=str(tmpdir.join('temp_code')), file_type='integration')

    extractor.extract_code(extractor.output)
    # notice that we passed without an extension. Extractor should be adding .ps1
    with open(extractor.output.with_suffix('.ps1'), 'r', encoding='utf-8') as temp_code:
        file_data = temp_code.read()
        assert '. $PSScriptRoot\\CommonServerPowerShell.ps1\n' in file_data
        assert file_data[-1] == '\n'
Ejemplo n.º 7
0
def prepare_single_content_item_for_validation(
        filename: str, data: bytes, tmp_directory: str) -> Tuple[str, Dict]:
    content = Content(tmp_directory)
    pack_name = 'TmpPack'
    pack_dir = content.path / 'Packs' / pack_name
    # create pack_metadata.json file in TmpPack
    contrib_converter = ContributionConverter(name=pack_name,
                                              base_dir=tmp_directory,
                                              pack_dir_name=pack_name)
    contrib_converter.create_metadata_file({
        'description': 'Temporary Pack',
        'author': 'xsoar'
    })
    prefix = '-'.join(filename.split('-')[:-1])
    containing_dir = pack_dir / ENTITY_TYPE_TO_DIR.get(prefix, 'Integrations')
    containing_dir.mkdir(exist_ok=True)
    is_json = filename.casefold().endswith('.json')
    data_as_string = data.decode()
    loaded_data = json.loads(data_as_string) if is_json else yaml.load(
        data_as_string)
    if is_json:
        data_as_string = json.dumps(loaded_data)
    else:
        buff = io.StringIO()
        yaml.dump(loaded_data, buff)
        data_as_string = buff.getvalue()
    # write content item file to file system
    file_path = containing_dir / filename
    file_path.write_text(data_as_string)
    file_type = find_type(str(file_path))
    file_type = file_type.value if file_type else file_type
    if is_json or file_type in (FileType.PLAYBOOK.value,
                                FileType.TEST_PLAYBOOK.value):
        return str(file_path), {}
    extractor = YmlSplitter(input=str(file_path),
                            file_type=file_type,
                            output=containing_dir,
                            no_logging=True,
                            no_pipenv=True,
                            no_basic_fmt=True)
    # validate the resulting package files, ergo set path_to_validate to the package directory that results
    # from extracting the unified yaml to a package format
    extractor.extract_to_package_format()
    code_fp_to_row_offset = {
        get_extracted_code_filepath(extractor):
        extractor.lines_inserted_at_code_start
    }
    return extractor.get_output_path(), code_fp_to_row_offset
def test_extract_code_modules_old_format(tmpdir):
    """
    Given:
        - A unified YML which ApiModule code is auto-generated there, but the comments are not up to date
    When:
        - run YmlSpltter on this code
    Then:
        - Make sure that the imported code is still there, and the code runs.
    """
    extractor = YmlSplitter(input=f'{git_path()}/demisto_sdk/tests/test_files/integration-EDL_old_generated.yml',
                            output=str(tmpdir.join('temp_code.py')), file_type='integration')

    extractor.extract_code(extractor.output)
    with open(extractor.output, 'rb') as temp_code:
        file_data = temp_code.read().decode('utf-8')
        assert '### GENERATED CODE ###' in file_data
        assert 'def nginx_log_process(nginx_process: subprocess.Popen):' in file_data
def test_extract_code__with_apimodule(tmpdir):
    """
    Given:
        - A unified YML which ApiModule code is auto-generated there
    When:
        - run YmlSpltter on this code
    Then:
        - Ensure generated code is being deleted, and the import line exists
    """
    extractor = YmlSplitter(input=f'{git_path()}/demisto_sdk/tests/test_files/integration-EDL.yml',
                            output=str(tmpdir.join('temp_code.py')), file_type='integration')

    extractor.extract_code(extractor.output)
    with open(extractor.output, 'rb') as temp_code:
        file_data = temp_code.read().decode('utf-8')
        assert '### GENERATED CODE ###' not in file_data
        assert '### END GENERATED CODE ###' not in file_data
        assert 'from NGINXApiModule import *' in file_data
        assert 'def create_nginx_server_conf(file_path: str, port: int, params: Dict):' not in file_data
Ejemplo n.º 10
0
def get_extracted_code_filepath(extractor: YmlSplitter) -> str:
    output_path = extractor.get_output_path()
    base_name = os.path.basename(
        output_path) if not extractor.base_name else extractor.base_name
    code_file = f'{output_path}/{base_name}'
    script = extractor.yml_data['script']
    lang_type: str = script[
        'type'] if extractor.file_type == 'integration' else extractor.yml_data[
            'type']
    code_file = f'{code_file}{TYPE_TO_EXTENSION[lang_type]}'
    return code_file
Ejemplo n.º 11
0
def test_extract_to_package_format_py(pack, mocker, tmp_path):
    mocker.patch.object(YmlSplitter, 'extract_image', return_value='12312321')
    mocker.patch(
        'demisto_sdk.commands.split.ymlsplitter.get_python_version',
        return_value='2.7'
    )
    mocker.patch(
        'demisto_sdk.commands.split.ymlsplitter.get_pipenv_dir',
        return_value=os.path.join(git_path(), 'demisto_sdk/tests/test_files/default_python2')
    )
    mocker.patch(
        'demisto_sdk.commands.split.ymlsplitter.get_pip_requirements',
        return_value="""certifi==2017.11.5
chardet==3.0.4
idna==2.6
olefile==0.44
PyYAML==3.12
requests==2.18.4
urllib3==1.22
"""
    )
    integration = pack.create_integration('Sample')
    integration.create_default_integration()
    out = tmp_path / 'TestIntegration'
    non_sorted_imports = 'from CommonServerPython import *\nimport datetime\nimport json'
    integration.yml.update(
        {
            'image': '',
            'script': {
                'type': 'python',
                'script': non_sorted_imports
            }
        }
    )
    extractor = YmlSplitter(input=integration.yml.path,
                            output=str(out), file_type='integration')
    extractor.extract_to_package_format()
    with open(out / 'TestIntegration.py', encoding='utf-8') as f:
        file_data = f.read()
        # check imports are sorted
        assert non_sorted_imports not in file_data
Ejemplo n.º 12
0
def test_extract_to_package_format_pwsh(tmpdir):
    out = tmpdir.join('Integrations')
    extractor = YmlSplitter(input=f'{git_path()}/demisto_sdk/tests/test_files/integration-powershell_ssh_remote.yml',
                            output=str(out), file_type='integration')
    assert extractor.extract_to_package_format() == 0
    # check code
    with open(out.join('PowerShellRemotingOverSSH').join('PowerShellRemotingOverSSH.ps1'), 'r', encoding='utf-8') as f:
        file_data = f.read()
        assert '. $PSScriptRoot\\CommonServerPowerShell.ps1\n' in file_data
        assert file_data[-1] == '\n'
    # check description
    with open(out.join('PowerShellRemotingOverSSH').join('PowerShellRemotingOverSSH_description.md'), 'r') as f:
        file_data = f.read()
        assert 'Username and password are both associated with the user in the target machine' in file_data
    # check readme
    with open(out.join('PowerShellRemotingOverSSH').join('README.md'), 'r') as f:
        file_data = f.read()
        assert 'This is a sample test README' in file_data
    with open(out.join('PowerShellRemotingOverSSH').join('PowerShellRemotingOverSSH.yml'), 'r') as f:
        yaml_obj = yaml.load(f)
        assert yaml_obj['fromversion'] == '5.5.0'
        assert not yaml_obj['script']['script']
Ejemplo n.º 13
0
def test_extract_image(tmpdir):
    # Test when script
    extractor = YmlSplitter(input=f'{git_path()}/demisto_sdk/tests/test_files/script-test_script.yml',
                            output='', file_type='script')
    assert extractor.extract_image('output_path') == 0

    # Test opening the file and writing to it
    extractor = YmlSplitter(input=f'{git_path()}/demisto_sdk/tests/test_files/integration-Zoom.yml',
                            output=str(tmpdir.join('temp_image.png')), file_type='integration')

    extractor.extract_image(extractor.output)
    with open(extractor.output, 'rb') as temp_image:
        image_data = temp_image.read()
        image = base64.b64encode(image_data).decode('utf-8')
        assert image == DEFAULT_IMAGE_BASE64
Ejemplo n.º 14
0
def test_extract_long_description(tmpdir):
    # Test when script
    extractor = YmlSplitter(input=f'{git_path()}/demisto_sdk/tests/test_files/script-test_script.yml',
                            output='', file_type='script', no_demisto_mock=False,
                            no_common_server=False, configuration=Configuration())
    assert extractor.extract_long_description('output_path') == 0

    # Test opening the file and writing to it
    extractor = YmlSplitter(input=f'{git_path()}/demisto_sdk/tests/test_files/integration-Zoom.yml',
                            output=str(tmpdir.join('temp_text.txt')), file_type='integration')

    extractor.extract_long_description(extractor.output)
    with open(extractor.output, 'rb') as temp_description:
        assert temp_description.read().decode('utf-8') == 'detaileddescription'
    os.remove(extractor.output)
Ejemplo n.º 15
0
def test_extract_code(tmpdir):
    extractor = YmlSplitter(input=f'{git_path()}/demisto_sdk/tests/test_files/integration-Zoom.yml',
                            output=str(tmpdir.join('temp_code.py')), file_type='integration')

    extractor.extract_code(extractor.output)
    with open(extractor.output, 'rb') as temp_code:
        file_data = temp_code.read().decode('utf-8')
        assert 'import demistomock as demisto  #' in file_data
        assert 'from CommonServerPython import *  #' in file_data
        assert file_data[-1] == '\n'
        assert 'register_module_line' not in file_data
    os.remove(extractor.output)

    extractor.common_server = False
    extractor.demisto_mock = False
    extractor.extract_code(extractor.output)
    with open(extractor.output, 'rb') as temp_code:
        file_data = temp_code.read().decode('utf-8')
        assert 'import demistomock as demisto  #' not in file_data
        assert 'from CommonServerPython import *  #' not in file_data
        assert 'register_module_line' not in file_data
        assert file_data[-1] == '\n'
Ejemplo n.º 16
0
    def content_item_to_package_format(
            self,
            content_item_dir: str,
            del_unified: bool = True,
            source_mapping: Union[Dict[str, Dict[str, str]]] = None):
        """
        Iterate over the YAML files in a directory and create packages (a containing directory and
        component files) from the YAMLs of integrations and scripts

        Args:
            content_item_dir (str): Path to the directory containing the content item YAML file(s)
            del_unified (bool): Whether to delete the unified yaml the package was extracted from
            source_mapping (Union[Dict], optional): Can be used when updating an existing pack and
                the package directory of a content item is not what would ordinarily be set by the
                `demisto-sdk` `split` command. Sample value would be,
                `{'integration-AbuseIPDB.yml': {'containing_dir_name': 'AbuseDB', 'base_name': 'AbuseDB'}}`
                - the split command would create a containing directory of `AbuseIPDB` for the file
                `integration-AbuseIPDB.yml` and we need the containing directory of the package to match
                what already exists in the repo.
        """
        child_files = get_child_files(content_item_dir)
        for child_file in child_files:
            cf_name_lower = os.path.basename(child_file).lower()
            if cf_name_lower.startswith(
                (SCRIPT, AUTOMATION,
                 INTEGRATION)) and cf_name_lower.endswith('yml'):
                content_item_file_path = child_file
                file_type = find_type(content_item_file_path)
                file_type = file_type.value if file_type else file_type
                try:
                    child_file_name = os.path.basename(child_file)
                    if source_mapping and child_file_name in source_mapping.keys(
                    ):
                        child_file_mapping = source_mapping.get(
                            child_file_name, {})
                        base_name = child_file_mapping.get('base_name', '')
                        containing_dir_name = child_file_mapping.get(
                            'containing_dir_name', '')
                        # for legacy unified yamls in the repo, their containing directory will be that of their
                        # entity type directly instead of the typical package format. For those cases, we need the
                        # extractor to auto create the containing directory. An example would be -
                        # 'content/Packs/AbuseDB/Scripts/script-AbuseIPDBPopulateIndicators.yml'
                        autocreate_dir = containing_dir_name == ENTITY_TYPE_TO_DIR.get(
                            file_type, '')
                        output_dir = os.path.join(
                            self.pack_dir_path,
                            ENTITY_TYPE_TO_DIR.get(file_type, ''))
                        if not autocreate_dir:
                            output_dir = os.path.join(output_dir,
                                                      containing_dir_name)
                        os.makedirs(output_dir, exist_ok=True)
                        extractor = YmlSplitter(
                            input=content_item_file_path,
                            file_type=file_type,
                            output=output_dir,
                            no_readme=True,
                            base_name=base_name,
                            no_auto_create_dir=(not autocreate_dir),
                            no_pipenv=self.no_pipenv)

                    else:
                        extractor = YmlSplitter(input=content_item_file_path,
                                                file_type=file_type,
                                                output=content_item_dir,
                                                no_pipenv=self.no_pipenv)
                    extractor.extract_to_package_format()
                except Exception as e:
                    err_msg = f'Error occurred while trying to split the unified YAML "{content_item_file_path}" ' \
                              f'into its component parts.\nError: "{e}"'
                    self.contrib_conversion_errs.append(err_msg)
                finally:
                    output_path = extractor.get_output_path()
                    if self.create_new:
                        # Moving the unified file to its package.
                        shutil.move(content_item_file_path, output_path)
                    if del_unified:
                        if os.path.exists(content_item_file_path):
                            os.remove(content_item_file_path)
                        moved_unified_dst = os.path.join(
                            output_path, child_file_name)
                        if os.path.exists(moved_unified_dst):
                            os.remove(moved_unified_dst)
Ejemplo n.º 17
0
    def merge_and_extract_existing_file(self,
                                        custom_content_object: dict) -> None:
        """
        "Smart" merges old files of type integration/script (existing in the output pack)
        :param custom_content_object: The custom content object to merge into the pack
        :return: None
        """
        file_path: str = custom_content_object['path']
        file_name: str = custom_content_object['name']
        file_type: str = custom_content_object['type']
        file_entity: str = custom_content_object['entity']

        file_code_language: str = custom_content_object.get('code_lang', '')
        if not self.verify_code_lang(file_code_language, file_type, file_name):
            return

        base_name: str = self.create_dir_name(file_name)
        temp_dir = mkdtemp()

        extractor = YmlSplitter(input=file_path,
                                output=temp_dir,
                                file_type=file_type,
                                base_name=base_name,
                                no_logging=not self.log_verbose,
                                no_pipenv=True,
                                no_readme=True,
                                no_auto_create_dir=True)
        extractor.extract_to_package_format()

        extracted_file_paths: list = get_child_files(temp_dir)
        corresponding_pack_object: dict = self.get_corresponding_pack_content_object(
            custom_content_object)

        for ex_file_path in extracted_file_paths:
            ex_file_ending: str = retrieve_file_ending(ex_file_path)
            ex_file_detail: str = self.get_extracted_file_detail(
                ex_file_ending)
            # Get the file name to search for in the pack object (integration/script contains several files of the
            # same type. For example: integration's py code and integration's unit tests code)
            searched_basename: str = self.get_searched_basename(
                file_name, ex_file_ending, ex_file_detail)
            corresponding_pack_file_object: dict = self.get_corresponding_pack_file_object(
                searched_basename, corresponding_pack_object)
            if not corresponding_pack_file_object:
                corresponding_pack_file_path: str = os.path.join(
                    self.output_pack_path, file_entity,
                    self.create_dir_name(file_name), searched_basename)
            else:
                corresponding_pack_file_path = corresponding_pack_file_object[
                    'path']
            # We use "smart" merge only for yml files (py, png  & md files to be moved regularly)
            if ex_file_ending == 'yml':
                # adding the deleted fields (by Demisto) of the old yml/json file to the custom content file.
                self.update_data(ex_file_path, corresponding_pack_file_path,
                                 ex_file_ending)
            try:
                shutil.move(src=ex_file_path, dst=corresponding_pack_file_path)
            except shutil.Error as e:
                print_color(e, LOG_COLORS.RED)
                raise
            self.format_file(corresponding_pack_file_path, ex_file_ending)

        try:
            shutil.rmtree(temp_dir, ignore_errors=True)
        except shutil.Error as e:
            print_color(e, LOG_COLORS.RED)
            raise

        self.num_merged_files += 1
        self.log_finished_file('Merged', file_name, file_entity[:-1])