Exemplo n.º 1
0
def prepare_single_content_item_for_validation(filename: str, data: bytes, tmp_directory: str) -> str:
    content = Content(tmp_directory)
    pack_name = 'TmpPack'
    pack_dir = content.path / 'Packs' / pack_name
    # create pack_metadata.json file in TmpPack
    contrib_converter = ContributionConverter(name=pack_name, base_dir=tmp_directory, pack_dir_name=pack_name)
    contrib_converter.create_metadata_file({'description': 'Temporary Pack', 'author': 'xsoar'})
    prefix = '-'.join(filename.split('-')[:-1])
    containing_dir = pack_dir / ENTITY_TYPE_TO_DIR.get(prefix, 'Integrations')
    containing_dir.mkdir(exist_ok=True)
    data_as_string = data.decode()
    loaded_data = yaml.load(data_as_string)
    buff = io.StringIO()
    yaml.dump(loaded_data, buff)
    data_as_string = buff.getvalue()
    # write yaml integration file to file system
    file_path = containing_dir / filename
    file_path.write_text(data_as_string)
    file_type = find_type(str(file_path))
    file_type = file_type.value if file_type else file_type
    extractor = Extractor(
        input=str(file_path), file_type=file_type, output=containing_dir, no_logging=True, no_pipenv=True)
    # validate the resulting package files, ergo set path_to_validate to the package directory that results
    # from extracting the unified yaml to a package format
    extractor.extract_to_package_format()
    return extractor.get_output_path()
Exemplo n.º 2
0
def test_get_output_path():
    out = f'{git_path()}/demisto_sdk/tests/Integrations'
    extractor = Extractor(input=f'{git_path()}/demisto_sdk/tests/test_files/integration-Zoom.yml',
                          file_type='integration',
                          output=out)
    res = extractor.get_output_path()
    assert res == out + "/Zoom"
Exemplo n.º 3
0
def get_extracted_code_filepath(extractor: Extractor) -> str:
    output_path = extractor.get_output_path()
    base_name = os.path.basename(output_path) if not extractor.base_name else extractor.base_name
    code_file = f'{output_path}/{base_name}'
    script = extractor.yml_data['script']
    lang_type: str = script['type'] if extractor.file_type == 'integration' else extractor.yml_data['type']
    code_file = f'{code_file}{TYPE_TO_EXTENSION[lang_type]}'
    return code_file
Exemplo n.º 4
0
def prepare_single_content_item_for_validation(
        filename: str, data: bytes, tmp_directory: str) -> Tuple[str, Dict]:
    content = Content(tmp_directory)
    pack_name = 'TmpPack'
    pack_dir = content.path / 'Packs' / pack_name
    # create pack_metadata.json file in TmpPack
    contrib_converter = ContributionConverter(name=pack_name,
                                              base_dir=tmp_directory,
                                              pack_dir_name=pack_name)
    contrib_converter.create_metadata_file({
        'description': 'Temporary Pack',
        'author': 'xsoar'
    })
    prefix = '-'.join(filename.split('-')[:-1])
    containing_dir = pack_dir / ENTITY_TYPE_TO_DIR.get(prefix, 'Integrations')
    containing_dir.mkdir(exist_ok=True)
    is_json = filename.casefold().endswith('.json')
    data_as_string = data.decode()
    loaded_data = json.loads(data_as_string) if is_json else yaml.load(
        data_as_string)
    if is_json:
        data_as_string = json.dumps(loaded_data)
    else:
        buff = io.StringIO()
        yaml.dump(loaded_data, buff)
        data_as_string = buff.getvalue()
    # write content item file to file system
    file_path = containing_dir / filename
    file_path.write_text(data_as_string)
    file_type = find_type(str(file_path))
    file_type = file_type.value if file_type else file_type
    if is_json or file_type in (FileType.PLAYBOOK.value,
                                FileType.TEST_PLAYBOOK.value):
        return str(file_path), {}
    extractor = Extractor(input=str(file_path),
                          file_type=file_type,
                          output=containing_dir,
                          no_logging=True,
                          no_pipenv=True,
                          no_basic_fmt=True)
    # validate the resulting package files, ergo set path_to_validate to the package directory that results
    # from extracting the unified yaml to a package format
    extractor.extract_to_package_format()
    code_fp_to_row_offset = {
        get_extracted_code_filepath(extractor):
        extractor.lines_inserted_at_code_start
    }
    return extractor.get_output_path(), code_fp_to_row_offset
Exemplo n.º 5
0
    def content_item_to_package_format(
            self,
            content_item_dir: str,
            del_unified: bool = True,
            source_mapping: Union[Dict[str, Dict[str, str]]] = None):
        """
        Iterate over the YAML files in a directory and create packages (a containing directory and
        component files) from the YAMLs of integrations and scripts

        Args:
            content_item_dir (str): Path to the directory containing the content item YAML file(s)
            del_unified (bool): Whether to delete the unified yaml the package was extracted from
            source_mapping (Union[Dict], optional): Can be used when updating an existing pack and
                the package directory of a content item is not what would ordinarily be set by the
                `demisto-sdk` `split-yml` command. Sample value would be,
                `{'integration-AbuseIPDB.yml': {'containing_dir_name': 'AbuseDB', 'base_name': 'AbuseDB'}}`
                - the split-yml command would create a containing directory of `AbuseIPDB` for the file
                `integration-AbuseIPDB.yml` and we need the containing directory of the package to match
                what already exists in the repo.
        """
        child_files = get_child_files(content_item_dir)
        content_item_file_path = ''
        for child_file in child_files:
            cf_name_lower = os.path.basename(child_file).lower()
            if cf_name_lower.startswith(
                (SCRIPT, AUTOMATION,
                 INTEGRATION)) and cf_name_lower.endswith('yml'):
                content_item_file_path = child_file
                file_type = find_type(content_item_file_path)
                file_type = file_type.value if file_type else file_type
                try:
                    child_file_name = os.path.basename(child_file)
                    if source_mapping and child_file_name in source_mapping.keys(
                    ):
                        child_file_mapping = source_mapping.get(
                            child_file_name, {})
                        base_name = child_file_mapping.get('base_name', '')
                        containing_dir_name = child_file_mapping.get(
                            'containing_dir_name', '')
                        # for legacy unified yamls in the repo, their containing directory will be that of their
                        # entity type directly instead of the typical package format. For those cases, we need the
                        # extractor to auto create the containing directory. An example would be -
                        # 'content/Packs/AbuseDB/Scripts/script-AbuseIPDBPopulateIndicators.yml'
                        autocreate_dir = containing_dir_name == ENTITY_TYPE_TO_DIR.get(
                            file_type, '')
                        output_dir = os.path.join(
                            self.pack_dir_path,
                            ENTITY_TYPE_TO_DIR.get(file_type, ''))
                        if not autocreate_dir:
                            output_dir = os.path.join(output_dir,
                                                      containing_dir_name)
                        os.makedirs(output_dir, exist_ok=True)
                        extractor = Extractor(
                            input=content_item_file_path,
                            file_type=file_type,
                            output=output_dir,
                            no_readme=True,
                            base_name=base_name,
                            no_auto_create_dir=(not autocreate_dir),
                            no_pipenv=self.no_pipenv)

                    else:
                        extractor = Extractor(input=content_item_file_path,
                                              file_type=file_type,
                                              output=content_item_dir,
                                              no_pipenv=self.no_pipenv)
                    extractor.extract_to_package_format()
                except Exception as e:
                    err_msg = f'Error occurred while trying to split the unified YAML "{content_item_file_path}" ' \
                              f'into its component parts.\nError: "{e}"'
                    self.contrib_conversion_errs.append(err_msg)
                finally:
                    output_path = extractor.get_output_path()
                    if self.create_new:
                        # Moving the unified file to its package.
                        shutil.move(content_item_file_path, output_path)
                    if del_unified:
                        if os.path.exists(content_item_file_path):
                            os.remove(content_item_file_path)
                        moved_unified_dst = os.path.join(
                            output_path, child_file_name)
                        if os.path.exists(moved_unified_dst):
                            os.remove(moved_unified_dst)