示例#1
0
def prepare_single_content_item_for_validation(filename: str, data: bytes, tmp_directory: str) -> str:
    content = Content(tmp_directory)
    pack_name = 'TmpPack'
    pack_dir = content.path / 'Packs' / pack_name
    # create pack_metadata.json file in TmpPack
    contrib_converter = ContributionConverter(name=pack_name, base_dir=tmp_directory, pack_dir_name=pack_name)
    contrib_converter.create_metadata_file({'description': 'Temporary Pack', 'author': 'xsoar'})
    prefix = '-'.join(filename.split('-')[:-1])
    containing_dir = pack_dir / ENTITY_TYPE_TO_DIR.get(prefix, 'Integrations')
    containing_dir.mkdir(exist_ok=True)
    data_as_string = data.decode()
    loaded_data = yaml.load(data_as_string)
    buff = io.StringIO()
    yaml.dump(loaded_data, buff)
    data_as_string = buff.getvalue()
    # write yaml integration file to file system
    file_path = containing_dir / filename
    file_path.write_text(data_as_string)
    file_type = find_type(str(file_path))
    file_type = file_type.value if file_type else file_type
    extractor = Extractor(
        input=str(file_path), file_type=file_type, output=containing_dir, no_logging=True, no_pipenv=True)
    # validate the resulting package files, ergo set path_to_validate to the package directory that results
    # from extracting the unified yaml to a package format
    extractor.extract_to_package_format()
    return extractor.get_output_path()
示例#2
0
    def convert_contribution_dir_to_pack_contents(
            self, unpacked_contribution_dir: str) -> None:
        """Converts a directory and its contents unpacked from the contribution zip file to the appropriate structure

        Example:
            The pack directory after `unpack_contribution_to_dst_pack_directory` has been executed:

            ExamplePack
            ├── automation
            │   └── automation-ExampleAutomation.yml
            ├── integration
            │   └── integration-ExampleIntegration.yml
            ├── playbook
            │   └── playbook-ExamplePlaybook.yml
            ├── report
            │   └── report-ExampleReport.json
            └── reputation
                └── reputation-ExampleReputation.json

            The updated pack directory structure after `convert_contribution_dir_to_pack_contents` has been
            executed, passing the path of .../ExamplePack/integration as the argument, would appear as so:

            ExamplePack
            ├── automation
            │   └── automation-ExampleAutomation.yml
            ├── Integrations
            │   └── integration-ExampleIntegration.yml
            ├── playbook
            │   └── playbook-ExamplePlaybook.yml
            ├── report
            │   └── report-ExampleReport.json
            └── reputation
                └── reputation-ExampleReputation.json

        Args:
            unpacked_contribution_dir (str): The directory to convert
        """
        basename = os.path.basename(unpacked_contribution_dir)
        if basename in ENTITY_TYPE_TO_DIR:
            dst_name = ENTITY_TYPE_TO_DIR.get(basename, '')
            src_path = os.path.join(self.pack_dir_path, basename)
            dst_path = os.path.join(self.pack_dir_path, dst_name)
            if os.path.exists(dst_path):
                # move src folder files to dst folder
                for _, _, files in os.walk(src_path, topdown=False):
                    for name in files:
                        src_file_path = os.path.join(src_path, name)
                        dst_file_path = os.path.join(dst_path, name)
                        shutil.move(src_file_path, dst_file_path)
                shutil.rmtree(src_path, ignore_errors=True)
            else:
                # replace dst folder with src folder
                shutil.move(src_path, dst_path)
示例#3
0
 def file_type_to_entity(file_data: dict, file_type: str) -> str:
     """
     Given the file type returns the file entity
     :param file_data: The file data
     :param file_type: The file type, for example: integration
     :return: The file entity, for example: Integrations
     """
     if file_type and file_type == 'playbook':
         name: str = get_entity_name_by_entity_type(file_data, PLAYBOOKS_DIR)
         if name and 'test' in name.lower():
             return TEST_PLAYBOOKS_DIR
     return ENTITY_TYPE_TO_DIR.get(file_type, '')
示例#4
0
def prepare_single_content_item_for_validation(
        filename: str, data: bytes, tmp_directory: str) -> Tuple[str, Dict]:
    content = Content(tmp_directory)
    pack_name = 'TmpPack'
    pack_dir = content.path / 'Packs' / pack_name
    # create pack_metadata.json file in TmpPack
    contrib_converter = ContributionConverter(name=pack_name,
                                              base_dir=tmp_directory,
                                              pack_dir_name=pack_name)
    contrib_converter.create_metadata_file({
        'description': 'Temporary Pack',
        'author': 'xsoar'
    })
    prefix = '-'.join(filename.split('-')[:-1])
    containing_dir = pack_dir / ENTITY_TYPE_TO_DIR.get(prefix, 'Integrations')
    containing_dir.mkdir(exist_ok=True)
    is_json = filename.casefold().endswith('.json')
    data_as_string = data.decode()
    loaded_data = json.loads(data_as_string) if is_json else yaml.load(
        data_as_string)
    if is_json:
        data_as_string = json.dumps(loaded_data)
    else:
        buff = io.StringIO()
        yaml.dump(loaded_data, buff)
        data_as_string = buff.getvalue()
    # write content item file to file system
    file_path = containing_dir / filename
    file_path.write_text(data_as_string)
    file_type = find_type(str(file_path))
    file_type = file_type.value if file_type else file_type
    if is_json or file_type in (FileType.PLAYBOOK.value,
                                FileType.TEST_PLAYBOOK.value):
        return str(file_path), {}
    extractor = Extractor(input=str(file_path),
                          file_type=file_type,
                          output=containing_dir,
                          no_logging=True,
                          no_pipenv=True,
                          no_basic_fmt=True)
    # validate the resulting package files, ergo set path_to_validate to the package directory that results
    # from extracting the unified yaml to a package format
    extractor.extract_to_package_format()
    code_fp_to_row_offset = {
        get_extracted_code_filepath(extractor):
        extractor.lines_inserted_at_code_start
    }
    return extractor.get_output_path(), code_fp_to_row_offset
示例#5
0
    def content_item_to_package_format(
            self,
            content_item_dir: str,
            del_unified: bool = True,
            source_mapping: Union[Dict[str, Dict[str, str]]] = None):
        """
        Iterate over the YAML files in a directory and create packages (a containing directory and
        component files) from the YAMLs of integrations and scripts

        Args:
            content_item_dir (str): Path to the directory containing the content item YAML file(s)
            del_unified (bool): Whether to delete the unified yaml the package was extracted from
            source_mapping (Union[Dict], optional): Can be used when updating an existing pack and
                the package directory of a content item is not what would ordinarily be set by the
                `demisto-sdk` `split` command. Sample value would be,
                `{'integration-AbuseIPDB.yml': {'containing_dir_name': 'AbuseDB', 'base_name': 'AbuseDB'}}`
                - the split command would create a containing directory of `AbuseIPDB` for the file
                `integration-AbuseIPDB.yml` and we need the containing directory of the package to match
                what already exists in the repo.
        """
        child_files = get_child_files(content_item_dir)
        for child_file in child_files:
            cf_name_lower = os.path.basename(child_file).lower()
            if cf_name_lower.startswith(
                (SCRIPT, AUTOMATION,
                 INTEGRATION)) and cf_name_lower.endswith('yml'):
                content_item_file_path = child_file
                file_type = find_type(content_item_file_path)
                file_type = file_type.value if file_type else file_type
                try:
                    child_file_name = os.path.basename(child_file)
                    if source_mapping and child_file_name in source_mapping.keys(
                    ):
                        child_file_mapping = source_mapping.get(
                            child_file_name, {})
                        base_name = child_file_mapping.get('base_name', '')
                        containing_dir_name = child_file_mapping.get(
                            'containing_dir_name', '')
                        # for legacy unified yamls in the repo, their containing directory will be that of their
                        # entity type directly instead of the typical package format. For those cases, we need the
                        # extractor to auto create the containing directory. An example would be -
                        # 'content/Packs/AbuseDB/Scripts/script-AbuseIPDBPopulateIndicators.yml'
                        autocreate_dir = containing_dir_name == ENTITY_TYPE_TO_DIR.get(
                            file_type, '')
                        output_dir = os.path.join(
                            self.pack_dir_path,
                            ENTITY_TYPE_TO_DIR.get(file_type, ''))
                        if not autocreate_dir:
                            output_dir = os.path.join(output_dir,
                                                      containing_dir_name)
                        os.makedirs(output_dir, exist_ok=True)
                        extractor = YmlSplitter(
                            input=content_item_file_path,
                            file_type=file_type,
                            output=output_dir,
                            no_readme=True,
                            base_name=base_name,
                            no_auto_create_dir=(not autocreate_dir),
                            no_pipenv=self.no_pipenv)

                    else:
                        extractor = YmlSplitter(input=content_item_file_path,
                                                file_type=file_type,
                                                output=content_item_dir,
                                                no_pipenv=self.no_pipenv)
                    extractor.extract_to_package_format()
                except Exception as e:
                    err_msg = f'Error occurred while trying to split the unified YAML "{content_item_file_path}" ' \
                              f'into its component parts.\nError: "{e}"'
                    self.contrib_conversion_errs.append(err_msg)
                finally:
                    output_path = extractor.get_output_path()
                    if self.create_new:
                        # Moving the unified file to its package.
                        shutil.move(content_item_file_path, output_path)
                    if del_unified:
                        if os.path.exists(content_item_file_path):
                            os.remove(content_item_file_path)
                        moved_unified_dst = os.path.join(
                            output_path, child_file_name)
                        if os.path.exists(moved_unified_dst):
                            os.remove(moved_unified_dst)
示例#6
0
 def convert_contribution_to_pack(self):
     """Create a Pack in the content repo from the contents of a contribution zipfile"""
     try:
         packs_dir = os.path.join(get_content_path(), 'Packs')
         metadata_dict = {}
         with zipfile.ZipFile(self.contribution) as zipped_contrib:
             with zipped_contrib.open('metadata.json') as metadata_file:
                 click.echo(
                     f'Pulling relevant information from {metadata_file.name}',
                     color=LOG_COLORS.NATIVE)
                 metadata = json.loads(metadata_file.read())
                 # a name passed on the cmd line should take precedence over one pulled
                 # from contribution metadata
                 pack_name = self.name or self.format_pack_dir_name(
                     metadata.get('name', 'ContributionPack'))
                 # a description passed on the cmd line should take precedence over one pulled
                 # from contribution metadata
                 metadata_dict[
                     'description'] = self.description or metadata.get(
                         'description')
                 metadata_dict['name'] = pack_name
                 metadata_dict['author'] = metadata.get('author', '')
                 metadata_dict['support'] = metadata.get('support', '')
                 metadata_dict['url'] = metadata.get('supportDetails',
                                                     {}).get('url', '')
                 metadata_dict['email'] = metadata.get(
                     'supportDetails', {}).get('email', '')
                 metadata_dict['categories'] = metadata.get(
                     'categories') if metadata.get('categories') else []
                 metadata_dict['tags'] = metadata.get(
                     'tags') if metadata.get('tags') else []
                 metadata_dict['useCases'] = metadata.get(
                     'useCases') if metadata.get('useCases') else []
                 metadata_dict['keywords'] = metadata.get(
                     'keywords') if metadata.get('keywords') else []
         while os.path.exists(os.path.join(packs_dir, pack_name)):
             click.echo(
                 f'Modifying pack name because pack {pack_name} already exists in the content repo',
                 color=LOG_COLORS.NATIVE)
             if len(pack_name) >= 2 and pack_name[-2].lower(
             ) == 'v' and pack_name[-1].isdigit():
                 # increment by one
                 pack_name = pack_name[:-1] + str(int(pack_name[-1]) + 1)
             else:
                 pack_name += 'V2'
             click.echo(f'New pack name is "{pack_name}"',
                        color=LOG_COLORS.NATIVE)
         pack_dir = os.path.join(packs_dir, pack_name)
         os.mkdir(pack_dir)
         shutil.unpack_archive(filename=self.contribution,
                               extract_dir=pack_dir)
         pack_subdirectories = get_child_directories(pack_dir)
         for pack_subdir in pack_subdirectories:
             basename = os.path.basename(pack_subdir)
             if basename in ENTITY_TYPE_TO_DIR:
                 dst_name = ENTITY_TYPE_TO_DIR.get(basename)
                 src_path = os.path.join(pack_dir, basename)
                 dst_path = os.path.join(pack_dir, dst_name)
                 content_item_dir = shutil.move(src_path, dst_path)
                 if basename in {SCRIPT, AUTOMATION, INTEGRATION}:
                     self.content_item_to_package_format(content_item_dir,
                                                         del_unified=True)
         # create pack's base files
         self.full_output_path = pack_dir
         self.create_pack_base_files()
         metadata_dict = Initiator.create_metadata(fill_manually=False,
                                                   data=metadata_dict)
         metadata_path = os.path.join(self.full_output_path,
                                      'pack_metadata.json')
         with open(metadata_path, 'w') as pack_metadata_file:
             json.dump(metadata_dict, pack_metadata_file, indent=4)
         # remove metadata.json file
         os.remove(os.path.join(pack_dir, 'metadata.json'))
         click.echo(
             f'Executing \'format\' on the restructured contribution zip files at "{pack_dir}"'
         )
         format_manager(input=pack_dir)
     except Exception as e:
         click.echo(
             f'Creating a Pack from the contribution zip failed with error: {e}\n {traceback.format_exc()}',
             color=LOG_COLORS.RED)
     finally:
         if self.contrib_conversion_errs:
             click.echo(
                 'The following errors occurred while converting unified content YAMLs to package structure:'
             )
             click.echo(
                 textwrap.indent('\n'.join(self.contrib_conversion_errs),
                                 '\t'))
示例#7
0
    def update_tests(self) -> None:
        """
        If there are no tests configured: Prompts a question to the cli that asks the user whether he wants to add
        'No tests' under 'tests' key or not and format the file according to the answer
        """
        if not self.data.get('tests', ''):
            # try to get the test playbook files from the TestPlaybooks dir in the pack
            pack_path = os.path.dirname(
                os.path.dirname(os.path.abspath(self.source_file)))
            test_playbook_dir_path = os.path.join(pack_path,
                                                  TEST_PLAYBOOKS_DIR)
            test_playbook_ids = []
            file_entity_type = find_type(self.source_file,
                                         _dict=self.data,
                                         file_type='yml')
            file_id = get_entity_id_by_entity_type(
                self.data, ENTITY_TYPE_TO_DIR.get(file_entity_type.value, ""))
            commands, scripts = get_scripts_and_commands_from_yml_data(
                self.data, file_entity_type)
            commands_names = [command.get('id') for command in commands]
            try:
                # Collecting the test playbooks
                test_playbooks_files = [
                    tpb_file
                    for tpb_file in listdir_fullpath(test_playbook_dir_path)
                    if find_type(tpb_file) == FileType.TEST_PLAYBOOK
                ]
                for tpb_file_path in test_playbooks_files:  # iterate over the test playbooks in the dir
                    test_playbook_data = get_yaml(tpb_file_path)
                    test_playbook_id = get_entity_id_by_entity_type(
                        test_playbook_data, content_entity='')
                    if not scripts and not commands:  # Better safe than sorry
                        test_playbook_ids.append(test_playbook_id)
                    else:
                        added = False
                        tpb_commands, tpb_scripts = get_scripts_and_commands_from_yml_data(
                            test_playbook_data, FileType.TEST_PLAYBOOK)

                        for tpb_command in tpb_commands:
                            tpb_command_name = tpb_command.get('id')
                            tpb_command_source = tpb_command.get('source', '')
                            if tpb_command_source and file_id and file_id != tpb_command_source:
                                continue

                            if not added and tpb_command_name in commands_names:
                                command_source = commands[commands_names.index(
                                    tpb_command_name)].get('source', '')
                                if command_source == tpb_command_source or command_source == '':
                                    test_playbook_ids.append(test_playbook_id)
                                    added = True
                                    break

                        if not added:
                            for tpb_script in tpb_scripts:
                                if tpb_script in scripts:
                                    test_playbook_ids.append(test_playbook_id)
                                    break

                self.data['tests'] = test_playbook_ids
            except FileNotFoundError:
                pass

            if not test_playbook_ids:
                # In case no_interactive flag was given - modify the tests without confirmation
                if self.assume_yes or not self.add_tests:
                    should_modify_yml_tests = True
                else:
                    should_modify_yml_tests = click.confirm(
                        f'The file {self.source_file} has no test playbooks '
                        f'configured. Do you want to configure it with "No tests"?'
                    )
                if should_modify_yml_tests:
                    click.echo(
                        f'Formatting {self.output_file} with "No tests"')
                    self.data['tests'] = ['No tests (auto formatted)']