Esempio n. 1
0
    def copy_packs_content_to_packs_bundle(self, packs):
        """
        Copy content in packs to the bundle that gets zipped to 'content_packs.zip'. Preserves directory structure
        except that packages inside the "Integrations" or "Scripts" directory inside a pack are flattened. Adds file
        prefixes according to how server expects to ingest the files, e.g. 'integration-' is prepended to integration
        yml filenames and 'script-' is prepended to script yml filenames and so on and so forth.
        """
        for pack in packs:
            pack_name = os.path.basename(pack)
            if pack_name in self.packs_to_skip:
                continue
            pack_dst = os.path.join(self.packs_bundle, pack_name)
            os.mkdir(pack_dst)
            pack_dirs = get_child_directories(pack)
            pack_files = get_child_files(pack)
            # copy first level pack files over
            for file_path in pack_files:
                shutil.copy(
                    file_path,
                    os.path.join(pack_dst, os.path.basename(file_path)))
            # handle content directories in the pack
            for content_dir in pack_dirs:
                dir_name = os.path.basename(content_dir)
                dest_dir = os.path.join(pack_dst, dir_name)
                os.mkdir(dest_dir)
                if dir_name in DIR_TO_PREFIX:
                    packages_dirs = get_child_directories(content_dir)
                    for package_dir in packages_dirs:
                        ymls, _ = get_yml_paths_in_dir(package_dir,
                                                       error_msg='')
                        if not ymls or (len(ymls) == 1
                                        and ymls[0].endswith('_unified.yml')):
                            msg = 'Skipping package: {} -'.format(package_dir)
                            if not ymls:
                                print_warning(
                                    '{} No yml files found in the package directory'
                                    .format(msg))
                            else:
                                print_warning(
                                    '{} Only unified yml found in the package directory'
                                    .format(msg))
                            continue
                        package_dir_name = os.path.basename(package_dir)
                        unifier = Unifier(package_dir, dir_name, dest_dir)
                        unifier.merge_script_package_to_yml()

                        # also copy CHANGELOG markdown files over (should only be one per package)
                        package_files = get_child_files(package_dir)
                        changelog_files = [
                            file_path for file_path in package_files
                            if 'CHANGELOG.md' in file_path
                        ]
                        for md_file_path in changelog_files:
                            md_out_name = '{}-{}_CHANGELOG.md'.format(
                                DIR_TO_PREFIX.get(dir_name), package_dir_name)
                            shutil.copyfile(
                                md_file_path,
                                os.path.join(dest_dir, md_out_name))
                else:
                    self.copy_dir_files(content_dir, dest_dir)
Esempio n. 2
0
 def generate_readmes_for_new_content_pack(self, is_contribution=False):
     """
     Generate the readme files for a new content pack.
     """
     for pack_subdir in get_child_directories(self.pack_dir_path):
         basename = os.path.basename(pack_subdir)
         if basename in {SCRIPTS_DIR, INTEGRATIONS_DIR}:
             directories = get_child_directories(pack_subdir)
             for directory in directories:
                 files = get_child_files(directory)
                 for file in files:
                     file_name = os.path.basename(file)
                     if file_name.startswith('integration-') \
                             or file_name.startswith('script-') \
                             or file_name.startswith('automation-'):
                         unified_file = file
                         self.generate_readme_for_pack_content_item(
                             unified_file, is_contribution)
                         os.remove(unified_file)
         elif basename == 'Playbooks':
             files = get_child_files(pack_subdir)
             for file in files:
                 file_name = os.path.basename(file)
                 if file_name.startswith('playbook') and file_name.endswith(
                         '.yml'):
                     self.generate_readme_for_pack_content_item(file)
Esempio n. 3
0
    def test_merge_and_extract_new_file(self, tmp_path):
        env = Environment(tmp_path)
        parameters = [{
            'custom_content_object':
            env.INTEGRATION_CUSTOM_CONTENT_OBJECT,
            'raw_files': [
                'odp/bn.py', 'odp/bn.yml', 'odp/bn_image.png',
                'odp/bn_description.md', 'odp/README.md'
            ]
        }, {
            'custom_content_object': env.SCRIPT_CUSTOM_CONTENT_OBJECT,
            'raw_files': ['odp/bn.py', 'odp/bn.yml', 'odp/README.md']
        }]
        for param in parameters:
            temp_dir = env.tmp_path / f'temp_dir_{parameters.index(param)}'
            os.mkdir(temp_dir)
            entity = param['custom_content_object']['entity']
            downloader = Downloader(output=str(temp_dir), input='', regex='')
            basename = downloader.create_dir_name(
                param['custom_content_object']['name'])
            output_entity_dir_path = f'{temp_dir}/{entity}'
            os.mkdir(output_entity_dir_path)
            output_dir_path = f'{output_entity_dir_path}/{basename}'
            os.mkdir(output_dir_path)
            files = [
                file.replace('odp', output_dir_path).replace('bn', basename)
                for file in param['raw_files']
            ]

            downloader.merge_and_extract_new_file(
                param['custom_content_object'])
            output_files = get_child_files(output_dir_path)
            assert sorted(output_files) == sorted(files)
Esempio n. 4
0
    def merge_and_extract_new_file(self, custom_content_object: dict) -> None:
        """
        Merges new files of type integration/script (not existing in the output pack)
        :param custom_content_object: The custom content object to merge into the pack
        :return: None
        """
        file_entity: str = custom_content_object['entity']
        file_path: str = custom_content_object['path']
        file_type: str = custom_content_object['type']
        file_name: str = custom_content_object['name']

        file_code_language: str = custom_content_object.get('code_lang', '')
        if not self.verify_code_lang(file_code_language, file_type, file_name):
            return

        dir_output_path: str = os.path.join(self.output_pack_path, file_entity)
        # dir name should be the same as file name without separators mentioned in constants.py
        dir_name: str = self.create_dir_name(file_name)
        dir_output_path = os.path.join(dir_output_path, dir_name)

        extractor = Extractor(input=file_path,
                              output=dir_output_path,
                              file_type=file_type,
                              base_name=dir_name,
                              no_auto_create_dir=True,
                              no_logging=not self.log_verbose,
                              no_pipenv=True)
        extractor.extract_to_package_format()

        for file_path in get_child_files(dir_output_path):
            self.format_file(file_path, retrieve_file_ending(file_path))
        self.num_added_files += 1
        self.log_finished_file('Added', file_name, file_entity[:-1])
Esempio n. 5
0
    def content_item_to_package_format(self,
                                       content_item_dir: str,
                                       del_unified: bool = True):
        """
        Iterate over the YAML files in a directory and create packages (a containing directory and
        component files) from the YAMLs of integrations and scripts

        Args:
            content_item_dir (str): Path to the directory containing the content item YAML file(s)
            del_unified (bool): Whether to delete the unified yaml the package was extracted from
        """
        child_files = get_child_files(content_item_dir)
        content_item_file_path = ''
        for child_file in child_files:
            cf_name_lower = os.path.basename(child_file).lower()
            if cf_name_lower.startswith(
                (SCRIPT, AUTOMATION,
                 INTEGRATION)) and cf_name_lower.endswith('yml'):
                content_item_file_path = child_file
                file_type = find_type(content_item_file_path)
                file_type = file_type.value if file_type else file_type
                try:
                    extractor = Extractor(input=content_item_file_path,
                                          file_type=file_type,
                                          output=content_item_dir)
                    extractor.extract_to_package_format()
                except Exception as e:
                    err_msg = f'Error occurred while trying to split the unified YAML "{content_item_file_path}" ' \
                              f'into its component parts.\nError: "{e}"'
                    self.contrib_conversion_errs.append(err_msg)
                if del_unified:
                    os.remove(content_item_file_path)
Esempio n. 6
0
    def merge_and_extract_existing_file(self, custom_content_object: dict) -> None:
        """
        "Smart" merges old files of type integration/script (existing in the output pack)
        :param custom_content_object: The custom content object to merge into the pack
        :return: None
        """
        file_path: str = custom_content_object['path']
        file_name: str = custom_content_object['name']
        file_type: str = custom_content_object['type']
        file_entity: str = custom_content_object['entity']

        file_code_language: str = custom_content_object.get('code_lang', '')
        if not self.verify_code_lang(file_code_language, file_type, file_name):
            return

        base_name: str = self.create_dir_name(file_name)
        temp_dir = mkdtemp()

        extractor = Extractor(input=file_path, output=temp_dir, file_type=file_type, base_name=base_name,
                              no_logging=not self.log_verbose, no_pipenv=True, no_readme=True,
                              no_auto_create_dir=True)
        extractor.extract_to_package_format()

        extracted_file_paths: list = get_child_files(temp_dir)
        corresponding_pack_object: dict = self.get_corresponding_pack_content_object(custom_content_object)

        for ex_file_path in extracted_file_paths:
            ex_file_ending: str = retrieve_file_ending(ex_file_path)
            ex_file_detail: str = self.get_extracted_file_detail(ex_file_ending)
            # Get the file name to search for in the pack object (integration/script contains several files of the
            # same type. For example: integration's py code and integration's unit tests code)
            searched_basename: str = self.get_searched_basename(file_name, ex_file_ending, ex_file_detail)
            corresponding_pack_file_object: dict = self.get_corresponding_pack_file_object(searched_basename,
                                                                                           corresponding_pack_object)
            if not corresponding_pack_file_object:
                corresponding_pack_file_path: str = os.path.join(self.output_pack_path, file_entity,
                                                                 self.create_dir_name(file_name), searched_basename)
            else:
                corresponding_pack_file_path = corresponding_pack_file_object['path']
            # We use "smart" merge only for yml files (py, png  & md files to be moved regularly)
            if ex_file_ending == 'yml':
                # adding the deleted fields (by Demisto) of the old yml/json file to the custom content file.
                self.update_data(ex_file_path, corresponding_pack_file_path, ex_file_ending)
            try:
                shutil.move(src=ex_file_path, dst=corresponding_pack_file_path)
            except shutil.Error as e:
                print_color(e, LOG_COLORS.RED)
                raise
            self.format_file(corresponding_pack_file_path, ex_file_ending)

        try:
            shutil.rmtree(temp_dir, ignore_errors=True)
        except shutil.Error as e:
            print_color(e, LOG_COLORS.RED)
            raise

        self.num_merged_files += 1
        self.log_finished_file('Merged', file_name, file_entity[:-1])
Esempio n. 7
0
    def directory_uploader(self, path: str):
        """Uploads directories by path

        Args:
            path (str): Path for directory to upload.
        """
        if is_path_of_integration_directory(path):
            list_integrations = get_child_directories(path)
            for integration in list_integrations:
                self.integration_uploader(integration)

        elif is_path_of_script_directory(path):
            list_script = get_child_directories(path)
            for script in list_script:
                self.script_uploader(script)

        elif is_path_of_playbook_directory(
                path) or is_path_of_test_playbook_directory(path):
            list_playbooks = get_child_files(path)
            for playbook in list_playbooks:
                if playbook.endswith('.yml'):
                    self.playbook_uploader(playbook)

        elif is_path_of_incident_field_directory(path):
            list_incident_fields = get_child_files(path)
            for incident_field in list_incident_fields:
                if incident_field.endswith('.json'):
                    self.incident_field_uploader(incident_field)

        elif is_path_of_widget_directory(path):
            list_widgets = get_child_files(path)
            for widget in list_widgets:
                if widget.endswith('.json'):
                    self.widget_uploader(widget)

        elif is_path_of_dashboard_directory(path):
            list_dashboards = get_child_files(path)
            for dashboard in list_dashboards:
                if dashboard.endswith('.json'):
                    self.dashboard_uploader(dashboard)

        elif is_path_of_layout_directory(path):
            list_layouts = get_child_files(path)
            for layout in list_layouts:
                if layout.endswith('.json'):
                    self.layout_uploader(layout)

        elif is_path_of_incident_type_directory(path):
            list_incident_types = get_child_files(path)
            for incident_type in list_incident_types:
                if incident_type.endswith('.json'):
                    self.incident_type_uploader(incident_type)

        elif is_path_of_classifier_directory(path):
            list_classifiers = get_child_files(path)
            for classifiers in list_classifiers:
                if classifiers.endswith('.json'):
                    self.classifier_uploader(classifiers)
Esempio n. 8
0
 def get_custom_content_objects(self) -> List[dict]:
     """
     Creates a list of all custom content objects
     :return: The list of all custom content objects
     """
     custom_content_file_paths: list = get_child_files(self.custom_content_temp_dir)
     custom_content_objects: List = list()
     for file_path in custom_content_file_paths:
         custom_content_object: Dict = self.build_custom_content_object(file_path)
         if custom_content_object['type']:
             # If custom content object's type is empty it means the file isn't of support content entity
             custom_content_objects.append(custom_content_object)
     return custom_content_objects
Esempio n. 9
0
 def build_pack_content(self) -> None:
     """
     Build a data structure called custom content that holds basic data for each content entity within the given output pack.
     For example check out the PACK_CONTENT variable in downloader_test.py
     """
     for content_entity_path in get_child_directories(self.output_pack_path):
         raw_content_entity: str = os.path.basename(os.path.normpath(content_entity_path))
         content_entity: str = raw_content_entity
         if content_entity in (INTEGRATIONS_DIR, SCRIPTS_DIR):
             # If entity is of type integration/script it will have dirs, otherwise files
             entity_instances_paths: list = get_child_directories(content_entity_path)
         else:
             entity_instances_paths = get_child_files(content_entity_path)
         for entity_instance_path in entity_instances_paths:
             content_object: dict = self.build_pack_content_object(content_entity, entity_instance_path)
             if content_object:
                 self.pack_content[content_entity].append(content_object)
Esempio n. 10
0
    def test_merge_and_extract_new_file(self, custom_content_object, raw_files):
        env_guard = EnvironmentGuardian()
        temp_dir = mkdtemp()
        entity = custom_content_object['entity']
        downloader = Downloader(output=temp_dir, input='')
        basename = downloader.create_dir_name(custom_content_object['name'])
        output_entity_dir_path = f'{temp_dir}/{entity}'
        os.mkdir(output_entity_dir_path)
        output_dir_path = f'{output_entity_dir_path}/{basename}'
        os.mkdir(output_dir_path)
        files = [file.replace('odp', output_dir_path).replace('bn', basename) for file in raw_files]

        downloader.merge_and_extract_new_file(custom_content_object)
        output_files = get_child_files(output_dir_path)
        test_answer = sorted(output_files) == sorted(files)

        env_guard.restore_environment('test_merge_and_extract_new_file', temp_dir)
        assert test_answer
Esempio n. 11
0
 def get_system_content_objects(self) -> List[dict]:
     """
     Creates a list of all custom content objects
     :return: The list of all custom content objects
     """
     system_content_file_paths: list = get_child_files(
         self.system_content_temp_dir)
     system_content_objects: List = list()
     for file_path in system_content_file_paths:
         try:
             system_content_object: Dict = self.build_custom_content_object(
                 file_path)
             system_content_objects.append(system_content_object)
         # Do not add file to custom_content_objects if it has an invalid format
         except ValueError as e:
             print_color(f"Error when loading {file_path}, skipping",
                         LOG_COLORS.RED)
             print_color(f"{e}", LOG_COLORS.RED)
     return system_content_objects
Esempio n. 12
0
 def get_custom_content_objects(self) -> List[dict]:
     """
     Creates a list of all custom content objects
     :return: The list of all custom content objects
     """
     custom_content_file_paths: list = get_child_files(
         self.custom_content_temp_dir)
     custom_content_objects: List = list()
     for file_path in custom_content_file_paths:
         try:
             custom_content_object: Dict = self.build_custom_content_object(
                 file_path)
             if custom_content_object['type']:
                 # If custom content object's type is empty it means the file isn't of support content entity
                 custom_content_objects.append(custom_content_object)
         # Do not add file to custom_content_objects if it has an invalid format
         except ValueError as e:
             print_color(f"Error when loading {file_path}, skipping",
                         LOG_COLORS.RED)
             print_color(f"{e}", LOG_COLORS.RED)
     return custom_content_objects
Esempio n. 13
0
    def directory_uploader(self, path: str):
        """Uploads directories by path

        Args:
            path (str): Path for directory to upload.
        """
        if is_path_of_integration_directory(path):
            # Upload unified integration files
            list_unified_integrations = get_child_files(path)
            for unified_integration in list_unified_integrations:
                file_type = find_type(unified_integration)
                if file_type == FileType.INTEGRATION:
                    self.integration_uploader(unified_integration)
            # Upload spliced integration files
            list_integrations = get_child_directories(path)
            for integration in list_integrations:
                self.integration_uploader(integration)

        elif is_path_of_script_directory(path):
            # Upload unified scripts files
            list_unified_scripts = get_child_files(path)
            for unified_script in list_unified_scripts:
                file_type = find_type(unified_script)
                if file_type in (FileType.SCRIPT, FileType.TEST_SCRIPT):
                    self.script_uploader(unified_script)
            # Upload spliced scripts
            list_script = get_child_directories(path)
            for script in list_script:
                self.script_uploader(script)

        elif is_path_of_playbook_directory(
                path) or is_path_of_test_playbook_directory(path):
            list_playbooks = get_child_files(path)
            for playbook in list_playbooks:
                if playbook.endswith('.yml'):
                    self.playbook_uploader(playbook)

        elif is_path_of_incident_field_directory(path):
            list_incident_fields = get_child_files(path)
            for incident_field in list_incident_fields:
                if incident_field.endswith('.json'):
                    self.incident_field_uploader(incident_field)

        elif is_path_of_widget_directory(path):
            list_widgets = get_child_files(path)
            for widget in list_widgets:
                if widget.endswith('.json'):
                    self.widget_uploader(widget)

        elif is_path_of_dashboard_directory(path):
            list_dashboards = get_child_files(path)
            for dashboard in list_dashboards:
                if dashboard.endswith('.json'):
                    self.dashboard_uploader(dashboard)

        elif is_path_of_layout_directory(path):
            list_layouts = get_child_files(path)
            for layout in list_layouts:
                if layout.endswith('.json'):
                    self.layout_uploader(layout)

        elif is_path_of_incident_type_directory(path):
            list_incident_types = get_child_files(path)
            for incident_type in list_incident_types:
                if incident_type.endswith('.json'):
                    self.incident_type_uploader(incident_type)

        elif is_path_of_classifier_directory(path):
            list_classifiers = get_child_files(path)
            for classifiers in list_classifiers:
                if classifiers.endswith('.json'):
                    self.classifier_uploader(classifiers)
Esempio n. 14
0
    def content_item_to_package_format(
            self,
            content_item_dir: str,
            del_unified: bool = True,
            source_mapping: Union[Dict[str, Dict[str, str]]] = None):
        """
        Iterate over the YAML files in a directory and create packages (a containing directory and
        component files) from the YAMLs of integrations and scripts

        Args:
            content_item_dir (str): Path to the directory containing the content item YAML file(s)
            del_unified (bool): Whether to delete the unified yaml the package was extracted from
            source_mapping (Union[Dict], optional): Can be used when updating an existing pack and
                the package directory of a content item is not what would ordinarily be set by the
                `demisto-sdk` `split` command. Sample value would be,
                `{'integration-AbuseIPDB.yml': {'containing_dir_name': 'AbuseDB', 'base_name': 'AbuseDB'}}`
                - the split command would create a containing directory of `AbuseIPDB` for the file
                `integration-AbuseIPDB.yml` and we need the containing directory of the package to match
                what already exists in the repo.
        """
        child_files = get_child_files(content_item_dir)
        for child_file in child_files:
            cf_name_lower = os.path.basename(child_file).lower()
            if cf_name_lower.startswith(
                (SCRIPT, AUTOMATION,
                 INTEGRATION)) and cf_name_lower.endswith('yml'):
                content_item_file_path = child_file
                file_type = find_type(content_item_file_path)
                file_type = file_type.value if file_type else file_type
                try:
                    child_file_name = os.path.basename(child_file)
                    if source_mapping and child_file_name in source_mapping.keys(
                    ):
                        child_file_mapping = source_mapping.get(
                            child_file_name, {})
                        base_name = child_file_mapping.get('base_name', '')
                        containing_dir_name = child_file_mapping.get(
                            'containing_dir_name', '')
                        # for legacy unified yamls in the repo, their containing directory will be that of their
                        # entity type directly instead of the typical package format. For those cases, we need the
                        # extractor to auto create the containing directory. An example would be -
                        # 'content/Packs/AbuseDB/Scripts/script-AbuseIPDBPopulateIndicators.yml'
                        autocreate_dir = containing_dir_name == ENTITY_TYPE_TO_DIR.get(
                            file_type, '')
                        output_dir = os.path.join(
                            self.pack_dir_path,
                            ENTITY_TYPE_TO_DIR.get(file_type, ''))
                        if not autocreate_dir:
                            output_dir = os.path.join(output_dir,
                                                      containing_dir_name)
                        os.makedirs(output_dir, exist_ok=True)
                        extractor = YmlSplitter(
                            input=content_item_file_path,
                            file_type=file_type,
                            output=output_dir,
                            no_readme=True,
                            base_name=base_name,
                            no_auto_create_dir=(not autocreate_dir),
                            no_pipenv=self.no_pipenv)

                    else:
                        extractor = YmlSplitter(input=content_item_file_path,
                                                file_type=file_type,
                                                output=content_item_dir,
                                                no_pipenv=self.no_pipenv)
                    extractor.extract_to_package_format()
                except Exception as e:
                    err_msg = f'Error occurred while trying to split the unified YAML "{content_item_file_path}" ' \
                              f'into its component parts.\nError: "{e}"'
                    self.contrib_conversion_errs.append(err_msg)
                finally:
                    output_path = extractor.get_output_path()
                    if self.create_new:
                        # Moving the unified file to its package.
                        shutil.move(content_item_file_path, output_path)
                    if del_unified:
                        if os.path.exists(content_item_file_path):
                            os.remove(content_item_file_path)
                        moved_unified_dst = os.path.join(
                            output_path, child_file_name)
                        if os.path.exists(moved_unified_dst):
                            os.remove(moved_unified_dst)
Esempio n. 15
0
    def copy_packs_content_to_packs_bundle(self, packs):
        """
        Copy content in packs to the bundle that gets zipped to 'content_packs.zip'. Preserves directory structure
        except that packages inside the "Integrations" or "Scripts" directory inside a pack are flattened. Adds file
        prefixes according to how server expects to ingest the files, e.g. 'integration-' is prepended to integration
        yml filenames and 'script-' is prepended to script yml filenames and so on and so forth.
        """
        for pack in packs:
            pack_name = os.path.basename(pack)
            if pack_name in self.packs_to_skip:
                continue
            pack_dst = os.path.join(self.packs_bundle, pack_name)
            os.mkdir(pack_dst)
            pack_dirs = get_child_directories(pack)
            pack_files = get_child_files(pack)
            # copy first level pack files over
            for file_path in pack_files:
                shutil.copy(
                    file_path,
                    os.path.join(pack_dst, os.path.basename(file_path)))

            # handle content directories in the pack
            for content_dir in pack_dirs:
                dir_name = os.path.basename(content_dir)
                dest_dir = os.path.join(pack_dst, dir_name)
                os.mkdir(dest_dir)
                if dir_name in DIR_TO_PREFIX:
                    packages_dirs = get_child_directories(content_dir)

                    if packages_dirs:  # split yml files directories
                        for package_dir in packages_dirs:
                            ymls, _ = get_yml_paths_in_dir(package_dir,
                                                           error_msg='')
                            if not ymls or (len(ymls) == 1 and
                                            ymls[0].endswith('_unified.yml')):
                                msg = f'Skipping package: {package_dir} -'
                                if not ymls:
                                    print_warning(
                                        '{} No yml files found in the package directory'
                                        .format(msg))
                                else:
                                    print_warning(
                                        '{} Only unified yml found in the package directory'
                                        .format(msg))
                                continue
                            unifier = Unifier(package_dir, dir_name, dest_dir)

                            if parse_version(
                                    unifier.yml_data.get(
                                        'toversion',
                                        '99.99.99')) >= parse_version('6.0.0'):
                                new_file_paths = unifier.merge_script_package_to_yml(
                                    file_name_suffix=self.file_name_suffix)
                                for new_file_path in new_file_paths:
                                    self.add_from_version_to_yml(new_file_path)

                    non_split_yml_files = [
                        f for f in os.listdir(content_dir)
                        if os.path.isfile(os.path.join(content_dir, f)) and (
                            fnmatch.fnmatch(f, 'integration-*.yml')
                            or fnmatch.fnmatch(f, 'script-*.yml'))
                    ]

                    if non_split_yml_files:  # old format non split yml files
                        for yml_file in non_split_yml_files:
                            new_file_path = self.add_suffix_to_file_path(
                                os.path.join(dest_dir, yml_file))
                            shutil.copyfile(
                                os.path.join(content_dir, yml_file),
                                new_file_path)
                            self.add_from_version_to_yml(new_file_path)

                else:
                    self.copy_dir_files(content_dir,
                                        dest_dir,
                                        is_legacy_bundle=False)