def generate_readmes_for_new_content_pack(self, is_contribution=False): """ Generate the readme files for a new content pack. """ for pack_subdir in get_child_directories(self.pack_dir_path): basename = os.path.basename(pack_subdir) if basename in {SCRIPTS_DIR, INTEGRATIONS_DIR}: directories = get_child_directories(pack_subdir) for directory in directories: files = get_child_files(directory) for file in files: file_name = os.path.basename(file) if file_name.startswith('integration-') \ or file_name.startswith('script-') \ or file_name.startswith('automation-'): unified_file = file self.generate_readme_for_pack_content_item( unified_file, is_contribution) os.remove(unified_file) elif basename == 'Playbooks': files = get_child_files(pack_subdir) for file in files: file_name = os.path.basename(file) if file_name.startswith('playbook') and file_name.endswith( '.yml'): self.generate_readme_for_pack_content_item(file)
def convert_contribution_to_pack(self, files_to_source_mapping: Dict = None): """Create or updates a pack in the content repo from the contents of a contribution zipfile Args: files_to_source_mapping (Dict[str, Dict[str, str]]): Only used when updating a pack. mapping of a file name as inside the contribution zip to a dictionary containing the the associated source info for that file, specifically the base name (the name used in naming the split component files) and the name of the containing directory. """ try: # only create pack_metadata.json and base pack files if creating a new pack if self.create_new: if self.contribution: # create pack metadata file with zipfile.ZipFile(self.contribution) as zipped_contrib: with zipped_contrib.open( 'metadata.json') as metadata_file: click.echo( f'Pulling relevant information from {metadata_file.name}', color=LOG_COLORS.NATIVE) metadata = json.loads(metadata_file.read()) self.create_metadata_file(metadata) # create base files self.create_pack_base_files() # unpack self.unpack_contribution_to_dst_pack_directory() # convert unpacked_contribution_dirs = get_child_directories( self.pack_dir_path) for unpacked_contribution_dir in unpacked_contribution_dirs: self.convert_contribution_dir_to_pack_contents( unpacked_contribution_dir) # extract to package format for pack_subdir in get_child_directories(self.pack_dir_path): basename = os.path.basename(pack_subdir) if basename in {SCRIPTS_DIR, INTEGRATIONS_DIR}: self.content_item_to_package_format( pack_subdir, del_unified=(not self.create_new), source_mapping=files_to_source_mapping) if self.create_new: self.generate_readmes_for_new_content_pack( is_contribution=True) # format self.format_converted_pack() except Exception as e: click.echo( f'Creating a Pack from the contribution zip failed with error: {e}\n {traceback.format_exc()}', color=LOG_COLORS.RED) finally: if self.contrib_conversion_errs: click.echo( 'The following errors occurred while converting unified content YAMLs to package structure:' ) click.echo( textwrap.indent('\n'.join(self.contrib_conversion_errs), '\t'))
def copy_packs_content_to_packs_bundle(self, packs): """ Copy content in packs to the bundle that gets zipped to 'content_packs.zip'. Preserves directory structure except that packages inside the "Integrations" or "Scripts" directory inside a pack are flattened. Adds file prefixes according to how server expects to ingest the files, e.g. 'integration-' is prepended to integration yml filenames and 'script-' is prepended to script yml filenames and so on and so forth. """ for pack in packs: pack_name = os.path.basename(pack) if pack_name in self.packs_to_skip: continue pack_dst = os.path.join(self.packs_bundle, pack_name) os.mkdir(pack_dst) pack_dirs = get_child_directories(pack) pack_files = get_child_files(pack) # copy first level pack files over for file_path in pack_files: shutil.copy( file_path, os.path.join(pack_dst, os.path.basename(file_path))) # handle content directories in the pack for content_dir in pack_dirs: dir_name = os.path.basename(content_dir) dest_dir = os.path.join(pack_dst, dir_name) os.mkdir(dest_dir) if dir_name in DIR_TO_PREFIX: packages_dirs = get_child_directories(content_dir) for package_dir in packages_dirs: ymls, _ = get_yml_paths_in_dir(package_dir, error_msg='') if not ymls or (len(ymls) == 1 and ymls[0].endswith('_unified.yml')): msg = 'Skipping package: {} -'.format(package_dir) if not ymls: print_warning( '{} No yml files found in the package directory' .format(msg)) else: print_warning( '{} Only unified yml found in the package directory' .format(msg)) continue package_dir_name = os.path.basename(package_dir) unifier = Unifier(package_dir, dir_name, dest_dir) unifier.merge_script_package_to_yml() # also copy CHANGELOG markdown files over (should only be one per package) package_files = get_child_files(package_dir) changelog_files = [ file_path for file_path in package_files if 'CHANGELOG.md' in file_path ] for md_file_path in changelog_files: md_out_name = '{}-{}_CHANGELOG.md'.format( DIR_TO_PREFIX.get(dir_name), package_dir_name) shutil.copyfile( md_file_path, os.path.join(dest_dir, md_out_name)) else: self.copy_dir_files(content_dir, dest_dir)
def directory_uploader(self, path: str): """Uploads directories by path Args: path (str): Path for directory to upload. """ if is_path_of_integration_directory(path): list_integrations = get_child_directories(path) for integration in list_integrations: self.integration_uploader(integration) elif is_path_of_script_directory(path): list_script = get_child_directories(path) for script in list_script: self.script_uploader(script) elif is_path_of_playbook_directory( path) or is_path_of_test_playbook_directory(path): list_playbooks = get_child_files(path) for playbook in list_playbooks: if playbook.endswith('.yml'): self.playbook_uploader(playbook) elif is_path_of_incident_field_directory(path): list_incident_fields = get_child_files(path) for incident_field in list_incident_fields: if incident_field.endswith('.json'): self.incident_field_uploader(incident_field) elif is_path_of_widget_directory(path): list_widgets = get_child_files(path) for widget in list_widgets: if widget.endswith('.json'): self.widget_uploader(widget) elif is_path_of_dashboard_directory(path): list_dashboards = get_child_files(path) for dashboard in list_dashboards: if dashboard.endswith('.json'): self.dashboard_uploader(dashboard) elif is_path_of_layout_directory(path): list_layouts = get_child_files(path) for layout in list_layouts: if layout.endswith('.json'): self.layout_uploader(layout) elif is_path_of_incident_type_directory(path): list_incident_types = get_child_files(path) for incident_type in list_incident_types: if incident_type.endswith('.json'): self.incident_type_uploader(incident_type) elif is_path_of_classifier_directory(path): list_classifiers = get_child_files(path) for classifiers in list_classifiers: if classifiers.endswith('.json'): self.classifier_uploader(classifiers)
def copy_packs_to_content_bundles(self, packs): """ Copy relevant content (yml and json files) from packs to the appropriate bundle. Test playbooks to the bundle that gets zipped to 'content_test.zip' and the rest of the content to the bundle that gets zipped to 'content_new.zip'. Adds file prefixes where necessary according to how server expects to ingest the files. """ for pack in packs: if os.path.basename(pack) in self.packs_to_skip: continue # each pack directory has it's own content subdirs, 'Integrations', # 'Scripts', 'TestPlaybooks', 'Layouts' etc. sub_dirs_paths = get_child_directories(pack) for sub_dir_path in sub_dirs_paths: dir_name = os.path.basename(sub_dir_path) if dir_name == 'TestPlaybooks': self.copy_test_files(sub_dir_path) elif dir_name == RELEASE_NOTES_DIR: continue else: # handle one-level deep content self.copy_dir_files(sub_dir_path, self.content_bundle) if dir_name in DIR_TO_PREFIX: # then it's a directory with nested packages that need to be handled # handle nested packages self.create_unifieds_and_copy(sub_dir_path)
def pack_uploader(self): """Extracts the directories of the pack and upload them by directory_uploader """ list_directories = get_child_directories(self.path) ordered_directories_list = self._sort_directories_based_on_dependencies( list_directories) for directory in ordered_directories_list: self.directory_uploader(directory)
def build_pack_content(self) -> None: """ Build a data structure called custom content that holds basic data for each content entity within the given output pack. For example check out the PACK_CONTENT variable in downloader_test.py """ for content_entity_path in get_child_directories(self.output_pack_path): raw_content_entity: str = os.path.basename(os.path.normpath(content_entity_path)) content_entity: str = raw_content_entity if content_entity in (INTEGRATIONS_DIR, SCRIPTS_DIR): # If entity is of type integration/script it will have dirs, otherwise files entity_instances_paths: list = get_child_directories(content_entity_path) else: entity_instances_paths = get_child_files(content_entity_path) for entity_instance_path in entity_instances_paths: content_object: dict = self.build_pack_content_object(content_entity, entity_instance_path) if content_object: self.pack_content[content_entity].append(content_object)
def pack_uploader(self, path: str) -> int: status_code = SUCCESS_RETURN_CODE sorted_directories = sort_directories_based_on_dependencies( get_child_directories(path)) for entity_folder in sorted_directories: if os.path.basename( entity_folder.rstrip('/')) in CONTENT_ENTITIES_DIRS: status_code = self.entity_dir_uploader( entity_folder) or status_code return status_code
def directory_uploader(self, path: str): """Uploads directories by path Args: path (str): Path for directory to upload. """ if is_path_of_integration_directory(path): # Upload unified integration files list_unified_integrations = get_child_files(path) for unified_integration in list_unified_integrations: file_type = find_type(unified_integration) if file_type == FileType.INTEGRATION: self.integration_uploader(unified_integration) # Upload spliced integration files list_integrations = get_child_directories(path) for integration in list_integrations: self.integration_uploader(integration) elif is_path_of_script_directory(path): # Upload unified scripts files list_unified_scripts = get_child_files(path) for unified_script in list_unified_scripts: file_type = find_type(unified_script) if file_type in (FileType.SCRIPT, FileType.TEST_SCRIPT): self.script_uploader(unified_script) # Upload spliced scripts list_script = get_child_directories(path) for script in list_script: self.script_uploader(script) elif is_path_of_playbook_directory( path) or is_path_of_test_playbook_directory(path): list_playbooks = get_child_files(path) for playbook in list_playbooks: if playbook.endswith('.yml'): self.playbook_uploader(playbook) elif is_path_of_incident_field_directory(path): list_incident_fields = get_child_files(path) for incident_field in list_incident_fields: if incident_field.endswith('.json'): self.incident_field_uploader(incident_field) elif is_path_of_widget_directory(path): list_widgets = get_child_files(path) for widget in list_widgets: if widget.endswith('.json'): self.widget_uploader(widget) elif is_path_of_dashboard_directory(path): list_dashboards = get_child_files(path) for dashboard in list_dashboards: if dashboard.endswith('.json'): self.dashboard_uploader(dashboard) elif is_path_of_layout_directory(path): list_layouts = get_child_files(path) for layout in list_layouts: if layout.endswith('.json'): self.layout_uploader(layout) elif is_path_of_incident_type_directory(path): list_incident_types = get_child_files(path) for incident_type in list_incident_types: if incident_type.endswith('.json'): self.incident_type_uploader(incident_type) elif is_path_of_classifier_directory(path): list_classifiers = get_child_files(path) for classifiers in list_classifiers: if classifiers.endswith('.json'): self.classifier_uploader(classifiers)
def create_content(self, only_packs=False): """ Creates the content artifact zip files "content_test.zip", "content_new.zip", and "content_packs.zip" """ if not only_packs: # update content_version in commonServerPython self.update_content_version(self.content_version) branch_name = self.update_branch() print( f'Updated CommonServerPython with branch {branch_name} and content version {self.content_version}' ) print('Starting to create content artifact...') try: print('creating dir for bundles...') for bundle_dir in [ self.content_bundle, self.test_bundle, self.packs_bundle ]: os.mkdir(bundle_dir) self.copy_test_files() # handle copying packs content to bundles for zipping to content_new.zip and content_test.zip packs = get_child_directories(PACKS_DIR) if not only_packs: self.copy_packs_to_content_bundles(packs) # handle copying packs content to packs_bundle for zipping to `content_packs.zip` self.copy_packs_content_to_packs_bundle(packs) if not only_packs: print( 'Copying content descriptor to content and test bundles\n') for bundle_dir in [self.content_bundle, self.test_bundle]: shutil.copyfile( 'content-descriptor.json', os.path.join(bundle_dir, 'content-descriptor.json')) if only_packs: ContentCreator.copy_docs_files( content_bundle_path=None, packs_bundle_path=self.packs_bundle) else: ContentCreator.copy_docs_files( content_bundle_path=self.content_bundle, packs_bundle_path=self.packs_bundle) print('\nCompressing bundles...') if not only_packs: shutil.make_archive(self.content_zip, 'zip', self.content_bundle) shutil.make_archive(self.test_zip, 'zip', self.test_bundle) self.copy_file_to_artifacts("./Tests/id_set.json") shutil.make_archive(self.packs_zip, 'zip', self.packs_bundle) self.copy_file_to_artifacts('release-notes.md') self.copy_file_to_artifacts('beta-release-notes.md') self.copy_file_to_artifacts('packs-release-notes.md') print_success( f'finished creating the content artifacts at "{os.path.abspath(self.artifacts_path)}"' ) finally: if not self.preserve_bundles: if os.path.exists(self.content_bundle): shutil.rmtree(self.content_bundle) if os.path.exists(self.test_bundle): shutil.rmtree(self.test_bundle) if os.path.exists(self.packs_bundle): shutil.rmtree(self.packs_bundle)
def copy_packs_content_to_packs_bundle(self, packs): """ Copy content in packs to the bundle that gets zipped to 'content_packs.zip'. Preserves directory structure except that packages inside the "Integrations" or "Scripts" directory inside a pack are flattened. Adds file prefixes according to how server expects to ingest the files, e.g. 'integration-' is prepended to integration yml filenames and 'script-' is prepended to script yml filenames and so on and so forth. """ for pack in packs: pack_name = os.path.basename(pack) if pack_name in self.packs_to_skip: continue pack_dst = os.path.join(self.packs_bundle, pack_name) os.mkdir(pack_dst) pack_dirs = get_child_directories(pack) pack_files = get_child_files(pack) # copy first level pack files over for file_path in pack_files: shutil.copy( file_path, os.path.join(pack_dst, os.path.basename(file_path))) # handle content directories in the pack for content_dir in pack_dirs: dir_name = os.path.basename(content_dir) dest_dir = os.path.join(pack_dst, dir_name) os.mkdir(dest_dir) if dir_name in DIR_TO_PREFIX: packages_dirs = get_child_directories(content_dir) if packages_dirs: # split yml files directories for package_dir in packages_dirs: ymls, _ = get_yml_paths_in_dir(package_dir, error_msg='') if not ymls or (len(ymls) == 1 and ymls[0].endswith('_unified.yml')): msg = f'Skipping package: {package_dir} -' if not ymls: print_warning( '{} No yml files found in the package directory' .format(msg)) else: print_warning( '{} Only unified yml found in the package directory' .format(msg)) continue unifier = Unifier(package_dir, dir_name, dest_dir) if parse_version( unifier.yml_data.get( 'toversion', '99.99.99')) >= parse_version('6.0.0'): new_file_paths = unifier.merge_script_package_to_yml( file_name_suffix=self.file_name_suffix) for new_file_path in new_file_paths: self.add_from_version_to_yml(new_file_path) non_split_yml_files = [ f for f in os.listdir(content_dir) if os.path.isfile(os.path.join(content_dir, f)) and ( fnmatch.fnmatch(f, 'integration-*.yml') or fnmatch.fnmatch(f, 'script-*.yml')) ] if non_split_yml_files: # old format non split yml files for yml_file in non_split_yml_files: new_file_path = self.add_suffix_to_file_path( os.path.join(dest_dir, yml_file)) shutil.copyfile( os.path.join(content_dir, yml_file), new_file_path) self.add_from_version_to_yml(new_file_path) else: self.copy_dir_files(content_dir, dest_dir, is_legacy_bundle=False)
def convert_contribution_to_pack(self): """Create a Pack in the content repo from the contents of a contribution zipfile""" try: packs_dir = os.path.join(get_content_path(), 'Packs') metadata_dict = {} with zipfile.ZipFile(self.contribution) as zipped_contrib: with zipped_contrib.open('metadata.json') as metadata_file: click.echo( f'Pulling relevant information from {metadata_file.name}', color=LOG_COLORS.NATIVE) metadata = json.loads(metadata_file.read()) # a name passed on the cmd line should take precedence over one pulled # from contribution metadata pack_name = self.name or self.format_pack_dir_name( metadata.get('name', 'ContributionPack')) # a description passed on the cmd line should take precedence over one pulled # from contribution metadata metadata_dict[ 'description'] = self.description or metadata.get( 'description') metadata_dict['name'] = pack_name metadata_dict['author'] = metadata.get('author', '') metadata_dict['support'] = metadata.get('support', '') metadata_dict['url'] = metadata.get('supportDetails', {}).get('url', '') metadata_dict['email'] = metadata.get( 'supportDetails', {}).get('email', '') metadata_dict['categories'] = metadata.get( 'categories') if metadata.get('categories') else [] metadata_dict['tags'] = metadata.get( 'tags') if metadata.get('tags') else [] metadata_dict['useCases'] = metadata.get( 'useCases') if metadata.get('useCases') else [] metadata_dict['keywords'] = metadata.get( 'keywords') if metadata.get('keywords') else [] while os.path.exists(os.path.join(packs_dir, pack_name)): click.echo( f'Modifying pack name because pack {pack_name} already exists in the content repo', color=LOG_COLORS.NATIVE) if len(pack_name) >= 2 and pack_name[-2].lower( ) == 'v' and pack_name[-1].isdigit(): # increment by one pack_name = pack_name[:-1] + str(int(pack_name[-1]) + 1) else: pack_name += 'V2' click.echo(f'New pack name is "{pack_name}"', color=LOG_COLORS.NATIVE) pack_dir = os.path.join(packs_dir, pack_name) os.mkdir(pack_dir) shutil.unpack_archive(filename=self.contribution, extract_dir=pack_dir) pack_subdirectories = get_child_directories(pack_dir) for pack_subdir in pack_subdirectories: basename = os.path.basename(pack_subdir) if basename in ENTITY_TYPE_TO_DIR: dst_name = ENTITY_TYPE_TO_DIR.get(basename) src_path = os.path.join(pack_dir, basename) dst_path = os.path.join(pack_dir, dst_name) content_item_dir = shutil.move(src_path, dst_path) if basename in {SCRIPT, AUTOMATION, INTEGRATION}: self.content_item_to_package_format(content_item_dir, del_unified=True) # create pack's base files self.full_output_path = pack_dir self.create_pack_base_files() metadata_dict = Initiator.create_metadata(fill_manually=False, data=metadata_dict) metadata_path = os.path.join(self.full_output_path, 'pack_metadata.json') with open(metadata_path, 'w') as pack_metadata_file: json.dump(metadata_dict, pack_metadata_file, indent=4) # remove metadata.json file os.remove(os.path.join(pack_dir, 'metadata.json')) click.echo( f'Executing \'format\' on the restructured contribution zip files at "{pack_dir}"' ) format_manager(input=pack_dir) except Exception as e: click.echo( f'Creating a Pack from the contribution zip failed with error: {e}\n {traceback.format_exc()}', color=LOG_COLORS.RED) finally: if self.contrib_conversion_errs: click.echo( 'The following errors occurred while converting unified content YAMLs to package structure:' ) click.echo( textwrap.indent('\n'.join(self.contrib_conversion_errs), '\t'))
def create_content(self): """ Creates the content artifact zip files "content_test.zip", "content_new.zip", and "content_packs.zip" """ # update content_version in commonServerPython self.update_content_version(self.content_version) branch_name = self.update_branch() print(f'Updated CommonServerPython with branch {branch_name} and content version {self.content_version}') print('Starting to create content artifact...') try: print('creating dir for bundles...') for bundle_dir in [self.content_bundle, self.test_bundle, self.packs_bundle]: os.mkdir(bundle_dir) self.add_tools_to_bundle(self.content_bundle) for package_dir in DIR_TO_PREFIX: # handles nested package directories self.create_unifieds_and_copy(package_dir) for content_dir in self.content_directories: print(f'Copying dir {content_dir} to bundles...') self.copy_dir_files(content_dir, self.content_bundle) self.copy_test_files() # handle copying packs content to bundles for zipping to content_new.zip and content_test.zip packs = get_child_directories(PACKS_DIR) self.copy_packs_content_to_old_bundles(packs) # handle copying packs content to packs_bundle for zipping to `content_packs.zip` self.copy_packs_content_to_packs_bundle(packs) print('Copying content descriptor to content and test bundles') for bundle_dir in [self.content_bundle, self.test_bundle]: shutil.copyfile('content-descriptor.json', os.path.join(bundle_dir, 'content-descriptor.json')) for doc_file in ('./Documentation/doc-CommonServer.json', './Documentation/doc-howto.json'): if os.path.exists(doc_file): print(f'copying {doc_file} doc to content bundle') shutil.copyfile(doc_file, os.path.join(self.content_bundle, os.path.basename(doc_file))) else: print_warning(f'{doc_file} was not found and ' 'therefore was not added to the content bundle') print('Compressing bundles...') shutil.make_archive(self.content_zip, 'zip', self.content_bundle) shutil.make_archive(self.test_zip, 'zip', self.test_bundle) shutil.make_archive(self.packs_zip, 'zip', self.packs_bundle) shutil.copyfile("./Tests/id_set.json", os.path.join(self.artifacts_path, "id_set.json")) if os.path.exists('release-notes.md'): print('copying release-notes.md to artifacts directory "{}"'.format(self.artifacts_path)) shutil.copyfile('release-notes.md', os.path.join(self.artifacts_path, 'release-notes.md')) else: print_warning('release-notes.md was not found in the content directory and therefore not ' 'copied over to the artifacts directory') if os.path.exists('beta-release-notes.md'): print('copying beta-release-notes.md to artifacts directory "{}"'.format(self.artifacts_path)) shutil.copyfile('beta-release-notes.md', os.path.join(self.artifacts_path, 'release-notes.md')) else: print_warning('beta-release-notes.md was not found in the content directory and therefore not ' 'copied over to the artifacts directory') print(f'finished creating the content artifacts at "{os.path.abspath(self.artifacts_path)}"') finally: if not self.preserve_bundles: if os.path.exists(self.content_bundle): shutil.rmtree(self.content_bundle) if os.path.exists(self.test_bundle): shutil.rmtree(self.test_bundle) if os.path.exists(self.packs_bundle): shutil.rmtree(self.packs_bundle)