def convert_contribution_to_pack(contrib_converter: ContributionConverter) -> None: """Create or updates a pack in the content repo from the contents of a contribution zipfile Args: contrib_converter (ContributionConverter): Contribution contributor object """ # only create pack_metadata.json and base pack files if creating a new pack if contrib_converter.create_new: if contrib_converter.contribution: # create pack metadata file with zipfile.ZipFile(contrib_converter.contribution) as zipped_contrib: with zipped_contrib.open('metadata.json') as metadata_file: metadata = json.loads(metadata_file.read()) contrib_converter.create_metadata_file(metadata) # create base files contrib_converter.create_pack_base_files = types.MethodType(_create_pack_base_files, contrib_converter) contrib_converter.create_pack_base_files() # unpack contrib_converter.unpack_contribution_to_dst_pack_directory() # convert unpacked_contribution_dirs = get_child_directories(contrib_converter.pack_dir_path) for unpacked_contribution_dir in unpacked_contribution_dirs: contrib_converter.convert_contribution_dir_to_pack_contents(unpacked_contribution_dir) # extract to package format for pack_subdir in get_child_directories(contrib_converter.pack_dir_path): basename = os.path.basename(pack_subdir) if basename in {SCRIPTS_DIR, INTEGRATIONS_DIR}: contrib_converter.content_item_to_package_format = types.MethodType(content_item_to_package_format, contrib_converter) contrib_converter.content_item_to_package_format( pack_subdir, del_unified=True, source_mapping=None )
def prepare_single_content_item_for_validation(filename: str, data: bytes, tmp_directory: str) -> str: content = Content(tmp_directory) pack_name = 'TmpPack' pack_dir = content.path / 'Packs' / pack_name # create pack_metadata.json file in TmpPack contrib_converter = ContributionConverter(name=pack_name, base_dir=tmp_directory, pack_dir_name=pack_name) contrib_converter.create_metadata_file({'description': 'Temporary Pack', 'author': 'xsoar'}) prefix = '-'.join(filename.split('-')[:-1]) containing_dir = pack_dir / ENTITY_TYPE_TO_DIR.get(prefix, 'Integrations') containing_dir.mkdir(exist_ok=True) data_as_string = data.decode() loaded_data = yaml.load(data_as_string) buff = io.StringIO() yaml.dump(loaded_data, buff) data_as_string = buff.getvalue() # write yaml integration file to file system file_path = containing_dir / filename file_path.write_text(data_as_string) file_type = find_type(str(file_path)) file_type = file_type.value if file_type else file_type extractor = Extractor( input=str(file_path), file_type=file_type, output=containing_dir, no_logging=True, no_pipenv=True) # validate the resulting package files, ergo set path_to_validate to the package directory that results # from extracting the unified yaml to a package format extractor.extract_to_package_format() return extractor.get_output_path()
def prepare_single_content_item_for_validation( filename: str, data: bytes, tmp_directory: str) -> Tuple[str, Dict]: content = Content(tmp_directory) pack_name = 'TmpPack' pack_dir = content.path / 'Packs' / pack_name # create pack_metadata.json file in TmpPack contrib_converter = ContributionConverter(name=pack_name, base_dir=tmp_directory, pack_dir_name=pack_name) contrib_converter.create_metadata_file({ 'description': 'Temporary Pack', 'author': 'xsoar' }) prefix = '-'.join(filename.split('-')[:-1]) containing_dir = pack_dir / ENTITY_TYPE_TO_DIR.get(prefix, 'Integrations') containing_dir.mkdir(exist_ok=True) is_json = filename.casefold().endswith('.json') data_as_string = data.decode() loaded_data = json.loads(data_as_string) if is_json else yaml.load( data_as_string) if is_json: data_as_string = json.dumps(loaded_data) else: buff = io.StringIO() yaml.dump(loaded_data, buff) data_as_string = buff.getvalue() # write content item file to file system file_path = containing_dir / filename file_path.write_text(data_as_string) file_type = find_type(str(file_path)) file_type = file_type.value if file_type else file_type if is_json or file_type in (FileType.PLAYBOOK.value, FileType.TEST_PLAYBOOK.value): return str(file_path), {} extractor = Extractor(input=str(file_path), file_type=file_type, output=containing_dir, no_logging=True, no_pipenv=True, no_basic_fmt=True) # validate the resulting package files, ergo set path_to_validate to the package directory that results # from extracting the unified yaml to a package format extractor.extract_to_package_format() code_fp_to_row_offset = { get_extracted_code_filepath(extractor): extractor.lines_inserted_at_code_start } return extractor.get_output_path(), code_fp_to_row_offset