def test_update_index_folder_new_version(self, mocker): """ Scenario: Update the bucket index when a pack is updated (new version) Given - Pack exists in the index folder - Pack has a new version When - Updating the bucket index Then - Ensure new metadata files are created for the new version - Ensure previous metadata files are not deleted - Ensure other files in the index are removed and replaced """ from Tests.Marketplace import upload_packs import shutil import os mocker.patch('glob.glob', return_value=['Index/HelloWorld/metadata-1.0.1.json', 'Index/HelloWorld/metadata-1.0.0.json', 'Index/HelloWorld/metadata-2.0.0.json']) mocker.patch('os.listdir', return_value=['HelloWorld']) mocker.patch('os.path.isdir', return_value=True) mocker.patch('os.remove') mocker.patch('shutil.copy') mocker.patch('os.path.exists') pack_dirs = scan_dir([('HelloWorld/metadata.json', 'metadata.json'), ('HelloWorld/changelog.json', 'changelog.json'), ('HelloWorld/README.md', 'README.md')]) index_dirs = scan_dir([('Index/HelloWorld/metadata-1.0.1.json', 'metadata-1.0.1.json'), ('Index/HelloWorld/metadata-1.0.0.json', 'metadata-1.0.0.json'), ('Index/HelloWorld/metadata-2.0.0.json', 'metadata-2.0.0.json'), ('Index/HelloWorld/metadata.json', 'metadata.json'), ('Index/HelloWorld/changelog.json', 'changelog.json'), ('Index/HelloWorld/README.md', 'README.md')]) mocker.patch('os.scandir', side_effect=[index_dirs, pack_dirs]) upload_packs.update_index_folder('Index', 'HelloWorld', 'HelloWorld', '2.0.1') expected_remove_args = ['Index/HelloWorld/metadata.json', 'Index/HelloWorld/changelog.json', 'Index/HelloWorld/README.md'] expected_copy_args = [('HelloWorld/metadata.json', 'Index/HelloWorld'), ('HelloWorld/metadata.json', 'Index/HelloWorld/metadata-2.0.1.json'), ('HelloWorld/changelog.json', 'Index/HelloWorld'), ('HelloWorld/README.md', 'Index/HelloWorld')] remove_call_count = os.remove.call_count remove_call_args = os.remove.call_args_list copy_call_count = shutil.copy.call_count copy_call_args = shutil.copy.call_args_list assert remove_call_count == 3 assert copy_call_count == 4 for call_arg in remove_call_args: assert call_arg[0][0] in expected_remove_args for call_arg in copy_call_args: assert call_arg[0] in expected_copy_args
def add_private_packs_to_index(index_folder_path: str, private_index_path: str): """ Add the private packs to the index folder. Args: index_folder_path: The index folder path. private_index_path: The path for the index of the private packs. """ for d in os.scandir(private_index_path): if os.path.isdir(d.path): update_index_folder(index_folder_path, d.name, d.path)
def test_update_index_folder_new_pack(self, mocker): """ Scenario: Update the bucket index when a new pack is created Given - Pack does not in the index folder When - Updating the bucket index Then - Ensure new metadata files are created for the new pack - Ensure other files in the index are copied """ from Tests.Marketplace import upload_packs import shutil import os mocker.patch('glob.glob', return_value=[]) mocker.patch('os.listdir', return_value=[]) mocker.patch('os.remove') mocker.patch('shutil.copy') mocker.patch('os.path.exists') pack_dirs = scan_dir([('HelloWorld/metadata.json', 'metadata.json'), ('HelloWorld/changelog.json', 'changelog.json'), ('HelloWorld/README.md', 'README.md')]) mocker.patch('os.scandir', return_value=pack_dirs) upload_packs.update_index_folder('Index', 'HelloWorld', 'HelloWorld', '1.0.0') expected_copy_args = [('HelloWorld/metadata.json', 'Index/HelloWorld'), ('HelloWorld/metadata.json', 'Index/HelloWorld/metadata-1.0.0.json'), ('HelloWorld/changelog.json', 'Index/HelloWorld'), ('HelloWorld/README.md', 'Index/HelloWorld')] remove_call_count = os.remove.call_count copy_call_count = shutil.copy.call_count copy_call_args = shutil.copy.call_args_list assert remove_call_count == 0 assert copy_call_count == 4 for call_arg in copy_call_args: assert call_arg[0] in expected_copy_args
def create_and_upload_marketplace_pack(upload_config: Any, pack: Any, storage_bucket: Any, index_folder_path: str, packs_dependencies_mapping: dict, private_bucket_name: str, private_storage_bucket: bool = None, content_repo: bool = None, current_commit_hash: str = '', remote_previous_commit_hash: str = '', packs_statistic_df: Any = None) \ -> Any: """ The main logic flow for the create and upload process. Acts as a decision tree while consistently checking the status of the progress being made. :param upload_config: Configuration for the script as handled by the Option Handler. :param pack: Pack object. :param storage_bucket: Bucket the changes are being uploaded to. :param index_folder_path: Path to the index folder. :param packs_dependencies_mapping: Used by format_metadata to add dependencies to the metadata file. :param private_storage_bucket: Bucket where the private packs are uploaded. :param content_repo: The main content repository. demisto/content :param current_commit_hash: Current commit hash for the run. Used in the pack metadata file. :param remote_previous_commit_hash: Previous commit hash. Used for comparison. :param packs_statistic_df: Dataframe object containing current pack analytics. :return: Updated pack.status value. """ build_number = upload_config.ci_build_number remove_test_playbooks = upload_config.remove_test_playbooks signature_key = upload_config.key_string extract_destination_path = upload_config.extract_path override_all_packs = upload_config.override_all_packs enc_key = upload_config.encryption_key packs_artifacts_dir = upload_config.artifacts_path private_artifacts_dir = upload_config.private_artifacts is_infra_run = upload_config.is_infra_run secondary_enc_key = upload_config.secondary_encryption_key landing_page_sections = load_json(LANDING_PAGE_SECTIONS_PATH) pack_was_modified = not is_infra_run task_status, user_metadata = pack.load_user_metadata() if not task_status: pack.status = PackStatus.FAILED_LOADING_USER_METADATA.name pack.cleanup() return task_status, pack_content_items = pack.collect_content_items() if not task_status: pack.status = PackStatus.FAILED_COLLECT_ITEMS.name pack.cleanup() return task_status, integration_images = pack.upload_integration_images( storage_bucket) if not task_status: pack.status = PackStatus.FAILED_IMAGES_UPLOAD.name pack.cleanup() return task_status, author_image = pack.upload_author_image(storage_bucket) if not task_status: pack.status = PackStatus.FAILED_AUTHOR_IMAGE_UPLOAD.name pack.cleanup() return task_status = pack.format_metadata( user_metadata=user_metadata, pack_content_items=pack_content_items, integration_images=integration_images, author_image=author_image, index_folder_path=index_folder_path, packs_dependencies_mapping=packs_dependencies_mapping, build_number=build_number, commit_hash=current_commit_hash, packs_statistic_df=packs_statistic_df, pack_was_modified=pack_was_modified, landing_page_sections=landing_page_sections) if not task_status: pack.status = PackStatus.FAILED_METADATA_PARSING.name pack.cleanup() return task_status, not_updated_build = pack.prepare_release_notes( index_folder_path, build_number) if not task_status: pack.status = PackStatus.FAILED_RELEASE_NOTES.name pack.cleanup() return if not_updated_build: pack.status = PackStatus.PACK_IS_NOT_UPDATED_IN_RUNNING_BUILD.name pack.cleanup() return task_status = pack.remove_unwanted_files(remove_test_playbooks) if not task_status: pack.status = PackStatus.FAILED_REMOVING_PACK_SKIPPED_FOLDERS pack.cleanup() return task_status = pack.sign_pack(signature_key) if not task_status: pack.status = PackStatus.FAILED_SIGNING_PACKS.name pack.cleanup() return task_status, zip_pack_path = pack.zip_pack(extract_destination_path, pack._pack_name, enc_key, private_artifacts_dir, secondary_enc_key) if not task_status: pack.status = PackStatus.FAILED_ZIPPING_PACK_ARTIFACTS.name pack.cleanup() return task_status = pack.is_pack_encrypted(zip_pack_path, enc_key) if not task_status: pack.status = PackStatus.FAILED_DECRYPT_PACK.name pack.cleanup() return bucket_for_uploading = private_storage_bucket if private_storage_bucket else storage_bucket (task_status, skipped_pack_uploading, full_pack_path) = \ pack.upload_to_storage(zip_pack_path, pack.latest_version, bucket_for_uploading, override_all_packs or pack_was_modified, pack_artifacts_path=packs_artifacts_dir, private_content=True) if full_pack_path is not None: bucket_path = f'https://console.cloud.google.com/storage/browser/{private_bucket_name}/' bucket_url = bucket_path + full_pack_path else: bucket_url = 'Pack was not uploaded.' pack.bucket_url = bucket_url if not task_status: pack.status = PackStatus.FAILED_UPLOADING_PACK.name pack.cleanup() return task_status, exists_in_index = pack.check_if_exists_in_index( index_folder_path) if not task_status: pack.status = PackStatus.FAILED_SEARCHING_PACK_IN_INDEX.name pack.cleanup() return task_status = pack.prepare_for_index_upload() if not task_status: pack.status = PackStatus.FAILED_PREPARING_INDEX_FOLDER.name pack.cleanup() return task_status = update_index_folder(index_folder_path=index_folder_path, pack_name=pack.name, pack_path=pack.path, pack_version=pack.latest_version, hidden_pack=pack.hidden) if not task_status: pack.status = PackStatus.FAILED_UPDATING_INDEX_FOLDER.name pack.cleanup() return # in case that pack already exist at cloud storage path and in index, don't show that the pack was changed if skipped_pack_uploading and exists_in_index: pack.status = PackStatus.PACK_ALREADY_EXISTS.name pack.cleanup() return pack.status = PackStatus.SUCCESS.name