def main(): install_logging('upload_packs_private.log') upload_config = option_handler() packs_artifacts_path = upload_config.artifacts_path extract_destination_path = upload_config.extract_path storage_bucket_name = upload_config.bucket_name private_bucket_name = upload_config.private_bucket_name service_account = upload_config.service_account target_packs = upload_config.pack_names build_number = upload_config.ci_build_number id_set_path = upload_config.id_set_path packs_dependencies_mapping = load_json( upload_config.pack_dependencies ) if upload_config.pack_dependencies else {} storage_base_path = upload_config.storage_base_path is_private_build = upload_config.encryption_key and upload_config.encryption_key != '' landing_page_sections = load_json(LANDING_PAGE_SECTIONS_PATH) logging.info(f"Packs artifact path is: {packs_artifacts_path}") prepare_test_directories(packs_artifacts_path) # google cloud storage client initialized storage_client = init_storage_client(service_account) storage_bucket = storage_client.bucket(storage_bucket_name) private_storage_bucket = storage_client.bucket(private_bucket_name) default_storage_bucket = private_storage_bucket if is_private_build else storage_bucket # download and extract index from public bucket index_folder_path, index_blob, index_generation = download_and_extract_index( storage_bucket, extract_destination_path) # content repo client initialized if not is_private_build: content_repo = get_content_git_client(CONTENT_ROOT_PATH) current_commit_hash, remote_previous_commit_hash = get_recent_commits_data( content_repo, index_folder_path, is_bucket_upload_flow=False, is_private_build=True) else: current_commit_hash, remote_previous_commit_hash = "", "" content_repo = None if storage_base_path: GCPConfig.STORAGE_BASE_PATH = storage_base_path # detect packs to upload pack_names = get_packs_names(target_packs) extract_packs_artifacts(packs_artifacts_path, extract_destination_path) packs_list = [ Pack(pack_name, os.path.join(extract_destination_path, pack_name)) for pack_name in pack_names if os.path.exists(os.path.join(extract_destination_path, pack_name)) ] if not is_private_build: check_if_index_is_updated(index_folder_path, content_repo, current_commit_hash, remote_previous_commit_hash, storage_bucket) if private_bucket_name: # Add private packs to the index private_packs, private_index_path, private_index_blob = update_index_with_priced_packs( private_storage_bucket, extract_destination_path, index_folder_path, pack_names, is_private_build) else: # skipping private packs logging.info("Skipping index update of priced packs") private_packs = [] # google cloud bigquery client initialized packs_statistic_df = None if not is_private_build: bq_client = init_bigquery_client(service_account) packs_statistic_df = get_packs_statistics_dataframe(bq_client) # clean index and gcs from non existing or invalid packs clean_non_existing_packs(index_folder_path, private_packs, default_storage_bucket) # starting iteration over packs for pack in packs_list: create_and_upload_marketplace_pack( upload_config, pack, storage_bucket, index_folder_path, packs_dependencies_mapping, private_bucket_name, private_storage_bucket=private_storage_bucket, content_repo=content_repo, current_commit_hash=current_commit_hash, remote_previous_commit_hash=remote_previous_commit_hash, packs_statistic_df=packs_statistic_df) # upload core packs json to bucket if should_upload_core_packs(storage_bucket_name): upload_core_packs_config(default_storage_bucket, build_number, index_folder_path) # finished iteration over content packs if is_private_build: delete_public_packs_from_index(index_folder_path) upload_index_to_storage(index_folder_path, extract_destination_path, private_index_blob, build_number, private_packs, current_commit_hash, index_generation, is_private_build, landing_page_sections=landing_page_sections) else: upload_index_to_storage(index_folder_path, extract_destination_path, index_blob, build_number, private_packs, current_commit_hash, index_generation, landing_page_sections=landing_page_sections) # upload id_set.json to bucket upload_id_set(default_storage_bucket, id_set_path) # get the lists of packs divided by their status successful_packs, skipped_packs, failed_packs = get_packs_summary( packs_list) # summary of packs status print_packs_summary(successful_packs, skipped_packs, failed_packs)
def main(): install_logging('Prepare_Content_Packs_For_Testing.log') option = option_handler() packs_artifacts_path = option.artifacts_path extract_destination_path = option.extract_path storage_bucket_name = option.bucket_name service_account = option.service_account target_packs = option.pack_names if option.pack_names else "" build_number = option.ci_build_number if option.ci_build_number else str( uuid.uuid4()) override_all_packs = option.override_all_packs signature_key = option.key_string id_set_path = option.id_set_path packs_dependencies_mapping = load_json( option.pack_dependencies) if option.pack_dependencies else {} storage_base_path = option.storage_base_path remove_test_playbooks = option.remove_test_playbooks is_bucket_upload_flow = option.bucket_upload private_bucket_name = option.private_bucket_name circle_branch = option.circle_branch force_upload = option.force_upload # google cloud storage client initialized storage_client = init_storage_client(service_account) storage_bucket = storage_client.bucket(storage_bucket_name) if storage_base_path: GCPConfig.STORAGE_BASE_PATH = storage_base_path # download and extract index from public bucket index_folder_path, index_blob, index_generation = download_and_extract_index( storage_bucket, extract_destination_path) # content repo client initialized content_repo = get_content_git_client(CONTENT_ROOT_PATH) current_commit_hash, previous_commit_hash = get_recent_commits_data( content_repo, index_folder_path, is_bucket_upload_flow, circle_branch) # detect packs to upload pack_names = get_packs_names(target_packs, previous_commit_hash) extract_packs_artifacts(packs_artifacts_path, extract_destination_path) packs_list = [ Pack(pack_name, os.path.join(extract_destination_path, pack_name)) for pack_name in pack_names if os.path.exists(os.path.join(extract_destination_path, pack_name)) ] if not option.override_all_packs: check_if_index_is_updated(index_folder_path, content_repo, current_commit_hash, previous_commit_hash, storage_bucket) # google cloud bigquery client initialized bq_client = init_bigquery_client(service_account) packs_statistic_df = get_packs_statistics_dataframe(bq_client) updated_private_packs_ids = [] if private_bucket_name: # Add private packs to the index private_storage_bucket = storage_client.bucket(private_bucket_name) private_packs, _, _, updated_private_packs_ids = update_index_with_priced_packs( private_storage_bucket, extract_destination_path, index_folder_path, pack_names) else: # skipping private packs logging.debug("Skipping index update of priced packs") private_packs = [] # clean index and gcs from non existing or invalid packs clean_non_existing_packs(index_folder_path, private_packs, storage_bucket) # starting iteration over packs for pack in packs_list: task_status, user_metadata = pack.load_user_metadata() if not task_status: pack.status = PackStatus.FAILED_LOADING_USER_METADATA.value pack.cleanup() continue task_status, pack_content_items = pack.collect_content_items() if not task_status: pack.status = PackStatus.FAILED_COLLECT_ITEMS.name pack.cleanup() continue task_status, integration_images = pack.upload_integration_images( storage_bucket) if not task_status: pack.status = PackStatus.FAILED_IMAGES_UPLOAD.name pack.cleanup() continue task_status, author_image = pack.upload_author_image(storage_bucket) if not task_status: pack.status = PackStatus.FAILED_AUTHOR_IMAGE_UPLOAD.name pack.cleanup() continue task_status, pack_was_modified = pack.detect_modified( content_repo, index_folder_path, current_commit_hash, previous_commit_hash) if not task_status: pack.status = PackStatus.FAILED_DETECTING_MODIFIED_FILES.name pack.cleanup() continue task_status = pack.format_metadata( user_metadata=user_metadata, pack_content_items=pack_content_items, integration_images=integration_images, author_image=author_image, index_folder_path=index_folder_path, packs_dependencies_mapping=packs_dependencies_mapping, build_number=build_number, commit_hash=current_commit_hash, packs_statistic_df=packs_statistic_df, pack_was_modified=pack_was_modified) if not task_status: pack.status = PackStatus.FAILED_METADATA_PARSING.name pack.cleanup() continue task_status, not_updated_build = pack.prepare_release_notes( index_folder_path, build_number, pack_was_modified) if not task_status: pack.status = PackStatus.FAILED_RELEASE_NOTES.name pack.cleanup() continue if not_updated_build: pack.status = PackStatus.PACK_IS_NOT_UPDATED_IN_RUNNING_BUILD.name pack.cleanup() continue task_status = pack.remove_unwanted_files(remove_test_playbooks) if not task_status: pack.status = PackStatus.FAILED_REMOVING_PACK_SKIPPED_FOLDERS pack.cleanup() continue task_status = pack.sign_pack(signature_key) if not task_status: pack.status = PackStatus.FAILED_SIGNING_PACKS.name pack.cleanup() continue task_status, zip_pack_path = pack.zip_pack() if not task_status: pack.status = PackStatus.FAILED_ZIPPING_PACK_ARTIFACTS.name pack.cleanup() continue (task_status, skipped_pack_uploading, full_pack_path) = \ pack.upload_to_storage(zip_pack_path, pack.latest_version, storage_bucket, override_all_packs or pack_was_modified) if not task_status: pack.status = PackStatus.FAILED_UPLOADING_PACK.name pack.cleanup() continue task_status, exists_in_index = pack.check_if_exists_in_index( index_folder_path) if not task_status: pack.status = PackStatus.FAILED_SEARCHING_PACK_IN_INDEX.name pack.cleanup() continue task_status = pack.prepare_for_index_upload() if not task_status: pack.status = PackStatus.FAILED_PREPARING_INDEX_FOLDER.name pack.cleanup() continue task_status = update_index_folder(index_folder_path=index_folder_path, pack_name=pack.name, pack_path=pack.path, pack_version=pack.latest_version, hidden_pack=pack.hidden) if not task_status: pack.status = PackStatus.FAILED_UPDATING_INDEX_FOLDER.name pack.cleanup() continue # in case that pack already exist at cloud storage path and in index, don't show that the pack was changed if skipped_pack_uploading and exists_in_index: pack.status = PackStatus.PACK_ALREADY_EXISTS.name pack.cleanup() continue pack.status = PackStatus.SUCCESS.name # upload core packs json to bucket upload_core_packs_config(storage_bucket, build_number, index_folder_path) # finished iteration over content packs upload_index_to_storage(index_folder_path=index_folder_path, extract_destination_path=extract_destination_path, index_blob=index_blob, build_number=build_number, private_packs=private_packs, current_commit_hash=current_commit_hash, index_generation=index_generation, force_upload=force_upload, previous_commit_hash=previous_commit_hash) # upload id_set.json to bucket upload_id_set(storage_bucket, id_set_path) # get the lists of packs divided by their status successful_packs, skipped_packs, failed_packs = get_packs_summary( packs_list) # Store successful and failed packs list in CircleCI artifacts - to be used in Upload Packs To Marketplace job packs_results_file_path = os.path.join( os.path.dirname(packs_artifacts_path), BucketUploadFlow.PACKS_RESULTS_FILE) store_successful_and_failed_packs_in_ci_artifacts( packs_results_file_path, BucketUploadFlow.PREPARE_CONTENT_FOR_TESTING, successful_packs, failed_packs, updated_private_packs_ids) # summary of packs status print_packs_summary(successful_packs, skipped_packs, failed_packs, not is_bucket_upload_flow)