def dummy_pack(self): """ dummy pack fixture """ return Pack(pack_name="TestPack", pack_path="dummy_path")
def main(): install_logging('upload_packs_private.log') upload_config = option_handler() packs_artifacts_path = upload_config.artifacts_path extract_destination_path = upload_config.extract_path storage_bucket_name = upload_config.bucket_name private_bucket_name = upload_config.private_bucket_name service_account = upload_config.service_account target_packs = upload_config.pack_names build_number = upload_config.ci_build_number packs_dependencies_mapping = load_json( upload_config.pack_dependencies ) if upload_config.pack_dependencies else {} storage_base_path = upload_config.storage_base_path is_private_build = upload_config.encryption_key and upload_config.encryption_key != '' landing_page_sections = StatisticsHandler.get_landing_page_sections() logging.info(f"Packs artifact path is: {packs_artifacts_path}") prepare_test_directories(packs_artifacts_path) # google cloud storage client initialized storage_client = init_storage_client(service_account) storage_bucket = storage_client.bucket(storage_bucket_name) private_storage_bucket = storage_client.bucket(private_bucket_name) default_storage_bucket = private_storage_bucket if is_private_build else storage_bucket # download and extract index from public bucket index_folder_path, index_blob, index_generation = download_and_extract_index( storage_bucket, extract_destination_path) # content repo client initialized if not is_private_build: content_repo = get_content_git_client(CONTENT_ROOT_PATH) current_commit_hash, remote_previous_commit_hash = get_recent_commits_data( content_repo, index_folder_path, is_bucket_upload_flow=False, is_private_build=True) else: current_commit_hash, remote_previous_commit_hash = "", "" content_repo = None if storage_base_path: GCPConfig.STORAGE_BASE_PATH = storage_base_path # detect packs to upload pack_names = get_packs_names(target_packs) extract_packs_artifacts(packs_artifacts_path, extract_destination_path) packs_list = [ Pack(pack_name, os.path.join(extract_destination_path, pack_name)) for pack_name in pack_names if os.path.exists(os.path.join(extract_destination_path, pack_name)) ] if not is_private_build: check_if_index_is_updated(index_folder_path, content_repo, current_commit_hash, remote_previous_commit_hash, storage_bucket) if private_bucket_name: # Add private packs to the index private_packs, private_index_path, private_index_blob = update_index_with_priced_packs( private_storage_bucket, extract_destination_path, index_folder_path, pack_names, is_private_build) else: # skipping private packs logging.info("Skipping index update of priced packs") private_packs = [] # clean index and gcs from non existing or invalid packs clean_non_existing_packs(index_folder_path, private_packs, default_storage_bucket) # starting iteration over packs for pack in packs_list: create_and_upload_marketplace_pack( upload_config, pack, storage_bucket, index_folder_path, packs_dependencies_mapping, private_bucket_name, private_storage_bucket=private_storage_bucket, content_repo=content_repo, current_commit_hash=current_commit_hash, remote_previous_commit_hash=remote_previous_commit_hash) # upload core packs json to bucket if should_upload_core_packs(storage_bucket_name): create_corepacks_config( default_storage_bucket, build_number, index_folder_path, artifacts_dir=os.path.dirname(packs_artifacts_path)) # finished iteration over content packs if is_private_build: delete_public_packs_from_index(index_folder_path) upload_index_to_storage(index_folder_path, extract_destination_path, private_index_blob, build_number, private_packs, current_commit_hash, index_generation, is_private_build, landing_page_sections=landing_page_sections) else: upload_index_to_storage(index_folder_path, extract_destination_path, index_blob, build_number, private_packs, current_commit_hash, index_generation, landing_page_sections=landing_page_sections) # get the lists of packs divided by their status successful_packs, skipped_packs, failed_packs = get_packs_summary( packs_list) # summary of packs status print_packs_summary(successful_packs, skipped_packs, failed_packs)
def dummy_pack(self): """ dummy pack fixture """ return Pack(pack_name="Test Pack Name", pack_path="dummy_path", marketplace="xsoar")
def main(): option = option_handler() packs_artifacts_path = option.artifacts_path extract_destination_path = option.extract_path storage_bucket_name = option.bucket_name private_bucket_name = option.private_bucket_name service_account = option.service_account target_packs = option.pack_names if option.pack_names else "" build_number = option.ci_build_number if option.ci_build_number else str( uuid.uuid4()) override_all_packs = option.override_all_packs signature_key = option.key_string id_set_path = option.id_set_path packs_dependencies_mapping = load_json( option.pack_dependencies) if option.pack_dependencies else {} storage_base_path = option.storage_base_path remove_test_playbooks = option.remove_test_playbooks # google cloud storage client initialized storage_client = init_storage_client(service_account) storage_bucket = storage_client.bucket(storage_bucket_name) # content repo client initialized content_repo = get_content_git_client(CONTENT_ROOT_PATH) current_commit_hash, remote_previous_commit_hash = get_recent_commits_data( content_repo) if storage_base_path: GCPConfig.STORAGE_BASE_PATH = storage_base_path # detect packs to upload pack_names = get_packs_names(target_packs) extract_packs_artifacts(packs_artifacts_path, extract_destination_path) packs_list = [ Pack(pack_name, os.path.join(extract_destination_path, pack_name)) for pack_name in pack_names if os.path.exists(os.path.join(extract_destination_path, pack_name)) ] # download and extract index from public bucket index_folder_path, index_blob, index_generation = download_and_extract_index( storage_bucket, extract_destination_path) if not option.override_all_packs: check_if_index_is_updated(index_folder_path, content_repo, current_commit_hash, remote_previous_commit_hash, storage_bucket) # google cloud bigquery client initialized bq_client = init_bigquery_client(service_account) packs_statistic_df = get_packs_statistics_dataframe(bq_client) if private_bucket_name: # Add private packs to the index private_storage_bucket = storage_client.bucket(private_bucket_name) private_packs = update_index_with_priced_packs( private_storage_bucket, extract_destination_path, index_folder_path) else: # skipping private packs print("Skipping index update of priced packs") private_packs = [] # clean index and gcs from non existing or invalid packs clean_non_existing_packs(index_folder_path, private_packs, storage_bucket) # starting iteration over packs for pack in packs_list: task_status, user_metadata = pack.load_user_metadata() if not task_status: pack.status = PackStatus.FAILED_LOADING_USER_METADATA.value pack.cleanup() continue task_status, pack_content_items = pack.collect_content_items() if not task_status: pack.status = PackStatus.FAILED_COLLECT_ITEMS.name pack.cleanup() continue task_status, integration_images = pack.upload_integration_images( storage_bucket) if not task_status: pack.status = PackStatus.FAILED_IMAGES_UPLOAD.name pack.cleanup() continue task_status, author_image = pack.upload_author_image(storage_bucket) if not task_status: pack.status = PackStatus.FAILED_AUTHOR_IMAGE_UPLOAD.name pack.cleanup() continue task_status = pack.format_metadata( user_metadata=user_metadata, pack_content_items=pack_content_items, integration_images=integration_images, author_image=author_image, index_folder_path=index_folder_path, packs_dependencies_mapping=packs_dependencies_mapping, build_number=build_number, commit_hash=current_commit_hash, packs_statistic_df=packs_statistic_df) if not task_status: pack.status = PackStatus.FAILED_METADATA_PARSING.name pack.cleanup() continue task_status, not_updated_build = pack.prepare_release_notes( index_folder_path, build_number) if not task_status: pack.status = PackStatus.FAILED_RELEASE_NOTES.name pack.cleanup() continue if not_updated_build: pack.status = PackStatus.PACK_IS_NOT_UPDATED_IN_RUNNING_BUILD.name pack.cleanup() continue task_status = pack.remove_unwanted_files(remove_test_playbooks) if not task_status: pack.status = PackStatus.FAILED_REMOVING_PACK_SKIPPED_FOLDERS pack.cleanup() continue task_status = pack.sign_pack(signature_key) if not task_status: pack.status = PackStatus.FAILED_SIGNING_PACKS.name pack.cleanup() continue task_status, zip_pack_path = pack.zip_pack() if not task_status: pack.status = PackStatus.FAILED_ZIPPING_PACK_ARTIFACTS.name pack.cleanup() continue task_status, pack_was_modified = pack.detect_modified( content_repo, index_folder_path, current_commit_hash, remote_previous_commit_hash) if not task_status: pack.status = PackStatus.FAILED_DETECTING_MODIFIED_FILES.name pack.cleanup() continue task_status, skipped_pack_uploading = pack.upload_to_storage( zip_pack_path, pack.latest_version, storage_bucket, override_all_packs or pack_was_modified) if not task_status: pack.status = PackStatus.FAILED_UPLOADING_PACK.name pack.cleanup() continue task_status, exists_in_index = pack.check_if_exists_in_index( index_folder_path) if not task_status: pack.status = PackStatus.FAILED_SEARCHING_PACK_IN_INDEX.name pack.cleanup() continue # in case that pack already exist at cloud storage path and in index, skipped further steps if skipped_pack_uploading and exists_in_index: pack.status = PackStatus.PACK_ALREADY_EXISTS.name pack.cleanup() continue task_status = pack.prepare_for_index_upload() if not task_status: pack.status = PackStatus.FAILED_PREPARING_INDEX_FOLDER.name pack.cleanup() continue task_status = update_index_folder(index_folder_path=index_folder_path, pack_name=pack.name, pack_path=pack.path, pack_version=pack.latest_version, hidden_pack=pack.hidden) if not task_status: pack.status = PackStatus.FAILED_UPDATING_INDEX_FOLDER.name pack.cleanup() continue pack.status = PackStatus.SUCCESS.name # upload core packs json to bucket upload_core_packs_config(storage_bucket, build_number, index_folder_path) # finished iteration over content packs upload_index_to_storage(index_folder_path, extract_destination_path, index_blob, build_number, private_packs, current_commit_hash, index_generation) # upload id_set.json to bucket upload_id_set(storage_bucket, id_set_path) # summary of packs status print_packs_summary(packs_list)
def main(): install_logging('Copy_and_Upload_Packs.log', logger=logging) options = options_handler() packs_artifacts_path = options.artifacts_path extract_destination_path = options.extract_path production_bucket_name = options.production_bucket_name build_bucket_name = options.build_bucket_name service_account = options.service_account build_number = options.ci_build_number circle_branch = options.circle_branch production_base_path = options.production_base_path target_packs = options.pack_names marketplace = options.marketplace # Google cloud storage client initialized storage_client = init_storage_client(service_account) production_bucket = storage_client.bucket(production_bucket_name) build_bucket = storage_client.bucket(build_bucket_name) # Initialize build and prod base paths build_bucket_path = os.path.join(GCPConfig.BUILD_PATH_PREFIX, circle_branch, build_number, marketplace) build_bucket_base_path = os.path.join(build_bucket_path, GCPConfig.CONTENT_PACKS_PATH) # Relevant when triggering test upload flow if production_bucket_name: GCPConfig.PRODUCTION_BUCKET = production_bucket_name # Download and extract build index from build and prod buckets build_index_folder_path, build_index_blob, build_index_generation = \ download_and_extract_index(build_bucket, extract_destination_path, build_bucket_base_path) # Get the successful and failed packs file from Prepare Content step in Create Instances job if there are packs_results_file_path = os.path.join( os.path.dirname(packs_artifacts_path), BucketUploadFlow.PACKS_RESULTS_FILE) pc_successful_packs_dict, pc_failed_packs_dict, pc_successful_private_packs_dict, \ pc_uploaded_images = get_upload_data(packs_results_file_path, BucketUploadFlow.PREPARE_CONTENT_FOR_TESTING) logging.debug( f"Successful packs from Prepare Content: {pc_successful_packs_dict}") logging.debug(f"Failed packs from Prepare Content: {pc_failed_packs_dict}") logging.debug( f"Successful private packs from Prepare Content: {pc_successful_private_packs_dict}" ) logging.debug(f"Images from Prepare Content: {pc_uploaded_images}") # Check if needs to upload or not check_if_need_to_upload(pc_successful_packs_dict, pc_failed_packs_dict, pc_successful_private_packs_dict, pc_uploaded_images) # Detect packs to upload pack_names = get_pack_names(target_packs) extract_packs_artifacts(packs_artifacts_path, extract_destination_path) packs_list = [ Pack(pack_name, os.path.join(extract_destination_path, pack_name), marketplace) for pack_name in pack_names if os.path.exists(os.path.join(extract_destination_path, pack_name)) ] # Starting iteration over packs for pack in packs_list: # Indicates whether a pack has failed to upload on Prepare Content step task_status, pack_status = pack.is_failed_to_upload( pc_failed_packs_dict) if task_status: pack.status = pack_status pack.cleanup() continue task_status = pack.load_user_metadata() if not task_status: pack.status = PackStatus.FAILED_LOADING_USER_METADATA.name pack.cleanup() continue task_status = pack.copy_integration_images(production_bucket, build_bucket, pc_uploaded_images, production_base_path, build_bucket_base_path) if not task_status: pack.status = PackStatus.FAILED_IMAGES_UPLOAD.name pack.cleanup() continue task_status = pack.copy_author_image(production_bucket, build_bucket, pc_uploaded_images, production_base_path, build_bucket_base_path) if not task_status: pack.status = PackStatus.FAILED_AUTHOR_IMAGE_UPLOAD.name pack.cleanup() continue task_status, skipped_pack_uploading = pack.copy_and_upload_to_storage( production_bucket, build_bucket, pc_successful_packs_dict, production_base_path, build_bucket_base_path) if skipped_pack_uploading: pack.status = PackStatus.PACK_ALREADY_EXISTS.name pack.cleanup() continue if not task_status: pack.status = PackStatus.FAILED_UPLOADING_PACK.name pack.cleanup() continue pack.status = PackStatus.SUCCESS.name # upload core packs json to bucket upload_core_packs_config(production_bucket, build_number, extract_destination_path, build_bucket, production_base_path, build_bucket_base_path) # finished iteration over content packs copy_index(build_index_folder_path, build_index_blob, build_index_generation, production_bucket, build_bucket, production_base_path, build_bucket_base_path) # upload id_set.json to bucket copy_id_set(production_bucket, build_bucket, production_base_path, build_bucket_base_path) # get the lists of packs divided by their status successful_packs, skipped_packs, failed_packs = get_packs_summary( packs_list) # Store successful and failed packs list in CircleCI artifacts store_successful_and_failed_packs_in_ci_artifacts( packs_results_file_path, BucketUploadFlow.UPLOAD_PACKS_TO_MARKETPLACE_STORAGE, successful_packs, failed_packs, list(pc_successful_private_packs_dict)) # verify that the successful from Prepare content and are the ones that were copied verify_copy(successful_packs, pc_successful_packs_dict) # summary of packs status print_packs_summary(successful_packs, skipped_packs, failed_packs)
def main(): option = option_handler() packs_artifacts_path = option.artifacts_path extract_destination_path = option.extract_path storage_bucket_name = option.bucket_name private_bucket_name = option.private_bucket_name service_account = option.service_account specific_packs = option.pack_names build_number = option.ci_build_number if option.ci_build_number else str( uuid.uuid4()) override_pack = option.override_pack signature_key = option.key_string # google cloud storage client initialized storage_client = init_storage_client(service_account) storage_bucket = storage_client.bucket(storage_bucket_name) private_storage_bucket = storage_client.bucket(private_bucket_name) index_folder_path, index_blob = download_and_extract_index( storage_bucket, extract_destination_path) private_index_path = '' if private_bucket_name: private_index_path, _ = download_and_extract_index( private_storage_bucket, os.path.join(extract_destination_path, 'private')) # detect new or modified packs modified_packs = get_modified_packs(specific_packs) extract_packs_artifacts(packs_artifacts_path, extract_destination_path) packs_list = [ Pack(pack_name, os.path.join(extract_destination_path, pack_name)) for pack_name in modified_packs if os.path.exists(os.path.join(extract_destination_path, pack_name)) ] # Add private packs to the index private_packs = [] if private_index_path: try: private_packs = get_private_packs(private_index_path) add_private_packs_to_index(index_folder_path, private_index_path) except Exception as e: print_warning( f'Could not add private packs to the index: {str(e)}') finally: shutil.rmtree(private_index_path, ignore_errors=True) for pack in packs_list: task_status, integration_images = pack.upload_integration_images( storage_bucket) if not task_status: pack.status = PackStatus.FAILED_IMAGES_UPLOAD.name pack.cleanup() continue task_status, author_image = pack.upload_author_image(storage_bucket) if not task_status: pack.status = PackStatus.FAILED_AUTHOR_IMAGE_UPLOAD.name pack.cleanup() continue task_status, pack_content_items = pack.collect_content_items() if not task_status: pack.status = PackStatus.FAILED_COLLECT_ITEMS.name pack.cleanup() continue task_status = pack.format_metadata(pack_content_items, integration_images, author_image, index_folder_path) if not task_status: pack.status = PackStatus.FAILED_METADATA_PARSING.name pack.cleanup() continue # todo finish implementation of release notes # pack.parse_release_notes() task_status = pack.remove_unwanted_files() if not task_status: pack.status = PackStatus.FAILED_REMOVING_PACK_SKIPPED_FOLDERS pack.cleanup() continue task_status = pack.sign_pack(signature_key) if not task_status: pack.status = PackStatus.FAILED_SIGNING_PACKS.name pack.cleanup() continue task_status, zip_pack_path = pack.zip_pack() if not task_status: pack.status = PackStatus.FAILED_ZIPPING_PACK_ARTIFACTS.name pack.cleanup() continue task_status, skipped_pack_uploading = pack.upload_to_storage( zip_pack_path, pack.latest_version, storage_bucket, override_pack) if not task_status: pack.status = PackStatus.FAILED_UPLOADING_PACK.name pack.cleanup() continue # in case that pack already exist at cloud storage path, skipped further steps if skipped_pack_uploading: pack.status = PackStatus.PACK_ALREADY_EXISTS.name pack.cleanup() continue task_status = pack.prepare_for_index_upload() if not task_status: pack.status = PackStatus.FAILED_PREPARING_INDEX_FOLDER.name pack.cleanup() continue task_status = update_index_folder(index_folder_path=index_folder_path, pack_name=pack.name, pack_path=pack.path) if not task_status: pack.status = PackStatus.FAILED_UPDATING_INDEX_FOLDER.name pack.cleanup() continue pack.status = PackStatus.SUCCESS.name # finished iteration over content packs upload_index_to_storage(index_folder_path, extract_destination_path, index_blob, build_number, private_packs) # summary of packs status print_packs_summary(packs_list)
def main(): install_logging('Copy_and_Upload_Packs.log') options = options_handler() packs_artifacts_path = options.artifacts_path extract_destination_path = options.extract_path production_bucket_name = options.production_bucket_name build_bucket_name = options.build_bucket_name service_account = options.service_account build_number = options.ci_build_number circle_branch = options.circle_branch production_base_path = options.production_base_path target_packs = options.pack_names # Google cloud storage client initialized storage_client = init_storage_client(service_account) production_bucket = storage_client.bucket(production_bucket_name) build_bucket = storage_client.bucket(build_bucket_name) # Initialize base paths build_bucket_path = os.path.join(GCPConfig.BUILD_PATH_PREFIX, circle_branch, build_number) GCPConfig.BUILD_BASE_PATH = os.path.join(build_bucket_path, GCPConfig.STORAGE_BASE_PATH) if production_base_path: GCPConfig.STORAGE_BASE_PATH = production_base_path # Download and extract build index from build and prod buckets build_index_folder_path, build_index_blob, build_index_generation = \ download_and_extract_index(build_bucket, extract_destination_path) # Get the successful and failed packs file from Prepare Content step in Create Instances job if there are pc_successful_packs_dict, pc_failed_packs_dict = get_successful_and_failed_packs( os.path.join(os.path.dirname(packs_artifacts_path), PACKS_RESULTS_FILE)) # Check if needs to upload or not check_if_need_to_upload(pc_successful_packs_dict, pc_failed_packs_dict) # Detect packs to upload pack_names = get_pack_names(target_packs) extract_packs_artifacts(packs_artifacts_path, extract_destination_path) packs_list = [ Pack(pack_name, os.path.join(extract_destination_path, pack_name)) for pack_name in pack_names if os.path.exists(os.path.join(extract_destination_path, pack_name)) ] # Starting iteration over packs for pack in packs_list: # Indicates whether a pack has failed to upload on Prepare Content step task_status, pack_status = pack.is_failed_to_upload( pc_failed_packs_dict) if task_status: pack.status = pack_status pack.cleanup() continue task_status, user_metadata = pack.load_user_metadata() if not task_status: pack.status = PackStatus.FAILED_LOADING_USER_METADATA.value pack.cleanup() continue task_status = pack.copy_integration_images(production_bucket, build_bucket) if not task_status: pack.status = PackStatus.FAILED_IMAGES_UPLOAD.name pack.cleanup() continue task_status = pack.copy_author_image(production_bucket, build_bucket) if not task_status: pack.status = PackStatus.FAILED_AUTHOR_IMAGE_UPLOAD.name pack.cleanup() continue # Create a local copy of the pack's index changelog task_status = pack.create_local_changelog(build_index_folder_path) if not task_status: pack.status = PackStatus.FAILED_RELEASE_NOTES.name pack.cleanup() continue task_status, skipped_pack_uploading = pack.copy_and_upload_to_storage( production_bucket, build_bucket, pack.latest_version, pc_successful_packs_dict) if skipped_pack_uploading: pack.status = PackStatus.PACK_ALREADY_EXISTS.name pack.cleanup() continue if not task_status: pack.status = PackStatus.FAILED_UPLOADING_PACK.name pack.cleanup() continue pack.status = PackStatus.SUCCESS.name # upload core packs json to bucket upload_core_packs_config(production_bucket, build_number, extract_destination_path, build_bucket) # finished iteration over content packs copy_index(build_index_folder_path, build_index_blob, build_index_generation, production_bucket, build_bucket) # upload id_set.json to bucket copy_id_set(production_bucket, build_bucket) # get the lists of packs divided by their status successful_packs, skipped_packs, failed_packs = get_packs_summary( packs_list) # verify that the successful from Prepare content and are the ones that were copied verify_copy(successful_packs, pc_successful_packs_dict) # summary of packs status print_packs_summary(successful_packs, skipped_packs, failed_packs)