def test_clean_non_existing_packs_skip_non_production_bucket(self, mocker): """ Scenario: running clean_non_existing_packs function on CI environment on master branch but not on production bucket Given - non production bucket input - master branch env variable - CI env variable (ensures that script is executed in circle CI) When - running clean_non_existing_packs in circle CI master branch with non production bucket Then - Ensure that task is skipped and blob form master bucket are not deleted """ from Tests.Marketplace.upload_packs import clean_non_existing_packs dummy_storage_bucket = mocker.MagicMock() dummy_storage_bucket.name = "dummy_bucket" skipped_cleanup = clean_non_existing_packs( index_folder_path="dummy_index_path", private_packs=[], storage_bucket=dummy_storage_bucket) assert skipped_cleanup
def test_clean_non_existing_packs_skip_non_master(self, mocker): """ Scenario: running clean_non_existing_packs function on CI environment but not on master branch Given - production bucket input - dummy_branch branch env variable - CI env variable (ensures that script is executed in circle CI) When - running clean_non_existing_packs in circle CI env but not in master branch Then - Ensure that task is skipped and blob form master bucket are not deleted """ from Tests.Marketplace.upload_packs import clean_non_existing_packs from Tests.Marketplace.marketplace_services import GCPConfig dummy_storage_bucket = mocker.MagicMock() dummy_storage_bucket.name = GCPConfig.PRODUCTION_BUCKET skipped_cleanup = clean_non_existing_packs( index_folder_path="dummy_index_path", private_packs=[], storage_bucket=dummy_storage_bucket) assert skipped_cleanup
def test_clean_non_existing_packs(self, mocker): """ Scenario: deleting pack that is not part of content repo or paid packs from index Given - valid pack from content repo - valid pack from private bucket - not valid pack that may be located in bucket and in the index When - pack was deleted from content repo Then - Ensure that not valid pack is deleted from index """ from Tests.Marketplace.upload_packs import clean_non_existing_packs from Tests.Marketplace.marketplace_constants import GCPConfig import os import shutil dummy_storage_bucket = mocker.MagicMock() dummy_storage_bucket.name = GCPConfig.PRODUCTION_BUCKET index_folder_path = "dummy_index_path" public_pack = "public_pack" private_pack = "private_pack" invalid_pack = "invalid_pack" dirs = scan_dir([(os.path.join(index_folder_path, public_pack), public_pack, True), (os.path.join(index_folder_path, private_pack), private_pack, True), (os.path.join(index_folder_path, invalid_pack), invalid_pack, True)]) mocker.patch("Tests.Marketplace.upload_packs.os.listdir", return_value=[public_pack]) mocker.patch("Tests.Marketplace.upload_packs.os.scandir", return_value=dirs) mocker.patch('Tests.Marketplace.upload_packs.shutil.rmtree') mocker.patch("Tests.Marketplace.upload_packs.logging.warning") private_packs = [{'id': private_pack, 'price': 120}] skipped_cleanup = clean_non_existing_packs( index_folder_path=index_folder_path, private_packs=private_packs, storage_bucket=dummy_storage_bucket, storage_base_path=GCPConfig.PRODUCTION_STORAGE_BASE_PATH, id_set={}) assert not skipped_cleanup shutil.rmtree.assert_called_once_with( os.path.join(index_folder_path, invalid_pack))
def main(): install_logging('upload_packs_private.log') upload_config = option_handler() packs_artifacts_path = upload_config.artifacts_path extract_destination_path = upload_config.extract_path storage_bucket_name = upload_config.bucket_name private_bucket_name = upload_config.private_bucket_name service_account = upload_config.service_account target_packs = upload_config.pack_names build_number = upload_config.ci_build_number id_set_path = upload_config.id_set_path packs_dependencies_mapping = load_json( upload_config.pack_dependencies ) if upload_config.pack_dependencies else {} storage_base_path = upload_config.storage_base_path is_private_build = upload_config.encryption_key and upload_config.encryption_key != '' landing_page_sections = load_json(LANDING_PAGE_SECTIONS_PATH) logging.info(f"Packs artifact path is: {packs_artifacts_path}") prepare_test_directories(packs_artifacts_path) # google cloud storage client initialized storage_client = init_storage_client(service_account) storage_bucket = storage_client.bucket(storage_bucket_name) private_storage_bucket = storage_client.bucket(private_bucket_name) default_storage_bucket = private_storage_bucket if is_private_build else storage_bucket # download and extract index from public bucket index_folder_path, index_blob, index_generation = download_and_extract_index( storage_bucket, extract_destination_path) # content repo client initialized if not is_private_build: content_repo = get_content_git_client(CONTENT_ROOT_PATH) current_commit_hash, remote_previous_commit_hash = get_recent_commits_data( content_repo, index_folder_path, is_bucket_upload_flow=False, is_private_build=True) else: current_commit_hash, remote_previous_commit_hash = "", "" content_repo = None if storage_base_path: GCPConfig.STORAGE_BASE_PATH = storage_base_path # detect packs to upload pack_names = get_packs_names(target_packs) extract_packs_artifacts(packs_artifacts_path, extract_destination_path) packs_list = [ Pack(pack_name, os.path.join(extract_destination_path, pack_name)) for pack_name in pack_names if os.path.exists(os.path.join(extract_destination_path, pack_name)) ] if not is_private_build: check_if_index_is_updated(index_folder_path, content_repo, current_commit_hash, remote_previous_commit_hash, storage_bucket) if private_bucket_name: # Add private packs to the index private_packs, private_index_path, private_index_blob = update_index_with_priced_packs( private_storage_bucket, extract_destination_path, index_folder_path, pack_names, is_private_build) else: # skipping private packs logging.info("Skipping index update of priced packs") private_packs = [] # google cloud bigquery client initialized packs_statistic_df = None if not is_private_build: bq_client = init_bigquery_client(service_account) packs_statistic_df = get_packs_statistics_dataframe(bq_client) # clean index and gcs from non existing or invalid packs clean_non_existing_packs(index_folder_path, private_packs, default_storage_bucket) # starting iteration over packs for pack in packs_list: create_and_upload_marketplace_pack( upload_config, pack, storage_bucket, index_folder_path, packs_dependencies_mapping, private_bucket_name, private_storage_bucket=private_storage_bucket, content_repo=content_repo, current_commit_hash=current_commit_hash, remote_previous_commit_hash=remote_previous_commit_hash, packs_statistic_df=packs_statistic_df) # upload core packs json to bucket if should_upload_core_packs(storage_bucket_name): upload_core_packs_config(default_storage_bucket, build_number, index_folder_path) # finished iteration over content packs if is_private_build: delete_public_packs_from_index(index_folder_path) upload_index_to_storage(index_folder_path, extract_destination_path, private_index_blob, build_number, private_packs, current_commit_hash, index_generation, is_private_build, landing_page_sections=landing_page_sections) else: upload_index_to_storage(index_folder_path, extract_destination_path, index_blob, build_number, private_packs, current_commit_hash, index_generation, landing_page_sections=landing_page_sections) # upload id_set.json to bucket upload_id_set(default_storage_bucket, id_set_path) # get the lists of packs divided by their status successful_packs, skipped_packs, failed_packs = get_packs_summary( packs_list) # summary of packs status print_packs_summary(successful_packs, skipped_packs, failed_packs)