Exemplo n.º 1
0
def main():
    install_logging('upload_packs_private.log')
    upload_config = option_handler()
    packs_artifacts_path = upload_config.artifacts_path
    extract_destination_path = upload_config.extract_path
    storage_bucket_name = upload_config.bucket_name
    private_bucket_name = upload_config.private_bucket_name
    service_account = upload_config.service_account
    target_packs = upload_config.pack_names
    build_number = upload_config.ci_build_number
    id_set_path = upload_config.id_set_path
    packs_dependencies_mapping = load_json(
        upload_config.pack_dependencies
    ) if upload_config.pack_dependencies else {}
    storage_base_path = upload_config.storage_base_path
    is_private_build = upload_config.encryption_key and upload_config.encryption_key != ''
    landing_page_sections = load_json(LANDING_PAGE_SECTIONS_PATH)

    logging.info(f"Packs artifact path is: {packs_artifacts_path}")

    prepare_test_directories(packs_artifacts_path)

    # google cloud storage client initialized
    storage_client = init_storage_client(service_account)
    storage_bucket = storage_client.bucket(storage_bucket_name)
    private_storage_bucket = storage_client.bucket(private_bucket_name)
    default_storage_bucket = private_storage_bucket if is_private_build else storage_bucket

    # download and extract index from public bucket
    index_folder_path, index_blob, index_generation = download_and_extract_index(
        storage_bucket, extract_destination_path)

    # content repo client initialized
    if not is_private_build:
        content_repo = get_content_git_client(CONTENT_ROOT_PATH)
        current_commit_hash, remote_previous_commit_hash = get_recent_commits_data(
            content_repo,
            index_folder_path,
            is_bucket_upload_flow=False,
            is_private_build=True)
    else:
        current_commit_hash, remote_previous_commit_hash = "", ""
        content_repo = None

    if storage_base_path:
        GCPConfig.STORAGE_BASE_PATH = storage_base_path

    # detect packs to upload
    pack_names = get_packs_names(target_packs)
    extract_packs_artifacts(packs_artifacts_path, extract_destination_path)
    packs_list = [
        Pack(pack_name, os.path.join(extract_destination_path, pack_name))
        for pack_name in pack_names
        if os.path.exists(os.path.join(extract_destination_path, pack_name))
    ]

    if not is_private_build:
        check_if_index_is_updated(index_folder_path, content_repo,
                                  current_commit_hash,
                                  remote_previous_commit_hash, storage_bucket)

    if private_bucket_name:  # Add private packs to the index
        private_packs, private_index_path, private_index_blob = update_index_with_priced_packs(
            private_storage_bucket, extract_destination_path,
            index_folder_path, pack_names, is_private_build)
    else:  # skipping private packs
        logging.info("Skipping index update of priced packs")
        private_packs = []

    # google cloud bigquery client initialized
    packs_statistic_df = None
    if not is_private_build:
        bq_client = init_bigquery_client(service_account)
        packs_statistic_df = get_packs_statistics_dataframe(bq_client)

    # clean index and gcs from non existing or invalid packs
    clean_non_existing_packs(index_folder_path, private_packs,
                             default_storage_bucket)
    # starting iteration over packs
    for pack in packs_list:
        create_and_upload_marketplace_pack(
            upload_config,
            pack,
            storage_bucket,
            index_folder_path,
            packs_dependencies_mapping,
            private_bucket_name,
            private_storage_bucket=private_storage_bucket,
            content_repo=content_repo,
            current_commit_hash=current_commit_hash,
            remote_previous_commit_hash=remote_previous_commit_hash,
            packs_statistic_df=packs_statistic_df)
    # upload core packs json to bucket

    if should_upload_core_packs(storage_bucket_name):
        upload_core_packs_config(default_storage_bucket, build_number,
                                 index_folder_path)
    # finished iteration over content packs
    if is_private_build:
        delete_public_packs_from_index(index_folder_path)
        upload_index_to_storage(index_folder_path,
                                extract_destination_path,
                                private_index_blob,
                                build_number,
                                private_packs,
                                current_commit_hash,
                                index_generation,
                                is_private_build,
                                landing_page_sections=landing_page_sections)

    else:
        upload_index_to_storage(index_folder_path,
                                extract_destination_path,
                                index_blob,
                                build_number,
                                private_packs,
                                current_commit_hash,
                                index_generation,
                                landing_page_sections=landing_page_sections)

    # upload id_set.json to bucket
    upload_id_set(default_storage_bucket, id_set_path)

    # get the lists of packs divided by their status
    successful_packs, skipped_packs, failed_packs = get_packs_summary(
        packs_list)

    # summary of packs status
    print_packs_summary(successful_packs, skipped_packs, failed_packs)
Exemplo n.º 2
0
def main():
    install_logging('Copy and Upload Packs.log')
    options = options_handler()
    packs_artifacts_path = options.artifacts_path
    extract_destination_path = options.extract_path
    production_bucket_name = options.production_bucket_name
    build_bucket_name = options.build_bucket_name
    service_account = options.service_account
    build_number = options.ci_build_number
    circle_branch = options.circle_branch
    production_base_path = options.production_base_path
    target_packs = options.pack_names

    # Google cloud storage client initialized
    storage_client = init_storage_client(service_account)
    production_bucket = storage_client.bucket(production_bucket_name)
    build_bucket = storage_client.bucket(build_bucket_name)

    # Initialize base paths
    build_bucket_path = os.path.join(GCPConfig.BUILD_PATH_PREFIX,
                                     circle_branch, build_number)
    GCPConfig.BUILD_BASE_PATH = os.path.join(build_bucket_path,
                                             GCPConfig.STORAGE_BASE_PATH)
    if production_base_path:
        GCPConfig.STORAGE_BASE_PATH = production_base_path

    # Download and extract build index from build and prod buckets
    build_index_folder_path, build_index_blob, build_index_generation = \
        download_and_extract_index(build_bucket, extract_destination_path)

    # Get the successful and failed packs file from Prepare Content step in Create Instances job if there are
    pc_successful_packs_dict, pc_failed_packs_dict = get_successful_and_failed_packs(
        os.path.join(os.path.dirname(packs_artifacts_path),
                     PACKS_RESULTS_FILE))

    # Check if needs to upload or not
    check_if_need_to_upload(pc_successful_packs_dict, pc_failed_packs_dict)

    # Detect packs to upload
    pack_names = get_pack_names(target_packs)
    extract_packs_artifacts(packs_artifacts_path, extract_destination_path)
    packs_list = [
        Pack(pack_name, os.path.join(extract_destination_path, pack_name))
        for pack_name in pack_names
        if os.path.exists(os.path.join(extract_destination_path, pack_name))
    ]

    # Starting iteration over packs
    for pack in packs_list:
        # Indicates whether a pack has failed to upload on Prepare Content step
        task_status, pack_status = pack.is_failed_to_upload(
            pc_failed_packs_dict)
        if task_status:
            pack.status = pack_status
            pack.cleanup()
            continue

        task_status, user_metadata = pack.load_user_metadata()
        if not task_status:
            pack.status = PackStatus.FAILED_LOADING_USER_METADATA.value
            pack.cleanup()
            continue

        task_status = pack.copy_integration_images(production_bucket,
                                                   build_bucket)
        if not task_status:
            pack.status = PackStatus.FAILED_IMAGES_UPLOAD.name
            pack.cleanup()
            continue

        task_status = pack.copy_author_image(production_bucket, build_bucket)
        if not task_status:
            pack.status = PackStatus.FAILED_AUTHOR_IMAGE_UPLOAD.name
            pack.cleanup()
            continue

        # Create a local copy of the pack's index changelog
        task_status = pack.create_local_changelog(build_index_folder_path)
        if not task_status:
            pack.status = PackStatus.FAILED_RELEASE_NOTES.name
            pack.cleanup()
            continue

        task_status, skipped_pack_uploading = pack.copy_and_upload_to_storage(
            production_bucket, build_bucket, pack.latest_version,
            pc_successful_packs_dict)
        if skipped_pack_uploading:
            pack.status = PackStatus.PACK_ALREADY_EXISTS.name
            pack.cleanup()
            continue

        if not task_status:
            pack.status = PackStatus.FAILED_UPLOADING_PACK.name
            pack.cleanup()
            continue

        pack.status = PackStatus.SUCCESS.name

    # upload core packs json to bucket
    upload_core_packs_config(production_bucket, build_number,
                             extract_destination_path, build_bucket)

    # finished iteration over content packs
    copy_index(build_index_folder_path, build_index_blob,
               build_index_generation, production_bucket, build_bucket)

    # upload id_set.json to bucket
    copy_id_set(production_bucket, build_bucket)

    # get the lists of packs divided by their status
    successful_packs, skipped_packs, failed_packs = get_packs_summary(
        packs_list)

    # verify that the successful from Prepare content and are the ones that were copied
    verify_copy(successful_packs, pc_successful_packs_dict)

    # summary of packs status
    print_packs_summary(successful_packs, skipped_packs, failed_packs)
Exemplo n.º 3
0
def main():
    install_logging('Copy_and_Upload_Packs.log', logger=logging)
    options = options_handler()
    packs_artifacts_path = options.artifacts_path
    extract_destination_path = options.extract_path
    production_bucket_name = options.production_bucket_name
    build_bucket_name = options.build_bucket_name
    service_account = options.service_account
    build_number = options.ci_build_number
    circle_branch = options.circle_branch
    production_base_path = options.production_base_path
    target_packs = options.pack_names
    marketplace = options.marketplace

    # Google cloud storage client initialized
    storage_client = init_storage_client(service_account)
    production_bucket = storage_client.bucket(production_bucket_name)
    build_bucket = storage_client.bucket(build_bucket_name)

    # Initialize build and prod base paths
    build_bucket_path = os.path.join(GCPConfig.BUILD_PATH_PREFIX,
                                     circle_branch, build_number, marketplace)
    build_bucket_base_path = os.path.join(build_bucket_path,
                                          GCPConfig.CONTENT_PACKS_PATH)

    # Relevant when triggering test upload flow
    if production_bucket_name:
        GCPConfig.PRODUCTION_BUCKET = production_bucket_name

    # Download and extract build index from build and prod buckets
    build_index_folder_path, build_index_blob, build_index_generation = \
        download_and_extract_index(build_bucket, extract_destination_path, build_bucket_base_path)

    # Get the successful and failed packs file from Prepare Content step in Create Instances job if there are
    packs_results_file_path = os.path.join(
        os.path.dirname(packs_artifacts_path),
        BucketUploadFlow.PACKS_RESULTS_FILE)
    pc_successful_packs_dict, pc_failed_packs_dict, pc_successful_private_packs_dict, \
        pc_uploaded_images = get_upload_data(packs_results_file_path, BucketUploadFlow.PREPARE_CONTENT_FOR_TESTING)

    logging.debug(
        f"Successful packs from Prepare Content: {pc_successful_packs_dict}")
    logging.debug(f"Failed packs from Prepare Content: {pc_failed_packs_dict}")
    logging.debug(
        f"Successful private packs from Prepare Content: {pc_successful_private_packs_dict}"
    )
    logging.debug(f"Images from Prepare Content: {pc_uploaded_images}")

    # Check if needs to upload or not
    check_if_need_to_upload(pc_successful_packs_dict, pc_failed_packs_dict,
                            pc_successful_private_packs_dict,
                            pc_uploaded_images)

    # Detect packs to upload
    pack_names = get_pack_names(target_packs)
    extract_packs_artifacts(packs_artifacts_path, extract_destination_path)
    packs_list = [
        Pack(pack_name, os.path.join(extract_destination_path, pack_name),
             marketplace) for pack_name in pack_names
        if os.path.exists(os.path.join(extract_destination_path, pack_name))
    ]

    # Starting iteration over packs
    for pack in packs_list:
        # Indicates whether a pack has failed to upload on Prepare Content step
        task_status, pack_status = pack.is_failed_to_upload(
            pc_failed_packs_dict)
        if task_status:
            pack.status = pack_status
            pack.cleanup()
            continue

        task_status = pack.load_user_metadata(marketplace)
        if not task_status:
            pack.status = PackStatus.FAILED_LOADING_USER_METADATA.name
            pack.cleanup()
            continue

        task_status = pack.copy_integration_images(production_bucket,
                                                   build_bucket,
                                                   pc_uploaded_images,
                                                   production_base_path,
                                                   build_bucket_base_path)
        if not task_status:
            pack.status = PackStatus.FAILED_IMAGES_UPLOAD.name
            pack.cleanup()
            continue

        task_status = pack.copy_author_image(production_bucket, build_bucket,
                                             pc_uploaded_images,
                                             production_base_path,
                                             build_bucket_base_path)
        if not task_status:
            pack.status = PackStatus.FAILED_AUTHOR_IMAGE_UPLOAD.name
            pack.cleanup()
            continue

        task_status, skipped_pack_uploading = pack.copy_and_upload_to_storage(
            production_bucket, build_bucket, pc_successful_packs_dict,
            production_base_path, build_bucket_base_path)
        if skipped_pack_uploading:
            pack.status = PackStatus.PACK_ALREADY_EXISTS.name
            pack.cleanup()
            continue

        if not task_status:
            pack.status = PackStatus.FAILED_UPLOADING_PACK.name
            pack.cleanup()
            continue

        pack.status = PackStatus.SUCCESS.name

    # upload core packs json to bucket
    upload_core_packs_config(production_bucket, build_number,
                             extract_destination_path, build_bucket,
                             production_base_path, build_bucket_base_path)

    # finished iteration over content packs
    copy_index(build_index_folder_path, build_index_blob,
               build_index_generation, production_bucket, build_bucket,
               production_base_path, build_bucket_base_path)

    # upload id_set.json to bucket
    copy_id_set(production_bucket, build_bucket, production_base_path,
                build_bucket_base_path)

    # get the lists of packs divided by their status
    successful_packs, skipped_packs, failed_packs = get_packs_summary(
        packs_list)

    # Store successful and failed packs list in CircleCI artifacts
    store_successful_and_failed_packs_in_ci_artifacts(
        packs_results_file_path,
        BucketUploadFlow.UPLOAD_PACKS_TO_MARKETPLACE_STORAGE, successful_packs,
        failed_packs, list(pc_successful_private_packs_dict))

    # verify that the successful from Prepare content and are the ones that were copied
    verify_copy(successful_packs, pc_successful_packs_dict)

    # summary of packs status
    print_packs_summary(successful_packs, skipped_packs, failed_packs)