Пример #1
0
def upload_core_packs_config(production_bucket: Bucket, build_number: str,
                             extract_destination_path: str,
                             build_bucket: Bucket):
    """Uploads corepacks.json file configuration to bucket. Corepacks file includes core packs for server installation.

     Args:
        production_bucket (google.cloud.storage.bucket.Bucket): gcs bucket where core packs config is uploaded.
        build_number (str): CircleCI build number.
        extract_destination_path (str): Full path of folder to extract the corepacks file
        build_bucket (google.cloud.storage.bucket.Bucket): gcs bucket where core packs config is downloaded from.

    """
    # download the corepacks.json stored in the build bucket to temp dir
    build_corepacks_file_path = os.path.join(GCPConfig.BUILD_BASE_PATH,
                                             GCPConfig.CORE_PACK_FILE_NAME)
    build_corepacks_blob = build_bucket.blob(build_corepacks_file_path)

    if not build_corepacks_blob.exists():
        logging.critical(
            f"{GCPConfig.CORE_PACK_FILE_NAME} is missing in {build_bucket.name} bucket, exiting..."
        )
        sys.exit(1)

    temp_corepacks_file_path = os.path.join(extract_destination_path,
                                            GCPConfig.CORE_PACK_FILE_NAME)
    build_corepacks_blob.download_to_filename(temp_corepacks_file_path)
    corepacks_file = load_json(temp_corepacks_file_path)

    # change the storage paths to the prod bucket
    corepacks_list = corepacks_file.get('corePacks', [])
    try:
        corepacks_list = [
            os.path.join(GCPConfig.GCS_PUBLIC_URL, production_bucket.name,
                         GCPConfig.STORAGE_BASE_PATH,
                         LATEST_ZIP_REGEX.findall(corepack_path)[0])
            for corepack_path in corepacks_list
        ]
    except IndexError:
        corepacks_list_str = '\n'.join(corepacks_list)
        logging.exception(
            f"GCS paths in build bucket corepacks.json file are not of format: "
            f"{GCPConfig.GCS_PUBLIC_URL}/<BUCKET_NAME>/.../content/packs/...\n"
            f"List of build bucket corepacks paths:\n{corepacks_list_str}")
        sys.exit(1)

    # construct core pack data with public gcs urls
    core_packs_data = {
        'corePacks': corepacks_list,
        'buildNumber': build_number
    }

    # upload core pack json file to gcs
    prod_corepacks_file_path = os.path.join(GCPConfig.STORAGE_BASE_PATH,
                                            GCPConfig.CORE_PACK_FILE_NAME)
    prod_corepacks_blob = production_bucket.blob(prod_corepacks_file_path)
    prod_corepacks_blob.upload_from_string(
        json.dumps(core_packs_data, indent=4))

    logging.success(
        f"Finished uploading {GCPConfig.CORE_PACK_FILE_NAME} to storage.")
Пример #2
0
def get_successful_and_failed_packs(packs_results_file_path):
    """ Loads the packs_results.json file to get the successful and failed packs dicts

    Args:
        packs_results_file_path: The path to the file

    Returns:
        dict: The successful packs dict
        dict: The failed packs dict

    """
    if os.path.exists(packs_results_file_path):
        packs_results_file = load_json(packs_results_file_path)
        successful_packs_dict = packs_results_file.get('successful_packs', {})
        failed_packs_dict = packs_results_file.get('failed_packs', {})
        return successful_packs_dict, failed_packs_dict
    return {}, {}
Пример #3
0
def main():
    install_logging('upload_packs_private.log')
    upload_config = option_handler()
    packs_artifacts_path = upload_config.artifacts_path
    extract_destination_path = upload_config.extract_path
    storage_bucket_name = upload_config.bucket_name
    private_bucket_name = upload_config.private_bucket_name
    service_account = upload_config.service_account
    target_packs = upload_config.pack_names
    build_number = upload_config.ci_build_number
    id_set_path = upload_config.id_set_path
    packs_dependencies_mapping = load_json(upload_config.pack_dependencies) if upload_config.pack_dependencies else {}
    storage_base_path = upload_config.storage_base_path
    is_private_build = upload_config.is_private

    print(f"Packs artifact path is: {packs_artifacts_path}")

    prepare_test_directories(packs_artifacts_path)

    # google cloud storage client initialized
    storage_client = init_storage_client(service_account)
    storage_bucket = storage_client.bucket(storage_bucket_name)
    private_storage_bucket = storage_client.bucket(private_bucket_name)
    default_storage_bucket = private_storage_bucket if is_private_build else storage_bucket

    # download and extract index from public bucket
    index_folder_path, index_blob, index_generation = download_and_extract_index(storage_bucket,
                                                                                 extract_destination_path)

    # content repo client initialized
    if not is_private_build:
        content_repo = get_content_git_client(CONTENT_ROOT_PATH)
        current_commit_hash, remote_previous_commit_hash = get_recent_commits_data(content_repo, index_folder_path,
                                                                                   is_bucket_upload_flow=False,
                                                                                   is_private_build=True,
                                                                                   force_previous_commit="")
    else:
        current_commit_hash, remote_previous_commit_hash = "", ""
        content_repo = None

    if storage_base_path:
        GCPConfig.STORAGE_BASE_PATH = storage_base_path

    # detect packs to upload
    pack_names = get_packs_names(target_packs)
    extract_packs_artifacts(packs_artifacts_path, extract_destination_path)
    packs_list = [Pack(pack_name, os.path.join(extract_destination_path, pack_name)) for pack_name in pack_names
                  if os.path.exists(os.path.join(extract_destination_path, pack_name))]

    if not is_private_build:
        check_if_index_is_updated(index_folder_path, content_repo, current_commit_hash, remote_previous_commit_hash,
                                  storage_bucket)

    if private_bucket_name:  # Add private packs to the index
        private_packs, private_index_path, private_index_blob = update_index_with_priced_packs(private_storage_bucket,
                                                                                               extract_destination_path,
                                                                                               index_folder_path,
                                                                                               pack_names,
                                                                                               is_private_build)
    else:  # skipping private packs
        logging.info("Skipping index update of priced packs")
        private_packs = []

    # google cloud bigquery client initialized
    packs_statistic_df = None
    if not is_private_build:
        bq_client = init_bigquery_client(service_account)
        packs_statistic_df = get_packs_statistics_dataframe(bq_client)

    # clean index and gcs from non existing or invalid packs
    clean_non_existing_packs(index_folder_path, private_packs, default_storage_bucket)
    # starting iteration over packs
    for pack in packs_list:
        create_and_upload_marketplace_pack(upload_config, pack, storage_bucket, index_folder_path,
                                           packs_dependencies_mapping,
                                           private_storage_bucket=private_storage_bucket, content_repo=content_repo,
                                           current_commit_hash=current_commit_hash,
                                           remote_previous_commit_hash=remote_previous_commit_hash,
                                           packs_statistic_df=packs_statistic_df)
    # upload core packs json to bucket

    if should_upload_core_packs(storage_bucket_name):
        upload_core_packs_config(default_storage_bucket, build_number, index_folder_path)
    # finished iteration over content packs
    if is_private_build:
        delete_public_packs_from_index(index_folder_path)
        upload_index_to_storage(index_folder_path, extract_destination_path, private_index_blob, build_number,
                                private_packs,
                                current_commit_hash, index_generation, is_private_build)

    else:
        upload_index_to_storage(index_folder_path, extract_destination_path, index_blob, build_number, private_packs,
                                current_commit_hash, index_generation)

    # upload id_set.json to bucket
    upload_id_set(default_storage_bucket, id_set_path)

    # get the lists of packs divided by their status
    successful_packs, skipped_packs, failed_packs = get_packs_summary(packs_list)

    # summary of packs status
    print_packs_summary(successful_packs, skipped_packs, failed_packs)