def install_all_content_packs_for_nightly(client: demisto_client, host: str,
                                          service_account: str):
    """ Iterates over the packs currently located in the Packs directory. Wrapper for install_packs.
    Retrieving the latest version of each pack from the production bucket.

    :param client: Demisto-py client to connect to the server.
    :param host: FQDN of the server.
    :param service_account: The full path to the service account json.
    :return: None. Prints the response from the server in the build.
    """
    all_packs = []

    # Initiate the GCS client and get the production bucket
    storage_client = init_storage_client(service_account)
    production_bucket = storage_client.bucket(GCPConfig.PRODUCTION_BUCKET)
    logging.debug(
        f"Installing all content packs for nightly flow in server {host}")

    # Add deprecated packs to IGNORED_FILES list:
    for pack_id in os.listdir(PACKS_FULL_PATH):
        if is_pack_hidden(pack_id):
            logging.debug(f'Skipping installation of hidden pack "{pack_id}"')
            IGNORED_FILES.append(pack_id)

    for pack_id in os.listdir(PACKS_FULL_PATH):
        if pack_id not in IGNORED_FILES:
            pack_version = get_latest_version_from_bucket(
                pack_id, production_bucket)
            if pack_version:
                all_packs.append(
                    get_pack_installation_request_data(pack_id, pack_version))
    install_packs(client, host, all_packs, is_nightly=True)
Пример #2
0
def main():
    option = option_handler()
    storage_bucket_name = option.bucket_name
    zip_path = option.zip_path
    artifacts_path = option.artifacts_path
    service_account = option.service_account
    circle_build = option.circle_build
    branch_name = option.branch_name
    gcp_path = option.gcp_path
    remove_test_playbooks = option.remove_test_playbooks

    # google cloud storage client initialized
    storage_client = init_storage_client(service_account)
    storage_bucket = storage_client.bucket(storage_bucket_name)

    if not circle_build or not branch_name:
        # Ignore build properties
        circle_build = ''
        branch_name = ''

    if not gcp_path:
        gcp_path = BUILD_GCP_PATH

    zipped_packs = []
    success = True
    try:
        zipped_packs = download_packs_from_gcp(storage_bucket, gcp_path,
                                               zip_path, circle_build,
                                               branch_name)
    except Exception as e:
        print_error(f'Failed downloading packs: {e}')
        success = False

    if remove_test_playbooks:
        try:
            remove_test_playbooks_if_exist(zip_path, zipped_packs)
        except Exception as e:
            print_error(f'Failed removing test playbooks from packs: {e}')
            success = False

    if zipped_packs and success:
        try:
            zip_packs(zipped_packs, zip_path)
        except Exception as e:
            print_error(f'Failed zipping packs: {e}')
            success = False

        if success:
            print_success('Successfully zipped packs.')
            if artifacts_path:
                # Save in the artifacts
                shutil.copy(os.path.join(zip_path, ARTIFACT_NAME),
                            os.path.join(artifacts_path, ARTIFACT_NAME))
        else:
            print_error('Failed zipping packs.')
            sys.exit(1)
    else:
        print_warning('Did not find any packs to zip.')

    cleanup(zip_path)
def upload_files_to_google_cloud_storage(
    service_account: str,
    bucket_name: str,
    source_file_name: str,
    minimal_file_name: str,
    destination_blob_dir: str,
    last_updated: str,
):
    """Upload files to the bucket."""

    updated = datetime.strptime(last_updated, TIMESTAMP_FORMAT_SECONDS)
    updated_date = updated.strftime(DATE_FORMAT)

    files_to_upload = [
        (f'{destination_blob_dir}/coverage.json', source_file_name),
        (f'{destination_blob_dir}/history/coverage/{updated_date}.json',
         source_file_name),
        (f'{destination_blob_dir}/coverage-min.json', minimal_file_name),
        (f'{destination_blob_dir}/history/coverage-min/{updated_date}.json',
         minimal_file_name),
    ]

    # google cloud storage client initialized
    storage_client = init_storage_client(service_account)
    bucket = storage_client.bucket(bucket_name)

    for path_in_bucket, local_path in files_to_upload:
        upload_file_to_bucket(bucket_obj=bucket,
                              path_in_bucket=path_in_bucket,
                              local_path=local_path)
        print("File {} uploaded to {}.".format(local_path, path_in_bucket))
Пример #4
0
def get_index_json_data(service_account: str, production_bucket_name: str, extract_path: str, storage_base_path: str) \
        -> (dict, str):
    """Retrieve the index.json file from production bucket.

    Args:
        service_account: Path to gcloud service account
        production_bucket_name: Production bucket name
        extract_path: Full path of folder to extract the index.zip to
        storage_base_path: The base path in the bucket

    Returns:
        (Dict: content of the index.json, Str: path to index.json)
    """
    logging.info('Downloading and extracting index.zip from the cloud')

    storage_client = init_storage_client(service_account)
    production_bucket = storage_client.bucket(production_bucket_name)
    index_folder_path, _, _ = download_and_extract_index(
        production_bucket, extract_path, storage_base_path)

    logging.info("Retrieving the index file")
    index_file_path = os.path.join(index_folder_path,
                                   f"{GCPConfig.INDEX_NAME}.json")
    index_data = load_json(index_file_path)

    return index_data, index_file_path
def main():
    install_logging('Prepare_Content_Packs_For_Testing.log')
    packs_dir = '/home/runner/work/content-private/content-private/content/artifacts/packs'
    temp_dir = '/home/runner/work/content-private/content-private/content/temp-dir'
    if not os.path.exists(packs_dir):
        os.mkdir(packs_dir)
    if not os.path.exists(temp_dir):
        os.mkdir(temp_dir)
    upload_config = option_handler()
    path_to_artifacts = upload_config.artifacts_path
    extract_destination_path = upload_config.extract_path
    service_account = upload_config.service_account
    pack_name = upload_config.pack_names
    storage_base_path = upload_config.storage_base_path

    storage_client = init_storage_client(service_account)
    private_testing_bucket_client = storage_client.bucket(
        GCPConfig.CI_PRIVATE_BUCKET)

    extract_packs_artifacts(path_to_artifacts, extract_destination_path)
    path_to_pack = os.path.join(extract_destination_path, pack_name)
    premium_pack = Pack(pack_name, path_to_pack)

    upload_premium_pack_to_private_testing_bucket(
        premium_pack, pack_name, private_testing_bucket_client,
        storage_base_path)
def main():
    options = option_handler()
    service_account = options.service_account
    storage_client = init_storage_client(service_account)
    public_bucket_name = options.public_bucket_name
    public_storage_bucket = storage_client.bucket(public_bucket_name)
    private_id_set = download_private_id_set_from_gcp(public_storage_bucket)
    return private_id_set
def install_all_content_packs_from_build_bucket(client: demisto_client,
                                                host: str, server_version: str,
                                                bucket_packs_root_path: str,
                                                service_account: str,
                                                extract_destination_path: str):
    """ Iterates over the packs currently located in the Build bucket. Wrapper for install_packs.
    Retrieving the metadata of the latest version of each pack from the index.zip of the build bucket.

    :param client: Demisto-py client to connect to the server.
    :param host: FQDN of the server.
    :param server_version: The version of the server the packs are installed on.
    :param bucket_packs_root_path: The prefix to the root of packs in the bucket
    :param service_account: Google Service Account
    :param extract_destination_path: the full path of extract folder for the index.
    :return: None. Prints the response from the server in the build.
    """
    all_packs = []
    logging.debug(
        f"Installing all content packs in server {host} from packs path {bucket_packs_root_path}"
    )

    storage_client = init_storage_client(service_account)
    build_bucket = storage_client.bucket(GCPConfig.CI_BUILD_BUCKET)
    index_folder_path, _, _ = download_and_extract_index(
        build_bucket, extract_destination_path, bucket_packs_root_path)

    for pack_id in os.listdir(index_folder_path):
        if os.path.isdir(os.path.join(index_folder_path, pack_id)):
            metadata_path = os.path.join(index_folder_path, pack_id,
                                         Pack.METADATA)
            pack_metadata = load_json(metadata_path)
            if 'partnerId' in pack_metadata:  # not installing private packs
                continue
            pack_version = pack_metadata.get(
                Metadata.CURRENT_VERSION, Metadata.SERVER_DEFAULT_MIN_VERSION)
            server_min_version = pack_metadata.get(
                Metadata.SERVER_MIN_VERSION,
                Metadata.SERVER_DEFAULT_MIN_VERSION)
            hidden = pack_metadata.get(Metadata.HIDDEN, False)
            # Check if the server version is greater than the minimum server version required for this pack or if the
            # pack is hidden (deprecated):
            if ('Master' in server_version or LooseVersion(server_version) >= LooseVersion(server_min_version)) and \
                    not hidden:
                logging.debug(f"Appending pack id {pack_id}")
                all_packs.append(
                    get_pack_installation_request_data(pack_id, pack_version))
            else:
                reason = 'Is hidden' if hidden else f'min server version is {server_min_version}'
                logging.debug(
                    f'Pack: {pack_id} with version: {pack_version} will not be installed on {host}. '
                    f'Pack {reason}.')
    return install_packs(client, host, all_packs)
Пример #8
0
def main():
    option = option_handler()
    storage_bucket_name = option.bucket_name
    service_account = option.service_account
    pack_name = option.pack_name
    branch_name = option.branch_name
    pack_version = option.pack_version
    snapshot_path = option.snapshot_path
    git_repo = option.git_repo
    git_org = option.git_org

    # google cloud storage client initialized
    storage_client = init_storage_client(service_account)
    storage_bucket = storage_client.bucket(storage_bucket_name)

    upload_git_snapshot(snapshot_path, pack_name, branch_name, pack_version,
                        storage_bucket, git_repo, git_org)
Пример #9
0
def main():
    install_logging('prepare_public_index_for_private_testing.log')
    upload_config = option_handler()
    service_account = upload_config.service_account
    build_number = upload_config.ci_build_number
    public_bucket_name = upload_config.public_bucket_name
    private_bucket_name = upload_config.private_bucket_name
    storage_base_path = upload_config.storage_base_path
    extract_public_index_path = upload_config.extract_public_index_path
    changed_pack = upload_config.pack_name
    extract_destination_path = upload_config.extract_artifacts_path
    packs_artifacts_path = upload_config.artifacts_path
    dummy_index_dir_path = upload_config.dummy_index_dir_path
    dummy_index_path = os.path.join(dummy_index_dir_path, 'index.zip')
    dummy_index_lock_path = os.path.join(dummy_index_dir_path, 'lock.txt')

    storage_client = init_storage_client(service_account)
    public_storage_bucket = storage_client.bucket(public_bucket_name)
    private_storage_bucket = storage_client.bucket(private_bucket_name)

    dummy_index_blob = public_storage_bucket.blob(dummy_index_path)

    with lock_and_unlock_dummy_index(public_storage_bucket,
                                     dummy_index_lock_path):
        if storage_base_path:
            GCPConfig.STORAGE_BASE_PATH = storage_base_path

        extract_packs_artifacts(packs_artifacts_path, extract_destination_path)
        public_index_folder_path, public_index_blob, _ = download_and_extract_index(
            public_storage_bucket, extract_public_index_path)

        # In order for the packs to be downloaded successfully, their price has to be 0
        change_packs_price_to_zero(public_index_folder_path)

        private_packs, private_index_path, private_index_blob = update_index_with_priced_packs(
            private_storage_bucket, extract_destination_path,
            public_index_folder_path, changed_pack, True)
        private_packs = add_private_packs_from_dummy_index(
            private_packs, dummy_index_blob)
        upload_modified_index(public_index_folder_path,
                              extract_public_index_path, dummy_index_blob,
                              build_number, private_packs)
Пример #10
0
def main():

    upload_config = option_handler()
    service_account = upload_config.service_account
    build_number = upload_config.ci_build_number
    public_bucket_name = upload_config.public_bucket_name
    private_bucket_name = upload_config.private_bucket_name
    storage_base_path = upload_config.storage_base_path
    extract_public_index_path = upload_config.extract_public_index_path
    changed_pack = upload_config.pack_name
    extract_destination_path = upload_config.extract_artifacts_path
    packs_artifacts_path = upload_config.artifacts_path
    dummy_index_path = upload_config.dummy_index_path

    storage_client = init_storage_client(service_account)
    public_storage_bucket = storage_client.bucket(public_bucket_name)
    private_storage_bucket = storage_client.bucket(private_bucket_name)

    dummy_index_blob = public_storage_bucket.blob(dummy_index_path)

    if storage_base_path:
        GCPConfig.STORAGE_BASE_PATH = storage_base_path

    extract_packs_artifacts(packs_artifacts_path, extract_destination_path)
    public_index_folder_path, public_index_blob, _ = download_and_extract_index(public_storage_bucket,
                                                                                extract_public_index_path)

    # In order for the packs to be downloaded successfully, their price has to be 0
    change_packs_price_to_zero(public_index_folder_path)

    private_packs, private_index_path, private_index_blob = update_index_with_priced_packs(private_storage_bucket,
                                                                                           extract_destination_path,
                                                                                           public_index_folder_path,
                                                                                           changed_pack, True)

    upload_modified_index(public_index_folder_path, extract_public_index_path, dummy_index_blob, build_number,
                          private_packs)
Пример #11
0
def main():
    install_logging('Copy and Upload Packs.log')
    options = options_handler()
    packs_artifacts_path = options.artifacts_path
    extract_destination_path = options.extract_path
    production_bucket_name = options.production_bucket_name
    build_bucket_name = options.build_bucket_name
    service_account = options.service_account
    build_number = options.ci_build_number
    circle_branch = options.circle_branch
    production_base_path = options.production_base_path
    target_packs = options.pack_names

    # Google cloud storage client initialized
    storage_client = init_storage_client(service_account)
    production_bucket = storage_client.bucket(production_bucket_name)
    build_bucket = storage_client.bucket(build_bucket_name)

    # Initialize base paths
    build_bucket_path = os.path.join(GCPConfig.BUILD_PATH_PREFIX,
                                     circle_branch, build_number)
    GCPConfig.BUILD_BASE_PATH = os.path.join(build_bucket_path,
                                             GCPConfig.STORAGE_BASE_PATH)
    if production_base_path:
        GCPConfig.STORAGE_BASE_PATH = production_base_path

    # Download and extract build index from build and prod buckets
    build_index_folder_path, build_index_blob, build_index_generation = \
        download_and_extract_index(build_bucket, extract_destination_path)

    # Get the successful and failed packs file from Prepare Content step in Create Instances job if there are
    pc_successful_packs_dict, pc_failed_packs_dict = get_successful_and_failed_packs(
        os.path.join(os.path.dirname(packs_artifacts_path),
                     PACKS_RESULTS_FILE))

    # Check if needs to upload or not
    check_if_need_to_upload(pc_successful_packs_dict, pc_failed_packs_dict)

    # Detect packs to upload
    pack_names = get_pack_names(target_packs)
    extract_packs_artifacts(packs_artifacts_path, extract_destination_path)
    packs_list = [
        Pack(pack_name, os.path.join(extract_destination_path, pack_name))
        for pack_name in pack_names
        if os.path.exists(os.path.join(extract_destination_path, pack_name))
    ]

    # Starting iteration over packs
    for pack in packs_list:
        # Indicates whether a pack has failed to upload on Prepare Content step
        task_status, pack_status = pack.is_failed_to_upload(
            pc_failed_packs_dict)
        if task_status:
            pack.status = pack_status
            pack.cleanup()
            continue

        task_status, user_metadata = pack.load_user_metadata()
        if not task_status:
            pack.status = PackStatus.FAILED_LOADING_USER_METADATA.value
            pack.cleanup()
            continue

        task_status = pack.copy_integration_images(production_bucket,
                                                   build_bucket)
        if not task_status:
            pack.status = PackStatus.FAILED_IMAGES_UPLOAD.name
            pack.cleanup()
            continue

        task_status = pack.copy_author_image(production_bucket, build_bucket)
        if not task_status:
            pack.status = PackStatus.FAILED_AUTHOR_IMAGE_UPLOAD.name
            pack.cleanup()
            continue

        # Create a local copy of the pack's index changelog
        task_status = pack.create_local_changelog(build_index_folder_path)
        if not task_status:
            pack.status = PackStatus.FAILED_RELEASE_NOTES.name
            pack.cleanup()
            continue

        task_status, skipped_pack_uploading = pack.copy_and_upload_to_storage(
            production_bucket, build_bucket, pack.latest_version,
            pc_successful_packs_dict)
        if skipped_pack_uploading:
            pack.status = PackStatus.PACK_ALREADY_EXISTS.name
            pack.cleanup()
            continue

        if not task_status:
            pack.status = PackStatus.FAILED_UPLOADING_PACK.name
            pack.cleanup()
            continue

        pack.status = PackStatus.SUCCESS.name

    # upload core packs json to bucket
    upload_core_packs_config(production_bucket, build_number,
                             extract_destination_path, build_bucket)

    # finished iteration over content packs
    copy_index(build_index_folder_path, build_index_blob,
               build_index_generation, production_bucket, build_bucket)

    # upload id_set.json to bucket
    copy_id_set(production_bucket, build_bucket)

    # get the lists of packs divided by their status
    successful_packs, skipped_packs, failed_packs = get_packs_summary(
        packs_list)

    # verify that the successful from Prepare content and are the ones that were copied
    verify_copy(successful_packs, pc_successful_packs_dict)

    # summary of packs status
    print_packs_summary(successful_packs, skipped_packs, failed_packs)
Пример #12
0
def main():
    install_logging('upload_packs_private.log')
    upload_config = option_handler()
    packs_artifacts_path = upload_config.artifacts_path
    extract_destination_path = upload_config.extract_path
    storage_bucket_name = upload_config.bucket_name
    private_bucket_name = upload_config.private_bucket_name
    service_account = upload_config.service_account
    target_packs = upload_config.pack_names
    build_number = upload_config.ci_build_number
    id_set_path = upload_config.id_set_path
    packs_dependencies_mapping = load_json(
        upload_config.pack_dependencies
    ) if upload_config.pack_dependencies else {}
    storage_base_path = upload_config.storage_base_path
    is_private_build = upload_config.encryption_key and upload_config.encryption_key != ''
    landing_page_sections = load_json(LANDING_PAGE_SECTIONS_PATH)

    logging.info(f"Packs artifact path is: {packs_artifacts_path}")

    prepare_test_directories(packs_artifacts_path)

    # google cloud storage client initialized
    storage_client = init_storage_client(service_account)
    storage_bucket = storage_client.bucket(storage_bucket_name)
    private_storage_bucket = storage_client.bucket(private_bucket_name)
    default_storage_bucket = private_storage_bucket if is_private_build else storage_bucket

    # download and extract index from public bucket
    index_folder_path, index_blob, index_generation = download_and_extract_index(
        storage_bucket, extract_destination_path)

    # content repo client initialized
    if not is_private_build:
        content_repo = get_content_git_client(CONTENT_ROOT_PATH)
        current_commit_hash, remote_previous_commit_hash = get_recent_commits_data(
            content_repo,
            index_folder_path,
            is_bucket_upload_flow=False,
            is_private_build=True)
    else:
        current_commit_hash, remote_previous_commit_hash = "", ""
        content_repo = None

    if storage_base_path:
        GCPConfig.STORAGE_BASE_PATH = storage_base_path

    # detect packs to upload
    pack_names = get_packs_names(target_packs)
    extract_packs_artifacts(packs_artifacts_path, extract_destination_path)
    packs_list = [
        Pack(pack_name, os.path.join(extract_destination_path, pack_name))
        for pack_name in pack_names
        if os.path.exists(os.path.join(extract_destination_path, pack_name))
    ]

    if not is_private_build:
        check_if_index_is_updated(index_folder_path, content_repo,
                                  current_commit_hash,
                                  remote_previous_commit_hash, storage_bucket)

    if private_bucket_name:  # Add private packs to the index
        private_packs, private_index_path, private_index_blob = update_index_with_priced_packs(
            private_storage_bucket, extract_destination_path,
            index_folder_path, pack_names, is_private_build)
    else:  # skipping private packs
        logging.info("Skipping index update of priced packs")
        private_packs = []

    # google cloud bigquery client initialized
    packs_statistic_df = None
    if not is_private_build:
        bq_client = init_bigquery_client(service_account)
        packs_statistic_df = get_packs_statistics_dataframe(bq_client)

    # clean index and gcs from non existing or invalid packs
    clean_non_existing_packs(index_folder_path, private_packs,
                             default_storage_bucket)
    # starting iteration over packs
    for pack in packs_list:
        create_and_upload_marketplace_pack(
            upload_config,
            pack,
            storage_bucket,
            index_folder_path,
            packs_dependencies_mapping,
            private_bucket_name,
            private_storage_bucket=private_storage_bucket,
            content_repo=content_repo,
            current_commit_hash=current_commit_hash,
            remote_previous_commit_hash=remote_previous_commit_hash,
            packs_statistic_df=packs_statistic_df)
    # upload core packs json to bucket

    if should_upload_core_packs(storage_bucket_name):
        upload_core_packs_config(default_storage_bucket, build_number,
                                 index_folder_path)
    # finished iteration over content packs
    if is_private_build:
        delete_public_packs_from_index(index_folder_path)
        upload_index_to_storage(index_folder_path,
                                extract_destination_path,
                                private_index_blob,
                                build_number,
                                private_packs,
                                current_commit_hash,
                                index_generation,
                                is_private_build,
                                landing_page_sections=landing_page_sections)

    else:
        upload_index_to_storage(index_folder_path,
                                extract_destination_path,
                                index_blob,
                                build_number,
                                private_packs,
                                current_commit_hash,
                                index_generation,
                                landing_page_sections=landing_page_sections)

    # upload id_set.json to bucket
    upload_id_set(default_storage_bucket, id_set_path)

    # get the lists of packs divided by their status
    successful_packs, skipped_packs, failed_packs = get_packs_summary(
        packs_list)

    # summary of packs status
    print_packs_summary(successful_packs, skipped_packs, failed_packs)
Пример #13
0
def main():
    install_logging('Prepare_Content_Packs_For_Testing.log')
    option = option_handler()
    packs_artifacts_path = option.artifacts_path
    extract_destination_path = option.extract_path
    storage_bucket_name = option.bucket_name
    service_account = option.service_account
    target_packs = option.pack_names if option.pack_names else ""
    build_number = option.ci_build_number if option.ci_build_number else str(
        uuid.uuid4())
    override_all_packs = option.override_all_packs
    signature_key = option.key_string
    id_set_path = option.id_set_path
    packs_dependencies_mapping = load_json(
        option.pack_dependencies) if option.pack_dependencies else {}
    storage_base_path = option.storage_base_path
    remove_test_playbooks = option.remove_test_playbooks
    is_bucket_upload_flow = option.bucket_upload
    private_bucket_name = option.private_bucket_name
    circle_branch = option.circle_branch
    force_upload = option.force_upload

    # google cloud storage client initialized
    storage_client = init_storage_client(service_account)
    storage_bucket = storage_client.bucket(storage_bucket_name)

    if storage_base_path:
        GCPConfig.STORAGE_BASE_PATH = storage_base_path

    # download and extract index from public bucket
    index_folder_path, index_blob, index_generation = download_and_extract_index(
        storage_bucket, extract_destination_path)

    # content repo client initialized
    content_repo = get_content_git_client(CONTENT_ROOT_PATH)
    current_commit_hash, previous_commit_hash = get_recent_commits_data(
        content_repo, index_folder_path, is_bucket_upload_flow, circle_branch)

    # detect packs to upload
    pack_names = get_packs_names(target_packs, previous_commit_hash)
    extract_packs_artifacts(packs_artifacts_path, extract_destination_path)
    packs_list = [
        Pack(pack_name, os.path.join(extract_destination_path, pack_name))
        for pack_name in pack_names
        if os.path.exists(os.path.join(extract_destination_path, pack_name))
    ]

    if not option.override_all_packs:
        check_if_index_is_updated(index_folder_path, content_repo,
                                  current_commit_hash, previous_commit_hash,
                                  storage_bucket)

    # google cloud bigquery client initialized
    bq_client = init_bigquery_client(service_account)
    packs_statistic_df = get_packs_statistics_dataframe(bq_client)
    updated_private_packs_ids = []
    if private_bucket_name:  # Add private packs to the index
        private_storage_bucket = storage_client.bucket(private_bucket_name)
        private_packs, _, _, updated_private_packs_ids = update_index_with_priced_packs(
            private_storage_bucket, extract_destination_path,
            index_folder_path, pack_names)
    else:  # skipping private packs
        logging.debug("Skipping index update of priced packs")
        private_packs = []

    # clean index and gcs from non existing or invalid packs
    clean_non_existing_packs(index_folder_path, private_packs, storage_bucket)

    # starting iteration over packs
    for pack in packs_list:
        task_status, user_metadata = pack.load_user_metadata()
        if not task_status:
            pack.status = PackStatus.FAILED_LOADING_USER_METADATA.value
            pack.cleanup()
            continue

        task_status, pack_content_items = pack.collect_content_items()
        if not task_status:
            pack.status = PackStatus.FAILED_COLLECT_ITEMS.name
            pack.cleanup()
            continue

        task_status, integration_images = pack.upload_integration_images(
            storage_bucket)
        if not task_status:
            pack.status = PackStatus.FAILED_IMAGES_UPLOAD.name
            pack.cleanup()
            continue

        task_status, author_image = pack.upload_author_image(storage_bucket)
        if not task_status:
            pack.status = PackStatus.FAILED_AUTHOR_IMAGE_UPLOAD.name
            pack.cleanup()
            continue

        task_status, pack_was_modified = pack.detect_modified(
            content_repo, index_folder_path, current_commit_hash,
            previous_commit_hash)
        if not task_status:
            pack.status = PackStatus.FAILED_DETECTING_MODIFIED_FILES.name
            pack.cleanup()
            continue

        task_status = pack.format_metadata(
            user_metadata=user_metadata,
            pack_content_items=pack_content_items,
            integration_images=integration_images,
            author_image=author_image,
            index_folder_path=index_folder_path,
            packs_dependencies_mapping=packs_dependencies_mapping,
            build_number=build_number,
            commit_hash=current_commit_hash,
            packs_statistic_df=packs_statistic_df,
            pack_was_modified=pack_was_modified)
        if not task_status:
            pack.status = PackStatus.FAILED_METADATA_PARSING.name
            pack.cleanup()
            continue

        task_status, not_updated_build = pack.prepare_release_notes(
            index_folder_path, build_number, pack_was_modified)
        if not task_status:
            pack.status = PackStatus.FAILED_RELEASE_NOTES.name
            pack.cleanup()
            continue

        if not_updated_build:
            pack.status = PackStatus.PACK_IS_NOT_UPDATED_IN_RUNNING_BUILD.name
            pack.cleanup()
            continue

        task_status = pack.remove_unwanted_files(remove_test_playbooks)
        if not task_status:
            pack.status = PackStatus.FAILED_REMOVING_PACK_SKIPPED_FOLDERS
            pack.cleanup()
            continue

        task_status = pack.sign_pack(signature_key)
        if not task_status:
            pack.status = PackStatus.FAILED_SIGNING_PACKS.name
            pack.cleanup()
            continue

        task_status, zip_pack_path = pack.zip_pack()
        if not task_status:
            pack.status = PackStatus.FAILED_ZIPPING_PACK_ARTIFACTS.name
            pack.cleanup()
            continue

        (task_status, skipped_pack_uploading, full_pack_path) = \
            pack.upload_to_storage(zip_pack_path, pack.latest_version,
                                   storage_bucket, override_all_packs
                                   or pack_was_modified)

        if not task_status:
            pack.status = PackStatus.FAILED_UPLOADING_PACK.name
            pack.cleanup()
            continue

        task_status, exists_in_index = pack.check_if_exists_in_index(
            index_folder_path)
        if not task_status:
            pack.status = PackStatus.FAILED_SEARCHING_PACK_IN_INDEX.name
            pack.cleanup()
            continue

        task_status = pack.prepare_for_index_upload()
        if not task_status:
            pack.status = PackStatus.FAILED_PREPARING_INDEX_FOLDER.name
            pack.cleanup()
            continue

        task_status = update_index_folder(index_folder_path=index_folder_path,
                                          pack_name=pack.name,
                                          pack_path=pack.path,
                                          pack_version=pack.latest_version,
                                          hidden_pack=pack.hidden)
        if not task_status:
            pack.status = PackStatus.FAILED_UPDATING_INDEX_FOLDER.name
            pack.cleanup()
            continue

        # in case that pack already exist at cloud storage path and in index, don't show that the pack was changed
        if skipped_pack_uploading and exists_in_index:
            pack.status = PackStatus.PACK_ALREADY_EXISTS.name
            pack.cleanup()
            continue

        pack.status = PackStatus.SUCCESS.name

    # upload core packs json to bucket
    upload_core_packs_config(storage_bucket, build_number, index_folder_path)

    # finished iteration over content packs
    upload_index_to_storage(index_folder_path=index_folder_path,
                            extract_destination_path=extract_destination_path,
                            index_blob=index_blob,
                            build_number=build_number,
                            private_packs=private_packs,
                            current_commit_hash=current_commit_hash,
                            index_generation=index_generation,
                            force_upload=force_upload,
                            previous_commit_hash=previous_commit_hash)

    # upload id_set.json to bucket
    upload_id_set(storage_bucket, id_set_path)

    # get the lists of packs divided by their status
    successful_packs, skipped_packs, failed_packs = get_packs_summary(
        packs_list)

    # Store successful and failed packs list in CircleCI artifacts - to be used in Upload Packs To Marketplace job
    packs_results_file_path = os.path.join(
        os.path.dirname(packs_artifacts_path),
        BucketUploadFlow.PACKS_RESULTS_FILE)
    store_successful_and_failed_packs_in_ci_artifacts(
        packs_results_file_path, BucketUploadFlow.PREPARE_CONTENT_FOR_TESTING,
        successful_packs, failed_packs, updated_private_packs_ids)

    # summary of packs status
    print_packs_summary(successful_packs, skipped_packs, failed_packs,
                        not is_bucket_upload_flow)
Пример #14
0
def main():
    install_logging('Zip_Content_Packs_From_GCS.log')
    option = option_handler()
    storage_bucket_name = option.bucket_name
    zip_path = option.zip_path
    artifacts_path = option.artifacts_path
    service_account = option.service_account
    circle_build = option.circle_build
    branch_name = option.branch_name
    gcp_path = option.gcp_path
    remove_test_playbooks = option.remove_test_playbooks
    private_build = option.private
    if private_build:
        packs_dir = '/home/runner/work/content-private/content-private/content/artifacts/packs'
        zip_path = '/home/runner/work/content-private/content-private/content/temp-dir'
        if not os.path.exists(packs_dir):
            logging.debug("Packs dir not found. Creating.")
            os.mkdir(packs_dir)
        if not os.path.exists(zip_path):
            logging.debug("Temp dir not found. Creating.")
            os.mkdir(zip_path)
        artifacts_path = '/home/runner/work/content-private/content-private/content/artifacts'

    # google cloud storage client initialized
    storage_client = init_storage_client(service_account)
    storage_bucket = storage_client.bucket(storage_bucket_name)

    if not circle_build or not branch_name:
        # Ignore build properties
        circle_build = ''
        branch_name = ''

    if not gcp_path:
        gcp_path = BUILD_GCP_PATH

    zipped_packs = []
    success = True
    try:
        zipped_packs = download_packs_from_gcp(storage_bucket, gcp_path,
                                               zip_path, circle_build,
                                               branch_name)
    except Exception:
        logging.exception('Failed downloading packs')
        success = False

    if remove_test_playbooks:
        try:
            remove_test_playbooks_if_exist(zip_path, zipped_packs)
        except Exception:
            logging.exception('Failed removing test playbooks from packs')
            success = False

    if zipped_packs and success:
        try:
            zip_packs(zipped_packs, zip_path)
        except Exception:
            logging.exception('Failed zipping packs')
            success = False

        if success:
            logging.info('Successfully zipped packs.')
            if artifacts_path:
                # Save in the artifacts
                shutil.copy(os.path.join(zip_path, ARTIFACT_NAME),
                            os.path.join(artifacts_path, ARTIFACT_NAME))
        else:
            logging.critical('Failed zipping packs.')
            sys.exit(1)
    else:
        logging.warning('Did not find any packs to zip.')

    cleanup(zip_path)
Пример #15
0
def main():
    option = option_handler()
    packs_artifacts_path = option.artifacts_path
    extract_destination_path = option.extract_path
    storage_bucket_name = option.bucket_name
    private_bucket_name = option.private_bucket_name
    service_account = option.service_account
    target_packs = option.pack_names if option.pack_names else ""
    build_number = option.ci_build_number if option.ci_build_number else str(
        uuid.uuid4())
    override_pack = option.override_pack
    signature_key = option.key_string
    id_set_path = option.id_set_path
    packs_dependencies_mapping = load_json(
        option.pack_dependencies) if option.pack_dependencies else {}
    storage_bash_path = option.storage_bash_path
    remove_test_playbooks = option.remove_test_playbooks

    # google cloud storage client initialized
    storage_client = init_storage_client(service_account)
    storage_bucket = storage_client.bucket(storage_bucket_name)

    if storage_bash_path:
        GCPConfig.STORAGE_BASE_PATH = storage_bash_path

    # detect packs to upload
    modified_packs = get_modified_packs(target_packs)
    extract_packs_artifacts(packs_artifacts_path, extract_destination_path)
    packs_list = [
        Pack(pack_name, os.path.join(extract_destination_path, pack_name))
        for pack_name in modified_packs
        if os.path.exists(os.path.join(extract_destination_path, pack_name))
    ]

    # download and extract index from public bucket
    index_folder_path, index_blob = download_and_extract_index(
        storage_bucket, extract_destination_path)

    if private_bucket_name:  # Add private packs to the index
        private_storage_bucket = storage_client.bucket(private_bucket_name)
        private_packs = update_index_with_priced_packs(
            private_storage_bucket, extract_destination_path,
            index_folder_path)
    else:  # skipping private packs
        print("Skipping index update of priced packs")
        private_packs = []

    # clean index and gcs from non existing or invalid packs
    clean_non_existing_packs(index_folder_path, private_packs, storage_bucket)

    # starting iteration over packs
    for pack in packs_list:
        task_status, user_metadata = pack.load_user_metadata()
        if not task_status:
            pack.status = PackStatus.FAILED_LOADING_USER_METADATA.value
            pack.cleanup()
            continue

        task_status, pack_content_items = pack.collect_content_items()
        if not task_status:
            pack.status = PackStatus.FAILED_COLLECT_ITEMS.name
            pack.cleanup()
            continue

        task_status, integration_images = pack.upload_integration_images(
            storage_bucket)
        if not task_status:
            pack.status = PackStatus.FAILED_IMAGES_UPLOAD.name
            pack.cleanup()
            continue

        task_status, author_image = pack.upload_author_image(storage_bucket)
        if not task_status:
            pack.status = PackStatus.FAILED_AUTHOR_IMAGE_UPLOAD.name
            pack.cleanup()
            continue

        task_status = pack.format_metadata(
            user_metadata=user_metadata,
            pack_content_items=pack_content_items,
            integration_images=integration_images,
            author_image=author_image,
            index_folder_path=index_folder_path,
            packs_dependencies_mapping=packs_dependencies_mapping)
        if not task_status:
            pack.status = PackStatus.FAILED_METADATA_PARSING.name
            pack.cleanup()
            continue

        task_status = pack.prepare_release_notes(index_folder_path)
        if not task_status:
            pack.status = PackStatus.FAILED_RELEASE_NOTES.name
            pack.cleanup()
            continue

        task_status = pack.remove_unwanted_files(remove_test_playbooks)
        if not task_status:
            pack.status = PackStatus.FAILED_REMOVING_PACK_SKIPPED_FOLDERS
            pack.cleanup()
            continue

        task_status = pack.sign_pack(signature_key)
        if not task_status:
            pack.status = PackStatus.FAILED_SIGNING_PACKS.name
            pack.cleanup()
            continue

        task_status, zip_pack_path = pack.zip_pack()
        if not task_status:
            pack.status = PackStatus.FAILED_ZIPPING_PACK_ARTIFACTS.name
            pack.cleanup()
            continue

        task_status, skipped_pack_uploading = pack.upload_to_storage(
            zip_pack_path, pack.latest_version, storage_bucket, override_pack)
        if not task_status:
            pack.status = PackStatus.FAILED_UPLOADING_PACK.name
            pack.cleanup()
            continue

        # in case that pack already exist at cloud storage path, skipped further steps
        if skipped_pack_uploading:
            pack.status = PackStatus.PACK_ALREADY_EXISTS.name
            pack.cleanup()
            continue

        task_status = pack.prepare_for_index_upload()
        if not task_status:
            pack.status = PackStatus.FAILED_PREPARING_INDEX_FOLDER.name
            pack.cleanup()
            continue

        task_status = update_index_folder(index_folder_path=index_folder_path,
                                          pack_name=pack.name,
                                          pack_path=pack.path,
                                          pack_version=pack.latest_version,
                                          hidden_pack=pack.hidden)
        if not task_status:
            pack.status = PackStatus.FAILED_UPDATING_INDEX_FOLDER.name
            pack.cleanup()
            continue

        pack.status = PackStatus.SUCCESS.name

    # finished iteration over content packs
    upload_index_to_storage(index_folder_path, extract_destination_path,
                            index_blob, build_number, private_packs)

    # upload core packs json to bucket
    upload_core_packs_config(storage_bucket, packs_list, build_number)

    # upload id_set.json to bucket
    upload_id_set(storage_bucket, id_set_path)

    # summary of packs status
    print_packs_summary(packs_list)
Пример #16
0
def main():
    install_logging('Copy_and_Upload_Packs.log', logger=logging)
    options = options_handler()
    packs_artifacts_path = options.artifacts_path
    extract_destination_path = options.extract_path
    production_bucket_name = options.production_bucket_name
    build_bucket_name = options.build_bucket_name
    service_account = options.service_account
    build_number = options.ci_build_number
    circle_branch = options.circle_branch
    production_base_path = options.production_base_path
    target_packs = options.pack_names
    marketplace = options.marketplace

    # Google cloud storage client initialized
    storage_client = init_storage_client(service_account)
    production_bucket = storage_client.bucket(production_bucket_name)
    build_bucket = storage_client.bucket(build_bucket_name)

    # Initialize build and prod base paths
    build_bucket_path = os.path.join(GCPConfig.BUILD_PATH_PREFIX,
                                     circle_branch, build_number, marketplace)
    build_bucket_base_path = os.path.join(build_bucket_path,
                                          GCPConfig.CONTENT_PACKS_PATH)

    # Relevant when triggering test upload flow
    if production_bucket_name:
        GCPConfig.PRODUCTION_BUCKET = production_bucket_name

    # Download and extract build index from build and prod buckets
    build_index_folder_path, build_index_blob, build_index_generation = \
        download_and_extract_index(build_bucket, extract_destination_path, build_bucket_base_path)

    # Get the successful and failed packs file from Prepare Content step in Create Instances job if there are
    packs_results_file_path = os.path.join(
        os.path.dirname(packs_artifacts_path),
        BucketUploadFlow.PACKS_RESULTS_FILE)
    pc_successful_packs_dict, pc_failed_packs_dict, pc_successful_private_packs_dict, \
        pc_uploaded_images = get_upload_data(packs_results_file_path, BucketUploadFlow.PREPARE_CONTENT_FOR_TESTING)

    logging.debug(
        f"Successful packs from Prepare Content: {pc_successful_packs_dict}")
    logging.debug(f"Failed packs from Prepare Content: {pc_failed_packs_dict}")
    logging.debug(
        f"Successful private packs from Prepare Content: {pc_successful_private_packs_dict}"
    )
    logging.debug(f"Images from Prepare Content: {pc_uploaded_images}")

    # Check if needs to upload or not
    check_if_need_to_upload(pc_successful_packs_dict, pc_failed_packs_dict,
                            pc_successful_private_packs_dict,
                            pc_uploaded_images)

    # Detect packs to upload
    pack_names = get_pack_names(target_packs)
    extract_packs_artifacts(packs_artifacts_path, extract_destination_path)
    packs_list = [
        Pack(pack_name, os.path.join(extract_destination_path, pack_name),
             marketplace) for pack_name in pack_names
        if os.path.exists(os.path.join(extract_destination_path, pack_name))
    ]

    # Starting iteration over packs
    for pack in packs_list:
        # Indicates whether a pack has failed to upload on Prepare Content step
        task_status, pack_status = pack.is_failed_to_upload(
            pc_failed_packs_dict)
        if task_status:
            pack.status = pack_status
            pack.cleanup()
            continue

        task_status = pack.load_user_metadata(marketplace)
        if not task_status:
            pack.status = PackStatus.FAILED_LOADING_USER_METADATA.name
            pack.cleanup()
            continue

        task_status = pack.copy_integration_images(production_bucket,
                                                   build_bucket,
                                                   pc_uploaded_images,
                                                   production_base_path,
                                                   build_bucket_base_path)
        if not task_status:
            pack.status = PackStatus.FAILED_IMAGES_UPLOAD.name
            pack.cleanup()
            continue

        task_status = pack.copy_author_image(production_bucket, build_bucket,
                                             pc_uploaded_images,
                                             production_base_path,
                                             build_bucket_base_path)
        if not task_status:
            pack.status = PackStatus.FAILED_AUTHOR_IMAGE_UPLOAD.name
            pack.cleanup()
            continue

        task_status, skipped_pack_uploading = pack.copy_and_upload_to_storage(
            production_bucket, build_bucket, pc_successful_packs_dict,
            production_base_path, build_bucket_base_path)
        if skipped_pack_uploading:
            pack.status = PackStatus.PACK_ALREADY_EXISTS.name
            pack.cleanup()
            continue

        if not task_status:
            pack.status = PackStatus.FAILED_UPLOADING_PACK.name
            pack.cleanup()
            continue

        pack.status = PackStatus.SUCCESS.name

    # upload core packs json to bucket
    upload_core_packs_config(production_bucket, build_number,
                             extract_destination_path, build_bucket,
                             production_base_path, build_bucket_base_path)

    # finished iteration over content packs
    copy_index(build_index_folder_path, build_index_blob,
               build_index_generation, production_bucket, build_bucket,
               production_base_path, build_bucket_base_path)

    # upload id_set.json to bucket
    copy_id_set(production_bucket, build_bucket, production_base_path,
                build_bucket_base_path)

    # get the lists of packs divided by their status
    successful_packs, skipped_packs, failed_packs = get_packs_summary(
        packs_list)

    # Store successful and failed packs list in CircleCI artifacts
    store_successful_and_failed_packs_in_ci_artifacts(
        packs_results_file_path,
        BucketUploadFlow.UPLOAD_PACKS_TO_MARKETPLACE_STORAGE, successful_packs,
        failed_packs, list(pc_successful_private_packs_dict))

    # verify that the successful from Prepare content and are the ones that were copied
    verify_copy(successful_packs, pc_successful_packs_dict)

    # summary of packs status
    print_packs_summary(successful_packs, skipped_packs, failed_packs)
Пример #17
0
def main():
    option = option_handler()
    packs_artifacts_path = option.artifacts_path
    extract_destination_path = option.extract_path
    storage_bucket_name = option.bucket_name
    private_bucket_name = option.private_bucket_name
    service_account = option.service_account
    target_packs = option.pack_names if option.pack_names else ""
    build_number = option.ci_build_number if option.ci_build_number else str(
        uuid.uuid4())
    override_all_packs = option.override_all_packs
    signature_key = option.key_string
    id_set_path = option.id_set_path
    packs_dependencies_mapping = load_json(
        option.pack_dependencies) if option.pack_dependencies else {}
    storage_base_path = option.storage_base_path
    remove_test_playbooks = option.remove_test_playbooks

    # google cloud storage client initialized
    storage_client = init_storage_client(service_account)
    storage_bucket = storage_client.bucket(storage_bucket_name)

    # content repo client initialized
    content_repo = get_content_git_client(CONTENT_ROOT_PATH)
    current_commit_hash, remote_previous_commit_hash = get_recent_commits_data(
        content_repo)

    if storage_base_path:
        GCPConfig.STORAGE_BASE_PATH = storage_base_path

    # detect packs to upload
    pack_names = get_packs_names(target_packs)
    extract_packs_artifacts(packs_artifacts_path, extract_destination_path)
    packs_list = [
        Pack(pack_name, os.path.join(extract_destination_path, pack_name))
        for pack_name in pack_names
        if os.path.exists(os.path.join(extract_destination_path, pack_name))
    ]

    # download and extract index from public bucket
    index_folder_path, index_blob, index_generation = download_and_extract_index(
        storage_bucket, extract_destination_path)

    if not option.override_all_packs:
        check_if_index_is_updated(index_folder_path, content_repo,
                                  current_commit_hash,
                                  remote_previous_commit_hash, storage_bucket)

    # google cloud bigquery client initialized
    bq_client = init_bigquery_client(service_account)
    packs_statistic_df = get_packs_statistics_dataframe(bq_client)

    if private_bucket_name:  # Add private packs to the index
        private_storage_bucket = storage_client.bucket(private_bucket_name)
        private_packs = update_index_with_priced_packs(
            private_storage_bucket, extract_destination_path,
            index_folder_path)
    else:  # skipping private packs
        print("Skipping index update of priced packs")
        private_packs = []

    # clean index and gcs from non existing or invalid packs
    clean_non_existing_packs(index_folder_path, private_packs, storage_bucket)

    # starting iteration over packs
    for pack in packs_list:
        task_status, user_metadata = pack.load_user_metadata()
        if not task_status:
            pack.status = PackStatus.FAILED_LOADING_USER_METADATA.value
            pack.cleanup()
            continue

        task_status, pack_content_items = pack.collect_content_items()
        if not task_status:
            pack.status = PackStatus.FAILED_COLLECT_ITEMS.name
            pack.cleanup()
            continue

        task_status, integration_images = pack.upload_integration_images(
            storage_bucket)
        if not task_status:
            pack.status = PackStatus.FAILED_IMAGES_UPLOAD.name
            pack.cleanup()
            continue

        task_status, author_image = pack.upload_author_image(storage_bucket)
        if not task_status:
            pack.status = PackStatus.FAILED_AUTHOR_IMAGE_UPLOAD.name
            pack.cleanup()
            continue

        task_status = pack.format_metadata(
            user_metadata=user_metadata,
            pack_content_items=pack_content_items,
            integration_images=integration_images,
            author_image=author_image,
            index_folder_path=index_folder_path,
            packs_dependencies_mapping=packs_dependencies_mapping,
            build_number=build_number,
            commit_hash=current_commit_hash,
            packs_statistic_df=packs_statistic_df)
        if not task_status:
            pack.status = PackStatus.FAILED_METADATA_PARSING.name
            pack.cleanup()
            continue

        task_status, not_updated_build = pack.prepare_release_notes(
            index_folder_path, build_number)
        if not task_status:
            pack.status = PackStatus.FAILED_RELEASE_NOTES.name
            pack.cleanup()
            continue

        if not_updated_build:
            pack.status = PackStatus.PACK_IS_NOT_UPDATED_IN_RUNNING_BUILD.name
            pack.cleanup()
            continue

        task_status = pack.remove_unwanted_files(remove_test_playbooks)
        if not task_status:
            pack.status = PackStatus.FAILED_REMOVING_PACK_SKIPPED_FOLDERS
            pack.cleanup()
            continue

        task_status = pack.sign_pack(signature_key)
        if not task_status:
            pack.status = PackStatus.FAILED_SIGNING_PACKS.name
            pack.cleanup()
            continue

        task_status, zip_pack_path = pack.zip_pack()
        if not task_status:
            pack.status = PackStatus.FAILED_ZIPPING_PACK_ARTIFACTS.name
            pack.cleanup()
            continue

        task_status, pack_was_modified = pack.detect_modified(
            content_repo, index_folder_path, current_commit_hash,
            remote_previous_commit_hash)
        if not task_status:
            pack.status = PackStatus.FAILED_DETECTING_MODIFIED_FILES.name
            pack.cleanup()
            continue

        (task_status, skipped_pack_uploading, full_pack_path) = \
            pack.upload_to_storage(zip_pack_path, pack.latest_version,
                                   storage_bucket, override_all_packs
                                   or pack_was_modified)
        if full_pack_path is not None:
            branch_name = os.environ['CIRCLE_BRANCH']
            build_num = os.environ['CIRCLE_BUILD_NUM']
            bucket_path = f'https://console.cloud.google.com/storage/browser/' \
                          f'marketplace-ci-build/{branch_name}/{build_num}'
            bucket_url = bucket_path.join(full_pack_path)
        else:
            bucket_url = 'Pack was not uploaded.'
        if not task_status:
            pack.status = PackStatus.FAILED_UPLOADING_PACK.name
            pack.bucket_url = bucket_url
            pack.cleanup()
            continue

        task_status, exists_in_index = pack.check_if_exists_in_index(
            index_folder_path)
        if not task_status:
            pack.status = PackStatus.FAILED_SEARCHING_PACK_IN_INDEX.name
            pack.cleanup()
            continue

        # in case that pack already exist at cloud storage path and in index, skipped further steps
        if skipped_pack_uploading and exists_in_index:
            pack.status = PackStatus.PACK_ALREADY_EXISTS.name
            pack.cleanup()
            continue

        task_status = pack.prepare_for_index_upload()
        if not task_status:
            pack.status = PackStatus.FAILED_PREPARING_INDEX_FOLDER.name
            pack.cleanup()
            continue

        task_status = update_index_folder(index_folder_path=index_folder_path,
                                          pack_name=pack.name,
                                          pack_path=pack.path,
                                          pack_version=pack.latest_version,
                                          hidden_pack=pack.hidden)
        if not task_status:
            pack.status = PackStatus.FAILED_UPDATING_INDEX_FOLDER.name
            pack.cleanup()
            continue

        pack.status = PackStatus.SUCCESS.name

    # upload core packs json to bucket
    upload_core_packs_config(storage_bucket, build_number, index_folder_path)

    # finished iteration over content packs
    upload_index_to_storage(index_folder_path, extract_destination_path,
                            index_blob, build_number, private_packs,
                            current_commit_hash, index_generation)

    # upload id_set.json to bucket
    upload_id_set(storage_bucket, id_set_path)

    # summary of packs status
    print_packs_summary(packs_list)