Пример #1
0
def update_index_with_priced_packs(private_storage_bucket: Any, extract_destination_path: str,
                                   index_folder_path: str, pack_names: set, is_private_build: bool) \
        -> Tuple[Union[list, list], str, Any]:
    """ Updates index with priced packs and returns list of priced packs data.

    Args:
        private_storage_bucket (google.cloud.storage.bucket.Bucket): google storage private bucket.
        extract_destination_path (str): full path to extract directory.
        index_folder_path (str): downloaded index folder directory path.
        pack_names (set): Collection of pack names.
        is_private_build (bool): Indicates if the build is private.

    Returns:
        list: priced packs from private bucket.

    """
    private_index_path = ""
    private_packs = []

    try:
        (private_index_path, private_index_blob, _) = \
            download_and_extract_index(private_storage_bucket,
                                       os.path.join(extract_destination_path,
                                                    'private'))
        logging.info("get_private_packs")
        private_packs = get_private_packs(private_index_path, pack_names,
                                          extract_destination_path)
        logging.info("add_private_packs_to_index")
        add_private_packs_to_index(index_folder_path, private_index_path)
        logging.info("Finished updating index with priced packs")
    except Exception:
        logging.exception('Could not add private packs to the index.')
    finally:
        shutil.rmtree(os.path.dirname(private_index_path), ignore_errors=True)
        return private_packs, private_index_path, private_index_blob
Пример #2
0
def get_index_json_data(service_account: str, production_bucket_name: str, extract_path: str, storage_base_path: str) \
        -> (dict, str):
    """Retrieve the index.json file from production bucket.

    Args:
        service_account: Path to gcloud service account
        production_bucket_name: Production bucket name
        extract_path: Full path of folder to extract the index.zip to
        storage_base_path: The base path in the bucket

    Returns:
        (Dict: content of the index.json, Str: path to index.json)
    """
    logging.info('Downloading and extracting index.zip from the cloud')

    storage_client = init_storage_client(service_account)
    production_bucket = storage_client.bucket(production_bucket_name)
    index_folder_path, _, _ = download_and_extract_index(
        production_bucket, extract_path, storage_base_path)

    logging.info("Retrieving the index file")
    index_file_path = os.path.join(index_folder_path,
                                   f"{GCPConfig.INDEX_NAME}.json")
    index_data = load_json(index_file_path)

    return index_data, index_file_path
Пример #3
0
def main():
    option = option_handler()
    storage_bucket_name = option.bucket_name
    service_account = option.service_account
    build_number = str(uuid.uuid4())
    extract_destination_path = tempfile.mkdtemp()

    # store original base storage path
    original_base_path = GCPConfig.STORAGE_BASE_PATH
    # set new storage base path for content test builds
    GCPConfig.STORAGE_BASE_PATH = os.path.normpath(option.storage_bash_path)

    # google cloud storage client initialized
    storage_client = init_storage_client(service_account)
    storage_bucket = storage_client.bucket(storage_bucket_name)

    # download and extract index from test bucket
    index_folder_path, index_blob = download_and_extract_index(storage_bucket, extract_destination_path)

    print(f"Starting iterating over packs in {GCPConfig.INDEX_NAME} and normalizing packs integration URLs")
    # starting iterating over packs folders inside index

    for pack in os.scandir(index_folder_path):
        normalize_pack_integration_urls(pack=pack, original_base_path=original_base_path)

    # finished iteration over packs inside index
    print(f"Finished iterating over packs in {GCPConfig.INDEX_NAME}")

    upload_index_to_storage(index_folder_path=index_folder_path, extract_destination_path=extract_destination_path,
                            index_blob=index_blob, build_number=build_number, private_packs=[])
def install_all_content_packs_from_build_bucket(client: demisto_client,
                                                host: str, server_version: str,
                                                bucket_packs_root_path: str,
                                                service_account: str,
                                                extract_destination_path: str):
    """ Iterates over the packs currently located in the Build bucket. Wrapper for install_packs.
    Retrieving the metadata of the latest version of each pack from the index.zip of the build bucket.

    :param client: Demisto-py client to connect to the server.
    :param host: FQDN of the server.
    :param server_version: The version of the server the packs are installed on.
    :param bucket_packs_root_path: The prefix to the root of packs in the bucket
    :param service_account: Google Service Account
    :param extract_destination_path: the full path of extract folder for the index.
    :return: None. Prints the response from the server in the build.
    """
    all_packs = []
    logging.debug(
        f"Installing all content packs in server {host} from packs path {bucket_packs_root_path}"
    )

    storage_client = init_storage_client(service_account)
    build_bucket = storage_client.bucket(GCPConfig.CI_BUILD_BUCKET)
    index_folder_path, _, _ = download_and_extract_index(
        build_bucket, extract_destination_path, bucket_packs_root_path)

    for pack_id in os.listdir(index_folder_path):
        if os.path.isdir(os.path.join(index_folder_path, pack_id)):
            metadata_path = os.path.join(index_folder_path, pack_id,
                                         Pack.METADATA)
            pack_metadata = load_json(metadata_path)
            if 'partnerId' in pack_metadata:  # not installing private packs
                continue
            pack_version = pack_metadata.get(
                Metadata.CURRENT_VERSION, Metadata.SERVER_DEFAULT_MIN_VERSION)
            server_min_version = pack_metadata.get(
                Metadata.SERVER_MIN_VERSION,
                Metadata.SERVER_DEFAULT_MIN_VERSION)
            hidden = pack_metadata.get(Metadata.HIDDEN, False)
            # Check if the server version is greater than the minimum server version required for this pack or if the
            # pack is hidden (deprecated):
            if ('Master' in server_version or LooseVersion(server_version) >= LooseVersion(server_min_version)) and \
                    not hidden:
                logging.debug(f"Appending pack id {pack_id}")
                all_packs.append(
                    get_pack_installation_request_data(pack_id, pack_version))
            else:
                reason = 'Is hidden' if hidden else f'min server version is {server_min_version}'
                logging.debug(
                    f'Pack: {pack_id} with version: {pack_version} will not be installed on {host}. '
                    f'Pack {reason}.')
    return install_packs(client, host, all_packs)
Пример #5
0
def main():
    install_logging('upload_packs_private.log')
    upload_config = option_handler()
    packs_artifacts_path = upload_config.artifacts_path
    extract_destination_path = upload_config.extract_path
    storage_bucket_name = upload_config.bucket_name
    private_bucket_name = upload_config.private_bucket_name
    service_account = upload_config.service_account
    target_packs = upload_config.pack_names
    build_number = upload_config.ci_build_number
    id_set_path = upload_config.id_set_path
    packs_dependencies_mapping = load_json(
        upload_config.pack_dependencies
    ) if upload_config.pack_dependencies else {}
    storage_base_path = upload_config.storage_base_path
    is_private_build = upload_config.encryption_key and upload_config.encryption_key != ''
    landing_page_sections = load_json(LANDING_PAGE_SECTIONS_PATH)

    logging.info(f"Packs artifact path is: {packs_artifacts_path}")

    prepare_test_directories(packs_artifacts_path)

    # google cloud storage client initialized
    storage_client = init_storage_client(service_account)
    storage_bucket = storage_client.bucket(storage_bucket_name)
    private_storage_bucket = storage_client.bucket(private_bucket_name)
    default_storage_bucket = private_storage_bucket if is_private_build else storage_bucket

    # download and extract index from public bucket
    index_folder_path, index_blob, index_generation = download_and_extract_index(
        storage_bucket, extract_destination_path)

    # content repo client initialized
    if not is_private_build:
        content_repo = get_content_git_client(CONTENT_ROOT_PATH)
        current_commit_hash, remote_previous_commit_hash = get_recent_commits_data(
            content_repo,
            index_folder_path,
            is_bucket_upload_flow=False,
            is_private_build=True)
    else:
        current_commit_hash, remote_previous_commit_hash = "", ""
        content_repo = None

    if storage_base_path:
        GCPConfig.STORAGE_BASE_PATH = storage_base_path

    # detect packs to upload
    pack_names = get_packs_names(target_packs)
    extract_packs_artifacts(packs_artifacts_path, extract_destination_path)
    packs_list = [
        Pack(pack_name, os.path.join(extract_destination_path, pack_name))
        for pack_name in pack_names
        if os.path.exists(os.path.join(extract_destination_path, pack_name))
    ]

    if not is_private_build:
        check_if_index_is_updated(index_folder_path, content_repo,
                                  current_commit_hash,
                                  remote_previous_commit_hash, storage_bucket)

    if private_bucket_name:  # Add private packs to the index
        private_packs, private_index_path, private_index_blob = update_index_with_priced_packs(
            private_storage_bucket, extract_destination_path,
            index_folder_path, pack_names, is_private_build)
    else:  # skipping private packs
        logging.info("Skipping index update of priced packs")
        private_packs = []

    # google cloud bigquery client initialized
    packs_statistic_df = None
    if not is_private_build:
        bq_client = init_bigquery_client(service_account)
        packs_statistic_df = get_packs_statistics_dataframe(bq_client)

    # clean index and gcs from non existing or invalid packs
    clean_non_existing_packs(index_folder_path, private_packs,
                             default_storage_bucket)
    # starting iteration over packs
    for pack in packs_list:
        create_and_upload_marketplace_pack(
            upload_config,
            pack,
            storage_bucket,
            index_folder_path,
            packs_dependencies_mapping,
            private_bucket_name,
            private_storage_bucket=private_storage_bucket,
            content_repo=content_repo,
            current_commit_hash=current_commit_hash,
            remote_previous_commit_hash=remote_previous_commit_hash,
            packs_statistic_df=packs_statistic_df)
    # upload core packs json to bucket

    if should_upload_core_packs(storage_bucket_name):
        upload_core_packs_config(default_storage_bucket, build_number,
                                 index_folder_path)
    # finished iteration over content packs
    if is_private_build:
        delete_public_packs_from_index(index_folder_path)
        upload_index_to_storage(index_folder_path,
                                extract_destination_path,
                                private_index_blob,
                                build_number,
                                private_packs,
                                current_commit_hash,
                                index_generation,
                                is_private_build,
                                landing_page_sections=landing_page_sections)

    else:
        upload_index_to_storage(index_folder_path,
                                extract_destination_path,
                                index_blob,
                                build_number,
                                private_packs,
                                current_commit_hash,
                                index_generation,
                                landing_page_sections=landing_page_sections)

    # upload id_set.json to bucket
    upload_id_set(default_storage_bucket, id_set_path)

    # get the lists of packs divided by their status
    successful_packs, skipped_packs, failed_packs = get_packs_summary(
        packs_list)

    # summary of packs status
    print_packs_summary(successful_packs, skipped_packs, failed_packs)