Example #1
0
def dummy_pack_metadata():
    """ Fixture for dummy pack_metadata.json file that is part of pack folder in content repo.
    """
    dummy_pack_metadata_path = os.path.join(os.path.dirname(os.path.abspath(__file__)), "test_data",
                                            "user_pack_metadata.json")
    pack_metadata = load_json(dummy_pack_metadata_path)
    return pack_metadata
Example #2
0
def upload_core_packs_config(production_bucket: Bucket, build_number: str,
                             extract_destination_path: str,
                             build_bucket: Bucket):
    """Uploads corepacks.json file configuration to bucket. Corepacks file includes core packs for server installation.

     Args:
        production_bucket (google.cloud.storage.bucket.Bucket): gcs bucket where core packs config is uploaded.
        build_number (str): CircleCI build number.
        extract_destination_path (str): Full path of folder to extract the corepacks file
        build_bucket (google.cloud.storage.bucket.Bucket): gcs bucket where core packs config is downloaded from.

    """
    # download the corepacks.json stored in the build bucket to temp dir
    build_corepacks_file_path = os.path.join(GCPConfig.BUILD_BASE_PATH,
                                             GCPConfig.CORE_PACK_FILE_NAME)
    build_corepacks_blob = build_bucket.blob(build_corepacks_file_path)

    if not build_corepacks_blob.exists():
        logging.critical(
            f"{GCPConfig.CORE_PACK_FILE_NAME} is missing in {build_bucket.name} bucket, exiting..."
        )
        sys.exit(1)

    temp_corepacks_file_path = os.path.join(extract_destination_path,
                                            GCPConfig.CORE_PACK_FILE_NAME)
    build_corepacks_blob.download_to_filename(temp_corepacks_file_path)
    corepacks_file = load_json(temp_corepacks_file_path)

    # change the storage paths to the prod bucket
    corepacks_list = corepacks_file.get('corePacks', [])
    try:
        corepacks_list = [
            os.path.join(GCPConfig.GCS_PUBLIC_URL, production_bucket.name,
                         GCPConfig.STORAGE_BASE_PATH,
                         LATEST_ZIP_REGEX.findall(corepack_path)[0])
            for corepack_path in corepacks_list
        ]
    except IndexError:
        corepacks_list_str = '\n'.join(corepacks_list)
        logging.exception(
            f"GCS paths in build bucket corepacks.json file are not of format: "
            f"{GCPConfig.GCS_PUBLIC_URL}/<BUCKET_NAME>/.../content/packs/...\n"
            f"List of build bucket corepacks paths:\n{corepacks_list_str}")
        sys.exit(1)

    # construct core pack data with public gcs urls
    core_packs_data = {
        'corePacks': corepacks_list,
        'buildNumber': build_number
    }

    # upload core pack json file to gcs
    prod_corepacks_file_path = os.path.join(GCPConfig.STORAGE_BASE_PATH,
                                            GCPConfig.CORE_PACK_FILE_NAME)
    prod_corepacks_blob = production_bucket.blob(prod_corepacks_file_path)
    prod_corepacks_blob.upload_from_string(
        json.dumps(core_packs_data, indent=4))

    logging.success(
        f"Finished uploading {GCPConfig.CORE_PACK_FILE_NAME} to storage.")
Example #3
0
def get_updated_private_packs(private_packs, index_folder_path):
    """ Checks for updated private packs by compering contentCommitHash between public index json and private pack
    metadata files.

    Args:
        private_packs (list): List of dicts containing pack metadata information.
        index_folder_path (str): The public index folder path.

    Returns:
        updated_private_packs (list) : a list of all private packs id's that were updated.

    """
    updated_private_packs = []

    public_index_file_path = os.path.join(index_folder_path,
                                          f"{GCPConfig.INDEX_NAME}.json")
    public_index_json = load_json(public_index_file_path)
    private_packs_from_public_index = public_index_json.get("packs", {})

    for pack in private_packs:
        private_pack_id = pack.get('id')
        private_commit_hash_from_metadata = pack.get('contentCommitHash', "")
        private_commit_hash_from_content_repo = ""
        for public_pack in private_packs_from_public_index:
            if public_pack.get('id') == private_pack_id:
                private_commit_hash_from_content_repo = public_pack.get(
                    'contentCommitHash', "")

        private_pack_was_updated = private_commit_hash_from_metadata != private_commit_hash_from_content_repo
        if private_pack_was_updated:
            updated_private_packs.append(private_pack_id)

    return updated_private_packs
Example #4
0
def get_index_json_data(service_account: str, production_bucket_name: str, extract_path: str, storage_base_path: str) \
        -> (dict, str):
    """Retrieve the index.json file from production bucket.

    Args:
        service_account: Path to gcloud service account
        production_bucket_name: Production bucket name
        extract_path: Full path of folder to extract the index.zip to
        storage_base_path: The base path in the bucket

    Returns:
        (Dict: content of the index.json, Str: path to index.json)
    """
    logging.info('Downloading and extracting index.zip from the cloud')

    storage_client = init_storage_client(service_account)
    production_bucket = storage_client.bucket(production_bucket_name)
    index_folder_path, _, _ = download_and_extract_index(
        production_bucket, extract_path, storage_base_path)

    logging.info("Retrieving the index file")
    index_file_path = os.path.join(index_folder_path,
                                   f"{GCPConfig.INDEX_NAME}.json")
    index_data = load_json(index_file_path)

    return index_data, index_file_path
def install_all_content_packs_from_build_bucket(client: demisto_client,
                                                host: str, server_version: str,
                                                bucket_packs_root_path: str,
                                                service_account: str,
                                                extract_destination_path: str):
    """ Iterates over the packs currently located in the Build bucket. Wrapper for install_packs.
    Retrieving the metadata of the latest version of each pack from the index.zip of the build bucket.

    :param client: Demisto-py client to connect to the server.
    :param host: FQDN of the server.
    :param server_version: The version of the server the packs are installed on.
    :param bucket_packs_root_path: The prefix to the root of packs in the bucket
    :param service_account: Google Service Account
    :param extract_destination_path: the full path of extract folder for the index.
    :return: None. Prints the response from the server in the build.
    """
    all_packs = []
    logging.debug(
        f"Installing all content packs in server {host} from packs path {bucket_packs_root_path}"
    )

    storage_client = init_storage_client(service_account)
    build_bucket = storage_client.bucket(GCPConfig.CI_BUILD_BUCKET)
    index_folder_path, _, _ = download_and_extract_index(
        build_bucket, extract_destination_path, bucket_packs_root_path)

    for pack_id in os.listdir(index_folder_path):
        if os.path.isdir(os.path.join(index_folder_path, pack_id)):
            metadata_path = os.path.join(index_folder_path, pack_id,
                                         Pack.METADATA)
            pack_metadata = load_json(metadata_path)
            if 'partnerId' in pack_metadata:  # not installing private packs
                continue
            pack_version = pack_metadata.get(
                Metadata.CURRENT_VERSION, Metadata.SERVER_DEFAULT_MIN_VERSION)
            server_min_version = pack_metadata.get(
                Metadata.SERVER_MIN_VERSION,
                Metadata.SERVER_DEFAULT_MIN_VERSION)
            hidden = pack_metadata.get(Metadata.HIDDEN, False)
            # Check if the server version is greater than the minimum server version required for this pack or if the
            # pack is hidden (deprecated):
            if ('Master' in server_version or LooseVersion(server_version) >= LooseVersion(server_min_version)) and \
                    not hidden:
                logging.debug(f"Appending pack id {pack_id}")
                all_packs.append(
                    get_pack_installation_request_data(pack_id, pack_version))
            else:
                reason = 'Is hidden' if hidden else f'min server version is {server_min_version}'
                logging.debug(
                    f'Pack: {pack_id} with version: {pack_version} will not be installed on {host}. '
                    f'Pack {reason}.')
    return install_packs(client, host, all_packs)
Example #6
0
def extract_credentials_from_secret(secret_path: str) -> (str, str):
    """Extract Credentials from secret file.

    Args:
        secret_path: The path to the secret file.

    Returns: (username, password) found in the secret file.
    """
    logging.info("Retrieving the credentials for Cortex XSOAR server")
    secret_conf_file = load_json(file_path=secret_path)
    username: str = secret_conf_file.get("username")
    password: str = secret_conf_file.get("userPassword")
    return username, password
Example #7
0
def get_successful_and_failed_packs(
        packs_results_file_path: str) -> Tuple[dict, dict]:
    """ Loads the packs_results.json file to get the successful and failed packs dicts

    Args:
        packs_results_file_path: The path to the file

    Returns:
        dict: The successful packs dict
        dict: The failed packs dict

    """
    if os.path.exists(packs_results_file_path):
        packs_results_file = load_json(packs_results_file_path)
        successful_packs_dict = packs_results_file.get('successful_packs', {})
        failed_packs_dict = packs_results_file.get('failed_packs', {})
        return successful_packs_dict, failed_packs_dict
    return {}, {}
Example #8
0
def check_index_data(index_data: dict) -> bool:
    """Check index.json file inside the index.zip archive in the cloud.

    Validate by running verify_pack on each pack.

    Args:
        index_data: Dictionary of the index.json contents.

    Returns: True if all packs are valid, False otherwise.
    """
    logging.info("Found index data in index file. Checking...")
    logging.debug(f"Index data is:\n {pformat(index_data)}")

    packs_list_exists = log_message_if_statement(
        statement=(len(index_data.get("packs", [])) != 0),
        error_message="Found 0 packs in index file."
        "\nAborting the rest of the check.")
    # If all packs are gone, return False
    if not packs_list_exists:
        return False

    mandatory_pack_ids = load_json(MANDATORY_PREMIUM_PACKS_PATH).get(
        "packs", [])

    packs_are_valid = True
    for pack in index_data["packs"]:
        pack_is_good = verify_pack(pack)
        if not pack_is_good:
            packs_are_valid = False
        if pack["id"] in mandatory_pack_ids:
            mandatory_pack_ids.remove(pack["id"])

    all_mandatory_packs_are_found = log_message_if_statement(
        statement=(mandatory_pack_ids == []),
        error_message=f"index json is missing some mandatory"
        f" pack ids: {pformat(mandatory_pack_ids)}",
        success_message="All premium mandatory pack ids were"
        " found in the index.json file.")
    return all([packs_are_valid, all_mandatory_packs_are_found])
Example #9
0
def handle_private_content(public_index_folder_path, private_bucket_name, extract_destination_path, storage_client,
                           public_pack_names) -> Tuple[bool, list, list]:
    """
    1. Add private packs to public index.json.
    2. Checks if there are private packs that were added/deleted/updated.

    Args:
        public_index_folder_path: extracted public index folder full path.
        private_bucket_name: Private storage bucket name
        extract_destination_path: full path to extract directory.
        storage_client : initialized google cloud storage client.
        public_pack_names : unique collection of public packs names to upload.

    Returns:
        is_private_content_updated (bool): True if there is at least one private pack that was updated/released.
        False otherwise (i.e there are no private packs that have been updated/released).
        private_packs (list) : priced packs from private bucket.
        updated_private_packs_ids (list): all private packs id's that were updated.
    """
    if private_bucket_name:
        private_storage_bucket = storage_client.bucket(private_bucket_name)
        private_index_path, _, _ = download_and_extract_index(
            private_storage_bucket, os.path.join(extract_destination_path, "private")
        )

        public_index_json_file_path = os.path.join(public_index_folder_path, f"{GCPConfig.INDEX_NAME}.json")
        public_index_json = load_json(public_index_json_file_path)

        if public_index_json:
            are_private_packs_updated = is_private_packs_updated(public_index_json, private_index_path)
            private_packs, updated_private_packs_ids = add_private_content_to_index(
                private_index_path, extract_destination_path, public_index_folder_path, public_pack_names
            )
            return are_private_packs_updated, private_packs, updated_private_packs_ids
        else:
            logging.error(f"Public {GCPConfig.INDEX_NAME}.json was found empty.")
            sys.exit(1)
    else:
        return False, [], []
Example #10
0
def is_private_packs_updated(public_index_json, private_index_path):
    """ Checks whether there were changes in private packs from the last upload.
    The check compares the `content commit hash` field in the public index with the value stored in the private index.
    If there is at least one private pack that has been updated/released, the upload should be performed and not
    skipped.

    Args:
        public_index_json (dict) : The public index.json file.
        private_index_path (str): Path to where the private index.zip is located.

    Returns:
        is_private_packs_updated (bool): True if there is at least one private pack that was updated/released,
         False otherwise (i.e there are no private packs that have been updated/released).

    """
    logging.debug("Checking if there are updated private packs")

    private_index_file_path = os.path.join(private_index_path, f"{GCPConfig.INDEX_NAME}.json")
    private_index_json = load_json(private_index_file_path)
    private_packs_from_private_index = private_index_json.get("packs")
    private_packs_from_public_index = public_index_json.get("packs")

    if len(private_packs_from_private_index) != len(private_packs_from_public_index):
        # private pack was added or deleted
        logging.debug("There is at least one private pack that was added/deleted, upload should not be skipped.")
        return True

    id_to_commit_hash_from_public_index = {private_pack.get("id"): private_pack.get("contentCommitHash", "") for
                                           private_pack in private_packs_from_public_index}

    for private_pack in private_packs_from_private_index:
        pack_id = private_pack.get("id")
        content_commit_hash = private_pack.get("contentCommitHash", "")
        if id_to_commit_hash_from_public_index.get(pack_id) != content_commit_hash:
            logging.debug("There is at least one private pack that was updated, upload should not be skipped.")
            return True

    logging.debug("No private packs were changed")
    return False
Example #11
0
def main():
    install_logging('upload_packs_private.log')
    upload_config = option_handler()
    packs_artifacts_path = upload_config.artifacts_path
    extract_destination_path = upload_config.extract_path
    storage_bucket_name = upload_config.bucket_name
    private_bucket_name = upload_config.private_bucket_name
    service_account = upload_config.service_account
    target_packs = upload_config.pack_names
    build_number = upload_config.ci_build_number
    id_set_path = upload_config.id_set_path
    packs_dependencies_mapping = load_json(
        upload_config.pack_dependencies
    ) if upload_config.pack_dependencies else {}
    storage_base_path = upload_config.storage_base_path
    is_private_build = upload_config.encryption_key and upload_config.encryption_key != ''
    landing_page_sections = load_json(LANDING_PAGE_SECTIONS_PATH)

    logging.info(f"Packs artifact path is: {packs_artifacts_path}")

    prepare_test_directories(packs_artifacts_path)

    # google cloud storage client initialized
    storage_client = init_storage_client(service_account)
    storage_bucket = storage_client.bucket(storage_bucket_name)
    private_storage_bucket = storage_client.bucket(private_bucket_name)
    default_storage_bucket = private_storage_bucket if is_private_build else storage_bucket

    # download and extract index from public bucket
    index_folder_path, index_blob, index_generation = download_and_extract_index(
        storage_bucket, extract_destination_path)

    # content repo client initialized
    if not is_private_build:
        content_repo = get_content_git_client(CONTENT_ROOT_PATH)
        current_commit_hash, remote_previous_commit_hash = get_recent_commits_data(
            content_repo,
            index_folder_path,
            is_bucket_upload_flow=False,
            is_private_build=True)
    else:
        current_commit_hash, remote_previous_commit_hash = "", ""
        content_repo = None

    if storage_base_path:
        GCPConfig.STORAGE_BASE_PATH = storage_base_path

    # detect packs to upload
    pack_names = get_packs_names(target_packs)
    extract_packs_artifacts(packs_artifacts_path, extract_destination_path)
    packs_list = [
        Pack(pack_name, os.path.join(extract_destination_path, pack_name))
        for pack_name in pack_names
        if os.path.exists(os.path.join(extract_destination_path, pack_name))
    ]

    if not is_private_build:
        check_if_index_is_updated(index_folder_path, content_repo,
                                  current_commit_hash,
                                  remote_previous_commit_hash, storage_bucket)

    if private_bucket_name:  # Add private packs to the index
        private_packs, private_index_path, private_index_blob = update_index_with_priced_packs(
            private_storage_bucket, extract_destination_path,
            index_folder_path, pack_names, is_private_build)
    else:  # skipping private packs
        logging.info("Skipping index update of priced packs")
        private_packs = []

    # google cloud bigquery client initialized
    packs_statistic_df = None
    if not is_private_build:
        bq_client = init_bigquery_client(service_account)
        packs_statistic_df = get_packs_statistics_dataframe(bq_client)

    # clean index and gcs from non existing or invalid packs
    clean_non_existing_packs(index_folder_path, private_packs,
                             default_storage_bucket)
    # starting iteration over packs
    for pack in packs_list:
        create_and_upload_marketplace_pack(
            upload_config,
            pack,
            storage_bucket,
            index_folder_path,
            packs_dependencies_mapping,
            private_bucket_name,
            private_storage_bucket=private_storage_bucket,
            content_repo=content_repo,
            current_commit_hash=current_commit_hash,
            remote_previous_commit_hash=remote_previous_commit_hash,
            packs_statistic_df=packs_statistic_df)
    # upload core packs json to bucket

    if should_upload_core_packs(storage_bucket_name):
        upload_core_packs_config(default_storage_bucket, build_number,
                                 index_folder_path)
    # finished iteration over content packs
    if is_private_build:
        delete_public_packs_from_index(index_folder_path)
        upload_index_to_storage(index_folder_path,
                                extract_destination_path,
                                private_index_blob,
                                build_number,
                                private_packs,
                                current_commit_hash,
                                index_generation,
                                is_private_build,
                                landing_page_sections=landing_page_sections)

    else:
        upload_index_to_storage(index_folder_path,
                                extract_destination_path,
                                index_blob,
                                build_number,
                                private_packs,
                                current_commit_hash,
                                index_generation,
                                landing_page_sections=landing_page_sections)

    # upload id_set.json to bucket
    upload_id_set(default_storage_bucket, id_set_path)

    # get the lists of packs divided by their status
    successful_packs, skipped_packs, failed_packs = get_packs_summary(
        packs_list)

    # summary of packs status
    print_packs_summary(successful_packs, skipped_packs, failed_packs)
Example #12
0
def create_and_upload_marketplace_pack(upload_config: Any, pack: Any, storage_bucket: Any, index_folder_path: str,
                                       packs_dependencies_mapping: dict, private_bucket_name: str,
                                       private_storage_bucket: bool = None,
                                       content_repo: bool = None, current_commit_hash: str = '',
                                       remote_previous_commit_hash: str = '', packs_statistic_df: Any = None) \
        -> Any:
    """
    The main logic flow for the create and upload process. Acts as a decision tree while consistently
    checking the status of the progress being made.

    :param upload_config: Configuration for the script as handled by the Option Handler.
    :param pack: Pack object.
    :param storage_bucket: Bucket the changes are being uploaded to.
    :param index_folder_path: Path to the index folder.
    :param packs_dependencies_mapping: Used by format_metadata to add dependencies to the metadata file.
    :param private_storage_bucket: Bucket where the private packs are uploaded.
    :param content_repo: The main content repository. demisto/content
    :param current_commit_hash: Current commit hash for the run. Used in the pack metadata file.
    :param remote_previous_commit_hash: Previous commit hash. Used for comparison.
    :param packs_statistic_df: Dataframe object containing current pack analytics.
    :return: Updated pack.status value.
    """
    build_number = upload_config.ci_build_number
    remove_test_playbooks = upload_config.remove_test_playbooks
    signature_key = upload_config.key_string
    extract_destination_path = upload_config.extract_path
    override_all_packs = upload_config.override_all_packs
    enc_key = upload_config.encryption_key
    packs_artifacts_dir = upload_config.artifacts_path
    private_artifacts_dir = upload_config.private_artifacts
    is_infra_run = upload_config.is_infra_run
    secondary_enc_key = upload_config.secondary_encryption_key
    landing_page_sections = load_json(LANDING_PAGE_SECTIONS_PATH)

    pack_was_modified = not is_infra_run

    task_status, user_metadata = pack.load_user_metadata()
    if not task_status:
        pack.status = PackStatus.FAILED_LOADING_USER_METADATA.name
        pack.cleanup()
        return

    task_status, pack_content_items = pack.collect_content_items()
    if not task_status:
        pack.status = PackStatus.FAILED_COLLECT_ITEMS.name
        pack.cleanup()
        return

    task_status, integration_images = pack.upload_integration_images(
        storage_bucket)
    if not task_status:
        pack.status = PackStatus.FAILED_IMAGES_UPLOAD.name
        pack.cleanup()
        return

    task_status, author_image = pack.upload_author_image(storage_bucket)
    if not task_status:
        pack.status = PackStatus.FAILED_AUTHOR_IMAGE_UPLOAD.name
        pack.cleanup()
        return

    task_status = pack.format_metadata(
        user_metadata=user_metadata,
        pack_content_items=pack_content_items,
        integration_images=integration_images,
        author_image=author_image,
        index_folder_path=index_folder_path,
        packs_dependencies_mapping=packs_dependencies_mapping,
        build_number=build_number,
        commit_hash=current_commit_hash,
        packs_statistic_df=packs_statistic_df,
        pack_was_modified=pack_was_modified,
        landing_page_sections=landing_page_sections)

    if not task_status:
        pack.status = PackStatus.FAILED_METADATA_PARSING.name
        pack.cleanup()
        return

    task_status, not_updated_build = pack.prepare_release_notes(
        index_folder_path, build_number)
    if not task_status:
        pack.status = PackStatus.FAILED_RELEASE_NOTES.name
        pack.cleanup()
        return

    if not_updated_build:
        pack.status = PackStatus.PACK_IS_NOT_UPDATED_IN_RUNNING_BUILD.name
        pack.cleanup()
        return

    task_status = pack.remove_unwanted_files(remove_test_playbooks)
    if not task_status:
        pack.status = PackStatus.FAILED_REMOVING_PACK_SKIPPED_FOLDERS
        pack.cleanup()
        return

    task_status = pack.sign_pack(signature_key)
    if not task_status:
        pack.status = PackStatus.FAILED_SIGNING_PACKS.name
        pack.cleanup()
        return

    task_status, zip_pack_path = pack.zip_pack(extract_destination_path,
                                               pack._pack_name, enc_key,
                                               private_artifacts_dir,
                                               secondary_enc_key)

    if not task_status:
        pack.status = PackStatus.FAILED_ZIPPING_PACK_ARTIFACTS.name
        pack.cleanup()
        return

    task_status = pack.is_pack_encrypted(zip_pack_path, enc_key)
    if not task_status:
        pack.status = PackStatus.FAILED_DECRYPT_PACK.name
        pack.cleanup()
        return

    bucket_for_uploading = private_storage_bucket if private_storage_bucket else storage_bucket
    (task_status, skipped_pack_uploading, full_pack_path) = \
        pack.upload_to_storage(zip_pack_path, pack.latest_version,
                               bucket_for_uploading, override_all_packs
                               or pack_was_modified, pack_artifacts_path=packs_artifacts_dir,
                               private_content=True)
    if full_pack_path is not None:
        bucket_path = f'https://console.cloud.google.com/storage/browser/{private_bucket_name}/'
        bucket_url = bucket_path + full_pack_path
    else:
        bucket_url = 'Pack was not uploaded.'
    pack.bucket_url = bucket_url

    if not task_status:
        pack.status = PackStatus.FAILED_UPLOADING_PACK.name
        pack.cleanup()
        return

    task_status, exists_in_index = pack.check_if_exists_in_index(
        index_folder_path)
    if not task_status:
        pack.status = PackStatus.FAILED_SEARCHING_PACK_IN_INDEX.name
        pack.cleanup()
        return

    task_status = pack.prepare_for_index_upload()
    if not task_status:
        pack.status = PackStatus.FAILED_PREPARING_INDEX_FOLDER.name
        pack.cleanup()
        return

    task_status = update_index_folder(index_folder_path=index_folder_path,
                                      pack_name=pack.name,
                                      pack_path=pack.path,
                                      pack_version=pack.latest_version,
                                      hidden_pack=pack.hidden)
    if not task_status:
        pack.status = PackStatus.FAILED_UPDATING_INDEX_FOLDER.name
        pack.cleanup()
        return

    # in case that pack already exist at cloud storage path and in index, don't show that the pack was changed
    if skipped_pack_uploading and exists_in_index:
        pack.status = PackStatus.PACK_ALREADY_EXISTS.name
        pack.cleanup()
        return

    pack.status = PackStatus.SUCCESS.name
Example #13
0
 def get_landing_page_sections() -> Dict:
     """ Returns the landing page sections file content """
     return mp_services.load_json(LANDING_PAGE_SECTIONS_PATH)
Example #14
0
def main():
    install_logging('Prepare_Content_Packs_For_Testing.log')
    option = option_handler()
    packs_artifacts_path = option.artifacts_path
    extract_destination_path = option.extract_path
    storage_bucket_name = option.bucket_name
    service_account = option.service_account
    target_packs = option.pack_names if option.pack_names else ""
    build_number = option.ci_build_number if option.ci_build_number else str(
        uuid.uuid4())
    override_all_packs = option.override_all_packs
    signature_key = option.key_string
    id_set_path = option.id_set_path
    packs_dependencies_mapping = load_json(
        option.pack_dependencies) if option.pack_dependencies else {}
    storage_base_path = option.storage_base_path
    remove_test_playbooks = option.remove_test_playbooks
    is_bucket_upload_flow = option.bucket_upload
    private_bucket_name = option.private_bucket_name
    circle_branch = option.circle_branch
    force_upload = option.force_upload

    # google cloud storage client initialized
    storage_client = init_storage_client(service_account)
    storage_bucket = storage_client.bucket(storage_bucket_name)

    if storage_base_path:
        GCPConfig.STORAGE_BASE_PATH = storage_base_path

    # download and extract index from public bucket
    index_folder_path, index_blob, index_generation = download_and_extract_index(
        storage_bucket, extract_destination_path)

    # content repo client initialized
    content_repo = get_content_git_client(CONTENT_ROOT_PATH)
    current_commit_hash, previous_commit_hash = get_recent_commits_data(
        content_repo, index_folder_path, is_bucket_upload_flow, circle_branch)

    # detect packs to upload
    pack_names = get_packs_names(target_packs, previous_commit_hash)
    extract_packs_artifacts(packs_artifacts_path, extract_destination_path)
    packs_list = [
        Pack(pack_name, os.path.join(extract_destination_path, pack_name))
        for pack_name in pack_names
        if os.path.exists(os.path.join(extract_destination_path, pack_name))
    ]

    if not option.override_all_packs:
        check_if_index_is_updated(index_folder_path, content_repo,
                                  current_commit_hash, previous_commit_hash,
                                  storage_bucket)

    # google cloud bigquery client initialized
    bq_client = init_bigquery_client(service_account)
    packs_statistic_df = get_packs_statistics_dataframe(bq_client)
    updated_private_packs_ids = []
    if private_bucket_name:  # Add private packs to the index
        private_storage_bucket = storage_client.bucket(private_bucket_name)
        private_packs, _, _, updated_private_packs_ids = update_index_with_priced_packs(
            private_storage_bucket, extract_destination_path,
            index_folder_path, pack_names)
    else:  # skipping private packs
        logging.debug("Skipping index update of priced packs")
        private_packs = []

    # clean index and gcs from non existing or invalid packs
    clean_non_existing_packs(index_folder_path, private_packs, storage_bucket)

    # starting iteration over packs
    for pack in packs_list:
        task_status, user_metadata = pack.load_user_metadata()
        if not task_status:
            pack.status = PackStatus.FAILED_LOADING_USER_METADATA.value
            pack.cleanup()
            continue

        task_status, pack_content_items = pack.collect_content_items()
        if not task_status:
            pack.status = PackStatus.FAILED_COLLECT_ITEMS.name
            pack.cleanup()
            continue

        task_status, integration_images = pack.upload_integration_images(
            storage_bucket)
        if not task_status:
            pack.status = PackStatus.FAILED_IMAGES_UPLOAD.name
            pack.cleanup()
            continue

        task_status, author_image = pack.upload_author_image(storage_bucket)
        if not task_status:
            pack.status = PackStatus.FAILED_AUTHOR_IMAGE_UPLOAD.name
            pack.cleanup()
            continue

        task_status, pack_was_modified = pack.detect_modified(
            content_repo, index_folder_path, current_commit_hash,
            previous_commit_hash)
        if not task_status:
            pack.status = PackStatus.FAILED_DETECTING_MODIFIED_FILES.name
            pack.cleanup()
            continue

        task_status = pack.format_metadata(
            user_metadata=user_metadata,
            pack_content_items=pack_content_items,
            integration_images=integration_images,
            author_image=author_image,
            index_folder_path=index_folder_path,
            packs_dependencies_mapping=packs_dependencies_mapping,
            build_number=build_number,
            commit_hash=current_commit_hash,
            packs_statistic_df=packs_statistic_df,
            pack_was_modified=pack_was_modified)
        if not task_status:
            pack.status = PackStatus.FAILED_METADATA_PARSING.name
            pack.cleanup()
            continue

        task_status, not_updated_build = pack.prepare_release_notes(
            index_folder_path, build_number, pack_was_modified)
        if not task_status:
            pack.status = PackStatus.FAILED_RELEASE_NOTES.name
            pack.cleanup()
            continue

        if not_updated_build:
            pack.status = PackStatus.PACK_IS_NOT_UPDATED_IN_RUNNING_BUILD.name
            pack.cleanup()
            continue

        task_status = pack.remove_unwanted_files(remove_test_playbooks)
        if not task_status:
            pack.status = PackStatus.FAILED_REMOVING_PACK_SKIPPED_FOLDERS
            pack.cleanup()
            continue

        task_status = pack.sign_pack(signature_key)
        if not task_status:
            pack.status = PackStatus.FAILED_SIGNING_PACKS.name
            pack.cleanup()
            continue

        task_status, zip_pack_path = pack.zip_pack()
        if not task_status:
            pack.status = PackStatus.FAILED_ZIPPING_PACK_ARTIFACTS.name
            pack.cleanup()
            continue

        (task_status, skipped_pack_uploading, full_pack_path) = \
            pack.upload_to_storage(zip_pack_path, pack.latest_version,
                                   storage_bucket, override_all_packs
                                   or pack_was_modified)

        if not task_status:
            pack.status = PackStatus.FAILED_UPLOADING_PACK.name
            pack.cleanup()
            continue

        task_status, exists_in_index = pack.check_if_exists_in_index(
            index_folder_path)
        if not task_status:
            pack.status = PackStatus.FAILED_SEARCHING_PACK_IN_INDEX.name
            pack.cleanup()
            continue

        task_status = pack.prepare_for_index_upload()
        if not task_status:
            pack.status = PackStatus.FAILED_PREPARING_INDEX_FOLDER.name
            pack.cleanup()
            continue

        task_status = update_index_folder(index_folder_path=index_folder_path,
                                          pack_name=pack.name,
                                          pack_path=pack.path,
                                          pack_version=pack.latest_version,
                                          hidden_pack=pack.hidden)
        if not task_status:
            pack.status = PackStatus.FAILED_UPDATING_INDEX_FOLDER.name
            pack.cleanup()
            continue

        # in case that pack already exist at cloud storage path and in index, don't show that the pack was changed
        if skipped_pack_uploading and exists_in_index:
            pack.status = PackStatus.PACK_ALREADY_EXISTS.name
            pack.cleanup()
            continue

        pack.status = PackStatus.SUCCESS.name

    # upload core packs json to bucket
    upload_core_packs_config(storage_bucket, build_number, index_folder_path)

    # finished iteration over content packs
    upload_index_to_storage(index_folder_path=index_folder_path,
                            extract_destination_path=extract_destination_path,
                            index_blob=index_blob,
                            build_number=build_number,
                            private_packs=private_packs,
                            current_commit_hash=current_commit_hash,
                            index_generation=index_generation,
                            force_upload=force_upload,
                            previous_commit_hash=previous_commit_hash)

    # upload id_set.json to bucket
    upload_id_set(storage_bucket, id_set_path)

    # get the lists of packs divided by their status
    successful_packs, skipped_packs, failed_packs = get_packs_summary(
        packs_list)

    # Store successful and failed packs list in CircleCI artifacts - to be used in Upload Packs To Marketplace job
    packs_results_file_path = os.path.join(
        os.path.dirname(packs_artifacts_path),
        BucketUploadFlow.PACKS_RESULTS_FILE)
    store_successful_and_failed_packs_in_ci_artifacts(
        packs_results_file_path, BucketUploadFlow.PREPARE_CONTENT_FOR_TESTING,
        successful_packs, failed_packs, updated_private_packs_ids)

    # summary of packs status
    print_packs_summary(successful_packs, skipped_packs, failed_packs,
                        not is_bucket_upload_flow)
Example #15
0
def upload_index_to_storage(index_folder_path: str, extract_destination_path: str, index_blob: Any,
                            build_number: str, private_packs: list, current_commit_hash: str,
                            index_generation: int, is_private: bool = False, force_upload: bool = False,
                            previous_commit_hash: str = None, landing_page_sections: dict = None):
    """
    Upload updated index zip to cloud storage.

    :param index_folder_path: index folder full path.
    :param extract_destination_path: extract folder full path.
    :param index_blob: google cloud storage object that represents index.zip blob.
    :param build_number: circleCI build number, used as an index revision.
    :param private_packs: List of private packs and their price.
    :param current_commit_hash: last commit hash of head.
    :param index_generation: downloaded index generation.
    :param is_private: Indicates if upload is private.
    :param force_upload: Indicates if force upload or not.
    :param previous_commit_hash: The previous commit hash to diff with.
    :param landing_page_sections: landingPage sections.
    :returns None.

    """
    if force_upload:
        # If we force upload we don't want to update the commit in the index.json file,
        # this is to be able to identify all changed packs in the next upload
        commit = previous_commit_hash
        logging.info('Force upload flow - Index commit hash shuould not be changed')
    else:
        # Otherwise, update the index with the current commit hash (the commit of the upload)
        commit = current_commit_hash
        logging.info('Updating production index commit hash to master last commit hash')

    if not landing_page_sections:
        landing_page_sections = load_json(LANDING_PAGE_SECTIONS_PATH)

    logging.debug(f'commit hash is: {commit}')
    with open(os.path.join(index_folder_path, f"{GCPConfig.INDEX_NAME}.json"), "w+") as index_file:
        index = {
            'revision': build_number,
            'modified': datetime.utcnow().strftime(Metadata.DATE_FORMAT),
            'packs': private_packs,
            'commit': commit,
            'landingPage': {'sections': landing_page_sections.get('sections', [])}
        }
        json.dump(index, index_file, indent=4)

    index_zip_name = os.path.basename(index_folder_path)
    index_zip_path = shutil.make_archive(base_name=index_folder_path, format="zip",
                                         root_dir=extract_destination_path, base_dir=index_zip_name)
    try:
        index_blob.reload()
        current_index_generation = index_blob.generation
        index_blob.cache_control = "no-cache,max-age=0"  # disabling caching for index blob

        if is_private or current_index_generation == index_generation:
            index_blob.upload_from_filename(index_zip_path)
            logging.success(f"Finished uploading {GCPConfig.INDEX_NAME}.zip to storage.")
        else:
            logging.critical(f"Failed in uploading {GCPConfig.INDEX_NAME}, mismatch in index file generation")
            logging.critical(f"Downloaded index generation: {index_generation}")
            logging.critical(f"Current index generation: {current_index_generation}")
            sys.exit(0)
    except Exception:
        logging.exception(f"Failed in uploading {GCPConfig.INDEX_NAME}.")
        sys.exit(1)
    finally:
        shutil.rmtree(index_folder_path)