コード例 #1
0
def bucket_upload_results():
    steps_fields = []
    logging.info(f'retrieving upload data from "{PACK_RESULTS_PATH}"')
    successful_packs, failed_packs, successful_private_packs, _ = get_upload_data(
        PACK_RESULTS_PATH, BucketUploadFlow.UPLOAD_PACKS_TO_MARKETPLACE_STORAGE
    )
    if successful_packs:
        steps_fields += [{
            "title": "Successful Packs:",
            "value": "\n".join(sorted([pack_name for pack_name in {*successful_packs}], key=lambda s: s.lower())),
            "short": False
        }]
    if failed_packs:
        steps_fields += [{
            "title": "Failed Packs:",
            "value": "\n".join(sorted([pack_name for pack_name in {*failed_packs}], key=lambda s: s.lower())),
            "short": False
        }]
    if successful_private_packs:
        steps_fields += [{
            "title": "Successful Private Packs:",
            "value": "\n".join(sorted([pack_name for pack_name in {*successful_private_packs}],
                                      key=lambda s: s.lower())),
            "short": False
        }]
    return steps_fields
コード例 #2
0
def bucket_upload_results(bucket_artifact_folder):
    steps_fields = []
    pack_results_path = os.path.join(bucket_artifact_folder,
                                     BucketUploadFlow.PACKS_RESULTS_FILE)
    marketplace_name = os.path.basename(bucket_artifact_folder).upper()

    logging.info(f'retrieving upload data from "{pack_results_path}"')
    successful_packs, failed_packs, successful_private_packs, _ = get_upload_data(
        pack_results_path,
        BucketUploadFlow.UPLOAD_PACKS_TO_MARKETPLACE_STORAGE)
    if successful_packs:
        steps_fields += [{
            'title':
            f'Successful {marketplace_name} Packs:',
            'value':
            '\n'.join(
                sorted([pack_name for pack_name in {*successful_packs}],
                       key=lambda s: s.lower())),
            'short':
            False
        }]

    if failed_packs:
        steps_fields += [{
            'title':
            f'Failed {marketplace_name} Packs:',
            'value':
            '\n'.join(
                sorted([pack_name for pack_name in {*failed_packs}],
                       key=lambda s: s.lower())),
            'short':
            False
        }]

    if successful_private_packs:
        # No need to indicate the marketplace name as private packs only upload to xsoar marketplace.
        steps_fields += [{
            'title':
            'Successful Private Packs:',
            'value':
            '\n'.join(
                sorted(
                    [pack_name for pack_name in {*successful_private_packs}],
                    key=lambda s: s.lower())),
            'short':
            False
        }]

    return steps_fields
コード例 #3
0
ファイル: slack_notifier.py プロジェクト: pxjohnny/content
def get_attachments_for_bucket_upload_flow(build_url,
                                           job_name,
                                           packs_results_file_path=None):
    steps_fields = get_entities_fields(entity_title="Failed Steps")
    color = 'good' if not steps_fields else 'danger'
    title = f'{BucketUploadFlow.BUCKET_UPLOAD_BUILD_TITLE} - Success' if not steps_fields \
        else f'{BucketUploadFlow.BUCKET_UPLOAD_BUILD_TITLE} - Failure'

    if job_name and color == 'danger':
        steps_fields = [{
            "title": f'Job Failed: {job_name}',
            "value": '',
            "short": False
        }] + steps_fields

    if job_name and job_name == BucketUploadFlow.UPLOAD_JOB_NAME:
        successful_packs, failed_packs, successful_private_packs, _ = get_upload_data(
            packs_results_file_path,
            BucketUploadFlow.UPLOAD_PACKS_TO_MARKETPLACE_STORAGE)
        if successful_packs:
            steps_fields += [{
                "title":
                "Successful Packs:",
                "value":
                "\n".join(
                    sorted([pack_name for pack_name in {*successful_packs}],
                           key=lambda s: s.lower())),
                "short":
                False
            }]
        if failed_packs:
            steps_fields += [{
                "title":
                "Failed Packs:",
                "value":
                "\n".join(
                    sorted([pack_name for pack_name in {*failed_packs}],
                           key=lambda s: s.lower())),
                "short":
                False
            }]
        if successful_private_packs:
            steps_fields += [{
                "title":
                "Successful Private Packs:",
                "value":
                "\n".join(
                    sorted([
                        pack_name for pack_name in {*successful_private_packs}
                    ],
                           key=lambda s: s.lower())),
                "short":
                False
            }]

    if job_name and job_name != 'Upload Packs To Marketplace' and color == 'good':
        logging.info(
            'On bucket upload flow we are not notifying on jobs that are not Upload Packs. exiting...'
        )
        sys.exit(0)

    container_build_url = build_url + '#queue-placeholder/containers/0'
    content_team_attachment = [{
        'fallback': title,
        'color': color,
        'title': title,
        'title_link': container_build_url,
        'fields': steps_fields
    }]
    return content_team_attachment
コード例 #4
0
def main():
    install_logging('Copy_and_Upload_Packs.log', logger=logging)
    options = options_handler()
    packs_artifacts_path = options.artifacts_path
    extract_destination_path = options.extract_path
    production_bucket_name = options.production_bucket_name
    build_bucket_name = options.build_bucket_name
    service_account = options.service_account
    build_number = options.ci_build_number
    circle_branch = options.circle_branch
    production_base_path = options.production_base_path
    target_packs = options.pack_names
    marketplace = options.marketplace

    # Google cloud storage client initialized
    storage_client = init_storage_client(service_account)
    production_bucket = storage_client.bucket(production_bucket_name)
    build_bucket = storage_client.bucket(build_bucket_name)

    # Initialize build and prod base paths
    build_bucket_path = os.path.join(GCPConfig.BUILD_PATH_PREFIX,
                                     circle_branch, build_number, marketplace)
    build_bucket_base_path = os.path.join(build_bucket_path,
                                          GCPConfig.CONTENT_PACKS_PATH)

    # Relevant when triggering test upload flow
    if production_bucket_name:
        GCPConfig.PRODUCTION_BUCKET = production_bucket_name

    # Download and extract build index from build and prod buckets
    build_index_folder_path, build_index_blob, build_index_generation = \
        download_and_extract_index(build_bucket, extract_destination_path, build_bucket_base_path)

    # Get the successful and failed packs file from Prepare Content step in Create Instances job if there are
    packs_results_file_path = os.path.join(
        os.path.dirname(packs_artifacts_path),
        BucketUploadFlow.PACKS_RESULTS_FILE)
    pc_successful_packs_dict, pc_failed_packs_dict, pc_successful_private_packs_dict, \
        pc_uploaded_images = get_upload_data(packs_results_file_path, BucketUploadFlow.PREPARE_CONTENT_FOR_TESTING)

    logging.debug(
        f"Successful packs from Prepare Content: {pc_successful_packs_dict}")
    logging.debug(f"Failed packs from Prepare Content: {pc_failed_packs_dict}")
    logging.debug(
        f"Successful private packs from Prepare Content: {pc_successful_private_packs_dict}"
    )
    logging.debug(f"Images from Prepare Content: {pc_uploaded_images}")

    # Check if needs to upload or not
    check_if_need_to_upload(pc_successful_packs_dict, pc_failed_packs_dict,
                            pc_successful_private_packs_dict,
                            pc_uploaded_images)

    # Detect packs to upload
    pack_names = get_pack_names(target_packs)
    extract_packs_artifacts(packs_artifacts_path, extract_destination_path)
    packs_list = [
        Pack(pack_name, os.path.join(extract_destination_path, pack_name),
             marketplace) for pack_name in pack_names
        if os.path.exists(os.path.join(extract_destination_path, pack_name))
    ]

    # Starting iteration over packs
    for pack in packs_list:
        # Indicates whether a pack has failed to upload on Prepare Content step
        task_status, pack_status = pack.is_failed_to_upload(
            pc_failed_packs_dict)
        if task_status:
            pack.status = pack_status
            pack.cleanup()
            continue

        task_status = pack.load_user_metadata(marketplace)
        if not task_status:
            pack.status = PackStatus.FAILED_LOADING_USER_METADATA.name
            pack.cleanup()
            continue

        task_status = pack.copy_integration_images(production_bucket,
                                                   build_bucket,
                                                   pc_uploaded_images,
                                                   production_base_path,
                                                   build_bucket_base_path)
        if not task_status:
            pack.status = PackStatus.FAILED_IMAGES_UPLOAD.name
            pack.cleanup()
            continue

        task_status = pack.copy_author_image(production_bucket, build_bucket,
                                             pc_uploaded_images,
                                             production_base_path,
                                             build_bucket_base_path)
        if not task_status:
            pack.status = PackStatus.FAILED_AUTHOR_IMAGE_UPLOAD.name
            pack.cleanup()
            continue

        task_status, skipped_pack_uploading = pack.copy_and_upload_to_storage(
            production_bucket, build_bucket, pc_successful_packs_dict,
            production_base_path, build_bucket_base_path)
        if skipped_pack_uploading:
            pack.status = PackStatus.PACK_ALREADY_EXISTS.name
            pack.cleanup()
            continue

        if not task_status:
            pack.status = PackStatus.FAILED_UPLOADING_PACK.name
            pack.cleanup()
            continue

        pack.status = PackStatus.SUCCESS.name

    # upload core packs json to bucket
    upload_core_packs_config(production_bucket, build_number,
                             extract_destination_path, build_bucket,
                             production_base_path, build_bucket_base_path)

    # finished iteration over content packs
    copy_index(build_index_folder_path, build_index_blob,
               build_index_generation, production_bucket, build_bucket,
               production_base_path, build_bucket_base_path)

    # upload id_set.json to bucket
    copy_id_set(production_bucket, build_bucket, production_base_path,
                build_bucket_base_path)

    # get the lists of packs divided by their status
    successful_packs, skipped_packs, failed_packs = get_packs_summary(
        packs_list)

    # Store successful and failed packs list in CircleCI artifacts
    store_successful_and_failed_packs_in_ci_artifacts(
        packs_results_file_path,
        BucketUploadFlow.UPLOAD_PACKS_TO_MARKETPLACE_STORAGE, successful_packs,
        failed_packs, list(pc_successful_private_packs_dict))

    # verify that the successful from Prepare content and are the ones that were copied
    verify_copy(successful_packs, pc_successful_packs_dict)

    # summary of packs status
    print_packs_summary(successful_packs, skipped_packs, failed_packs)
コード例 #5
0
    else:
        steps_fields = []
    color = 'good' if not steps_fields else 'danger'
    title = f'{BucketUploadFlow.BUCKET_UPLOAD_BUILD_TITLE} - Success' if not steps_fields \
        else f'{BucketUploadFlow.BUCKET_UPLOAD_BUILD_TITLE} - Failure'

    if job_name and color == 'danger':
        steps_fields = [{
            "title": f'Job Failed: {job_name}',
            "value": '',
            "short": False
        }] + steps_fields

    if job_name and job_name in BucketUploadFlow.UPLOAD_JOB_NAMES:
        successful_packs, failed_packs, successful_private_packs, _ = get_upload_data(
            packs_results_file_path, BucketUploadFlow.UPLOAD_PACKS_TO_MARKETPLACE_STORAGE
        )
        if successful_packs:
            steps_fields += [{
                "title": "Successful Packs:",
                "value": "\n".join(sorted([pack_name for pack_name in {*successful_packs}], key=lambda s: s.lower())),
                "short": False
            }]
        if failed_packs:
            steps_fields += [{
                "title": "Failed Packs:",
                "value": "\n".join(sorted([pack_name for pack_name in {*failed_packs}], key=lambda s: s.lower())),
                "short": False
            }]
        if successful_private_packs:
            steps_fields += [{