def get_attachments_for_bucket_upload_flow(build_url, job_name, packs_results_file_path=None): steps_fields = get_entities_fields(entity_title="Failed Steps") color = 'good' if not steps_fields else 'danger' title = f'{BucketUploadFlow.BUCKET_UPLOAD_BUILD_TITLE} - Success' if not steps_fields \ else f'{BucketUploadFlow.BUCKET_UPLOAD_BUILD_TITLE} - Failure' if job_name and color == 'danger': steps_fields = [{ "title": f'Job Failed: {job_name}', "value": '', "short": False }] + steps_fields if job_name and job_name == BucketUploadFlow.UPLOAD_JOB_NAME: successful_packs, failed_packs, _ = get_successful_and_failed_packs( packs_results_file_path, BucketUploadFlow.UPLOAD_PACKS_TO_MARKETPLACE_STORAGE) if successful_packs: steps_fields += [{ "title": "Successful Packs:", "value": "\n".join([pack_name for pack_name in {*successful_packs}]), "short": False }] if failed_packs: steps_fields += [{ "title": "Failed Packs:", "value": "\n".join([ f"{pack_name}: {PackStatus[pack_data.get(BucketUploadFlow.STATUS)].value}" for pack_name, pack_data in failed_packs.items() ]), "short": False }] if job_name and job_name != 'Upload Packs To Marketplace' and color == 'good': logging.info( 'On bucket upload flow we are not notifying on jobs that are not Upload Packs. exiting...' ) sys.exit(0) container_build_url = build_url + '#queue-placeholder/containers/0' content_team_attachment = [{ 'fallback': title, 'color': color, 'title': title, 'title_link': container_build_url, 'fields': steps_fields }] return content_team_attachment
def main(): install_logging('Copy_and_Upload_Packs.log') options = options_handler() packs_artifacts_path = options.artifacts_path extract_destination_path = options.extract_path production_bucket_name = options.production_bucket_name build_bucket_name = options.build_bucket_name service_account = options.service_account build_number = options.ci_build_number circle_branch = options.circle_branch production_base_path = options.production_base_path target_packs = options.pack_names # Google cloud storage client initialized storage_client = init_storage_client(service_account) production_bucket = storage_client.bucket(production_bucket_name) build_bucket = storage_client.bucket(build_bucket_name) # Initialize build and prod base paths build_bucket_path = os.path.join(GCPConfig.BUILD_PATH_PREFIX, circle_branch, build_number) GCPConfig.BUILD_BASE_PATH = os.path.join(build_bucket_path, GCPConfig.STORAGE_BASE_PATH) if production_base_path: GCPConfig.STORAGE_BASE_PATH = production_base_path # Download and extract build index from build and prod buckets build_index_folder_path, build_index_blob, build_index_generation = \ download_and_extract_index(build_bucket, extract_destination_path) # Get the successful and failed packs file from Prepare Content step in Create Instances job if there are packs_results_file_path = os.path.join( os.path.dirname(packs_artifacts_path), BucketUploadFlow.PACKS_RESULTS_FILE) pc_successful_packs_dict, pc_failed_packs_dict, pc_successful_private_packs_dict = get_successful_and_failed_packs( packs_results_file_path, BucketUploadFlow.PREPARE_CONTENT_FOR_TESTING) logging.debug( f"Successful packs from Prepare Content: {pc_successful_packs_dict}") logging.debug(f"Failed packs from Prepare Content: {pc_failed_packs_dict}") logging.debug( f"Successful private packs from Prepare Content: {pc_successful_private_packs_dict}" ) # Check if needs to upload or not check_if_need_to_upload(pc_successful_packs_dict, pc_failed_packs_dict) # Detect packs to upload pack_names = get_pack_names(target_packs) extract_packs_artifacts(packs_artifacts_path, extract_destination_path) packs_list = [ Pack(pack_name, os.path.join(extract_destination_path, pack_name)) for pack_name in pack_names if os.path.exists(os.path.join(extract_destination_path, pack_name)) ] # Starting iteration over packs for pack in packs_list: # Indicates whether a pack has failed to upload on Prepare Content step task_status, pack_status = pack.is_failed_to_upload( pc_failed_packs_dict) if task_status: pack.status = pack_status pack.cleanup() continue task_status, user_metadata = pack.load_user_metadata() if not task_status: pack.status = PackStatus.FAILED_LOADING_USER_METADATA.name pack.cleanup() continue task_status = pack.copy_integration_images(production_bucket, build_bucket) if not task_status: pack.status = PackStatus.FAILED_IMAGES_UPLOAD.name pack.cleanup() continue task_status = pack.copy_author_image(production_bucket, build_bucket) if not task_status: pack.status = PackStatus.FAILED_AUTHOR_IMAGE_UPLOAD.name pack.cleanup() continue # Create a local copy of the pack's index changelog task_status = pack.create_local_changelog(build_index_folder_path) if not task_status: pack.status = PackStatus.FAILED_RELEASE_NOTES.name pack.cleanup() continue task_status, skipped_pack_uploading = pack.copy_and_upload_to_storage( production_bucket, build_bucket, pack.latest_version, pc_successful_packs_dict) if skipped_pack_uploading: pack.status = PackStatus.PACK_ALREADY_EXISTS.name pack.cleanup() continue if not task_status: pack.status = PackStatus.FAILED_UPLOADING_PACK.name pack.cleanup() continue pack.status = PackStatus.SUCCESS.name # upload core packs json to bucket upload_core_packs_config(production_bucket, build_number, extract_destination_path, build_bucket) # finished iteration over content packs copy_index(build_index_folder_path, build_index_blob, build_index_generation, production_bucket, build_bucket) # upload id_set.json to bucket copy_id_set(production_bucket, build_bucket) # get the lists of packs divided by their status successful_packs, skipped_packs, failed_packs = get_packs_summary( packs_list) # Store successful and failed packs list in CircleCI artifacts store_successful_and_failed_packs_in_ci_artifacts( packs_results_file_path, BucketUploadFlow.UPLOAD_PACKS_TO_MARKETPLACE_STORAGE, successful_packs, failed_packs, list(pc_successful_private_packs_dict)) # verify that the successful from Prepare content and are the ones that were copied verify_copy(successful_packs, pc_successful_packs_dict) # summary of packs status print_packs_summary(successful_packs, skipped_packs, failed_packs)