def run(self) -> beam.PCollection[job_run_result.JobRunResult]: """Returns a PCollection of 'SUCCESS' or 'FAILURE' results from deleting ExplorationOpportunitySummaryModel. Returns: PCollection. A PCollection of 'SUCCESS' or 'FAILURE' results from deleting ExplorationOpportunitySummaryModel. """ exp_opportunity_summary_model = ( self.pipeline | 'Get all non-deleted opportunity models' >> ndb_io.GetModels( opportunity_models.ExplorationOpportunitySummaryModel.get_all( include_deleted=False)) ) unused_delete_result = ( exp_opportunity_summary_model | beam.Map(lambda model: model.key) | 'Delete all models' >> ndb_io.DeleteModels() ) return ( exp_opportunity_summary_model | 'Create job run result' >> ( job_result_transforms.CountObjectsToJobRunResult()) )
def run(self) -> beam.PCollection[job_run_result.JobRunResult]: """Returns a PCollection of 'SUCCESS' or 'FAILURE' results from deleting ExplorationOpportunitySummaryModel. Returns: PCollection. A PCollection of 'SUCCESS' or 'FAILURE' results from deleting ExplorationOpportunitySummaryModel. """ exp_opportunity_summary_model = ( self.pipeline | 'Get all non-deleted opportunity models' >> ndb_io.GetModels( opportunity_models.ExplorationOpportunitySummaryModel.get_all( include_deleted=False))) unused_delete_result = (exp_opportunity_summary_model | beam.Map(lambda model: model.key) | 'Delete all models' >> ndb_io.DeleteModels()) return (exp_opportunity_summary_model | 'Count all new models' >> beam.combiners.Count.Globally() | 'Only create result for new models when > 0' >> (beam.Filter(lambda n: n > 0)) | 'Create result for new models' >> beam.Map(lambda n: job_run_result.JobRunResult( stdout='SUCCESS %s' % n)))
def test_delete_from_datastore(self) -> None: model_list = [ self.create_model(base_models.BaseModel, id='a'), self.create_model(base_models.BaseModel, id='b'), self.create_model(base_models.BaseModel, id='c'), ] self.put_multi(model_list) self.assertItemsEqual(self.get_base_models(), model_list) # type: ignore[no-untyped-call] self.assert_pcoll_empty( self.pipeline | beam.Create([model.key for model in model_list]) | ndb_io.DeleteModels()) self.assertItemsEqual(self.get_base_models(), []) # type: ignore[no-untyped-call]
def run(self) -> beam.PCollection[job_run_result.JobRunResult]: deleted_user_ids_collection = ( self.pipeline | 'Get all deleted user models' >> ndb_io.GetModels( user_models.DeletedUserModel.get_all()) | 'Extract user IDs' >> beam.Map(lambda deleted_user_model: deleted_user_model.id)) deleted_user_ids = beam.pvalue.AsIter(deleted_user_ids_collection) sent_email_models_to_delete = ( self.pipeline | 'Get all sent email models' >> ndb_io.GetModels( email_models.SentEmailModel.get_all()) | 'Filter sent email models that belong to deleted users' >> (beam.Filter(lambda model, ids: (model.sender_id in ids or model.recipient_id in ids), ids=deleted_user_ids))) sent_email_models_to_delete_result = ( sent_email_models_to_delete | 'Count sent email models to be deleted' >> (job_result_transforms.CountObjectsToJobRunResult('SENT EMAILS'))) bulk_email_models_to_delete = ( self.pipeline | 'Get all bulk email models' >> ndb_io.GetModels( email_models.BulkEmailModel.get_all()) | 'Filter bulk email models that belong to deleted users' >> (beam.Filter(lambda model, ids: model.sender_id in ids, ids=deleted_user_ids))) bulk_email_models_to_delete_result = ( bulk_email_models_to_delete | 'Count bulk email models to be deleted' >> (job_result_transforms.CountObjectsToJobRunResult('BULK EMAILS'))) unsent_feedback_email_models_to_delete = ( self.pipeline | 'Get all unsent feedback models' >> ndb_io.GetModels( feedback_models.UnsentFeedbackEmailModel.get_all()) | 'Filter unsent feedback models that belong to deleted users' >> (beam.Filter(lambda model, ids: model.id in ids, ids=deleted_user_ids))) unsent_feedback_email_models_to_delete_result = ( unsent_feedback_email_models_to_delete | 'Count unsent feedback email models to be deleted' >> (job_result_transforms.CountObjectsToJobRunResult( 'FEEDBACK EMAILS'))) user_bulk_emails_models_to_delete = ( self.pipeline | 'Get all user bulk email models' >> ndb_io.GetModels( user_models.UserBulkEmailsModel.get_all()) | 'Filter user bulk email models that belong to deleted users' >> (beam.Filter(lambda model, ids: model.id in ids, ids=deleted_user_ids))) user_bulk_emails_models_to_delete_result = ( user_bulk_emails_models_to_delete | 'Count user bulk email models to be deleted' >> (job_result_transforms.CountObjectsToJobRunResult( 'USER BULK EMAILS'))) unused_models_deletion = ( (sent_email_models_to_delete, bulk_email_models_to_delete, unsent_feedback_email_models_to_delete, user_bulk_emails_models_to_delete) | 'Merge models' >> beam.Flatten() | 'Extract keys' >> beam.Map(lambda model: model.key) | 'Delete models' >> ndb_io.DeleteModels()) return (( sent_email_models_to_delete_result, bulk_email_models_to_delete_result, unsent_feedback_email_models_to_delete_result, user_bulk_emails_models_to_delete_result, ) | 'Merge results' >> beam.Flatten())