def process(self, emails: List[str], batch_index_dict: int, test_run: bool) -> result.Result[str]: """Add 500 users at a time, who have subscribed for newsletters, to the MailChimp DB. Args: emails: list(str). List of emails of users subscribed to newsletters. batch_index_dict: int. Current batch index. test_run: bool. Whether to use mailchimp API or not. To be set to TRUE only when run from a non-production server for testing. Raises: Exception. Exception thrown by the api is raised. Yields: JobRunResult. Job run result which is either 'Ok' or an error with corresponding error message. """ sorted_emails = sorted(emails) selected_emails = sorted_emails[batch_index_dict * 500:(batch_index_dict + 1) * 500] if test_run: # There is a max limit of 1500 bytes for job output. Hence, only # returning first and last 5 emails in batch for testing. yield result.Ok(','.join(selected_emails[:5] + selected_emails[-5:])) return mailchimp_data = [] client = _get_mailchimp_class() for email in selected_emails: mailchimp_data.append({ 'email_address': email, 'status': 'subscribed' }) try: response = client.lists.update_members( feconf.MAILCHIMP_AUDIENCE_ID, { 'members': mailchimp_data, 'update_existing': False }) except mailchimpclient.MailChimpError as error: error_message = ast.literal_eval(str(error)) yield result.Err(error_message['detail']) return response_emails_count = (len(response['new_members']) + len(response['updated_members'])) source_emails_count = len(selected_emails) if response_emails_count == source_emails_count: yield result.Ok('Request successful') else: failed_emails = [] for user in response['errors']: failed_emails.append(user['email_address']) yield result.Err('User update failed for: %s' % failed_emails)
def test_ok_results_with_prefix_correctly_outputs(self) -> None: transform_result = ( self.pipeline | beam.Create([result.Ok('ok'), result.Ok('ok')]) | job_result_transforms.ResultsToJobRunResults('PREFIX') ) self.assert_pcoll_equal( transform_result, [job_run_result.JobRunResult.as_stdout('PREFIX SUCCESS: 2')] )
def _create_skill_opportunity_model( skill: skill_models.SkillModel, question_skill_links: List[question_models.QuestionSkillLinkModel] ) -> result.Result[opportunity_models.SkillOpportunityModel, Exception]: """Transforms a skill object and a list of QuestionSkillLink objects into a skill opportunity model. Args: skill: skill_models.SkillModel. The skill to create the opportunity for. question_skill_links: list(question_models.QuestionSkillLinkModel). The list of QuestionSkillLinkModel for the given skill. Returns: Result[opportunity_models.SkillOpportunityModel, Exception]. Result object that contains SkillOpportunityModel when the operation is successful and Exception when an exception occurs. """ try: skill_opportunity = opportunity_domain.SkillOpportunity( skill_id=skill.id, skill_description=skill.description, question_count=( GenerateSkillOpportunityModelJob. _count_unique_question_ids(question_skill_links))) skill_opportunity.validate() with datastore_services.get_ndb_context(): opportunity_model = opportunity_models.SkillOpportunityModel( id=skill_opportunity.id, skill_description=skill_opportunity.skill_description, question_count=skill_opportunity.question_count) opportunity_model.update_timestamps() return result.Ok(opportunity_model) except Exception as e: return result.Err(e)
def _migrate_story( story_id: str, story_model: story_models.StoryModel, # This must have a default value of None. Otherwise, Beam won't # execute this code. topic_id_to_topic: Optional[Dict[str, topic_domain.Topic]] = None ) -> result.Result[Tuple[str, story_domain.Story], Tuple[str, Exception]]: """Migrates story and transform story model into story object. Args: story_id: str. The id of the story. story_model: StoryModel. The story model to migrate. topic_id_to_topic: dict(str, Topic). The mapping from topic ID to topic. Returns: Result((str, Story), (str, Exception)). Result containing tuple that consists of story ID and either story object or Exception. Story object is returned when the migration was successful and Exception is returned otherwise. """ try: story = story_fetchers.get_story_from_model(story_model) story.validate() assert topic_id_to_topic is not None corresponding_topic = ( topic_id_to_topic[story.corresponding_topic_id]) story_services.validate_prerequisite_skills_in_story_contents( corresponding_topic.get_all_skill_ids(), story.story_contents) except Exception as e: logging.exception(e) return result.Err((story_id, e)) return result.Ok((story_id, story))
def try_request(logger: Logger, request: Callable, *args, **kwargs) -> result.Result[r.Response, str]: try: answer: r.Response = request(*args, **kwargs, timeout=10) if answer.status_code != 200: raise ValueError(str([args, kwargs, answer.text])) return result.Ok(answer) except (r.exceptions.Timeout, ValueError) as e: logger.exception(e) return result.Err(str(e))
def _delete_skill_from_cache( skill: skill_domain.Skill) -> result.Result[str, Exception]: """Deletes skill from cache. Args: skill: Skill. The skill which should be deleted from cache. Returns: Result(str,Exception). The id of the skill when the deletion was successful or Exception when the deletion failed. """ try: caching_services.delete_multi( caching_services.CACHE_NAMESPACE_SKILL, None, [skill.id]) return result.Ok(skill.id) except Exception as e: return result.Err(e)
def _delete_story_from_cache( story: story_domain.Story) -> result.Result[str, Exception]: """Deletes story from cache. Args: story: Story. The story which should be deleted from cache. Returns: Result(str,Exception). The id of the story when the deletion was successful or Exception when the deletion failed. """ try: caching_services.delete_multi( caching_services.CACHE_NAMESPACE_STORY, None, [story.id]) return result.Ok(story.id) except Exception as e: return result.Err(e)
def process( self, exp_summary: List[exp_domain.ExplorationSummary] ) -> Iterable[result.Result[None, Exception]]: """Index exploration summaries and catch any errors. Args: exp_summary: list(ExplorationSummary). List of Exp Summary domain objects to be indexed. Yields: JobRunResult. List containing one element, which is either SUCCESS, or FAILURE. """ try: search_services.index_exploration_summaries(exp_summary) for _ in exp_summary: yield result.Ok() except platform_search_services.SearchException as e: yield result.Err(e)
def process( self, exp_summary_models: List[datastore_services.Model] ) -> Iterable[result.Result[None, Exception]]: """Index exploration summaries and catch any errors. Args: exp_summary_models: list(Model). Models to index. Yields: JobRunResult. List containing one element, which is either SUCCESS, or FAILURE. """ try: search_services.index_exploration_summaries( # type: ignore[no-untyped-call] cast(List[exp_models.ExpSummaryModel], exp_summary_models)) for _ in exp_summary_models: yield result.Ok() except platform_search_services.SearchException as e: yield result.Err(e)
def _migrate_skill( skill_id: str, skill_model: skill_models.SkillModel ) -> result.Result[Tuple[str, skill_domain.Skill], Tuple[str, Exception]]: """Migrates skill and transform skill model into skill object. Args: skill_id: str. The id of the skill. skill_model: SkillModel. The skill model to migrate. Returns: Result((str, Skill), (str, Exception)). Result containing tuple that consists of skill ID and either skill object or Exception. Skill object is returned when the migration was successful and Exception is returned otherwise. """ try: skill = skill_fetchers.get_skill_from_model(skill_model) skill.validate() except Exception as e: logging.exception(e) return result.Err((skill_id, e)) return result.Ok((skill_id, skill))
def _generate_stats( suggestions: Iterable[suggestion_registry.SuggestionTranslateContent], opportunity: Optional[opportunity_domain.ExplorationOpportunitySummary] ) -> Tuple[str, result.Result[Dict[str, Union[bool, int, str]], str]]: """Generates translation contribution stats for each suggestion. Args: suggestions: iter(SuggestionTranslateContent). Suggestions for which the stats should be generated. opportunity: ExplorationOpportunitySummary. Opportunity for which were the suggestions generated. Used to extract topic ID. Yields: tuple(str, Dict(str, *)). Tuple of key and suggestion stats dict. The stats dictionary has four fields: suggestion_status: str. What is the status of the suggestion. edited_by_reviewer: bool. Whether the suggestion was edited by the reviewer. content_word_count: int. The word count of the content of the suggestion. last_updated_date: str. When was the suggestion last updated. """ # When opportunity is not available we leave the topic ID empty. topic_id = '' if opportunity is not None: topic_id = opportunity.topic_id for suggestion in suggestions: key = (suggestion_models.TranslationContributionStatsModel. generate_id(suggestion.language_code, suggestion.author_id, topic_id)) try: change = suggestion.change # In the new translation command the content in set format is # a list, content in unicode and html format is a string. # This code normalizes the content to the list type so that # we can easily count words. if (change.cmd == exp_domain.CMD_ADD_WRITTEN_TRANSLATION and state_domain.WrittenTranslation.is_data_format_list( change.data_format)): content_items = change.content_html else: content_items = [change.content_html] content_word_count = 0 for item in content_items: # Count the number of words in the original content, # ignoring any HTML tags and attributes. content_plain_text = html_cleaner.strip_html_tags( item) # type: ignore[no-untyped-call,attr-defined] content_word_count += len(content_plain_text.split()) translation_contribution_stats_dict = { 'suggestion_status': suggestion.status, 'edited_by_reviewer': suggestion.edited_by_reviewer, 'content_word_count': content_word_count, 'last_updated_date': (suggestion.last_updated.date().isoformat()) } yield (key, result.Ok(translation_contribution_stats_dict)) except Exception as e: yield (key, result.Err('%s: %s' % (suggestion.suggestion_id, e)))
def _generate_opportunities_related_to_topic( topic: topic_domain.Topic, stories_dict: Dict[str, story_domain.Story], exps_dict: Dict[str, exp_domain.Exploration] ) -> result.Result[ List[opportunity_models.ExplorationOpportunitySummaryModel], Exception ]: """Generate opportunities related to a topic. Args: topic: Topic. Topic for which to generate the opportunities. stories_dict: dict(str, Story). All stories in the datastore, keyed by their ID. exps_dict: dict(str, Exploration). All explorations in the datastore, keyed by their ID. Returns: dict(str, *). Metadata about the operation. Keys are: status: str. Whether the job succeeded or failed. job_result: JobRunResult. A detailed report of the status, including exception details if a failure occurred. models: list(ExplorationOpportunitySummaryModel). The models generated by the operation. """ try: story_ids = topic.get_canonical_story_ids() # type: ignore[no-untyped-call] existing_story_ids = ( set(stories_dict.keys()).intersection(story_ids)) exp_ids: List[str] = list(itertools.chain.from_iterable( stories_dict[story_id].story_contents.get_all_linked_exp_ids() for story_id in existing_story_ids)) existing_exp_ids = set(exps_dict.keys()).intersection(exp_ids) missing_story_ids = set(story_ids).difference(existing_story_ids) missing_exp_ids = set(exp_ids).difference(existing_exp_ids) if len(missing_exp_ids) > 0 or len(missing_story_ids) > 0: raise Exception( 'Failed to regenerate opportunities for topic id: %s, ' 'missing_exp_with_ids: %s, missing_story_with_ids: %s' % ( topic.id, list(missing_exp_ids), list(missing_story_ids))) exploration_opportunity_summary_list = [] stories = [ stories_dict[story_id] for story_id in existing_story_ids ] for story in stories: for exp_id in story.story_contents.get_all_linked_exp_ids(): exploration_opportunity_summary_list.append( opportunity_services.create_exp_opportunity_summary( # type: ignore[no-untyped-call] topic, story, exps_dict[exp_id])) exploration_opportunity_summary_model_list = [] with datastore_services.get_ndb_context(): for opportunity in exploration_opportunity_summary_list: model = ( opportunity_models.ExplorationOpportunitySummaryModel( id=opportunity.id, topic_id=opportunity.topic_id, topic_name=opportunity.topic_name, story_id=opportunity.story_id, story_title=opportunity.story_title, chapter_title=opportunity.chapter_title, content_count=opportunity.content_count, incomplete_translation_language_codes=( opportunity .incomplete_translation_language_codes ), translation_counts=opportunity.translation_counts, language_codes_needing_voice_artists=( opportunity .language_codes_needing_voice_artists ), language_codes_with_assigned_voice_artists=( opportunity .language_codes_with_assigned_voice_artists ) ) ) model.update_timestamps() exploration_opportunity_summary_model_list.append(model) return result.Ok(exploration_opportunity_summary_model_list) except Exception as e: return result.Err(e)