def get_best_jobs_in_area(database: mongo.NoPiiMongoDatabase, area_id: str) -> job_pb2.BestJobsInArea: """Get the best jobs in an area.""" return proto.create_from_mongo( database.best_jobs_in_area.find_one({'_id': area_id}), job_pb2.BestJobsInArea)
def _get_users_counts(database): all_counts = next( database.user_count.find({}).sort('aggregatedAt', pymongo.DESCENDING).limit(1), None) return proto.create_from_mongo(all_counts, stats_pb2.UsersCount, always_create=False)
def test_timeout(self, mock_db: pymongo.database.Database) -> None: """Simple timeout.""" mock_db.cvs_and_cover_letters.insert_one({ 'anonymizedUrl': 'https://dl.airtable.com/4KDBTy2WSGa1JvGbPYsA_CV%20de%20Pascal.png', 'kind': 'DOCUMENT_RESUME', 'name': 'Pascal', 'reviews': [ { 'sentAt': '2018-05-02T08:39:38Z', 'reviewerId': 'aca69757aff44770db7d7e49', 'status': 'REVIEW_SENT' }, ], 'numPendingReviews': 1, 'ownerEmail': '*****@*****.**', }) timeout_reviews.main([]) document = typing.cast( review_pb2.DocumentToReview, proto.create_from_mongo(mock_db.cvs_and_cover_letters.find_one(), review_pb2.DocumentToReview)) self.assertFalse(document.num_pending_reviews) self.assertEqual(review_pb2.REVIEW_TIME_OUT, document.reviews[0].status) self.assertEqual('aca69757aff44770db7d7e49', document.reviews[0].reviewer_id) self.assertEqual('*****@*****.**', document.owner_email)
def get_user_data(user_id: str) -> user_pb2.User: """Load user data from DB.""" user_dict = flask.current_app.config['USER_DATABASE'].user.find_one( {'_id': safe_object_id(user_id)}) user_proto = proto.create_from_mongo(user_dict, user_pb2.User, 'user_id', always_create=False) if not user_proto or user_proto.HasField('deleted_at'): # Switch to raising an error if you move this function in a lib. flask.abort(404, f'Utilisateur "{user_id}" inconnu.') _populate_feature_flags(user_proto) # TODO(cyrille): Remove this once we've generated observations for old users. for project in user_proto.projects: if not project.diagnostic.sub_diagnostics: continue scoring_project = scoring.ScoringProject( project, user_proto, flask.current_app.config['DATABASE']) for sub_diagnostic in project.diagnostic.sub_diagnostics: if not sub_diagnostic.observations: sub_diagnostic.observations.extend( diagnostic.compute_sub_diagnostic_observations( scoring_project, sub_diagnostic.topic)) # TODO(pascal): Remove the fields completely after this has been live for a # week. user_proto.profile.ClearField('city') user_proto.profile.ClearField('latest_job') user_proto.profile.ClearField('situation') return user_proto
def redirect_eterritoire(city_id): """Redirect to the e-Territoire page for a city.""" link = proto.create_from_mongo( _DB.eterritoire_links.find_one({'_id': city_id}), association_pb2.SimpleLink) return flask.redirect('http://www.eterritoire.fr{}'.format(link.path))
def send_auth_token(self, user_dict: Dict[str, Any]) -> None: """Sends an email to the user with an auth token so that they can log in.""" user_profile = typing.cast( user_pb2.UserProfile, proto.create_from_mongo(user_dict.get('profile'), user_pb2.UserProfile)) user_id = str(user_dict['_id']) auth_token = create_token(user_id, is_using_timestamp=True) # TODO(pascal): Factorize with campaign.create_logged_url. auth_link = parse.urljoin( flask.request.url, '/?' + parse.urlencode({ 'userId': user_id, 'authToken': auth_token })) template_vars = { 'authLink': auth_link, 'firstName': user_profile.name, } result = mail.send_template('1140080', user_profile, template_vars) if result.status_code != 200: logging.error('Failed to send an email with MailJet:\n %s', result.text) flask.abort(result.status_code)
def test_main(self) -> None: """Test main.""" count_users.main() result = self._db.user_count.find_one({'_id': ''}) self.assertTrue(result) result_proto = typing.cast( stats_pb2.UsersCount, proto.create_from_mongo(result, stats_pb2.UsersCount)) self.assertEqual(2, result_proto.departement_counts['69']) self.assertEqual(1, result_proto.departement_counts['64']) self.assertEqual(2, result_proto.job_group_counts['A1235']) self.assertEqual(1, result_proto.job_group_counts['B4567']) self.assertEqual(2, result_proto.weekly_application_counts['LESS_THAN_2']) self.assertEqual(2, result_proto.weekly_application_counts['SOME']) self.assertEqual(2, result_proto.medium_search_interview_counts['3']) self.assertEqual(1, result_proto.long_search_interview_counts['1']) self.assertEqual(stats_pb2.SHORT_SEARCH_LENGTH, result_proto.passion_level_counts[0].search_length) self.assertEqual(stats_pb2.MEDIUM_SEARCH_LENGTH, result_proto.passion_level_counts[1].search_length) self.assertEqual( 2, len(result_proto.passion_level_counts[1].level_counts)) self.assertEqual([ stats_pb2.PassionLevelCount( passionate_level=project_pb2.LIFE_GOAL_JOB, count=1), stats_pb2.PassionLevelCount( passionate_level=project_pb2.ALIMENTARY_JOB, count=2) ], sorted( result_proto.passion_level_counts[1].level_counts, key=lambda a: a.count))
def _update_email_sent_status( email_sent_dict: dict[str, Any], yesterday: str, campaign_ids: Optional[list[str]] = None) -> dict[str, Any]: email_sent = proto.create_from_mongo(email_sent_dict, email_pb2.EmailSent) if campaign_ids and email_sent.campaign_id not in campaign_ids: # Email is not from a campaign we wish to update, skipping. return email_sent_dict if email_sent.status != email_pb2.EMAIL_SENT_UNKNOWN and email_sent.last_status_checked_at: sent_at = email_sent.sent_at.ToJsonString() if sent_at < yesterday: last_status_checked_at = email_sent.last_status_checked_at.ToJsonString( ) if email_sent.last_status_checked_after_days > 14 or last_status_checked_at > yesterday: return email_sent_dict message = _find_message(email_sent) if message: email_sent.mailjet_message_id = message.get( 'ID', email_sent.mailjet_message_id) status = message.get('Status') if status: email_sent.status = email_pb2.EmailSentStatus.Value( f'EMAIL_SENT_{status.upper()}') else: logging.warning('No status for message "%s"', email_sent.mailjet_message_id) else: logging.warning('Could not find a message in MailJet.') common_proto.set_date_now(email_sent.last_status_checked_at) email_sent.last_status_checked_after_days = ( now.get() - email_sent.sent_at.ToDatetime()).days return json_format.MessageToDict(email_sent)
def save_user(user_data: user_pb2.User) -> user_pb2.User: """Save a user in the database.""" unused_, users_database, unused_ = mongo.get_connections_from_env() users_database = users_database.with_prefix('jobflix_') collection = users_database.user if user_data.profile.email: if db_user := collection.find_one( {'hashedEmail': (hashed_email := auth.hash_user_email(user_data.profile.email))}, {'_id': 1, 'projects': 1}): user_data.user_id = str(db_user['_id']) new_projects = list(user_data.projects[:]) user_data.ClearField('projects') user_data.projects.extend( proto.create_from_mongo(p, project_pb2.Project, always_create=True) for p in db_user.get('projects', [])) old_project_ids = {p.project_id for p in user_data.projects} user_data.projects.extend( p for p in new_projects if _make_project_id(p) not in old_project_ids) elif user_data.user_id: collection.update_one({'_id': objectid.ObjectId(user_data.user_id)}, {'$set': { 'profile.email': user_data.profile.email, 'hashedEmail': hashed_email, }})
def _get_seasonal_departements(self, project): """Compute departements that propose seasonal jobs.""" # TODO(guillaume): Cache this to increase speed. top_departements = proto.create_from_mongo( project.database.seasonal_jobbing.find_one( {'_id': project.now.month}), seasonal_jobbing_pb2.MonthlySeasonalJobbingStats) for departement in top_departements.departement_stats: # TODO(guillaume): If we don't use deeper jobgroups by october 1st 2017, trim the db. del departement.job_groups[6:] try: departement.departement_in_name = geo.get_in_a_departement_text( project.database, departement.departement_id) except KeyError: logging.exception( 'Prefix or name not found for departement: %s', departement.departement_id) continue for i, departement in enumerate( top_departements.departement_stats[::-1]): if not departement.departement_in_name: del top_departements.departement_stats[i] return top_departements or []
def _timeout_old_reviews(document_dict: Dict[str, Any], timeout_date: datetime.datetime) -> None: document_id = document_dict.pop('_id') document = typing.cast( review_pb2.DocumentToReview, proto.create_from_mongo(document_dict, review_pb2.DocumentToReview)) timeout_review_indices = [ review_index for review_index, review in enumerate(document.reviews) if review.status == review_pb2.REVIEW_SENT and review.sent_at.ToDatetime() < timeout_date ] if not timeout_review_indices: logging.warning('Mismatch between mongo and python filters') return _USER_DB.cvs_and_cover_letters.update_one( {'_id': document_id}, { '$inc': { 'numPendingReviews': -len(timeout_review_indices) }, '$set': { f'reviews.{review_index}.status': 'REVIEW_TIME_OUT' for review_index in timeout_review_indices }, }, )
def _send_focus_emails( action: 'campaign.NoGhostAction', dry_run_email: str, restricted_campaigns: Optional[Iterable[mailjet_templates.Id]] = None) -> None: database, users_database, eval_database = mongo.get_connections_from_env() instant = now.get() email_errors = 0 counts = { campaign_id: 0 for campaign_id in sorted(get_possible_campaigns(database, restricted_campaigns)) } potential_users = users_database.user.find({ 'profile.email': { '$regex': re.compile(r'[^ ]+@[^ ]+\.[^ ]+'), '$not': re.compile(r'@example.com$'), }, 'projects': {'$elemMatch': { 'isIncomplete': {'$ne': True}, }}, 'profile.coachingEmailFrequency': {'$in': [ email_pb2.EmailFrequency.Name(setting) for setting in _EMAIL_PERIOD_DAYS]}, # Note that "not >" is not equivalent to "<=" in the case the field # is not defined: in that case we do want to select the user. 'sendCoachingEmailAfter': {'$not': {'$gt': proto.datetime_to_json_string(instant)}}, }) restricted_campaigns_set: Optional[Set[mailjet_templates.Id]] if restricted_campaigns: restricted_campaigns_set = set(restricted_campaigns) else: restricted_campaigns_set = None for user_dict in potential_users: user_id = user_dict.pop('_id') user = proto.create_from_mongo(user_dict, user_pb2.User) user.user_id = str(user_id) try: campaign_id = send_focus_email_to_user( action, user, dry_run_email=dry_run_email, database=database, users_database=users_database, eval_database=eval_database, instant=instant, restricted_campaigns=restricted_campaigns_set) except requests.exceptions.HTTPError as error: if action == 'dry-run': raise logging.warning('Error while sending an email: %s', error) email_errors += 1 continue if campaign_id: counts[campaign_id] += 1 if action == 'dry-run': break continue report_message = 'Focus emails sent today:\n' + '\n'.join([ f' • *{campaign_id}*: {count} email{"s" if count > 1 else ""}' for campaign_id, count in counts.items() ]) if action == 'send': report.notify_slack(report_message) logging.info(report_message)
def get_expanded_card_data(self, project): """Retrieve data for the expanded card.""" # TODO(cyrille): Cache coordinates in ScoringProject. return proto.create_from_mongo( project.database.cities.find_one( {'_id': project.details.mobility.city.city_id}), geo_pb2.FrenchCity)
def test_timeout_only_some_reviews( self, mock_db: pymongo.database.Database) -> None: """Timeout some reviews but not all.""" mock_db.cvs_and_cover_letters.insert_one({ 'anonymizedUrl': 'https://dl.airtable.com/4KDBTy2WSGa1JvGbPYsA_CV%20de%20Pascal.png', 'kind': 'DOCUMENT_RESUME', 'name': 'Pascal', 'reviews': [ # Review already done. { 'sentAt': '2018-05-02T08:39:38Z', 'reviewerId': 'aca69757aff44770db7d7e49', 'status': 'REVIEW_DONE' }, # Review to timeout. { 'sentAt': '2018-05-02T08:39:38Z', 'reviewerId': 'aca69757aff44770db7d7e49', 'status': 'REVIEW_SENT' }, # Review too recent to timeout. { 'sentAt': '2018-05-10T08:39:38Z', 'reviewerId': 'aca69757aff44770db7d7e49', 'status': 'REVIEW_SENT' }, # Review to timeout. { 'sentAt': '2018-05-02T08:39:38Z', 'reviewerId': 'aca69757aff44770db7d7e49', 'status': 'REVIEW_SENT' }, ], 'numPendingReviews': 3, 'ownerEmail': '*****@*****.**', }) timeout_reviews.main([]) document = typing.cast( review_pb2.DocumentToReview, proto.create_from_mongo(mock_db.cvs_and_cover_letters.find_one(), review_pb2.DocumentToReview)) self.assertEqual(1, document.num_pending_reviews) self.assertEqual([ review_pb2.REVIEW_DONE, review_pb2.REVIEW_TIME_OUT, review_pb2.REVIEW_SENT, review_pb2.REVIEW_TIME_OUT, ], [r.status for r in document.reviews]) self.assertEqual('aca69757aff44770db7d7e49', document.reviews[0].reviewer_id) self.assertEqual('*****@*****.**', document.owner_email)
def _compute_assessment_report(example_count: int, since: str, until: str) -> str: """Count the use cases that are assessed, and reports which and why are not.""" cursor = _DB.use_case.find( {'poolName': {'$gte': str(since), '$lt': str(until), '$regex': r'\d{4}-\d{2}-\d{2}'}}) unassessed_count = 0 num_cases_missing_a_field: Dict[str, int] = collections.defaultdict(int) project_count = 0 examples: List[Tuple[str, List[str]]] = [] for use_case_json in cursor: use_case = proto.create_from_mongo( use_case_json, use_case_pb2.UseCase, 'use_case_id', always_create=False) if not use_case: logging.debug('Unable to parse use case from mongo\n%s', use_case_json) continue if not use_case.user_data.projects: logging.debug("Use case '%s' does not have any project", use_case.title) continue project_count += 1 project_assessed = True missing = list(_list_missing_properties_for_assessed_use_case( use_case.user_data, use_case.title)) for missing_field in missing: num_cases_missing_a_field[missing_field] += 1 if project_assessed and not missing_field.startswith(_SUBDIAGNOSTIC_PREFIX): project_assessed = False if project_assessed: continue unassessed_count += 1 example = (_get_use_case_url(use_case), missing) _reservoir_sample(examples, example_count, example, unassessed_count) report_text = f'{project_count} use case{"" if project_count == 1 else "s"} tested\n' \ f'{project_count - unassessed_count} use ' \ f'case{"" if project_count - unassessed_count == 1 else "s"} successfully assessed\n' if project_count: rate = (project_count - unassessed_count) / project_count * 100 report_text += f'Success rate: {rate:4.1f}%\n' total_failure_count = unassessed_count if total_failure_count: for field, count in num_cases_missing_a_field.items(): percent = 100 * count / total_failure_count report_text += \ f'{count} use case{"" if count == 1 else "s"} missed {field} ({percent:3.1f}%)\n' example_count = min(len(examples), example_count) if not example_count: return report_text maybe_s = '' if example_count == 1 else 's' report_text += f'Example{maybe_s} of {example_count} failed use case{maybe_s}:\n' grouped_examples = itertools.groupby( sorted(examples, key=lambda s: s[1]), lambda s: s[1]) for key, values in grouped_examples: keys = '", "'.join(key) values_string = '\n\t'.join(map(lambda e: e[0], list(values))) report_text += f'Missing field{"" if len(key) == 1 else "s"} "{keys}":\n\t{values_string}\n' return report_text
def job_group_info(self): """Get the info for job group info.""" if self._job_group_info is not None: return self._job_group_info self._job_group_info = proto.create_from_mongo( self._db.job_group_info.find_one({'_id': self._rome_id()}), job_pb2.JobGroup) return self._job_group_info
def get_users_counts( database: pymongo.database.Database) -> Optional[stats_pb2.UsersCount]: """Get the count of users in departements and in job groups.""" all_counts = next( database.user_count.find({}).sort('aggregatedAt', pymongo.DESCENDING).limit(1), None) return proto.create_from_mongo(all_counts, stats_pb2.UsersCount, always_create=False)
def make_diagnostic_main_challenge_distribution(request: use_case_pb2.UseCasesDistributionRequest) \ -> use_case_pb2.UseCaseDistribution: """See how use cases are distributed in the different diagnostic main challenges.""" database, unused_, eval_db = mongo.get_connections_from_env() use_case_iterator = ( proto.create_from_mongo(use_case_json, use_case_pb2.UseCase, 'use_case_id') for use_case_json in eval_db.use_case.find( {'_id': _AUTOMATIC_EVAL_USE_CASE_ID_REGEX} ).sort([('_id', -1)]).limit(request.max_use_cases or _MAX_SEARCHED_USE_CASES)) return _make_diagnostic_main_challenge_distribution( use_case_iterator, database, request.categories)
def test_update(self) -> None: """Ensure updating overrides previous values.""" count_users.main() # No more users in database. self._user_db.user.drop() count_users.main() result = self._db.user_count.find_one({'_id': ''}) self.assertTrue(result) result_proto = proto.create_from_mongo(result, stats_pb2.UsersCount) self.assertFalse(result_proto.departement_counts) self.assertFalse(result_proto.job_group_counts)
def export_user_to_elasticsearch(es_client: elasticsearch.Elasticsearch, index: str, registered_from: str, force_recreate: bool, dry_run: bool = True) -> None: """Synchronize users to elasticsearch for analytics purpose.""" user_db = mongo.get_connections_from_env().user_db if not dry_run: has_previous_index = es_client.indices.exists(index=index) if force_recreate and has_previous_index: logging.info('Removing old bobusers index ...') es_client.indices.delete(index=index) if force_recreate or not has_previous_index: logging.info('Creating bobusers index ...') es_client.indices.create(index=index) nb_users = 0 nb_docs = 0 cursor = user_db.user.find({ 'registeredAt': { '$gt': registered_from }, 'featuresEnabled.excludeFromAnalytics': { '$ne': True }, }) for row in cursor: nb_users += 1 user = proto.create_from_mongo(row, user_pb2.User, 'user_id') data = user_to_analytics_data(user) logging.debug(data) if not dry_run: # TODO(cyrille): Consider using the noop feature to avoid re-computing inactive users # endlessly. es_client.update(index=index, doc_type='_doc', id=user.user_id, body={ 'doc': data, 'doc_as_upsert': True, }) nb_docs += 1 if nb_docs % 1000 == 0: logging.info('%i users processed', nb_docs) if not dry_run: es_client.indices.flush(index=index)
def local_diagnosis(self): """Get local stats for the project's job group and département.""" if self._local_diagnosis is not None: return self._local_diagnosis local_id = '{}:{}'.format(self.details.mobility.city.departement_id, self.details.target_job.job_group.rome_id) self._local_diagnosis = proto.create_from_mongo( self._db.local_diagnosis.find_one({'_id': local_id}), job_pb2.LocalJobStats) return self._local_diagnosis
def get_dashboard_export(dashboard_export_id): """Retrieve an export of the user's current dashboard.""" dashboard_export_json = _DB.dashboard_exports.find_one( {'_id': _safe_object_id(dashboard_export_id)}) dashboard_export = proto.create_from_mongo(dashboard_export_json, export_pb2.DashboardExport, always_create=False) if not dashboard_export: flask.abort(404, 'Export "{}" introuvable.'.format(dashboard_export_id)) dashboard_export.dashboard_export_id = dashboard_export_id return dashboard_export
def list_nearby_cities(self, project): """Compute and store all interesting cities that are not too close and not too far. Those cities will be used by the Commute advice. """ job_group = project.details.target_job.job_group.rome_id all_cities = proto.create_from_mongo( project.database.hiring_cities.find_one({'_id': job_group}), commute_pb2.HiringCities) interesting_cities_for_rome = all_cities.hiring_cities if not interesting_cities_for_rome: return [] mongo_city = project.database.cities.find_one( {'_id': project.details.mobility.city.city_id}) if not mongo_city: return [] target_city = proto.create_from_mongo(mongo_city, geo_pb2.FrenchCity) commuting_cities = list(_get_commuting_cities(interesting_cities_for_rome, target_city)) obvious_cities = [ city for city in commuting_cities if city.distance_km < _MIN_CITY_DISTANCE] interesting_cities = [ city for city in commuting_cities if city.distance_km >= _MIN_CITY_DISTANCE] # If there is only one city nearby and no obvious city, the nearby city becomes obvious, so # we do not recommend it. if len(interesting_cities) == 1 and not obvious_cities: return [] return interesting_cities
def get_usage_stats(): """Get stats of the app usage.""" now_utc = now.get().astimezone(datetime.timezone.utc) start_of_second = now_utc.replace(microsecond=0, tzinfo=None) last_week = start_of_second - datetime.timedelta(days=7) yesterday = start_of_second - datetime.timedelta(days=1) # Compute daily scores count. daily_scores_count = collections.defaultdict(int) last_day_users = _USER_DB.user.find( { 'registeredAt': { '$gt': _datetime_to_json_string(yesterday), '$lte': _datetime_to_json_string(start_of_second), }, 'featuresEnabled.excludeFromAnalytics': { '$ne': True }, }, { 'projects': 1, 'registeredAt': 1, }, ) for user_dict in last_day_users: user_proto = proto.create_from_mongo(user_dict, user_pb2.User) for project in user_proto.projects: if project.feedback.score: daily_scores_count[project.feedback.score] += 1 # Compute weekly user count. weekly_new_user_count = _USER_DB.user.find({ 'registeredAt': { '$gt': _datetime_to_json_string(last_week), '$lte': _datetime_to_json_string(start_of_second), }, 'featuresEnabled.excludeFromAnalytics': { '$ne': True }, }).count() return stats_pb2.UsersCount( total_user_count=_USER_DB.user.count(), weekly_new_user_count=weekly_new_user_count, daily_scores_count=daily_scores_count, )
def clean_users(database: mongo.UsersDatabase, dry_run: bool = True, max_users: int = 0) -> Tuple[int, int, int]: """Clean inactive users and guests who registered before a given date.""" users = get_users(database) num_users_cleaned = 0 num_users_updated = 0 num_errors = 0 for user in users: user_proto = proto.create_from_mongo(user, user_pb2.User, 'user_id') if max_users and (num_users_cleaned + num_users_updated + num_errors) >= max_users: return num_users_cleaned, num_users_updated, num_errors if not user_proto: num_errors += 1 continue try: user_id = objectid.ObjectId(user_proto.user_id) except bson.errors.InvalidId: logging.exception('Tried to modify a user with an invalid ID "%s"', user_proto.user_id) num_errors += 1 continue deletion_date = compute_deletion_date(user_proto) if deletion_date >= datetime.datetime.today(): set_deletion_check_date(user_proto, deletion_date, database, dry_run) num_users_updated += 1 continue if dry_run: logging.info('Cleaning user "%s"', str(user_id)) num_users_cleaned += 1 elif auth.delete_user(user_proto, database): num_users_cleaned += 1 else: num_errors += 1 return num_users_cleaned, num_users_updated, num_errors
def _update_user_client_metric(user_collection: pymongo.collection.Collection, user: dict[str, Any], dry_run: bool) -> None: user_id = user['_id'] client_metrics = proto.create_from_mongo(user.get('clientMetrics'), user_pb2.ClientSideMetrics) assert client_metrics updated_metrics = user_pb2.ClientSideMetrics() if client_metrics.amplitude_id: amplitude_id = client_metrics.amplitude_id else: try: amplitude_id = _get_amplitude_id(user_id) except KeyError: logging.info('Could not find user "%s" on Amplitude.', user_id) amplitude_id = _AMPLITUDE_ID_NOT_FOUND updated_metrics.amplitude_id = amplitude_id if amplitude_id != _AMPLITUDE_ID_NOT_FOUND: if not client_metrics.first_session_duration_seconds: events = _get_amplitude_events(amplitude_id) first_session_duration = compute_first_session_duration(events) updated_metrics.first_session_duration_seconds = \ round(first_session_duration.total_seconds()) else: events = [] if not client_metrics.is_first_session_mobile: if not events: events = _get_amplitude_events(amplitude_id, limit=5) updated_metrics.is_first_session_mobile = _compute_is_mobile( events[:5]) dict_update = json_format.MessageToDict(updated_metrics) if not dict_update: logging.info('Nothing to update for user "%s"', user_id) return if dry_run: logging.info('Update user "%s":\n%s', user_id, dict_update) else: user_collection.update_one(user, { '$set': {f'clientMetrics.{k}': v for k, v in dict_update.items()} })
def make_diagnostic_category_distribution(request: use_case_pb2.UseCasesDistributionRequest) \ -> use_case_pb2.UseCaseDistribution: """See how use cases are distributed in the different diagnostic categories.""" use_case_iterator = ( typing.cast( use_case_pb2.UseCase, proto.create_from_mongo(use_case_json, use_case_pb2.UseCase, 'use_case_id')) for use_case_json in flask.current_app.config['EVAL_DATABASE']. use_case.find({ '_id': _AUTOMATIC_EVAL_USE_CASE_ID_REGEX }).sort([( '_id', -1)]).limit(request.max_use_cases or _MAX_SEARCHED_USE_CASES)) return _make_diagnostic_category_distribution( use_case_iterator, flask.current_app.config['DATABASE'], request.categories)
def export_user_to_elasticsearch(es_client: elasticsearch.Elasticsearch, index: str, registered_from: str, dry_run: bool = True) -> None: """Synchronize users to elasticsearch for analytics purpose.""" if not dry_run: if es_client.indices.exists(index=index): logging.info('Removing old bobusers index ...') es_client.indices.delete(index=index) logging.info('Creating bobusers index ...') es_client.indices.create(index=index) nb_users = 0 nb_docs = 0 cursor = _USER_DB.user.find({ 'registeredAt': { '$gt': registered_from }, 'featuresEnabled.excludeFromAnalytics': { '$ne': True }, }) for row in cursor: nb_users += 1 user = typing.cast( user_pb2.User, proto.create_from_mongo(row, user_pb2.User, 'user_id')) data = _user_to_analytics_data(user) logging.debug(data) if not dry_run: es_client.create(index=index, doc_type='_doc', id=user.user_id, body=json.dumps(data)) nb_docs += 1 if nb_docs % 1000 == 0: logging.info('%i users processed', nb_docs) if not dry_run: es_client.indices.flush(index=index)
def _send_focus_emails(action: 'campaign.Action', dry_run_email: str) -> None: database, users_database, unused_eval_database = mongo.get_connections_from_env() instant = now.get() email_errors = 0 counts = {campaign_id: 0 for campaign_id in _FOCUS_CAMPAIGNS} potential_users = users_database.user.find({ 'profile.email': re.compile('.+@.+'), 'projects': {'$elemMatch': { 'isIncomplete': {'$ne': True}, }}, 'profile.coachingEmailFrequency': {'$in': [ user_pb2.EmailFrequency.Name(setting) for setting in _EMAIL_PERIOD_DAYS]}, # Note that "not >" is not equivalent to "<=" in the case the field # is not defined: in that case we do want to select the user. 'sendCoachingEmailAfter': {'$not': {'$gt': proto.datetime_to_json_string(instant)}}, }) for user_dict in potential_users: user_id = user_dict.pop('_id') user = typing.cast(user_pb2.User, proto.create_from_mongo(user_dict, user_pb2.User)) user.user_id = str(user_id) try: campaign_id = send_focus_email_to_user( action, user, dry_run_email=dry_run_email, database=database, users_database=users_database, instant=instant) except requests.exceptions.HTTPError as error: if action == 'dry-run': raise logging.warning('Error while sending an email: %s', error) email_errors += 1 continue if campaign_id: counts[campaign_id] += 1 continue report_message = 'Focus emails sent:\n' + '\n'.join([ f' • *{campaign_id}*: {count} email{"s" if count > 1 else ""}' for campaign_id, count in counts.items() ]) if action == 'send': report.notify_slack(report_message) logging.info(report_message)
def send_reset_password_token(self, email): """Sends an email to user with a reset token so that they can reset their password.""" user_dict = self._user_db.user.find_one({'profile.email': email}) if not user_dict: flask.abort( 403, "Nous n'avons pas d'utilisateur avec cet email : {}".format( email)) user_auth_dict = self._user_db.user_auth.find_one( {'_id': user_dict['_id']}) if not user_auth_dict or not user_auth_dict.get('hashedPassword'): flask.abort( 403, 'Utilisez Facebook ou Google pour vous connecter, comme la première fois.' ) hashed_old_password = user_auth_dict.get('hashedPassword') auth_token = _timestamped_hash( int(time.time()), email + str(user_dict['_id']) + hashed_old_password) user_profile = proto.create_from_mongo(user_dict.get('profile'), user_pb2.UserProfile) reset_link = parse.urljoin( flask.request.url, '/?' + parse.urlencode({ 'email': email, 'resetToken': auth_token })) template_vars = { 'resetLink': reset_link, 'firstName': user_profile.name, } result = mail.send_template('71254', user_profile, template_vars, monitoring_category='reset_password') if result.status_code != 200: logging.error('Failed to send an email with MailJet:\n %s', result.text) flask.abort(result.status_code)