def _update_returning_user( user_data: user_pb2.User, force_update: bool = False, has_set_email: bool = False) \ -> timestamp_pb2.Timestamp: if user_data.HasField('requested_by_user_at_date'): start_of_day = now.get().replace(hour=0, minute=0, second=0, microsecond=0) if user_data.requested_by_user_at_date.ToDatetime() >= start_of_day: if force_update: _save_low_level(user_data) return user_data.requested_by_user_at_date else: last_connection = timestamp_pb2.Timestamp() last_connection.CopyFrom(user_data.requested_by_user_at_date) else: last_connection = user_data.registered_at if user_data.profile.email: user_data.hashed_email = auth.hash_user_email(user_data.profile.email) if has_set_email: base_url = parse.urljoin(flask.request.base_url, '/')[:-1] advisor.maybe_send_late_activation_emails( user_data, flask.current_app.config['DATABASE'], base_url) user_data.requested_by_user_at_date.FromDatetime(now.get()) # No need to pollute our DB with super precise timestamps. user_data.requested_by_user_at_date.nanos = 0 _save_low_level(user_data) return last_connection
def use_app(user_id): """Update the user's data to mark that they have just used the app.""" user_proto = _get_user_data(user_id) start_of_day = now.get().replace(hour=0, minute=0, second=0, microsecond=0) if user_proto.requested_by_user_at_date.ToDatetime() >= start_of_day: return user_proto user_proto.requested_by_user_at_date.FromDatetime(now.get()) # No need to pollute our DB with super precise timestamps. user_proto.requested_by_user_at_date.nanos = 0 return _save_user(user_proto, is_new_user=False)
def main(database, last_check_days_ago=7): """Check the status of sent emails on MailJet and update our Database. Args: database: a pymongo access to the Database. Only the user collection will be accessed and modified. last_check_days_ago: consider checked done more than n days ago as too old, and check the status again. """ last_checked_at = _datetime_to_json(now.get() - datetime.timedelta( days=last_check_days_ago)) a_month_ago = _datetime_to_json(now.get() - datetime.timedelta(days=30.5)) users_with_missing_status = database.user.find( { 'emailsSent': { '$exists': True }, 'profile.email': re.compile('.*@.*'), 'emailsSent.lastStatusCheckedAt': { '$exists': False }, }, { 'profile.email': 1, 'emailsSent': 1 }) users_with_old_status = database.user.find( { 'emailsSent.sentAt': { '$gt': a_month_ago }, 'profile.email': re.compile('.*@.*'), 'emailsSent.lastStatusCheckedAt': { '$lt': last_checked_at }, }, { 'profile.email': 1, 'emailsSent': 1 }) selected_users = itertools.chain(users_with_missing_status, users_with_old_status) for user in selected_users: emails_sent = user.get('emailsSent', []) updated_emails_sent = [ _update_email_sent_status(email, user.get('profile', {}).get('email'), last_checked_at) for email in emails_sent ] database.user.update_one({'_id': user['_id']}, {'$set': { 'emailsSent': updated_emails_sent }})
def compute_advices_for_project( user: user_pb2.User, project: project_pb2.Project, database: pymongo_database.Database, scoring_timeout_seconds: float = 3) -> project_pb2.Advices: """Advise on a user project. Args: user: the user's data, mainly used for their profile and features_enabled. project: the project data. It will not be modified. database: access to the MongoDB with market data. scoring_timeout_seconds: how long we wait to compute each advice scoring model. Returns: an Advices protobuffer containing a list of recommendations. """ scoring_project = scoring.ScoringProject(project, user, database, now=now.get()) advice_modules = _advice_modules(database) advices = project_pb2.Advices() advices.advices.extend( compute_available_methods(scoring_project, advice_modules, scoring_timeout_seconds)) return advices
def main(string_args=None): """Parse command line arguments and trigger the delete_duplicated_users function.""" parser = argparse.ArgumentParser( description='Synchronize MongoDB client metrics fields from Amplitude') parser.add_argument( '--registered-from', default='2016', help='Consider only users who registered after this date.') yesterday = str((now.get() - datetime.timedelta(days=1)).date()) parser.add_argument( '--registered-to', default=yesterday, help='Consider only users who registered before this date.') parser.add_argument( '--no-dry-run', dest='dry_run', action='store_false', help='No dry run really store in DB.') parser.add_argument( '--backup-collection', required=True, help='Name of the collection where to backup users that are deleted from main DB.') args = parser.parse_args(string_args) logging.basicConfig(level='INFO') delete_duplicated_users( _DB.user, from_date=args.registered_from, to_date=args.registered_to, dry_run=args.dry_run, backup_collection=_DB.get_collection(args.backup_collection))
def __init__(self, days_since_any_email=7, days_since_same_campaign_unread=0, days_since_same_campaign=0): """Constructor for an EmailPolicy object. Args: days_since_any_email: number of days to wait before sending any new mail to the users. days_since_same_campaign_unread: number of days to wait before sending again the same campaign email to a user to whom it has already been sent and who has not read/open it. ATTENTION: emails status have to be updated in mongodb. days_since_same_campaign: number of days to wait before sending again the same campaign email to a user whom it has already been sent whether they have opened it or not. """ instant = now.get() self.last_email_datetime = instant - datetime.timedelta( days=days_since_any_email) if days_since_same_campaign_unread > 0: self.retry_campaign_date_unread = \ instant - datetime.timedelta(days=days_since_same_campaign_unread) else: self.retry_campaign_date_unread = None if days_since_same_campaign > 0: self.retry_campaign_date = instant - datetime.timedelta( days=days_since_same_campaign) else: self.retry_campaign_date = None
def instantiate(action, user_proto, project, template, database): """Instantiate a newly created action from a template. Args: action: the action to be populated from the template. user_proto: the whole user data. project: the whole project data. template: the action template to instantiate. database: a MongoDB client to get stats and info. Returns: the populated action for chaining. """ action.action_id = '{}-{}-{:x}-{:x}'.format(project.project_id, template.action_template_id, round(time.time()), random.randrange(0x10000)) action.action_template_id = template.action_template_id action.title = template.title action.title_feminine = template.title_feminine action.short_description = template.short_description action.short_description_feminine = template.short_description_feminine scoring_project = scoring.ScoringProject(project, user_proto.profile, user_proto.features_enabled, database) action.link = scoring_project.populate_template(template.link) action.how_to = template.how_to action.status = action_pb2.ACTION_UNREAD action.created_at.FromDatetime(now.get()) action.image_url = template.image_url return action
def test_recommendations_in_both_categories(self) -> None: """Users with a total of two recommended jobs should have a high score.""" self.database.local_diagnosis.drop() self.database.local_diagnosis.insert_one({ '_id': '09:M1601', 'imt': { 'yearlyAvgOffersPer10Candidates': 4, }, 'lessStressfulJobGroups': [ { 'localStats': {'imt': {'yearlyAvgOffersPer10Candidates': 12}}, 'jobGroup': {'romeId': 'A1413', 'name': 'Aide caviste'}, 'mobilityType': job_pb2.CLOSE, }, { 'localStats': {'imt': {'yearlyAvgOffersPer10Candidates': 6}}, 'jobGroup': {'romeId': 'A1401', 'name': 'Aide arboricole'}, 'mobilityType': job_pb2.EVOLUTION, }], }) self. persona.user_profile.year_of_birth = datetime.date.today().year - 40 self.persona.project.job_search_has_not_started = False self.persona.project.job_search_started_at.FromDatetime( now.get() - datetime.timedelta(days=397)) score = self._score_persona(self.persona) self.assertEqual(score, 3, msg=f'Failed for "{self.persona.name}"')
def instantiate(action: action_pb2.Action, user_proto: user_pb2.User, project: project_pb2.Project, template: action_pb2.ActionTemplate, base: database.Database) -> action_pb2.Action: """Instantiate a newly created action from a template. Args: action: the action to be populated from the template. user_proto: the whole user data. project: the whole project data. template: the action template to instantiate. base: a MongoDB client to get stats and info. Returns: the populated action for chaining. """ action.action_id = f'{project.project_id}-{template.action_template_id}-' \ f'{round(time.time()):x}-{random.randrange(0x10000):x}' action.action_template_id = template.action_template_id action.title = template.title action.title_feminine = template.title_feminine action.short_description = template.short_description action.short_description_feminine = template.short_description_feminine scoring_project = scoring.ScoringProject(project, user_proto, base) action.link = scoring_project.populate_template(template.link) action.how_to = template.how_to action.status = action_pb2.ACTION_UNREAD action.created_at.FromDatetime(now.get()) action.image_url = template.image_url return action
def main(): """Aggregate users and populate user_count collection.""" aggregation = _DB.user.aggregate([{ '$match': { 'featuresEnabled.excludeFromAnalytics': { '$ne': True }, } }, { '$unwind': '$projects' }, { '$project': { '_id': 0, 'dep_id': '$projects.mobility.city.departementId', 'rome_id': '$projects.targetJob.jobGroup.romeId', } }]) job_group_counts = collections.defaultdict(int) dep_counts = collections.defaultdict(int) for local_info in aggregation: if 'dep_id' in local_info: dep_counts[local_info.get('dep_id')] += 1 if 'rome_id' in local_info: job_group_counts[local_info.get('rome_id')] += 1 _DB.user_count.insert_one({ 'aggregatedAt': str(now.get()), 'depCounts': dep_counts, 'jobGroupCounts': job_group_counts, })
def update_employment_status(new_status, user_id): """Update user's last employment status.""" user_proto = _get_user_data(user_id) # Create another empty User message to update only the employment_status field. user_to_update = user_pb2.User() user_to_update.employment_status.extend(user_proto.employment_status[:]) current_time = now.get() if user_to_update.employment_status and \ user_to_update.employment_status[-1].created_at.ToDatetime() > \ current_time - datetime.timedelta(days=1): recent_status = user_to_update.employment_status[-1] else: recent_status = user_to_update.employment_status.add() recent_status.created_at.FromDatetime(current_time) new_status.ClearField('created_at') recent_status.MergeFrom(new_status) _USER_DB.user.update_one( {'_id': _safe_object_id(user_id)}, {'$set': json_format.MessageToDict(user_to_update)}, upsert=False) return ''
def get_employment_status(user_id): """Save user's first click and redirect them to the full survey.""" user_proto = _get_user_data(user_id) # Create another empty User message to update only employment_status field. user_to_update = user_pb2.User() user_to_update.employment_status.extend(user_proto.employment_status[:]) if 'id' in flask.request.args: # TODO(pascal): Cleanup once all clients are using the POST endpoint # (in 2018). survey_id = int(flask.request.args.get('id')) if survey_id >= len(user_to_update.employment_status): flask.abort(422, 'Id invalide.') employment_status = user_to_update.employment_status[survey_id] else: survey_id = len(user_to_update.employment_status) employment_status = user_to_update.employment_status.add() employment_status.created_at.FromDatetime(now.get()) try: json_format.ParseDict(flask.request.args, employment_status, ignore_unknown_fields=True) except json_format.ParseError: flask.abort(422, 'Paramètres invalides.') _USER_DB.user.update_one( {'_id': _safe_object_id(user_id)}, {'$set': json_format.MessageToDict(user_to_update)}, upsert=False) return _maybe_redirect(id=survey_id, gender=user_pb2.Gender.Name( user_proto.profile.gender), can_tutoie=user_proto.profile.can_tutoie)
def delete_user(user_proto: user_pb2.User, user_database: pymongo.database.Database) -> bool: """Close a user's account. We assume the given user_proto is up-to-date, e.g. just being fetched from database. """ try: user_id = objectid.ObjectId(user_proto.user_id) except objectid.InvalidId: logging.exception('Tried to delete a user with invalid ID "%s"', user_proto.user_id) return False filter_user = {'_id': user_id} # Remove authentication informations. user_database.user_auth.delete_one(filter_user) try: privacy.redact_proto(user_proto) except TypeError: logging.exception('Cannot delete account %s', str(user_id)) return False user_proto.deleted_at.FromDatetime(now.get()) user_proto.ClearField('user_id') user_database.user.replace_one(filter_user, json_format.MessageToDict(user_proto)) return True
def main(string_args: Optional[List[str]] = None) -> None: """Time out CVS and motivation letters reviews.""" parser = argparse.ArgumentParser( description='Time out CVs and motivation letters reviews.', formatter_class=argparse.ArgumentDefaultsHelpFormatter) parser.add_argument('--days-before-timeout', default='5', type=int) args = parser.parse_args(string_args) timeout_date = now.get() - datetime.timedelta( days=args.days_before_timeout) documents = _USER_DB.cvs_and_cover_letters.find({ 'reviews': { '$elemMatch': { 'sentAt': { '$lt': proto.datetime_to_json_string(timeout_date) }, 'status': 'REVIEW_SENT', } }, }) for document in documents: _timeout_old_reviews(document, timeout_date)
def main(string_args: Optional[List[str]] = None) -> None: """Parse command line arguments and trigger the clean_guest_users function.""" parser = argparse.ArgumentParser(description='Clean guests user from the database.') parser.add_argument( '--disable-sentry', action='store_true', help='Disable logging to Sentry.') registered_to_group = parser.add_mutually_exclusive_group() registered_to_group.add_argument( '--registered-to', help='Consider only users who registered before \ this date.') registered_to_group.add_argument( '--registered-to-days-ago', default=7, type=int, help='Consider only users who registered more than N days ago.') parser.add_argument( '--no-dry-run', dest='dry_run', action='store_false', help='No dry run really store in DB.') args = parser.parse_args(string_args) logging.basicConfig(level='INFO') if not args.dry_run and not args.disable_sentry: try: report.setup_sentry_logging(os.getenv('SENTRY_DSN')) except ValueError: logging.error( 'Please set SENTRY_DSN to enable logging to Sentry, or use --disable-sentry option') return if args.registered_to: to_date = args.registered_to else: to_date = (now.get() - datetime.timedelta(days=args.registered_to_days_ago))\ .strftime('%Y-%m-%dT%H:%M:%S') logging.info( 'Cleaned %d users and got %d errors', *clean_guest_users(_DB, to_date, args.dry_run))
def _send_activation_email(user: user_pb2.User, project: project_pb2.Project, database: pymongo_database.Database, base_url: str) -> None: """Send an email to the user just after we have defined their diagnosis.""" if '@' not in user.profile.email: return # Set locale. locale.setlocale(locale.LC_ALL, 'fr_FR.UTF-8') scoring_project = scoring.ScoringProject(project, user, database, now=now.get()) auth_token = parse.quote( auth.create_token(user.user_id, is_using_timestamp=True)) settings_token = parse.quote( auth.create_token(user.user_id, role='settings')) coaching_email_frequency_name = \ user_pb2.EmailFrequency.Name(user.profile.coaching_email_frequency) data = { 'changeEmailSettingsUrl': f'{base_url}/unsubscribe.html?user={user.user_id}&auth={settings_token}&' f'coachingEmailFrequency={coaching_email_frequency_name}&' f'hl={parse.quote(user.profile.locale)}', 'date': now.get().strftime('%d %B %Y'), 'firstName': user.profile.name, 'gender': user_pb2.Gender.Name(user.profile.gender), 'isCoachingEnabled': 'True' if user.profile.coaching_email_frequency and user.profile.coaching_email_frequency != user_pb2.EMAIL_NONE else '', 'loginUrl': f'{base_url}?userId={user.user_id}&authToken={auth_token}', 'ofJob': scoring_project.populate_template('%ofJobName', raise_on_missing_var=True), } # https://app.mailjet.com/template/636862/build response = mail.send_template('636862', user.profile, data) if response.status_code != 200: logging.warning('Error while sending diagnostic email: %s\n%s', response.status_code, response.text)
def _compute_data_bob_has_helped(self, created_at: datetime.datetime, bob_has_helped: str) -> Dict[str, Any]: user = user_pb2.User() user.registered_at.FromDatetime(now.get()) status = user.employment_status.add() status.bob_has_helped = bob_has_helped status.created_at.FromDatetime(created_at) return self._compute_user_data(user)
def _ensure_cache(self) -> Dict[str, _Type]: instant = now.get() if self._cached_valid_until and self._cached_valid_until >= instant and \ self._cache_version >= self._global_cache_version: return typing.cast(Dict[str, _Type], self._cache) self._cache_version = self._global_cache_version self._cached_valid_until = instant + self._cache_duration self._cache = collections.OrderedDict() self._populate(self._cache) return typing.cast(Dict[str, _Type], self._cache)
def test_search_for_long_time(self): """User searching for 11 months should have a medium score.""" if self.persona.user_profile.year_of_birth < datetime.date.today().year - 45: self.persona.user_profile.year_of_birth = datetime.date.today().year - 40 self.persona.project.job_search_has_not_started = False self.persona.project.job_search_started_at.FromDatetime( now.get() - datetime.timedelta(days=335)) score = self._score_persona(self.persona) self.assertEqual(score, 2, msg='Failed for "{}"'.format(self.persona.name))
def _ensure_cache(self): instant = now.get() if self._cached_valid_until and self._cached_valid_until >= instant and \ self._cache_version >= self._global_cache_version: return self._cache self._cache_version = self._global_cache_version self._cached_valid_until = instant + self._cache_duration self._cache = collections.OrderedDict() self._populate(self._cache) return self._cache
def test_search_for_quite_long_time(self) -> None: """User searching for 7 months should have a low score.""" if self.persona.user_profile.year_of_birth <= datetime.date.today().year - 45: self.persona.user_profile.year_of_birth = datetime.date.today().year - 40 self.persona.project.job_search_has_not_started = False self.persona.project.job_search_started_at.FromDatetime( now.get() - datetime.timedelta(days=214)) score = self._score_persona(self.persona) self.assertEqual(score, 1, msg=f'Failed for "{self.persona.name}"')
def _update_returning_user(user_data): if user_data.HasField('requested_by_user_at_date'): start_of_day = now.get().replace(hour=0, minute=0, second=0, microsecond=0) if user_data.requested_by_user_at_date.ToDatetime() >= start_of_day: # Nothing to update. return user_data.requested_by_user_at_date last_connection = timestamp_pb2.Timestamp() last_connection.CopyFrom(user_data.requested_by_user_at_date) else: last_connection = user_data.registered_at user_data.requested_by_user_at_date.FromDatetime(now.get()) # No need to pollute our DB with super precise timestamps. user_data.requested_by_user_at_date.nanos = 0 _save_low_level(user_data) return last_connection
def test_passionate_search_just_started(self) -> None: """User passionate about their job and searching for 15 days should have a low score.""" if self.persona.user_profile.year_of_birth <= datetime.date.today().year - 45: self.persona.user_profile.year_of_birth = datetime.date.today().year - 40 self.persona.project.passionate_level = project_pb2.LIFE_GOAL_JOB self.persona.project.job_search_has_not_started = False self.persona.project.job_search_started_at.FromDatetime( now.get() - datetime.timedelta(days=15)) score = self._score_persona(self.persona) self.assertEqual(score, 1, msg=f'Failed for "{self.persona.name}"')
def test_search_reasonable_time(self) -> None: """User searching for 4 months should have a 0 score.""" self.persona.project.diagnostic.ClearField('category_id') if self.persona.user_profile.year_of_birth <= datetime.date.today().year - 45: self.persona.user_profile.year_of_birth = datetime.date.today().year - 40 self.persona.project.job_search_has_not_started = False self.persona.project.job_search_started_at.FromDatetime( now.get() - datetime.timedelta(days=124)) score = self._score_persona(self.persona) self.assertEqual(score, 0, msg=f'Failed for "{self.persona.name}"')
def test_search_just_started(self) -> None: """User searching for 15 days should have a medium score.""" if self.persona.user_profile.year_of_birth <= datetime.date.today().year - 45: self.persona.user_profile.year_of_birth = datetime.date.today().year - 40 if self.persona.project.passionate_level >= project_pb2.PASSIONATING_JOB: self.persona.project.passionate_level = project_pb2.ALIMENTARY_JOB self.persona.project.job_search_has_not_started = False self.persona.project.job_search_started_at.FromDatetime( now.get() - datetime.timedelta(days=15)) score = self._score_persona(self.persona) self.assertEqual(score, 2, msg=f'Failed for "{self.persona.name}"')
def test_search_for_very_long_time(self) -> None: """User searching for 13 months should have a high score.""" if self.persona.user_profile.year_of_birth <= datetime.date.today().year - 45: self.persona.user_profile.year_of_birth = datetime.date.today().year - 40 self.persona.project.job_search_has_not_started = False self.persona.project.job_search_started_at.FromDatetime( now.get() - datetime.timedelta(days=397)) project = self.persona.scoring_project(self.database, now=self.now) score, explanations = self.model.score_and_explain(project) self.assertEqual(score, 3, msg=f'Failed for "{self.persona.name}"') self.assertEqual(['vous cherchez depuis 13 mois'], explanations)
def _update_email_sent_status( email_sent_dict: Dict[str, Any], yesterday: str, campaign_ids: Optional[List[str]] = None) -> Dict[str, Any]: email_sent = typing.cast( user_pb2.EmailSent, proto.create_from_mongo(email_sent_dict, user_pb2.EmailSent)) if campaign_ids and email_sent.campaign_id not in campaign_ids: # Email is not from a campaign we wish to update, skipping. return email_sent_dict if email_sent.status != user_pb2.EMAIL_SENT_UNKNOWN and email_sent.last_status_checked_at: sent_at = email_sent.sent_at.ToJsonString() if sent_at < yesterday: last_status_checked_at = email_sent.last_status_checked_at.ToJsonString( ) if email_sent.last_status_checked_after_days > 14 or last_status_checked_at > yesterday: return email_sent_dict message = _find_message(email_sent) if message: email_sent.mailjet_message_id = message.get( 'ID', email_sent.mailjet_message_id) status = message.get('Status') if status: email_sent.status = user_pb2.EmailSentStatus.Value( f'EMAIL_SENT_{status.upper()}') else: logging.warning('No status for message "%s"', email_sent.mailjet_message_id) else: logging.warning('Could not find a message in MailJet.') email_sent.last_status_checked_at.FromDatetime(now.get()) email_sent.last_status_checked_at.nanos = 0 email_sent.last_status_checked_after_days = ( now.get() - email_sent.sent_at.ToDatetime()).days return json_format.MessageToDict(email_sent)
def test_create_support_ticket(self) -> None: """A user is assigned a support ID if requested.""" user_id, token = self.create_user_with_token() response = self.app.post(f'/api/support/{user_id}', headers={'Authorization': 'Bearer ' + token}, content_type='application/json') ticket = self.json_from_response(response) self.assertTrue(ticket.get('ticketId')) delete_after = ticket.get('deleteAfter') do_not_delete_before = proto.datetime_to_json_string( now.get() + datetime.timedelta(days=1)) delete_before = proto.datetime_to_json_string(now.get() + datetime.timedelta( days=30)) self.assertGreater(delete_after, do_not_delete_before) self.assertLess(delete_after, delete_before) user_data = self.get_user_info(user_id, token) last_saved_ticket = typing.cast( Dict[str, str], user_data.get('supportTickets', [])[-1]) self.assertEqual(ticket, last_saved_ticket)
def main(string_args: Optional[List[str]] = None, out: TextIO = sys.stdout) \ -> None: """Parse command line arguments and trigger _compute_assessment_report function. docker-compose run --rm -e MONGO_URL="$PROD_MONGO" frontend-flask \ python /work/bob_emploi/frontend/server/asynchronous/assess_assessment.py -s 2017-11-01 """ parser = argparse.ArgumentParser( description='Statistics on users whith or whithout assessment.') since_group = parser.add_mutually_exclusive_group() since_group.add_argument( '-d', '--since-days-ago', type=int, help='Process use cases registered in the last given days.') since_group.add_argument( '-s', '--since', default='2018', help='Process use cases registered since the given date.') parser.add_argument( '-u', '--until', help='Process use cases registered before (but not including) the given date.') parser.add_argument( '-e', '--examples', default='1', type=int, help='Show the given number of examples of use cases whithout assessment.') parser.add_argument('--verbose', '-v', action='store_true', help='More detailed output.') parser.add_argument( '--no-dry-run', dest='dry_run', action='store_false', help='No dry run really send reports.') args = parser.parse_args(string_args) if not args.dry_run: report.setup_sentry_logging(os.getenv('SENTRY_DSN')) logging.basicConfig(level='DEBUG' if args.verbose else 'INFO') present = now.get() from_date = args.since if args.since_days_ago: from_date = present - datetime.timedelta(days=args.since_days_ago) to_date = present if args.since_days_ago or not args.until else args.until report_text = _compute_assessment_report( args.examples, from_date, to_date) if args.dry_run: out.write(report_text) return if _SLACK_ASSESSER_URL: requests.post(_SLACK_ASSESSER_URL, json={'attachments': [{ 'mrkdwn_in': ['text'], 'title': f'Assessment coverage from {from_date} to {to_date}', 'text': report_text, }]})
def _log_request(email: str, requester_email: str, database: pymongo.database.Database) -> None: try: # Log that we've tried to access to a specific user. database.email_requests.insert_one({ 'email': email, 'registeredAt': proto.datetime_to_json_string(now.get()), 'requesterEmail': requester_email, }) except errors.OperationFailure: flask.abort(401, "Vous n'avez pas accès en écriture à la base de données.")