def test_update_user(self): config_manager = self._setup_simple_config() with config_manager.context() as config: job = automatic_emails.AutomaticEmailsCronApp(config, '') now = utc_now().isoformat() es = SuperS().es( urls=config.elasticsearch.elasticsearch_urls, timeout=config.elasticsearch.elasticsearch_timeout, ) search = es.indexes( config.elasticsearch.elasticsearch_emails_index) search = search.doctypes('emails') connection = es.get_es() job.update_user('*****@*****.**', now, connection) connection.refresh() s = search.filter(_id='*****@*****.**') res = list(s.values_list('last_sending')) eq_(len(res), 1) eq_(res[0][0], now) # Test with a non-existing user job.update_user('*****@*****.**', now, connection) connection.refresh() s = search.filter(_id='*****@*****.**') res = list(s.values_list('last_sending')) eq_(len(res), 1) eq_(res[0][0], now)
def test_update_user(self): config_manager = self._setup_simple_config() with config_manager.context() as config: job = automatic_emails.AutomaticEmailsCronApp(config, '') now = utc_now().isoformat() es = SuperS().es( urls=config.elasticsearch.elasticsearch_urls, timeout=config.elasticsearch.elasticsearch_timeout, ) search = es.indexes( config.elasticsearch.elasticsearch_emails_index ) search = search.doctypes('emails') connection = es.get_es() job.update_user('*****@*****.**', now, connection) connection.refresh() s = search.filter(_id='*****@*****.**') res = list(s.values_list('last_sending')) self.assertEqual(len(res), 1) self.assertEqual(res[0][0], now) # Test with a non-existing user job.update_user('*****@*****.**', now, connection) connection.refresh() s = search.filter(_id='*****@*****.**') res = list(s.values_list('last_sending')) self.assertEqual(len(res), 1) self.assertEqual(res[0][0], now)
def test_cron_job(self, exacttarget_mock): config_manager = self._setup_config_manager() et_mock = exacttarget_mock.return_value # Make get_subscriber raise an exception list_service = et_mock.list.return_value = mock.Mock() list_service.get_subscriber = mock.Mock( side_effect=exacttarget.NewsletterException() ) with config_manager.context() as config: tab = crontabber.CronTabber(config) tab.run_all() information = self._load_structure() assert information['automatic-emails'] assert not information['automatic-emails']['last_error'] assert information['automatic-emails']['last_success'] self.assertEqual(et_mock.trigger_send.call_count, 4) last_email = u'z\[email protected]' # Verify the last call to trigger_send fields = { 'EMAIL_ADDRESS_': last_email, 'EMAIL_FORMAT_': 'H', 'TOKEN': last_email } et_mock.trigger_send.assert_called_with('socorro_dev_test', fields) # Verify that user's data was updated conf = config.crontabber['class-AutomaticEmailsCronApp'] es = SuperS().es( urls=conf.elasticsearch.elasticsearch_urls, timeout=conf.elasticsearch.elasticsearch_timeout, ) search = es.indexes(conf.elasticsearch.elasticsearch_emails_index) search = search.doctypes('emails') es.get_es().refresh() emails_list = ( '*****@*****.**', '"Quidam" <*****@*****.**>', '*****@*****.**' ) search = search.filter(_id__in=emails_list) res = search.values_list('last_sending') self.assertEqual(len(res), 3) now = utc_now() for row in res: date = string_to_datetime(row[0]) self.assertEqual(date.year, now.year) self.assertEqual(date.month, now.month) self.assertEqual(date.day, now.day)
def run(self, run_datetime): logger = self.config.logger if self.config.test_mode: logger.warning('You are running Automatic Emails cron app ' 'in test mode') delay = datetime.timedelta(days=self.config.delay_between_emails) params = { 'start_date': run_datetime - datetime.timedelta(hours=1), 'end_date': run_datetime, 'delayed_date': run_datetime - delay, 'products': tuple(self.config.restrict_products) } # Find the indexes to use to optimize the elasticsearch query. indexes = self.generate_list_of_indexes( params['start_date'], params['end_date'], self.config.elasticsearch.elasticsearch_index ) # Create and configure the search object. connection = SuperS().es( urls=self.config.elasticsearch.elasticsearch_urls, timeout=self.config.elasticsearch.elasticsearch_timeout, ) search = ( connection.indexes(*indexes).doctypes( self.config.elasticsearch.elasticsearch_doctype ).order_by('processed_crash.email') ) # Create filters. args_and = { 'processed_crash.date_processed__lt': params['end_date'], 'processed_crash.date_processed__gt': params['start_date'], 'processed_crash.product': [x.lower() for x in params['products']], } args_not = { 'processed_crash.email__missing': None, } filters = elasticutils.F(**args_and) filters &= ~elasticutils.F(**args_not) search = search.filter(filters) count = search.count() # Total number of results. search = search[:count] # Get the recently sent emails emails = self.get_list_of_emails(params, connection) validation_rules = TransformRuleSystem() validation_rules.load_rules(( (verify_email, (), {}, sanitize_email, (), {}), (verify_email, (), {}, False, (), {}), ( verify_email_last_sending, (), {'emails_list': emails}, True, (), {} ), )) template_rules = TransformRuleSystem() template_rules.load_rules(( ( verify_support_classification, ('bitguard',), {}, set_email_template, ('socorro_bitguard_en',), {} ), # If no other rule passed, fall back to the default template. ( True, (), {}, set_email_template, (self.config.email_template,), {} ), )) for hit in search.values_dict( 'processed_crash.email', 'processed_crash.classifications.support.classification', ): res = validation_rules.apply_until_predicate_fails(hit) if res is None: # All predicates succeeded! # Now apply all template rules to find which email template # to use. template_rules.apply_until_action_succeeds(hit) if not hit['email_template']: # Bug 965610 - If the email template is empty, do not send # an email. Setting the default email template to '' means # no generic email will be sent anymore. continue email = hit['processed_crash.email'] self.send_email(hit) self.update_user(email, run_datetime, connection.get_es()) emails[email] = run_datetime # logger.info('Automatic Email sent to %s', email) # Make sure the next run will have updated data, to avoid sending an # email several times. connection.get_es().refresh()
def run(self, run_datetime): logger = self.config.logger if self.config.test_mode: logger.warning('You are running Automatic Emails cron app ' 'in test mode') delay = datetime.timedelta(days=self.config.delay_between_emails) params = { 'start_date': run_datetime - datetime.timedelta(hours=1), 'end_date': run_datetime, 'delayed_date': run_datetime - delay, 'products': tuple(self.config.restrict_products) } # Find the indexes to use to optimize the elasticsearch query. indexes = self.generate_list_of_indexes( params['start_date'], params['end_date'], self.config.elasticsearch.elasticsearch_index ) # Create and configure the search object. connection = SuperS().es( urls=self.config.elasticsearch.elasticsearch_urls, timeout=self.config.elasticsearch.elasticsearch_timeout, ) search = (connection.indexes(*indexes) .doctypes( self.config.elasticsearch.elasticsearch_doctype ) .order_by('processed_crash.email')) # Create filters. args_and = { 'processed_crash.date_processed__lt': params['end_date'], 'processed_crash.date_processed__gt': params['start_date'], 'processed_crash.product': [x.lower() for x in params['products']], } args_not = { 'processed_crash.email__missing': None, } filters = elasticutils.F(**args_and) filters &= ~elasticutils.F(**args_not) search = search.filter(filters) count = search.count() # Total number of results. search = search[:count] # Get the recently sent emails emails = self.get_list_of_emails(params, connection) validation_rules = TransformRuleSystem() validation_rules.load_rules(( (verify_email, (), {}, sanitize_email, (), {}), (verify_email, (), {}, False, (), {}), ( verify_email_last_sending, (), {'emails_list': emails}, True, (), {} ), )) template_rules = TransformRuleSystem() template_rules.load_rules(( ( verify_support_classification, ('bitguard',), {}, set_email_template, ('socorro_bitguard_en',), {} ), # If no other rule passed, fall back to the default template. ( True, (), {}, set_email_template, (self.config.email_template,), {} ), )) for hit in search.values_dict( 'processed_crash.email', 'processed_crash.classifications.support.classification', ): res = validation_rules.apply_until_predicate_fails(hit) if res is None: # All predicates succeeded! # Now apply all template rules to find which email template # to use. template_rules.apply_until_action_succeeds(hit) email = hit['processed_crash.email'] self.send_email(hit) self.update_user(email, run_datetime, connection.get_es()) emails[email] = run_datetime # logger.info('Automatic Email sent to %s', email) # Make sure the next run will have updated data, to avoid sending an # email several times. connection.get_es().refresh()
def test_email_cannot_be_sent_twice(self, exacttarget_mock): config_manager = self._setup_config_manager( restrict_products=['NightlyTrain']) et_mock = exacttarget_mock.return_value # Prepare failures _failures = [] _email_sent = [] class SomeRandomError(Exception): pass def trigger_send(template, fields): email = fields['EMAIL_ADDRESS_'] if email == '*****@*****.**' and email not in _failures: _failures.append(email) raise SomeRandomError('This is an error. ') else: _email_sent.append(email) et_mock.trigger_send = trigger_send with config_manager.context() as config: tab = CronTabber(config) tab.run_all() information = self._load_structure() assert information['automatic-emails'] assert information['automatic-emails']['last_error'] eq_(information['automatic-emails']['last_error']['type'], str(SomeRandomError)) # Verify that user's data was updated, but not all of it eq_(_email_sent, ['*****@*****.**', '*****@*****.**']) emails_list = ('*****@*****.**', '*****@*****.**', '*****@*****.**', '*****@*****.**', '*****@*****.**') conf = config.crontabber['class-AutomaticEmailsCronApp'] es = SuperS().es( urls=conf.elasticsearch.elasticsearch_urls, timeout=conf.elasticsearch.elasticsearch_timeout, ) search = es.indexes(conf.elasticsearch.elasticsearch_emails_index) search = search.doctypes('emails') es.get_es().refresh() search = search.filter(_id__in=emails_list) res = search.execute() eq_(res.count, 2) now = utc_now() for row in res.results: assert row['_id'] in ('*****@*****.**', '*****@*****.**') date = string_to_datetime(row['_source']['last_sending']) eq_(date.year, now.year) eq_(date.month, now.month) eq_(date.day, now.day) # Run crontabber again and verify that all users are updated, # and emails are not sent twice state = tab.job_state_database['automatic-emails'] self._wind_clock(state, hours=1) tab.job_state_database['automatic-emails'] = state tab.run_all() information = self._load_structure() assert information['automatic-emails'] assert not information['automatic-emails']['last_error'] assert information['automatic-emails']['last_success'] # Verify that users were not sent an email twice eq_(_email_sent, [ '*****@*****.**', '*****@*****.**', '*****@*****.**', '*****@*****.**', '*****@*****.**' ])
def test_email_cannot_be_sent_twice(self, exacttarget_mock): config_manager = self._setup_config_manager( restrict_products=['NightlyTrain'] ) et_mock = exacttarget_mock.return_value # Prepare failures _failures = [] _email_sent = [] class SomeRandomError(Exception): pass def trigger_send(template, fields): email = fields['EMAIL_ADDRESS_'] if email == '*****@*****.**' and email not in _failures: _failures.append(email) raise SomeRandomError('This is an error. ') else: _email_sent.append(email) et_mock.trigger_send = trigger_send with config_manager.context() as config: tab = crontabber.CronTabber(config) tab.run_all() information = self._load_structure() assert information['automatic-emails'] assert information['automatic-emails']['last_error'] self.assertEqual( information['automatic-emails']['last_error']['type'], str(SomeRandomError) ) # Verify that user's data was updated, but not all of it self.assertEqual(_email_sent, ['*****@*****.**', '*****@*****.**']) emails_list = ( '*****@*****.**', '*****@*****.**', '*****@*****.**', '*****@*****.**', '*****@*****.**' ) conf = config.crontabber['class-AutomaticEmailsCronApp'] es = SuperS().es( urls=conf.elasticsearch.elasticsearch_urls, timeout=conf.elasticsearch.elasticsearch_timeout, ) search = es.indexes( conf.elasticsearch.elasticsearch_emails_index ) search = search.doctypes('emails') es.get_es().refresh() search = search.filter(_id__in=emails_list) res = search.execute() self.assertEqual(res.count, 2) now = utc_now() for row in res.results: assert row['_id'] in ('*****@*****.**', '*****@*****.**') date = string_to_datetime(row['_source']['last_sending']) self.assertEqual(date.year, now.year) self.assertEqual(date.month, now.month) self.assertEqual(date.day, now.day) # Run crontabber again and verify that all users are updated, # and emails are not sent twice state = tab.job_database['automatic-emails'] self._wind_clock(state, hours=1) tab.job_database['automatic-emails'] = state tab.run_all() information = self._load_structure() assert information['automatic-emails'] assert not information['automatic-emails']['last_error'] assert information['automatic-emails']['last_success'] # Verify that users were not sent an email twice self.assertEqual(_email_sent, [ '*****@*****.**', '*****@*****.**', '*****@*****.**', '*****@*****.**', '*****@*****.**' ])