예제 #1
0
    def test_sending_many_emails(self, exacttarget_mock):
        """Test that we can send emails to a lot of users in the same run. """

        # First add a lot of emails.
        now = utc_now() - datetime.timedelta(minutes=30)

        config_manager = self._setup_storage_config()
        with config_manager.context() as config:
            storage = ElasticSearchCrashStorage(config)

            for i in range(21):
                storage.save_processed({
                    'uuid': 'fake-%s' % i,
                    'email': '*****@*****.**' % i,
                    'product': 'WaterWolf',
                    'version': '20.0',
                    'release_channel': 'Release',
                    'date_processed': now,
                })

            storage.es.refresh()

        config_manager = self._setup_simple_config()
        with config_manager.context() as config:
            job = automatic_emails.AutomaticEmailsCronApp(config, '')
            job.run(utc_now())

            et_mock = exacttarget_mock.return_value
            # Verify that we have the default 4 results + the 21 we added.
            self.assertEqual(et_mock.trigger_send.call_count, 25)
예제 #2
0
    def test_sending_many_emails(self, exacttarget_mock):
        """Test that we can send emails to a lot of users in the same run. """

        # First add a lot of emails.
        now = utc_now() - datetime.timedelta(minutes=30)

        config_manager = self._setup_storage_config()
        with config_manager.context() as config:
            storage = ElasticSearchCrashStorage(config)

            for i in range(21):
                storage.save_processed({
                    'uuid': 'fake-%s' % i,
                    'email': '*****@*****.**' % i,
                    'product': 'WaterWolf',
                    'version': '20.0',
                    'release_channel': 'Release',
                    'date_processed': now,
                })

            storage.es.refresh()

        config_manager = self._setup_simple_config()
        with config_manager.context() as config:
            job = automatic_emails.AutomaticEmailsCronApp(config, '')
            job.run(utc_now())

            et_mock = exacttarget_mock.return_value
            # Verify that we have the default 4 results + the 21 we added.
            eq_(et_mock.trigger_send.call_count, 25)
예제 #3
0
    def test_success(self, pyes_mock):
        mock_logging = mock.Mock()
        mock_es = mock.Mock()

        pyes_mock.ElasticSearch.return_value = mock_es
        required_config = ElasticSearchCrashStorage.get_required_config()
        required_config.add_option('logger', default=mock_logging)

        config_manager = ConfigurationManager(
            [required_config],
            app_name='testapp',
            app_version='1.0',
            app_description='app description',
            values_source_list=[{
                'logger':
                mock_logging,
                'elasticsearch_urls':
                'http://elasticsearch_host:9200',
            }])

        with config_manager.context() as config:
            es_storage = ElasticSearchCrashStorage(config)
            es_storage.save_processed(a_processed_crash)

            expected_request_args = ('socorro201214', 'crash_reports',
                                     a_processed_crash)
            expected_request_kwargs = {
                'replication': 'async',
                'id': a_processed_crash['uuid'],
            }

            mock_es.index.assert_called_with(*expected_request_args,
                                             **expected_request_kwargs)
예제 #4
0
    def test_success_after_limited_retry(self):
        mock_logging = mock.Mock()
        required_config = ElasticSearchCrashStorage.required_config
        required_config.add_option('logger', default=mock_logging)

        config_manager = ConfigurationManager(
            [required_config],
            app_name='testapp',
            app_version='1.0',
            app_description='app description',
            values_source_list=[{
                'logger':
                mock_logging,
                'submission_url':
                'http://elasticsearch_host/%s',
                'timeout':
                0,
                'backoff_delays': [0, 0, 0],
                'transaction_executor_class':
                TransactionExecutorWithLimitedBackoff
            }])

        with config_manager.context() as config:
            es_storage = ElasticSearchCrashStorage(config)
            urllib_str = 'socorro.external.elasticsearch.crashstorage.urllib2'
            m_request = mock.Mock()
            m_urlopen = mock.Mock()
            with mock.patch(urllib_str) as mocked_urllib:
                mocked_urllib.Request = m_request
                m_request.return_value = 17
                mocked_urllib.urlopen = m_urlopen

                urlopen_results = [
                    urllib2.socket.timeout, urllib2.socket.timeout
                ]

                def urlopen_fn(*args, **kwargs):
                    try:
                        r = urlopen_results.pop(0)
                        raise r
                    except IndexError:
                        return m_urlopen

                m_urlopen.side_effect = urlopen_fn

                es_storage.save_processed(a_processed_crash)

                expected_request_args = (
                    'http://elasticsearch_host/9120408936ce666-ff3b-4c7a-9674-'
                    '367fe2120408',
                    {},
                )
                m_request.assert_called_with(*expected_request_args)
                self.assertEqual(m_urlopen.call_count, 3)
                expected_urlopen_args = (17, )
                expected_urlopen_kwargs = {'timeout': 0}
                m_urlopen.assert_called_with(*expected_urlopen_args,
                                             **expected_urlopen_kwargs)
예제 #5
0
    def test_indexing(self, pyes_mock):
        mock_logging = mock.Mock()
        mock_es = mock.Mock()
        pyes_mock.exceptions.ElasticHttpNotFoundError = \
            pyelasticsearch.exceptions.ElasticHttpNotFoundError

        pyes_mock.ElasticSearch.return_value = mock_es
        required_config = ElasticSearchCrashStorage.get_required_config()
        required_config.add_option('logger', default=mock_logging)

        config_manager = ConfigurationManager(
            [required_config],
            app_name='testapp',
            app_version='1.0',
            app_description='app description',
            values_source_list=[{
                'logger': mock_logging,
                'elasticsearch_urls': 'http://elasticsearch_host:9200',
            }],
            argv_source=[]
        )

        with config_manager.context() as config:
            es_storage = ElasticSearchCrashStorage(config)
            crash_report = a_processed_crash.copy()
            crash_report['date_processed'] = '2013-01-01 10:56:41.558922'

            def create_index_fn(index, **kwargs):
                assert 'socorro20130' in index
                if index == 'socorro201301':
                    raise IndexAlreadyExistsError()

            mock_es.create_index.side_effect = create_index_fn

            # The index does not exist and is created
            es_storage.save_processed(crash_report)
            eq_(mock_es.create_index.call_count, 1)
            call_args = [
                args for args, kwargs in mock_logging.info.call_args_list
            ]
            ok_(
                ('created new elasticsearch index: %s', 'socorro201300')
                in call_args
            )

            # The index exists and is not created
            crash_report['date_processed'] = '2013-01-10 10:56:41.558922'
            es_storage.save_processed(crash_report)

            eq_(mock_es.create_index.call_count, 2)
            call_args = [
                args for args, kwargs in mock_logging.info.call_args_list
            ]
            ok_(
                ('created new elasticsearch index: %s', 'socorro201301')
                not in call_args
            )
예제 #6
0
    def test_indexing(self, pyes_mock):
        mock_logging = mock.Mock()
        mock_es = mock.Mock()
        pyes_mock.exceptions.ElasticHttpNotFoundError = \
            pyelasticsearch.exceptions.ElasticHttpNotFoundError

        pyes_mock.ElasticSearch.return_value = mock_es
        required_config = ElasticSearchCrashStorage.get_required_config()
        required_config.add_option('logger', default=mock_logging)

        config_manager = ConfigurationManager(
            [required_config],
            app_name='testapp',
            app_version='1.0',
            app_description='app description',
            values_source_list=[{
                'logger': mock_logging,
                'elasticsearch_urls': 'http://elasticsearch_host:9200',
            }],
            argv_source=[]
        )

        with config_manager.context() as config:
            es_storage = ElasticSearchCrashStorage(config)
            crash_report = a_processed_crash.copy()
            crash_report['date_processed'] = '2013-01-01 10:56:41.558922'

            def create_index_fn(index, **kwargs):
                assert 'socorro20130' in index
                if index == 'socorro201301':
                    raise IndexAlreadyExistsError()

            mock_es.create_index.side_effect = create_index_fn

            # The index does not exist and is created
            es_storage.save_processed(crash_report)
            eq_(mock_es.create_index.call_count, 1)
            call_args = [
                args for args, kwargs in mock_logging.info.call_args_list
            ]
            ok_(
                ('created new elasticsearch index: %s', 'socorro201300')
                in call_args
            )

            # The index exists and is not created
            crash_report['date_processed'] = '2013-01-10 10:56:41.558922'
            es_storage.save_processed(crash_report)

            eq_(mock_es.create_index.call_count, 2)
            call_args = [
                args for args, kwargs in mock_logging.info.call_args_list
            ]
            ok_(
                ('created new elasticsearch index: %s', 'socorro201301')
                not in call_args
            )
예제 #7
0
    def test_success_after_limited_retry(self):
        mock_logging = mock.Mock()
        required_config = ElasticSearchCrashStorage.required_config
        required_config.add_option('logger', default=mock_logging)

        config_manager = ConfigurationManager(
          [required_config],
          app_name='testapp',
          app_version='1.0',
          app_description='app description',
          values_source_list=[{
            'logger': mock_logging,
            'submission_url': 'http://elasticsearch_host/%s',
            'timeout': 0,
            'backoff_delays': [0, 0, 0],
            'transaction_executor_class': TransactionExecutorWithLimitedBackoff
          }]
        )

        with config_manager.context() as config:
            es_storage = ElasticSearchCrashStorage(config)
            urllib_str = 'socorro.external.elasticsearch.crashstorage.urllib2'
            m_request = mock.Mock()
            m_urlopen = mock.Mock()
            with mock.patch(urllib_str) as mocked_urllib:
                mocked_urllib.Request = m_request
                m_request.return_value = 17
                mocked_urllib.urlopen = m_urlopen

                urlopen_results = [urllib2.socket.timeout,
                                   urllib2.socket.timeout]

                def urlopen_fn(*args, **kwargs):
                    try:
                        r = urlopen_results.pop(0)
                        raise r
                    except IndexError:
                        return m_urlopen

                m_urlopen.side_effect = urlopen_fn

                es_storage.save_processed(a_processed_crash)

                expected_request_args = (
                  'http://elasticsearch_host/9120408936ce666-ff3b-4c7a-9674-'
                                             '367fe2120408',
                  {},
                )
                m_request.assert_called_with(*expected_request_args)
                self.assertEqual(m_urlopen.call_count, 3)
                expected_urlopen_args = (17,)
                expected_urlopen_kwargs = {'timeout': 0}
                m_urlopen.assert_called_with(*expected_urlopen_args,
                                             **expected_urlopen_kwargs)
예제 #8
0
    def test_success_after_limited_retry(self, pyes_mock):
        mock_logging = mock.Mock()
        mock_es = mock.Mock()

        pyes_mock.ElasticSearch.return_value = mock_es
        required_config = ElasticSearchCrashStorage.required_config
        required_config.add_option('logger', default=mock_logging)

        config_manager = ConfigurationManager(
            [required_config],
            app_name='testapp',
            app_version='1.0',
            app_description='app description',
            values_source_list=[{
                'logger': mock_logging,
                'elasticsearch_urls': 'http://elasticsearch_host:9200',
                'timeout': 0,
                'backoff_delays': [0, 0, 0],
                'transaction_executor_class':
                    TransactionExecutorWithLimitedBackoff
            }]
        )

        with config_manager.context() as config:
            es_storage = ElasticSearchCrashStorage(config)

            esindex_results = [pyelasticsearch.exceptions.Timeout,
                               pyelasticsearch.exceptions.Timeout]

            def esindex_fn(*args, **kwargs):
                try:
                    r = esindex_results.pop(0)
                    raise r
                except IndexError:
                    return mock_es.index

            mock_es.index.side_effect = esindex_fn

            es_storage.save_processed(a_processed_crash)

            expected_request_args = (
                'socorro201214',
                'crash_reports',
                a_processed_crash
            )
            expected_request_kwargs = {
                'replication': 'async',
                'id': a_processed_crash['uuid'],
            }

            mock_es.index.assert_called_with(
                *expected_request_args,
                **expected_request_kwargs
            )
예제 #9
0
    def test_success_after_limited_retry(self, pyes_mock):
        mock_logging = mock.Mock()
        mock_es = mock.Mock()

        pyes_mock.ElasticSearch.return_value = mock_es
        required_config = ElasticSearchCrashStorage.get_required_config()
        required_config.add_option('logger', default=mock_logging)

        config_manager = ConfigurationManager(
            [required_config],
            app_name='testapp',
            app_version='1.0',
            app_description='app description',
            values_source_list=[{
                'logger':
                mock_logging,
                'elasticsearch_urls':
                'http://elasticsearch_host:9200',
                'timeout':
                0,
                'backoff_delays': [0, 0, 0],
                'transaction_executor_class':
                TransactionExecutorWithLimitedBackoff
            }])

        with config_manager.context() as config:
            es_storage = ElasticSearchCrashStorage(config)

            esindex_results = [
                pyelasticsearch.exceptions.Timeout,
                pyelasticsearch.exceptions.Timeout
            ]

            def esindex_fn(*args, **kwargs):
                try:
                    r = esindex_results.pop(0)
                    raise r
                except IndexError:
                    return mock_es.index

            mock_es.index.side_effect = esindex_fn

            es_storage.save_processed(a_processed_crash)

            expected_request_args = ('socorro201214', 'crash_reports',
                                     a_processed_crash)
            expected_request_kwargs = {
                'replication': 'async',
                'id': a_processed_crash['uuid'],
            }

            mock_es.index.assert_called_with(*expected_request_args,
                                             **expected_request_kwargs)
예제 #10
0
    def test_indexing(self, pyes_mock):
        mock_logging = mock.Mock()
        mock_es = mock.Mock()
        pyes_mock.exceptions.ElasticHttpNotFoundError = \
                            pyelasticsearch.exceptions.ElasticHttpNotFoundError

        pyes_mock.ElasticSearch.return_value = mock_es
        required_config = ElasticSearchCrashStorage.get_required_config()
        required_config.add_option('logger', default=mock_logging)

        config_manager = ConfigurationManager(
            [required_config],
            app_name='testapp',
            app_version='1.0',
            app_description='app description',
            values_source_list=[{
                'logger':
                mock_logging,
                'elasticsearch_urls':
                'http://elasticsearch_host:9200',
            }])

        with config_manager.context() as config:
            es_storage = ElasticSearchCrashStorage(config)
            crash_report = a_processed_crash.copy()
            crash_report['date_processed'] = '2013-01-01 10:56:41.558922'

            def status_fn(index):
                assert 'socorro20130' in index
                if index == 'socorro201300':
                    raise pyelasticsearch.exceptions.ElasticHttpNotFoundError()

            mock_es.status = status_fn

            # The index does not exist and is created
            es_storage.save_processed(crash_report)
            self.assertEqual(mock_es.create_index.call_count, 1)

            # The index exists and is not created
            crash_report['date_processed'] = '2013-01-10 10:56:41.558922'
            es_storage.save_processed(crash_report)

            self.assertEqual(mock_es.create_index.call_count, 1)
예제 #11
0
    def test_indexing(self, pyes_mock):
        mock_logging = mock.Mock()
        mock_es = mock.Mock()
        pyes_mock.exceptions.ElasticHttpNotFoundError = \
                            pyelasticsearch.exceptions.ElasticHttpNotFoundError

        pyes_mock.ElasticSearch.return_value = mock_es
        required_config = ElasticSearchCrashStorage.required_config
        required_config.add_option('logger', default=mock_logging)

        config_manager = ConfigurationManager(
            [required_config],
            app_name='testapp',
            app_version='1.0',
            app_description='app description',
            values_source_list=[{
                'logger': mock_logging,
                'elasticsearch_urls': 'http://elasticsearch_host:9200',
            }]
        )

        with config_manager.context() as config:
            es_storage = ElasticSearchCrashStorage(config)
            crash_report = a_processed_crash.copy()
            crash_report['date_processed'] = '2013-01-01 10:56:41.558922'

            def status_fn(index):
                assert 'socorro20130' in index
                if index == 'socorro201300':
                    raise pyelasticsearch.exceptions.ElasticHttpNotFoundError()

            mock_es.status = status_fn

            # The index does not exist and is created
            es_storage.save_processed(crash_report)
            self.assertEqual(mock_es.create_index.call_count, 1)

            # The index exists and is not created
            crash_report['date_processed'] = '2013-01-10 10:56:41.558922'
            es_storage.save_processed(crash_report)

            self.assertEqual(mock_es.create_index.call_count, 1)
예제 #12
0
    def test_success(self):
        mock_logging = mock.Mock()
        required_config = ElasticSearchCrashStorage.required_config
        required_config.add_option('logger', default=mock_logging)

        config_manager = ConfigurationManager(
            [required_config],
            app_name='testapp',
            app_version='1.0',
            app_description='app description',
            values_source_list=[{
                'logger':
                mock_logging,
                'submission_url':
                'http://elasticsearch_host/%s'
            }])

        with config_manager.context() as config:
            es_storage = ElasticSearchCrashStorage(config)
            urllib_str = 'socorro.external.elasticsearch.crashstorage.urllib2'
            m_request = mock.Mock()
            m_urlopen = mock.Mock()
            with mock.patch(urllib_str) as mocked_urllib:
                mocked_urllib.Request = m_request
                m_request.return_value = 17
                mocked_urllib.urlopen = m_urlopen

                es_storage.save_processed(a_processed_crash)

                expected_request_args = (
                    'http://elasticsearch_host/9120408936ce666-ff3b-4c7a-9674-'
                    '367fe2120408',
                    {},
                )
                m_request.assert_called_with(*expected_request_args)
                expected_urlopen_args = (17, )
                expected_urlopen_kwargs = {'timeout': 2}
                m_urlopen.assert_called_with(*expected_urlopen_args,
                                             **expected_urlopen_kwargs)
예제 #13
0
    def test_success(self):
        mock_logging = mock.Mock()
        required_config = ElasticSearchCrashStorage.required_config
        required_config.add_option('logger', default=mock_logging)

        config_manager = ConfigurationManager(
          [required_config],
          app_name='testapp',
          app_version='1.0',
          app_description='app description',
          values_source_list=[{
            'logger': mock_logging,
            'submission_url': 'http://elasticsearch_host/%s'
          }]
        )

        with config_manager.context() as config:
            es_storage = ElasticSearchCrashStorage(config)
            urllib_str = 'socorro.external.elasticsearch.crashstorage.urllib2'
            m_request = mock.Mock()
            m_urlopen = mock.Mock()
            with mock.patch(urllib_str) as mocked_urllib:
                mocked_urllib.Request = m_request
                m_request.return_value = 17
                mocked_urllib.urlopen = m_urlopen

                es_storage.save_processed(a_processed_crash)

                expected_request_args = (
                  'http://elasticsearch_host/9120408936ce666-ff3b-4c7a-9674-'
                                             '367fe2120408',
                  {},
                )
                m_request.assert_called_with(*expected_request_args)
                expected_urlopen_args = (17,)
                expected_urlopen_kwargs = {'timeout': 2}
                m_urlopen.assert_called_with(*expected_urlopen_args,
                                             **expected_urlopen_kwargs)
예제 #14
0
    def test_success(self, pyes_mock):
        mock_logging = mock.Mock()
        mock_es = mock.Mock()

        pyes_mock.ElasticSearch.return_value = mock_es
        required_config = ElasticSearchCrashStorage.required_config
        required_config.add_option('logger', default=mock_logging)

        config_manager = ConfigurationManager(
            [required_config],
            app_name='testapp',
            app_version='1.0',
            app_description='app description',
            values_source_list=[{
                'logger': mock_logging,
                'elasticsearch_urls': 'http://elasticsearch_host:9200',
            }]
        )

        with config_manager.context() as config:
            es_storage = ElasticSearchCrashStorage(config)
            es_storage.save_processed(a_processed_crash)

            expected_request_args = (
                'socorro201214',
                'crash_reports',
                a_processed_crash
            )
            expected_request_kwargs = {
                'replication': 'async',
                'id': a_processed_crash['uuid'],
            }

            mock_es.index.assert_called_with(
                *expected_request_args,
                **expected_request_kwargs
            )
예제 #15
0
    def setUp(self):
        super(IntegrationTestAutomaticEmails, self).setUp()
        # prep a fake table
        now = utc_now() - datetime.timedelta(minutes=30)
        last_month = now - datetime.timedelta(days=31)

        config_manager = self._setup_storage_config()
        with config_manager.context() as config:
            storage = ElasticSearchCrashStorage(config)
            # clear the indices cache so the index is created on every test
            storage.indices_cache = set()

            storage.save_processed({
                'uuid': '1',
                'email': '*****@*****.**',
                'product': 'WaterWolf',
                'version': '20.0',
                'release_channel': 'Release',
                'date_processed': now,
                'classifications': {
                    'support': {
                        'classification': 'unknown'
                    }
                }
            })
            storage.save_processed({
                'uuid': '2',
                'email': '"Quidam" <*****@*****.**>',
                'product': 'WaterWolf',
                'version': '20.0',
                'release_channel': 'Release',
                'date_processed': now,
                'classifications': {
                    'support': {
                        'classification': None
                    }
                }
            })
            storage.save_processed({
                'uuid': '3',
                'email': '*****@*****.**',
                'product': 'WaterWolf',
                'version': '20.0',
                'release_channel': 'Release',
                'date_processed': now,
                'classifications': {
                    'support': {
                        'classification': 'bitguard'
                    }
                }
            })
            storage.save_processed({
                'uuid': '4',
                'email': '*****@*****.**',
                'product': 'NightlyTrain',
                'version': '1.0',
                'release_channel': 'Nightly',
                'date_processed': now
            })
            storage.save_processed({
                'uuid': '5',
                'email': '*****@*****.**',
                'product': 'NightlyTrain',
                'version': '1.0',
                'release_channel': 'Nightly',
                'date_processed': now
            })
            storage.save_processed({
                'uuid': '6',
                'email': '*****@*****.**',
                'product': 'NightlyTrain',
                'version': '1.0',
                'release_channel': 'Nightly',
                'date_processed': now
            })
            storage.save_processed({
                'uuid': '7',
                'email': '*****@*****.**',
                'product': 'NightlyTrain',
                'version': '1.0',
                'release_channel': 'Nightly',
                'date_processed': now
            })
            storage.save_processed({
                'uuid': '8',
                'email': '*****@*****.**',
                'product': 'NightlyTrain',
                'version': '1.0',
                'release_channel': 'Nightly',
                'date_processed': now
            })
            storage.save_processed({
                'uuid': '9',
                'email': '*****@*****.**',
                'product': 'EarthRaccoon',
                'version': '1.0',
                'release_channel': 'Nightly',
                'date_processed': now
            })
            storage.save_processed({
                'uuid': '18',
                'email': 'z\xc3\[email protected]',
                'product': 'WaterWolf',
                'version': '20.0',
                'release_channel': 'Release',
                'date_processed': now
            })

            # Let's insert a duplicate
            storage.save_processed({
                'uuid': '10',
                'email': '*****@*****.**',
                'product': 'WaterWolf',
                'version': '20.0',
                'release_channel': 'Release',
                'date_processed': now
            })

            # And let's insert some invalid crashes
            storage.save_processed({
                'uuid': '11',
                'email': None,
                'product': 'WaterWolf',
                'version': '20.0',
                'release_channel': 'Release',
                'date_processed': now
            })
            storage.save_processed({
                'uuid': '12',
                'email': '*****@*****.**',
                'product': 'WaterWolf',
                'version': '20.0',
                'release_channel': 'Release',
                'date_processed': last_month
            })
            storage.save_processed({
                'uuid': '13',
                'email': '*****@*****.**',
                'product': 'WaterWolf',
                'version': '20.0',
                'release_channel': 'Release',
                'date_processed': now
            })
            storage.save_processed({
                'uuid': '14',
                'email': '*****@*****.**',
                'product': 'WindBear',
                'version': '20.0',
                'release_channel': 'Release',
                'date_processed': now
            })

            # Finally some invalid email addresses
            storage.save_processed({
                'uuid': '15',
                'email': '     ',
                'product': 'WaterWolf',
                'version': '20.0',
                'release_channel': 'Release',
                'date_processed': now
            })
            storage.save_processed({
                'uuid': '16',
                'email': 'invalid@email',
                'product': 'WaterWolf',
                'version': '20.0',
                'release_channel': 'Release',
                'date_processed': now
            })
            storage.save_processed({
                'uuid': '17',
                'email': 'i.do.not.work',
                'product': 'WaterWolf',
                'version': '20.0',
                'release_channel': 'Release',
                'date_processed': now
            })

            # Create some email addresses.
            storage.create_emails_index()
            storage.es.index(
                index=config.elasticsearch_emails_index,
                doc_type='emails',
                doc={
                    'email': '*****@*****.**',
                    'last_sending': last_month
                },
                id='*****@*****.**',
            )
            storage.es.index(
                index=config.elasticsearch_emails_index,
                doc_type='emails',
                doc={
                    'email': '"Quidam" <*****@*****.**>',
                    'last_sending': last_month
                },
                id='"Quidam" <*****@*****.**>',
            )
            storage.es.index(
                index=config.elasticsearch_emails_index,
                doc_type='emails',
                doc={
                    'email': '*****@*****.**',
                    'last_sending': now
                },
                id='*****@*****.**',
            )

            # As indexing is asynchronous, we need to force elasticsearch to
            # make the newly created content searchable before we run the
            # tests.
            storage.es.refresh()
예제 #16
0
    def setUp(self):
        super(IntegrationTestAutomaticEmails, self).setUp()
        # prep a fake table
        now = utc_now() - datetime.timedelta(minutes=30)
        last_month = now - datetime.timedelta(days=31)

        config_manager = self._setup_storage_config()
        with config_manager.context() as config:
            storage = ElasticSearchCrashStorage(config)
            # clear the indices cache so the index is created on every test
            storage.indices_cache = set()

            storage.save_processed({
                'uuid': '1',
                'email': '*****@*****.**',
                'product': 'WaterWolf',
                'version': '20.0',
                'release_channel': 'Release',
                'date_processed': now,
                'classifications': {
                    'support': {
                        'classification': 'unknown'
                    }
                }
            })
            storage.save_processed({
                'uuid': '2',
                'email': '"Quidam" <*****@*****.**>',
                'product': 'WaterWolf',
                'version': '20.0',
                'release_channel': 'Release',
                'date_processed': now,
                'classifications': {
                    'support': {
                        'classification': None
                    }
                }
            })
            storage.save_processed({
                'uuid': '3',
                'email': '*****@*****.**',
                'product': 'WaterWolf',
                'version': '20.0',
                'release_channel': 'Release',
                'date_processed': now,
                'classifications': {
                    'support': {
                        'classification': 'bitguard'
                    }
                }
            })
            storage.save_processed({
                'uuid': '4',
                'email': '*****@*****.**',
                'product': 'NightlyTrain',
                'version': '1.0',
                'release_channel': 'Nightly',
                'date_processed': now
            })
            storage.save_processed({
                'uuid': '5',
                'email': '*****@*****.**',
                'product': 'NightlyTrain',
                'version': '1.0',
                'release_channel': 'Nightly',
                'date_processed': now
            })
            storage.save_processed({
                'uuid': '6',
                'email': '*****@*****.**',
                'product': 'NightlyTrain',
                'version': '1.0',
                'release_channel': 'Nightly',
                'date_processed': now
            })
            storage.save_processed({
                'uuid': '7',
                'email': '*****@*****.**',
                'product': 'NightlyTrain',
                'version': '1.0',
                'release_channel': 'Nightly',
                'date_processed': now
            })
            storage.save_processed({
                'uuid': '8',
                'email': '*****@*****.**',
                'product': 'NightlyTrain',
                'version': '1.0',
                'release_channel': 'Nightly',
                'date_processed': now
            })
            storage.save_processed({
                'uuid': '9',
                'email': '*****@*****.**',
                'product': 'EarthRaccoon',
                'version': '1.0',
                'release_channel': 'Nightly',
                'date_processed': now
            })
            storage.save_processed({
                'uuid': '18',
                'email': 'z\xc3\[email protected]',
                'product': 'WaterWolf',
                'version': '20.0',
                'release_channel': 'Release',
                'date_processed': now
            })

            # Let's insert a duplicate
            storage.save_processed({
                'uuid': '10',
                'email': '*****@*****.**',
                'product': 'WaterWolf',
                'version': '20.0',
                'release_channel': 'Release',
                'date_processed': now
            })

            # And let's insert some invalid crashes
            storage.save_processed({
                'uuid': '11',
                'email': None,
                'product': 'WaterWolf',
                'version': '20.0',
                'release_channel': 'Release',
                'date_processed': now
            })
            storage.save_processed({
                'uuid': '12',
                'email': '*****@*****.**',
                'product': 'WaterWolf',
                'version': '20.0',
                'release_channel': 'Release',
                'date_processed': last_month
            })
            storage.save_processed({
                'uuid': '13',
                'email': '*****@*****.**',
                'product': 'WaterWolf',
                'version': '20.0',
                'release_channel': 'Release',
                'date_processed': now
            })
            storage.save_processed({
                'uuid': '14',
                'email': '*****@*****.**',
                'product': 'WindBear',
                'version': '20.0',
                'release_channel': 'Release',
                'date_processed': now
            })

            # Finally some invalid email addresses
            storage.save_processed({
                'uuid': '15',
                'email': '     ',
                'product': 'WaterWolf',
                'version': '20.0',
                'release_channel': 'Release',
                'date_processed': now
            })
            storage.save_processed({
                'uuid': '16',
                'email': 'invalid@email',
                'product': 'WaterWolf',
                'version': '20.0',
                'release_channel': 'Release',
                'date_processed': now
            })
            storage.save_processed({
                'uuid': '17',
                'email': 'i.do.not.work',
                'product': 'WaterWolf',
                'version': '20.0',
                'release_channel': 'Release',
                'date_processed': now
            })

            # Create some email addresses.
            storage.create_emails_index()
            storage.es.index(
                index=config.elasticsearch_emails_index,
                doc_type='emails',
                doc={
                    'email': '*****@*****.**',
                    'last_sending': last_month
                },
                id='*****@*****.**',
            )
            storage.es.index(
                index=config.elasticsearch_emails_index,
                doc_type='emails',
                doc={
                    'email': '"Quidam" <*****@*****.**>',
                    'last_sending': last_month
                },
                id='"Quidam" <*****@*****.**>',
            )
            storage.es.index(
                index=config.elasticsearch_emails_index,
                doc_type='emails',
                doc={
                    'email': '*****@*****.**',
                    'last_sending': now
                },
                id='*****@*****.**',
            )

            # As indexing is asynchronous, we need to force elasticsearch to
            # make the newly created content searchable before we run the
            # tests.
            storage.es.refresh()
예제 #17
0
    def test_email_after_delay(self, exacttarget_mock):
        """Test that a user will receive an email if he or she sends us a new
        crash report after the delay is passed (but not before). """
        config_manager = self._setup_config_manager(
            delay_between_emails=1, restrict_products=['EarthRaccoon'])
        email = '*****@*****.**'
        list_service_mock = exacttarget_mock.return_value.list.return_value
        list_service_mock.get_subscriber.return_value = {'token': email}
        trigger_send_mock = exacttarget_mock.return_value.trigger_send
        tomorrow = utc_now() + datetime.timedelta(days=1, hours=2)
        twohourslater = utc_now() + datetime.timedelta(hours=2)

        storage_config_manager = self._setup_storage_config()
        with storage_config_manager.context() as storage_config:
            storage = ElasticSearchCrashStorage(storage_config)

        with config_manager.context() as config:
            # 1. Send an email to the user and update emailing data
            tab = CronTabber(config)
            tab.run_all()

            information = self._load_structure()
            assert information['automatic-emails']
            assert not information['automatic-emails']['last_error']
            assert information['automatic-emails']['last_success']

            exacttarget_mock.return_value.trigger_send.assert_called_with(
                'socorro_dev_test', {
                    'EMAIL_ADDRESS_': email,
                    'EMAIL_FORMAT_': 'H',
                    'TOKEN': email
                })
            eq_(trigger_send_mock.call_count, 1)

            # 2. Test that before 'delay' is passed user doesn't receive
            # another email

            # Insert a new crash report with the same email address
            storage.save_processed({
                'uuid':
                '50',
                'email':
                email,
                'product':
                'EarthRaccoon',
                'version':
                '20.0',
                'release_channel':
                'Release',
                'date_processed':
                utc_now() + datetime.timedelta(hours=1)
            })
            storage.es.refresh()

            # Run crontabber with time pushed by two hours
            with mock.patch('crontabber.app.utc_now') as cronutc_mock:
                with mock.patch('crontabber.base.utc_now') as baseutc_mock:
                    cronutc_mock.return_value = twohourslater
                    baseutc_mock.return_value = twohourslater
                    tab.run_all()

            information = self._load_structure()
            assert information['automatic-emails']
            assert not information['automatic-emails']['last_error']
            assert information['automatic-emails']['last_success']

            # No new email was sent
            eq_(trigger_send_mock.call_count, 1)

            # 3. Verify that, after 'delay' is passed, a new email is sent
            # to our user

            # Insert a new crash report with the same email address
            storage.save_processed({
                'uuid':
                '51',
                'email':
                email,
                'product':
                'EarthRaccoon',
                'version':
                '20.0',
                'release_channel':
                'Release',
                'date_processed':
                utc_now() + datetime.timedelta(days=1)
            })
            storage.es.refresh()

            # Run crontabber with time pushed by a day
            with mock.patch('crontabber.app.utc_now') as cronutc_mock:
                with mock.patch('crontabber.base.utc_now') as baseutc_mock:
                    cronutc_mock.return_value = tomorrow
                    baseutc_mock.return_value = tomorrow
                    tab.run_all()

            information = self._load_structure()
            assert information['automatic-emails']
            assert not information['automatic-emails']['last_error']
            assert information['automatic-emails']['last_success']

            # A new email was sent
            eq_(trigger_send_mock.call_count, 2)
예제 #18
0
    def test_email_after_delay(self, exacttarget_mock):
        """Test that a user will receive an email if he or she sends us a new
        crash report after the delay is passed (but not before). """
        config_manager = self._setup_config_manager(
            delay_between_emails=1,
            restrict_products=['EarthRaccoon']
        )
        email = '*****@*****.**'
        list_service_mock = exacttarget_mock.return_value.list.return_value
        list_service_mock.get_subscriber.return_value = {
            'token': email
        }
        trigger_send_mock = exacttarget_mock.return_value.trigger_send
        tomorrow = utc_now() + datetime.timedelta(days=1, hours=2)
        twohourslater = utc_now() + datetime.timedelta(hours=2)

        storage_config_manager = self._setup_storage_config()
        with storage_config_manager.context() as storage_config:
            storage = ElasticSearchCrashStorage(storage_config)

        with config_manager.context() as config:
            # 1. Send an email to the user and update emailing data
            tab = crontabber.CronTabber(config)
            tab.run_all()

            information = self._load_structure()
            assert information['automatic-emails']
            assert not information['automatic-emails']['last_error']
            assert information['automatic-emails']['last_success']

            exacttarget_mock.return_value.trigger_send.assert_called_with(
                'socorro_dev_test',
                {
                    'EMAIL_ADDRESS_': email,
                    'EMAIL_FORMAT_': 'H',
                    'TOKEN': email
                }
            )
            self.assertEqual(trigger_send_mock.call_count, 1)

            # 2. Test that before 'delay' is passed user doesn't receive
            # another email

            # Insert a new crash report with the same email address
            storage.save_processed({
                'uuid': '50',
                'email': email,
                'product': 'EarthRaccoon',
                'version': '20.0',
                'release_channel': 'Release',
                'date_processed': utc_now() + datetime.timedelta(hours=1)
            })
            storage.es.refresh()

            # Run crontabber with time pushed by two hours
            with mock.patch('socorro.cron.crontabber.utc_now') as cronutc_mock:
                with mock.patch('socorro.cron.base.utc_now') as baseutc_mock:
                    cronutc_mock.return_value = twohourslater
                    baseutc_mock.return_value = twohourslater
                    tab.run_all()

            information = self._load_structure()
            assert information['automatic-emails']
            assert not information['automatic-emails']['last_error']
            assert information['automatic-emails']['last_success']

            # No new email was sent
            self.assertEqual(trigger_send_mock.call_count, 1)

            # 3. Verify that, after 'delay' is passed, a new email is sent
            # to our user

            # Insert a new crash report with the same email address
            storage.save_processed({
                'uuid': '51',
                'email': email,
                'product': 'EarthRaccoon',
                'version': '20.0',
                'release_channel': 'Release',
                'date_processed': utc_now() + datetime.timedelta(days=1)
            })
            storage.es.refresh()

            # Run crontabber with time pushed by a day
            with mock.patch('socorro.cron.crontabber.utc_now') as cronutc_mock:
                with mock.patch('socorro.cron.base.utc_now') as baseutc_mock:
                    cronutc_mock.return_value = tomorrow
                    baseutc_mock.return_value = tomorrow
                    tab.run_all()

            information = self._load_structure()
            assert information['automatic-emails']
            assert not information['automatic-emails']['last_error']
            assert information['automatic-emails']['last_success']

            # A new email was sent
            self.assertEqual(trigger_send_mock.call_count, 2)