Exemple #1
0
    def test_query_and_upload_to_s3(self, mock_uploader):
        """
        Assert query_and_upload_to_s3 uploads to S3 for each query.

        We only have test data reliably set for AWS, but this function should
        still execute *all* of the table_export_settings queries, effectively
        providing a syntax check on the SQL even if no results are found.
        """
        today = self.today
        _, last_day_of_month = calendar.monthrange(today.year, today.month)
        curr_month_first_day = date(year=today.year, month=today.month, day=1)
        curr_month_last_day = date(year=today.year, month=today.month, day=last_day_of_month)

        date_range = (curr_month_first_day, curr_month_last_day)
        for table_export_setting in tasks.table_export_settings:
            mock_uploader.reset_mock()
            tasks.query_and_upload_to_s3(
                self.schema,
                self.aws_provider_uuid,
                dictify_table_export_settings(table_export_setting),
                date_range[0],
                date_range[1],
            )
            if table_export_setting.provider == "aws":
                if table_export_setting.iterate_daily:
                    # There are always TWO days of AWS test data.
                    calls = mock_uploader.return_value.upload_file.call_args_list
                    self.assertEqual(len(calls), 2)
                else:
                    # There is always only ONE month of AWS test data.
                    mock_uploader.return_value.upload_file.assert_called_once()
            else:
                # We ONLY have test data currently for AWS.
                mock_uploader.return_value.upload_file.assert_not_called()
Exemple #2
0
 def test_query_and_upload_skips_if_no_data(self, mock_uploader):
     """Assert query_and_upload_to_s3 uploads nothing if no data is found."""
     table_export_setting = self.get_table_export_setting_by_name("reporting_awscostentrylineitem")
     tasks.query_and_upload_to_s3(
         self.schema,
         self.aws_provider_uuid,
         dictify_table_export_settings(table_export_setting),
         start_date=self.future_date,
         end_date=self.future_date,
     )
     mock_uploader.return_value.upload_file.assert_not_called()
Exemple #3
0
 def test_query_and_upload_to_s3_multiple_days_multiple_rows(self, mock_uploader):
     """Assert query_and_upload_to_s3 for multiple days uploads multiple files."""
     table_export_setting = self.get_table_export_setting_by_name("reporting_awscostentrylineitem_daily_summary")
     tasks.query_and_upload_to_s3(
         self.schema,
         self.aws_provider_uuid,
         dictify_table_export_settings(table_export_setting),
         start_date=self.yesterday_date,
         end_date=self.today_date,
     )
     # expect one upload call for yesterday and one for today
     self.assertEqual(mock_uploader.return_value.upload_file.call_count, 2)
Exemple #4
0
    def test_query_and_upload_to_s3_archiving_false(self):
        """Assert query_and_upload_to_s3 not run."""
        today = self.today
        _, last_day_of_month = calendar.monthrange(today.year, today.month)
        curr_month_first_day = date(year=today.year, month=today.month, day=1)
        curr_month_last_day = date(year=today.year, month=today.month, day=last_day_of_month)

        date_range = (curr_month_first_day, curr_month_last_day)
        for table_export_setting in tasks.table_export_settings:
            with self.assertLogs("masu.celery.tasks", "INFO") as captured_logs:
                tasks.query_and_upload_to_s3(
                    self.schema,
                    self.aws_provider_uuid,
                    dictify_table_export_settings(table_export_setting),
                    date_range[0],
                    date_range[1],
                )
                self.assertIn("S3 Archiving is disabled. Not running task.", captured_logs.output[0])