Example #1
0
    def _generate_ocp_on_aws_data(self):
        """Test that the OCP on AWS cost summary table is populated."""
        creator = ReportObjectCreator(self.schema, self.column_map)

        bill_ids = []

        today = DateAccessor().today_with_timezone('UTC')
        last_month = today - relativedelta.relativedelta(months=1)
        resource_id = 'i-12345'

        for cost_entry_date in (today, last_month):
            bill = creator.create_cost_entry_bill(provider_id=self.aws_provider.id, bill_date=cost_entry_date)
            bill_ids.append(str(bill.id))
            cost_entry = creator.create_cost_entry(bill, cost_entry_date)
            product = creator.create_cost_entry_product('Compute Instance')
            pricing = creator.create_cost_entry_pricing()
            reservation = creator.create_cost_entry_reservation()
            creator.create_cost_entry_line_item(
                bill,
                cost_entry,
                product,
                pricing,
                reservation,
                resource_id=resource_id
            )

        with AWSReportDBAccessor(self.schema, self.column_map) as aws_accessor:
            aws_accessor.populate_line_item_daily_table(last_month.date(), today.date(), bill_ids)

        cluster_id = self.ocp_provider_resource_name
        provider_id = self.ocp_provider.id

        for cost_entry_date in (today, last_month):
            period = creator.create_ocp_report_period(cost_entry_date, provider_id=provider_id, cluster_id=cluster_id)
            report = creator.create_ocp_report(period, cost_entry_date)
            creator.create_ocp_usage_line_item(
                period,
                report,
                resource_id=resource_id
            )
        cluster_id = get_cluster_id_from_provider(self.ocp_test_provider_uuid)
        with OCPReportDBAccessor(self.schema, self.column_map) as ocp_accessor:
            ocp_accessor.populate_line_item_daily_table(last_month.date(), today.date(), cluster_id)
Example #2
0
class TestUploadUtilsWithData(MasuTestCase):
    """Test cases for upload utils that need some data."""

    def setUp(self):
        """Set up initial data for tests."""
        super(TestUploadUtilsWithData, self).setUp()

        with ReportingCommonDBAccessor(self.schema) as common_accessor:
            self.column_map = common_accessor.column_map
        self.creator = ReportObjectCreator(self.schema, self.column_map)

        self.today = DateAccessor().today_with_timezone('UTC')
        self.today_date = date(
            year=self.today.year, month=self.today.month, day=self.today.day
        )
        self.create_some_data_for_date(self.today)

        self.yesterday = self.today - timedelta(days=1)
        self.yesterday_date = date(
            year=self.yesterday.year, month=self.yesterday.month, day=self.yesterday.day
        )
        self.create_some_data_for_date(self.yesterday)

        self.future = self.today + timedelta(days=900)
        self.future_date = date(
            year=self.future.year, month=self.future.month, day=self.future.day
        )

    def create_some_data_for_date(self, the_datetime):
        """Create some dummy data for the given datetime."""
        product = self.creator.create_cost_entry_product()
        pricing = self.creator.create_cost_entry_pricing()
        reservation = self.creator.create_cost_entry_reservation()

        bill = self.creator.create_cost_entry_bill(
            provider_id=self.aws_provider.id, bill_date=the_datetime
        )
        cost_entry = self.creator.create_cost_entry(bill, entry_datetime=the_datetime)
        self.creator.create_cost_entry_line_item(
            bill, cost_entry, product, pricing, reservation
        )

        # The daily summary lines are aligned with midnight of each day.
        the_date = the_datetime.replace(hour=0, minute=0, second=0, microsecond=0)
        self.creator.create_awscostentrylineitem_daily_summary(
            self.customer.account_id, self.schema, bill, the_date
        )

    def get_table_export_setting_by_name(self, name):
        """Helper to get specific TableExportSetting for testing."""
        return [s for s in table_export_settings if s.output_name == name].pop()

    @patch('masu.util.upload.AwsS3Uploader')
    def test_query_and_upload_to_s3(self, mock_uploader):
        """Assert query_and_upload_to_s3 uploads to S3 with one file."""
        today = self.today
        _, last_day_of_month = calendar.monthrange(today.year, today.month)
        curr_month_first_day = date(year=today.year, month=today.month, day=1)
        curr_month_last_day = date(
            year=today.year, month=today.month, day=last_day_of_month
        )

        date_range = (curr_month_first_day, curr_month_last_day)
        table_export_setting = self.get_table_export_setting_by_name(
            'reporting_awscostentrylineitem'
        )
        query_and_upload_to_s3(self.schema, table_export_setting, date_range)
        mock_uploader.return_value.upload_file.assert_called_once()

    @patch('masu.util.upload.AwsS3Uploader')
    def test_query_and_upload_skips_if_no_data(self, mock_uploader):
        """Assert query_and_upload_to_s3 uploads nothing if no data is found."""
        date_range = (self.future_date, self.future_date)
        table_export_setting = self.get_table_export_setting_by_name(
            'reporting_awscostentrylineitem'
        )
        query_and_upload_to_s3(self.schema, table_export_setting, date_range)
        mock_uploader.return_value.upload_file.assert_not_called()

    @patch('masu.util.upload.AwsS3Uploader')
    def test_query_and_upload_to_s3_multiple_days_multiple_rows(self, mock_uploader):
        """Assert query_and_upload_to_s3 for multiple days uploads multiple files."""
        date_range = (self.yesterday_date, self.today_date)
        table_export_setting = self.get_table_export_setting_by_name(
            'reporting_awscostentrylineitem_daily_summary'
        )
        query_and_upload_to_s3(self.schema, table_export_setting, date_range)
        # expect one upload call for yesterday and one for today
        self.assertEqual(mock_uploader.return_value.upload_file.call_count, 2)
Example #3
0
class TestUploadTaskWithData(MasuTestCase):
    """Test cases for upload utils that need some data."""

    def setUp(self):
        """Set up initial data for tests."""
        super().setUp()

        self.creator = ReportObjectCreator(self.schema)

        timezone = pytz.timezone("UTC")
        # Arbitrary date as "today" so we don't drift around with `now`.
        self.today = datetime(2019, 11, 5, 0, 0, 0, tzinfo=timezone)

        self.today_date = date(year=self.today.year, month=self.today.month, day=self.today.day)
        self.create_some_data_for_date(self.today)

        self.yesterday = self.today - timedelta(days=1)
        self.yesterday_date = date(year=self.yesterday.year, month=self.yesterday.month, day=self.yesterday.day)
        self.create_some_data_for_date(self.yesterday)

        self.future = self.today + timedelta(days=900)
        self.future_date = date(year=self.future.year, month=self.future.month, day=self.future.day)

    def create_some_data_for_date(self, the_datetime):
        """Create some dummy data for the given datetime."""
        product = self.creator.create_cost_entry_product()
        pricing = self.creator.create_cost_entry_pricing()
        reservation = self.creator.create_cost_entry_reservation()

        bill = self.creator.create_cost_entry_bill(provider_uuid=self.aws_provider_uuid, bill_date=the_datetime)
        cost_entry = self.creator.create_cost_entry(bill, entry_datetime=the_datetime)
        self.creator.create_cost_entry_line_item(bill, cost_entry, product, pricing, reservation)

        # The daily summary lines are aligned with midnight of each day.
        the_date = the_datetime.replace(hour=0, minute=0, second=0, microsecond=0)
        self.creator.create_awscostentrylineitem_daily_summary(self.customer.account_id, self.schema, bill, the_date)

    def get_table_export_setting_by_name(self, name):
        """Get specific TableExportSetting for testing."""
        return [s for s in tasks.table_export_settings if s.output_name == name].pop()

    @override_settings(ENABLE_S3_ARCHIVING=True)
    @patch("masu.celery.tasks.AwsS3Uploader")
    def test_query_and_upload_to_s3(self, mock_uploader):
        """
        Assert query_and_upload_to_s3 uploads to S3 for each query.

        We only have test data reliably set for AWS, but this function should
        still execute *all* of the table_export_settings queries, effectively
        providing a syntax check on the SQL even if no results are found.
        """
        today = self.today
        _, last_day_of_month = calendar.monthrange(today.year, today.month)
        curr_month_first_day = date(year=today.year, month=today.month, day=1)
        curr_month_last_day = date(year=today.year, month=today.month, day=last_day_of_month)

        date_range = (curr_month_first_day, curr_month_last_day)
        for table_export_setting in tasks.table_export_settings:
            mock_uploader.reset_mock()
            tasks.query_and_upload_to_s3(
                self.schema,
                self.aws_provider_uuid,
                dictify_table_export_settings(table_export_setting),
                date_range[0],
                date_range[1],
            )
            if table_export_setting.provider == "aws":
                if table_export_setting.iterate_daily:
                    # There are always TWO days of AWS test data.
                    calls = mock_uploader.return_value.upload_file.call_args_list
                    self.assertEqual(len(calls), 2)
                else:
                    # There is always only ONE month of AWS test data.
                    mock_uploader.return_value.upload_file.assert_called_once()
            else:
                # We ONLY have test data currently for AWS.
                mock_uploader.return_value.upload_file.assert_not_called()

    @override_settings(ENABLE_S3_ARCHIVING=False)
    def test_query_and_upload_to_s3_archiving_false(self):
        """Assert query_and_upload_to_s3 not run."""
        today = self.today
        _, last_day_of_month = calendar.monthrange(today.year, today.month)
        curr_month_first_day = date(year=today.year, month=today.month, day=1)
        curr_month_last_day = date(year=today.year, month=today.month, day=last_day_of_month)

        date_range = (curr_month_first_day, curr_month_last_day)
        for table_export_setting in tasks.table_export_settings:
            with self.assertLogs("masu.celery.tasks", "INFO") as captured_logs:
                tasks.query_and_upload_to_s3(
                    self.schema,
                    self.aws_provider_uuid,
                    dictify_table_export_settings(table_export_setting),
                    date_range[0],
                    date_range[1],
                )
                self.assertIn("S3 Archiving is disabled. Not running task.", captured_logs.output[0])

    @override_settings(ENABLE_S3_ARCHIVING=True)
    @patch("masu.celery.tasks.AwsS3Uploader")
    def test_query_and_upload_skips_if_no_data(self, mock_uploader):
        """Assert query_and_upload_to_s3 uploads nothing if no data is found."""
        table_export_setting = self.get_table_export_setting_by_name("reporting_awscostentrylineitem")
        tasks.query_and_upload_to_s3(
            self.schema,
            self.aws_provider_uuid,
            dictify_table_export_settings(table_export_setting),
            start_date=self.future_date,
            end_date=self.future_date,
        )
        mock_uploader.return_value.upload_file.assert_not_called()

    @override_settings(ENABLE_S3_ARCHIVING=True)
    @patch("masu.celery.tasks.AwsS3Uploader")
    def test_query_and_upload_to_s3_multiple_days_multiple_rows(self, mock_uploader):
        """Assert query_and_upload_to_s3 for multiple days uploads multiple files."""
        table_export_setting = self.get_table_export_setting_by_name("reporting_awscostentrylineitem_daily_summary")
        tasks.query_and_upload_to_s3(
            self.schema,
            self.aws_provider_uuid,
            dictify_table_export_settings(table_export_setting),
            start_date=self.yesterday_date,
            end_date=self.today_date,
        )
        # expect one upload call for yesterday and one for today
        self.assertEqual(mock_uploader.return_value.upload_file.call_count, 2)
Example #4
0
class TestUploadUtilsWithData(MasuTestCase):
    """Test cases for upload utils that need some data."""
    def setUp(self):
        """Set up initial data for tests."""
        super(TestUploadUtilsWithData, self).setUp()

        with ReportingCommonDBAccessor(self.schema) as common_accessor:
            self.column_map = common_accessor.column_map
        self.creator = ReportObjectCreator(self.schema, self.column_map)

        timezone = pytz.timezone('UTC')
        # Arbitrary date as "today" so we don't drift around with `now`.
        self.today = datetime(2019, 11, 5, 0, 0, 0, tzinfo=timezone)

        self.today_date = date(year=self.today.year,
                               month=self.today.month,
                               day=self.today.day)
        self.create_some_data_for_date(self.today)

        self.yesterday = self.today - timedelta(days=1)
        self.yesterday_date = date(year=self.yesterday.year,
                                   month=self.yesterday.month,
                                   day=self.yesterday.day)
        self.create_some_data_for_date(self.yesterday)

        self.future = self.today + timedelta(days=900)
        self.future_date = date(year=self.future.year,
                                month=self.future.month,
                                day=self.future.day)

    def create_some_data_for_date(self, the_datetime):
        """Create some dummy data for the given datetime."""
        product = self.creator.create_cost_entry_product()
        pricing = self.creator.create_cost_entry_pricing()
        reservation = self.creator.create_cost_entry_reservation()

        bill = self.creator.create_cost_entry_bill(
            provider_uuid=self.aws_provider_uuid, bill_date=the_datetime)
        cost_entry = self.creator.create_cost_entry(
            bill, entry_datetime=the_datetime)
        self.creator.create_cost_entry_line_item(bill, cost_entry, product,
                                                 pricing, reservation)

        # The daily summary lines are aligned with midnight of each day.
        the_date = the_datetime.replace(hour=0,
                                        minute=0,
                                        second=0,
                                        microsecond=0)
        self.creator.create_awscostentrylineitem_daily_summary(
            self.customer.account_id, self.schema, bill, the_date)

    def get_table_export_setting_by_name(self, name):
        """Helper to get specific TableExportSetting for testing."""
        return [s for s in table_export_settings
                if s.output_name == name].pop()

    @patch('masu.util.upload.AwsS3Uploader')
    def test_query_and_upload_to_s3(self, mock_uploader):
        """
        Assert query_and_upload_to_s3 uploads to S3 for each query.

        We only have test data reliably set for AWS, but this function should
        still execute *all* of the table_export_settings queries, effectively
        providing a syntax check on the SQL even if no results are found.
        """
        today = self.today
        _, last_day_of_month = calendar.monthrange(today.year, today.month)
        curr_month_first_day = date(year=today.year, month=today.month, day=1)
        curr_month_last_day = date(year=today.year,
                                   month=today.month,
                                   day=last_day_of_month)

        date_range = (curr_month_first_day, curr_month_last_day)
        for table_export_setting in table_export_settings:
            mock_uploader.reset_mock()
            query_and_upload_to_s3(self.schema, self.aws_provider_uuid,
                                   table_export_setting, date_range)
            if table_export_setting.provider == 'aws':
                if table_export_setting.iterate_daily:
                    # There are always TWO days of AWS test data.
                    calls = mock_uploader.return_value.upload_file.call_args_list
                    self.assertEqual(len(calls), 2)
                else:
                    # There is always only ONE month of AWS test data.
                    mock_uploader.return_value.upload_file.assert_called_once()
            else:
                # We ONLY have test data currently for AWS.
                mock_uploader.return_value.upload_file.assert_not_called()

    @patch('masu.util.upload.AwsS3Uploader')
    def test_query_and_upload_skips_if_no_data(self, mock_uploader):
        """Assert query_and_upload_to_s3 uploads nothing if no data is found."""
        date_range = (self.future_date, self.future_date)
        table_export_setting = self.get_table_export_setting_by_name(
            'reporting_awscostentrylineitem')
        query_and_upload_to_s3(self.schema, self.aws_provider_uuid,
                               table_export_setting, date_range)
        mock_uploader.return_value.upload_file.assert_not_called()

    @patch('masu.util.upload.AwsS3Uploader')
    def test_query_and_upload_to_s3_multiple_days_multiple_rows(
            self, mock_uploader):
        """Assert query_and_upload_to_s3 for multiple days uploads multiple files."""
        date_range = (self.yesterday_date, self.today_date)
        table_export_setting = self.get_table_export_setting_by_name(
            'reporting_awscostentrylineitem_daily_summary')
        query_and_upload_to_s3(self.schema, self.aws_provider_uuid,
                               table_export_setting, date_range)
        # expect one upload call for yesterday and one for today
        self.assertEqual(mock_uploader.return_value.upload_file.call_count, 2)