Exemple #1
0
    def test_single_rhel_run_result(self, mock_schedule_concurrent_calc):
        """Test with a single RHEL instance run within the day."""
        rhel_instance = api_helper.generate_instance(self.user1account1,
                                                     image=self.image_rhel)
        api_helper.generate_single_run(
            rhel_instance,
            (
                util_helper.utc_dt(2019, 5, 1, 1, 0, 0),
                util_helper.utc_dt(2019, 5, 1, 2, 0, 0),
            ),
            image=rhel_instance.machine_image,
            calculate_concurrent_usage=False,
        )
        request_date = datetime.date(2019, 5, 1)
        expected_date = request_date
        expected_instances = 1

        self.assertEqual(
            models.ConcurrentUsage.objects.filter(
                date=request_date, user_id=self.user1.id).count(),
            0,
        )

        with self.assertRaises(ResultsUnavailable):
            get_max_concurrent_usage(request_date, user_id=self.user1.id)

        calculate_max_concurrent_usage(request_date, self.user1.id)
        concurrent_usage = models.ConcurrentUsage.objects.get(
            date=request_date, user_id=self.user1.id)

        self.assertEqual(
            models.ConcurrentUsage.objects.filter(
                date=request_date, user_id=self.user1.id).count(),
            1,
        )
        self.assertEqual(concurrent_usage.date, expected_date)
        self.assertEqual(len(concurrent_usage.maximum_counts), 24)
        for single_day_counts in concurrent_usage.maximum_counts:
            self.assertEqual(single_day_counts["instances_count"],
                             expected_instances)
Exemple #2
0
    def test_process_instance_event_new_run(self, mock_recalculate_runs,
                                            mock_schedule_concurrent_task):
        """
        Test new run is created if it occurred after all runs and is power on.

        account.util.recalculate_runs should not be ran in this case.

        Initial Runs (2,5):
            [ ####          ]

        New power on event at (10,) results in 2 runs (2,5) (10,-):
            [ ####    #-----]

        """
        # Calculate the runs directly instead of scheduling a task for testing purposes
        mock_schedule_concurrent_task.side_effect = calculate_max_concurrent_usage

        instance = api_helper.generate_instance(self.account)

        run_time = (
            util_helper.utc_dt(2018, 1, 2, 0, 0, 0),
            util_helper.utc_dt(2018, 1, 5, 0, 0, 0),
        )

        api_helper.generate_single_run(instance, run_time)

        occurred_at = util_helper.utc_dt(2018, 1, 10, 0, 0, 0)
        instance_event = api_helper.generate_single_instance_event(
            instance=instance,
            occurred_at=occurred_at,
            event_type=InstanceEvent.TYPE.power_on,
            instance_type=None,
        )
        tasks.process_instance_event(instance_event)

        runs = list(Run.objects.all())
        self.assertEqual(2, len(runs))

        # Since we're adding a new run, recalculate_runs shouldn't be called
        mock_recalculate_runs.assert_not_called()
Exemple #3
0
    def test_single_not_rhel_run_within_day(self):
        """
        Test with a not-RHEL instance run within the day.

        This instance should have zero effect on max calculations.
        """
        rhel_instance = api_helper.generate_instance(
            self.user1account1, image=self.image_plain
        )
        api_helper.generate_single_run(
            rhel_instance,
            (
                util_helper.utc_dt(2019, 5, 1, 1, 0, 0),
                util_helper.utc_dt(2019, 5, 1, 2, 0, 0),
            ),
            image=rhel_instance.machine_image,
        )
        request_date = datetime.date(2019, 5, 1)
        expected_date = request_date

        usage = calculate_max_concurrent_usage(request_date, user_id=self.user1.id)
        self.assertMaxConcurrentUsage(usage, expected_date, 0)
Exemple #4
0
    def test_delete_cleans_up_related_objects(self, mock_describe, mock_verify,
                                              mock_notify_sources):
        """
        Verify that deleting an AWS account cleans up related objects.

        Deleting the account should also delete instances, events, runs, and the
        periodic verify task related to it.
        """
        instance = helper.generate_instance(cloud_account=self.account)
        runtime = (
            util_helper.utc_dt(2019, 1, 1, 0, 0, 0),
            util_helper.utc_dt(2019, 1, 2, 0, 0, 0),
        )

        self.account.enable()
        helper.generate_single_run(instance=instance, runtime=runtime)

        # First, verify that objects exist *before* deleting the AwsCloudAccount.
        self.assertGreater(aws_models.AwsCloudAccount.objects.count(), 0)
        self.assertGreater(models.CloudAccount.objects.count(), 0)
        self.assertGreater(aws_models.AwsInstanceEvent.objects.count(), 0)
        self.assertGreater(models.InstanceEvent.objects.count(), 0)
        self.assertGreater(models.Run.objects.count(), 0)
        self.assertGreater(aws_models.AwsInstance.objects.count(), 0)
        self.assertGreater(models.Instance.objects.count(), 0)
        self.assertGreater(PeriodicTask.objects.count(), 0)

        with patch("api.clouds.aws.util.delete_cloudtrail"
                   ) as mock_delete_cloudtrail:
            mock_delete_cloudtrail.return_value = True
            self.account.delete()
        self.assertEqual(0, aws_models.AwsCloudAccount.objects.count())
        self.assertEqual(0, models.CloudAccount.objects.count())
        self.assertEqual(0, aws_models.AwsInstanceEvent.objects.count())
        self.assertEqual(0, models.InstanceEvent.objects.count())
        self.assertEqual(0, models.Run.objects.count())
        self.assertEqual(0, aws_models.AwsInstance.objects.count())
        self.assertEqual(0, models.Instance.objects.count())
        self.assertEqual(0, PeriodicTask.objects.count())
Exemple #5
0
    def setUp(self):
        """Set up a bunch of test data."""
        created_at = util_helper.utc_dt(2019, 4, 1, 1, 0, 0)
        with util_helper.clouditardis(created_at):
            self.user = util_helper.generate_test_user()
            self.account = api_helper.generate_cloud_account(user=self.user)
        self.image = api_helper.generate_image(rhel_detected=True)
        self.instance = api_helper.generate_instance(self.account,
                                                     image=self.image)

        api_helper.generate_single_run(
            self.instance,
            (
                util_helper.utc_dt(2019, 5, 1, 1, 0, 0),
                util_helper.utc_dt(2019, 5, 3, 1, 0, 0),
            ),
            image=self.instance.machine_image,
            calculate_concurrent_usage=False,
        )
        self.request_date = datetime.date(2019, 5, 2)
        calculate_max_concurrent_usage(self.request_date, self.user.id)

        self.factory = APIRequestFactory()
Exemple #6
0
    def test_process_instance_event_power_off(self, mock_recalculate_runs):
        """
        Test new run is not if a power off event occurs after all runs.

        account.util.recalculate_runs should not be ran in this case.

        Initial Runs (2,5):
            [ ####          ]

        New power off event at (10,) results in 1 runs (2,5):
            [ ####          ]

        """
        instance = api_helper.generate_instance(self.account)

        run_time = (
            util_helper.utc_dt(2018, 1, 2, 0, 0, 0),
            util_helper.utc_dt(2018, 1, 5, 0, 0, 0),
        )

        api_helper.generate_single_run(instance, run_time)

        occurred_at = util_helper.utc_dt(2018, 1, 10, 0, 0, 0)

        instance_event = api_helper.generate_single_instance_event(
            instance=instance,
            occurred_at=occurred_at,
            event_type=InstanceEvent.TYPE.power_off,
            instance_type=None,
        )
        tasks.process_instance_event(instance_event)

        runs = list(Run.objects.all())
        self.assertEqual(1, len(runs))

        mock_recalculate_runs.assert_not_called()
    def test_daily_pagination(self):
        """
        Test proper pagination handling of days from the custom queryset.

        This test asserts that the pagination envelope is correctly populated
        and that the included list is populated with the expected dates with
        calculated concurrency values.

        We ask for 31 days worth of concurrency here, but default pagination
        should limit the response to the first 10 days.

        One instance run exist in the first day of this period. All other days
        have no activity. Therefore, only that first day should have non-zero
        values for instances; all other days should have 0s.
        """
        api_helper.generate_single_run(
            self.instance1,
            (
                util_helper.utc_dt(2019, 3, 15, 1, 0, 0),
                util_helper.utc_dt(2019, 3, 15, 2, 0, 0),
            ),
            image=self.instance1.machine_image,
            instance_type=self.instance_type1,
        )
        api_helper.generate_single_run(
            self.instance1,
            (
                util_helper.utc_dt(2019, 3, 16, 1, 0, 0),
                util_helper.utc_dt(2019, 3, 16, 2, 0, 0),
            ),
            image=self.instance1.machine_image,
            instance_type=self.instance_type1,
        )
        api_helper.generate_single_run(
            self.instance2,
            (
                util_helper.utc_dt(2019, 3, 16, 1, 0, 0),
                util_helper.utc_dt(2019, 3, 17, 2, 0, 0),
            ),
            image=self.instance2.machine_image,
            instance_type=self.instance_type1,
        )

        start_date = datetime.date(2019, 3, 15)
        end_date = datetime.date(2019, 4, 15)

        api_helper.calculate_concurrent(start_date, end_date, self.user1.id)
        data = {
            "start_date": start_date.strftime("%Y-%m-%d"),
            "end_date": end_date.strftime("%Y-%m-%d"),
        }
        client = APIClient()
        client.force_authenticate(user=self.user1)
        response = client.get(self.concurrent_api_url,
                              data=data,
                              format="json")
        body = response.json()

        self.assertEquals(body["meta"]["count"], 31)
        self.assertEquals(len(body["data"]), 10)

        link_first = body["links"]["first"]
        self.assertIn("offset=0", link_first)
        self.assertIn("start_date=2019-03-15", link_first)
        self.assertIn("end_date=2019-04-15", link_first)

        link_next = body["links"]["next"]
        self.assertIn("offset=10", link_next)
        self.assertIn("start_date=2019-03-15", link_next)
        self.assertIn("end_date=2019-04-15", link_next)

        self.assertIsNone(body["links"]["previous"])

        link_last = body["links"]["last"]
        self.assertIn("offset=21", link_last)
        self.assertIn("start_date=2019-03-15", link_last)
        self.assertIn("end_date=2019-04-15", link_last)

        first_date = datetime.date(2019, 3, 15)
        first_result = body["data"][0]

        self.assertEqual(first_result["maximum_counts"][0]["instances_count"],
                         1)
        self.assertEqual(first_result["date"], str(first_date))

        second_date = datetime.date(2019, 3, 16)
        second_result = body["data"][1]

        self.assertEqual(second_result["maximum_counts"][0]["instances_count"],
                         2)
        self.assertEqual(second_result["date"], str(second_date))

        third_date = datetime.date(2019, 3, 17)
        third_result = body["data"][2]

        self.assertEqual(third_result["maximum_counts"][0]["instances_count"],
                         1)
        self.assertEqual(third_result["date"], str(third_date))

        # assert that every other day exists with zero reported concurrency.
        for offset, result in enumerate(body["data"][3:]):
            this_date = third_date + datetime.timedelta(days=offset + 1)
            self.assertEqual(result["maximum_counts"], [])
            self.assertEqual(result["date"], str(this_date))