예제 #1
0
    def test_save_with_concurrent_usages(self):
        """Test that save deletes the related concurrent_usages."""
        user = util_helper.generate_test_user()
        aws_account_id = util_helper.generate_dummy_aws_account_id()
        account = api_helper.generate_cloud_account(
            aws_account_id=aws_account_id,
            user=user,
        )
        image = api_helper.generate_image(
            owner_aws_account_id=aws_account_id,
            rhel_detected=True,
        )
        instance = api_helper.generate_instance(account, image=image)
        api_helper.generate_single_run(
            instance,
            (
                util_helper.utc_dt(2019, 5, 1, 1, 0, 0),
                util_helper.utc_dt(2019, 5, 1, 2, 0, 0),
            ),
            image=instance.machine_image,
        )
        request_date = datetime.date(2019, 5, 1)
        calculate_max_concurrent_usage(request_date, user_id=user.id)

        self.assertEquals(1, ConcurrentUsage.objects.count())
        image.rhel_detected_by_tag = True
        image.save()
        self.assertEquals(0, ConcurrentUsage.objects.count())
예제 #2
0
    def test_overlapping_rhel_runs_within_day(self):
        """
        Test with two overlapping RHEL instances run within the day.

        Because no account filter is applied, both instances are seen.
        """
        rhel_instance1 = api_helper.generate_instance(
            self.user1account1, image=self.image_rhel1
        )
        rhel_instance2 = api_helper.generate_instance(
            self.user1account1, image=self.image_rhel2
        )
        api_helper.generate_single_run(
            rhel_instance1,
            (
                util_helper.utc_dt(2019, 5, 1, 1, 0, 0),
                util_helper.utc_dt(2019, 5, 1, 2, 0, 0),
            ),
            image=rhel_instance1.machine_image,
        )
        api_helper.generate_single_run(
            rhel_instance2,
            (
                util_helper.utc_dt(2019, 5, 1, 1, 30, 0),
                util_helper.utc_dt(2019, 5, 1, 2, 30, 0),
            ),
            image=rhel_instance2.machine_image,
        )
        expected_date = request_date = datetime.date(2019, 5, 1)

        usage = calculate_max_concurrent_usage(request_date, user_id=self.user1.id)
        self.assertEqual(usage.date, expected_date)
        self.assertEqual(len(usage.maximum_counts), 32)
예제 #3
0
    def test_overlapping_rhel_runs_within_day_with_user_filter(self):
        """
        Test with two overlapping RHEL instances run within the day.

        Because a user filter is applied, only one instance's data is seen.
        """
        rhel_instance1 = api_helper.generate_instance(
            self.user1account1, image=self.image_rhel3
        )
        rhel_instance2 = api_helper.generate_instance(
            self.user2account1, image=self.image_rhel4
        )
        api_helper.generate_single_run(
            rhel_instance1,
            (
                util_helper.utc_dt(2019, 5, 1, 1, 0, 0),
                util_helper.utc_dt(2019, 5, 1, 2, 0, 0),
            ),
            image=rhel_instance1.machine_image,
        )
        # This second instance run should be filtered away.
        api_helper.generate_single_run(
            rhel_instance2,
            (
                util_helper.utc_dt(2019, 5, 1, 1, 30, 0),
                util_helper.utc_dt(2019, 5, 1, 2, 30, 0),
            ),
            image=rhel_instance2.machine_image,
        )
        request_date = datetime.date(2019, 5, 1)
        expected_date = request_date
        expected_instances = 1

        usage = calculate_max_concurrent_usage(request_date, user_id=self.user1.id)
        self.assertMaxConcurrentUsage(usage, expected_date, expected_instances)
예제 #4
0
    def test_non_overlapping_rhel_runs_within_day(self):
        """
        Test with two non-overlapping RHEL instances run within the day.

        Two instances of different size run at different times, and this test
        should see the *larger* of the two matching the max values.
        """
        rhel_instance1 = api_helper.generate_instance(
            self.user1account1, image=self.image_rhel4
        )
        rhel_instance2 = api_helper.generate_instance(
            self.user1account2, image=self.image_rhel5
        )
        api_helper.generate_single_run(
            rhel_instance1,
            (
                util_helper.utc_dt(2019, 5, 1, 1, 0, 0),
                util_helper.utc_dt(2019, 5, 1, 2, 0, 0),
            ),
            image=rhel_instance1.machine_image,
        )
        api_helper.generate_single_run(
            rhel_instance2,
            (
                util_helper.utc_dt(2019, 5, 1, 3, 0, 0),
                util_helper.utc_dt(2019, 5, 1, 4, 0, 0),
            ),
            image=rhel_instance2.machine_image,
        )
        request_date = datetime.date(2019, 5, 1)
        expected_date = request_date
        expected_instances = 1

        usage = calculate_max_concurrent_usage(request_date, user_id=self.user1.id)
        self.assertMaxConcurrentUsage(usage, expected_date, expected_instances)
예제 #5
0
    def test_when_user_id_does_not_exist(self):
        """Test when the requested user ID does not exist."""
        request_date = datetime.date(2019, 5, 1)
        user_id = -1  # negative id should never exit
        expected_date = request_date

        usage = calculate_max_concurrent_usage(request_date, user_id=user_id)
        self.assertMaxConcurrentUsage(usage, expected_date, 0)
예제 #6
0
    def test_calculation_in_progress(self, mock_schedule_concurrent_task):
        """Test exception is raised if calculation is currently running."""
        rhel_instance = api_helper.generate_instance(self.user1account1,
                                                     image=self.image_rhel)
        api_helper.generate_single_run(
            rhel_instance,
            (
                util_helper.utc_dt(2019, 5, 1, 1, 0, 0),
                util_helper.utc_dt(2019, 5, 1, 2, 0, 0),
            ),
            image=rhel_instance.machine_image,
            calculate_concurrent_usage=False,
        )
        request_date = datetime.date(2019, 5, 1)
        expected_date = request_date
        expected_instances = 1

        self.assertEqual(
            models.ConcurrentUsage.objects.filter(
                date=request_date, user_id=self.user1.id).count(),
            0,
        )

        concurrent_task = models.ConcurrentUsageCalculationTask(
            date=request_date, user_id=self.user1.id, task_id="test123")
        concurrent_task.status = models.ConcurrentUsageCalculationTask.RUNNING
        concurrent_task.save()

        with self.assertRaises(ResultsUnavailable):
            get_max_concurrent_usage(request_date, user_id=self.user1.id)

        # now actually calculate concurrent usage
        calculate_max_concurrent_usage(request_date, self.user1.id)
        concurrent_usage = get_max_concurrent_usage(request_date,
                                                    user_id=self.user1.id)

        self.assertEqual(
            models.ConcurrentUsage.objects.filter(
                date=request_date, user_id=self.user1.id).count(),
            1,
        )
        self.assertEqual(concurrent_usage.date, expected_date)
        self.assertEqual(len(concurrent_usage.maximum_counts), 24)
        for single_day_counts in concurrent_usage.maximum_counts:
            self.assertEqual(single_day_counts["instances_count"],
                             expected_instances)
예제 #7
0
    def test_single_rhel_run_result(self, mock_schedule_concurrent_calc):
        """Test with a single RHEL instance run within the day."""
        rhel_instance = api_helper.generate_instance(self.user1account1,
                                                     image=self.image_rhel)
        api_helper.generate_single_run(
            rhel_instance,
            (
                util_helper.utc_dt(2019, 5, 1, 1, 0, 0),
                util_helper.utc_dt(2019, 5, 1, 2, 0, 0),
            ),
            image=rhel_instance.machine_image,
            calculate_concurrent_usage=False,
        )
        request_date = datetime.date(2019, 5, 1)
        expected_date = request_date
        expected_instances = 1

        self.assertEqual(
            models.ConcurrentUsage.objects.filter(
                date=request_date, user_id=self.user1.id).count(),
            0,
        )

        with self.assertRaises(ResultsUnavailable):
            get_max_concurrent_usage(request_date, user_id=self.user1.id)

        calculate_max_concurrent_usage(request_date, self.user1.id)
        concurrent_usage = models.ConcurrentUsage.objects.get(
            date=request_date, user_id=self.user1.id)

        self.assertEqual(
            models.ConcurrentUsage.objects.filter(
                date=request_date, user_id=self.user1.id).count(),
            1,
        )
        self.assertEqual(concurrent_usage.date, expected_date)
        self.assertEqual(len(concurrent_usage.maximum_counts), 24)
        for single_day_counts in concurrent_usage.maximum_counts:
            self.assertEqual(single_day_counts["instances_count"],
                             expected_instances)
예제 #8
0
    def setUp(self):
        """Set up a bunch of test data."""
        created_at = util_helper.utc_dt(2019, 4, 1, 1, 0, 0)
        with util_helper.clouditardis(created_at):
            self.user = util_helper.generate_test_user()
            self.account = api_helper.generate_cloud_account(user=self.user)
        self.image = api_helper.generate_image(rhel_detected=True)
        self.instance = api_helper.generate_instance(self.account,
                                                     image=self.image)

        api_helper.generate_single_run(
            self.instance,
            (
                util_helper.utc_dt(2019, 5, 1, 1, 0, 0),
                util_helper.utc_dt(2019, 5, 3, 1, 0, 0),
            ),
            image=self.instance.machine_image,
            calculate_concurrent_usage=False,
        )
        self.request_date = datetime.date(2019, 5, 2)
        calculate_max_concurrent_usage(self.request_date, self.user.id)

        self.factory = APIRequestFactory()
예제 #9
0
    def test_single_rhel_run_entirely_after_day(self):
        """Test with a RHEL instance run entirely after the day."""
        rhel_instance = api_helper.generate_instance(
            self.user1account1, image=self.image_rhel1
        )
        api_helper.generate_single_run(
            rhel_instance,
            (
                util_helper.utc_dt(2019, 5, 2, 1, 0, 0),
                util_helper.utc_dt(2019, 5, 2, 2, 0, 0),
            ),
            image=rhel_instance.machine_image,
        )
        request_date = datetime.date(2019, 5, 1)
        expected_date = request_date

        usage = calculate_max_concurrent_usage(request_date, user_id=self.user1.id)
        self.assertMaxConcurrentUsage(usage, expected_date, 0)
예제 #10
0
    def test_single_run_overlapping_day_start(self):
        """Test with a RHEL instance run overlapping the start of the day."""
        rhel_instance = api_helper.generate_instance(
            self.user1account1, image=self.image_rhel1
        )
        api_helper.generate_single_run(
            rhel_instance,
            (
                util_helper.utc_dt(2019, 4, 30, 1, 0, 0),
                util_helper.utc_dt(2019, 5, 1, 2, 0, 0),
            ),
            image=rhel_instance.machine_image,
        )
        request_date = datetime.date(2019, 5, 1)
        expected_date = request_date
        expected_instances = 1

        usage = calculate_max_concurrent_usage(request_date, user_id=self.user1.id)
        self.assertMaxConcurrentUsage(usage, expected_date, expected_instances)
예제 #11
0
    def test_single_rhel_run_within_day(self):
        """Test with a single RHEL instance run within the day."""
        rhel_instance = api_helper.generate_instance(
            self.user1account1, image=self.image_rhel1
        )
        api_helper.generate_single_run(
            rhel_instance,
            (
                util_helper.utc_dt(2019, 5, 1, 1, 0, 0),
                util_helper.utc_dt(2019, 5, 1, 2, 0, 0),
            ),
            image=rhel_instance.machine_image,
        )
        request_date = datetime.date(2019, 5, 1)
        expected_date = request_date
        expected_instances = 1

        usage = calculate_max_concurrent_usage(request_date, user_id=self.user1.id)
        self.assertEqual(len(usage.maximum_counts), 24)
        self.assertMaxConcurrentUsage(usage, expected_date, expected_instances)
예제 #12
0
    def test_single_no_image_run_within_day(self):
        """Test with a single no-image instance run within the day."""
        mystery_instance = api_helper.generate_instance(
            self.user1account1, image=None, no_image=True
        )
        api_helper.generate_single_run(
            mystery_instance,
            (
                util_helper.utc_dt(2019, 5, 1, 1, 0, 0),
                util_helper.utc_dt(2019, 5, 1, 2, 0, 0),
            ),
            image=None,
            no_image=True,
        )
        request_date = datetime.date(2019, 5, 1)
        expected_date = request_date
        expected_instances = 0

        usage = calculate_max_concurrent_usage(request_date, user_id=self.user1.id)
        self.assertEqual(len(usage.maximum_counts), 0)
        self.assertMaxConcurrentUsage(usage, expected_date, expected_instances)
예제 #13
0
    def test_single_not_rhel_run_within_day(self):
        """
        Test with a not-RHEL instance run within the day.

        This instance should have zero effect on max calculations.
        """
        rhel_instance = api_helper.generate_instance(
            self.user1account1, image=self.image_plain
        )
        api_helper.generate_single_run(
            rhel_instance,
            (
                util_helper.utc_dt(2019, 5, 1, 1, 0, 0),
                util_helper.utc_dt(2019, 5, 1, 2, 0, 0),
            ),
            image=rhel_instance.machine_image,
        )
        request_date = datetime.date(2019, 5, 1)
        expected_date = request_date

        usage = calculate_max_concurrent_usage(request_date, user_id=self.user1.id)
        self.assertMaxConcurrentUsage(usage, expected_date, 0)
예제 #14
0
def calculate_concurrent(start_date, end_date, user_id):
    """Calculate the concurrent usage between two dates."""
    delta = end_date - start_date
    for i in range(delta.days + 1):
        day = start_date + timedelta(days=i)
        calculate_max_concurrent_usage(date=day, user_id=user_id)
예제 #15
0
def calculate_max_concurrent_usage_task(self, date, user_id):  # noqa: C901
    """
    Schedule a task to calculate maximum concurrent usage of RHEL instances.

    Args:
        self (celery.Task): The bound task. With this we can retry if necessary.
        date (str): the day during which we are measuring usage.
            Celery serializes the date as a string in the format "%Y-%B-%dT%H:%M:%S.
        user_id (int): required filter on user

    Returns:
        ConcurrentUsage for the given date and user ID.

    """
    task_id = self.request.id
    date = date_parser.parse(date).date()

    # Temporary logger.info to help diagnose retry issues.
    logger.info(
        "retries is %(retries)s for id %(id)s user_id %(user_id)s and date %(date)s.",
        {
            "retries": self.request.retries,
            "id": task_id,
            "user_id": user_id,
            "date": date,
        },
    )

    # If the user does not exist, all the related ConcurrentUsage
    # objects should also have been removed, so we can exit early.
    if not User.objects.filter(id=user_id).exists():
        return

    try:
        # Lock the task at a user level. A user can only run one task at a time.
        # Since this both starts a transaction and blocks any others from starting, we
        # can be reasonably confident that there are no other tasks processing for the
        # same user and date at the same time.
        with lock_task_for_user_ids([user_id]):
            try:
                calculation_task = ConcurrentUsageCalculationTask.objects.get(
                    task_id=task_id)
            except ConcurrentUsageCalculationTask.DoesNotExist:
                # It's possible but unlikely this task was deleted since its task was
                # delayed. Since the same user still exists, try scheduling a new task.
                logger.warning(
                    "ConcurrentUsageCalculationTask not found for task ID %(task_id)s! "
                    "Scheduling a new task for user_id %(user_id)s and date %(date)s.",
                    {
                        "task_id": task_id,
                        "user_id": user_id,
                        "date": date
                    },
                )
                schedule_concurrent_calculation_task(date, user_id)
                return

            if calculation_task.status != ConcurrentUsageCalculationTask.SCHEDULED:
                # It's possible but unlikely that something else has changed the status
                # of this task. If it's not currently SCHEDULED, log and return early.
                logger.info(
                    "ConcurrentUsageCalculationTask for task ID %(task_id)s for "
                    "user_id %(user_id)s and date %(date)s has status "
                    "%(status)s which is not SCHEDULED.",
                    {
                        "user_id": user_id,
                        "date": date,
                        "task_id": task_id,
                        "status": calculation_task.status,
                    },
                )
                return

            calculate_max_concurrent_usage(date, user_id)

            calculation_task.status = ConcurrentUsageCalculationTask.COMPLETE
            calculation_task.save()
            logger.info(
                "Completed calculate_max_concurrent_usage_task for user_id %(user_id)s "
                "and date %(date)s (task_id %(task_id)s).",
                {
                    "user_id": user_id,
                    "date": date,
                    "task_id": task_id
                },
            )
            return
    except Exception as unknown_exception:
        # It's unclear exactly what other exceptions might arise, but just to be safe,
        # let's log the trace, set the task's status to ERROR, and re-raise it.
        logger.warning(unknown_exception, exc_info=True)
        # Use this objects.filter().update() pattern so that we don't risk raising an
        # IntegrityError in case the object has somehow been deleted.
        ConcurrentUsageCalculationTask.objects.filter(task_id=task_id).update(
            status=ConcurrentUsageCalculationTask.ERROR)
        raise unknown_exception
예제 #16
0
    def test_new_run_deletes_concurrent_usage(self):
        """
        Test that creating a new run deletes the right ConcurrentUsage.

        When a run is saved that is related to ConcurrentUsage through
        the potentially_related_runs field, ensure those ConcurrentUsages
        are deleted. Creating a new Run should not remove ConcurrentUsages
        with no related runs.

        """
        user = util_helper.generate_test_user()
        aws_account_id = util_helper.generate_dummy_aws_account_id()
        account = api_helper.generate_cloud_account(
            aws_account_id=aws_account_id,
            user=user,
        )
        image = api_helper.generate_image(
            owner_aws_account_id=aws_account_id,
            rhel_detected=True,
        )
        instance = api_helper.generate_instance(account, image=image)
        instance_type = util_helper.get_random_instance_type()

        start_time = util_helper.utc_dt(2019, 5, 1, 1, 0, 0)
        end_time = util_helper.utc_dt(2019, 5, 1, 2, 0, 0)

        request_date = datetime.date(2019, 5, 1)

        # Calculating maximum usage for no runs generates one concurrent usage
        # with empty counts
        calculate_max_concurrent_usage(request_date, user_id=user.id)
        self.assertEqual(1, ConcurrentUsage.objects.all().count())
        self.assertEqual("[]",
                         ConcurrentUsage.objects.all()[0]._maximum_counts)

        # Create a run
        run = Run.objects.create(
            start_time=start_time,
            end_time=end_time,
            instance=instance,
            machineimage=image,
            instance_type=instance_type,
            vcpu=util_helper.SOME_EC2_INSTANCE_TYPES[instance_type]["vcpu"],
            memory=util_helper.SOME_EC2_INSTANCE_TYPES[instance_type]
            ["memory"],
        )
        # Creating a run should not delete the empty concurrent usage
        # since that concurrent usage isn't related to this run
        self.assertEqual(1, ConcurrentUsage.objects.all().count())
        self.assertEqual("[]",
                         ConcurrentUsage.objects.all()[0]._maximum_counts)

        # recalculating the maximum concurrent usage results in a nonempty
        # ConcurrentUsage maximum_counts
        calculate_max_concurrent_usage(request_date, user_id=user.id)
        self.assertNotEqual("[]",
                            ConcurrentUsage.objects.all()[0]._maximum_counts)

        # Re-saving the run should remove the related the concurrent usage.
        run.save()
        self.assertEqual(0, ConcurrentUsage.objects.all().count())
예제 #17
0
    def __init__(self):
        """Initialize all the data for the examples."""
        api_helper.generate_instance_type_definitions(cloud_type="aws")
        api_helper.generate_instance_type_definitions(cloud_type="azure")

        self.customer_account_number = "100001"
        self.customer_user = util_helper.get_test_user(
            self.customer_account_number, is_superuser=False)
        self.customer_user.date_joined = util_helper.utc_dt(
            2019, 1, 1, 0, 0, 0)
        self.customer_user.save()

        self.customer_client = api_helper.SandboxedRestClient()
        self.customer_client._force_authenticate(self.customer_user)
        self.internal_client = api_helper.SandboxedRestClient(
            api_root="/internal/api/cloudigrade/v1")
        self.internal_client._force_authenticate(self.customer_user)

        self.customer_arn = util_helper.generate_dummy_arn()

        # Times to use for various account and event activity.
        self.now = get_now()
        self.this_morning = self.now.replace(hour=0,
                                             minute=0,
                                             second=0,
                                             microsecond=0)
        self.yesterday = self.this_morning - timedelta(days=1)
        self.last_month = self.this_morning - timedelta(days=31)
        self.last_week = self.this_morning - timedelta(days=7)
        self.three_days_ago = self.this_morning - timedelta(days=3)
        self.two_days_ago = self.this_morning - timedelta(days=2)
        self.two_weeks_ago = self.this_morning - timedelta(weeks=2)
        self.tomorrow = self.this_morning + timedelta(days=1)
        self.next_week = self.this_morning + timedelta(weeks=1)

        ######################################
        # Generate AWS data for the customer user.
        self.aws_customer_account = api_helper.generate_cloud_account(
            arn=util_helper.generate_dummy_arn(),
            user=self.customer_user,
            name="greatest account ever",
            created_at=self.two_weeks_ago,
        )
        self.azure_customer_account = api_helper.generate_cloud_account(
            user=self.customer_user,
            name="meh account",
            created_at=self.two_weeks_ago,
            cloud_type="azure",
            azure_subscription_id=str(seeded_uuid4()),
            azure_tenant_id=str(seeded_uuid4()),
        )
        self.customer_instances = [
            api_helper.generate_instance(self.aws_customer_account),
            api_helper.generate_instance(self.aws_customer_account),
            api_helper.generate_instance(self.aws_customer_account),
            api_helper.generate_instance(self.azure_customer_account,
                                         cloud_type="azure"),
            api_helper.generate_instance(self.azure_customer_account,
                                         cloud_type="azure"),
            api_helper.generate_instance(self.azure_customer_account,
                                         cloud_type="azure"),
        ]

        # Generate events so we can see customer activity in the responses.
        # These events represent all customer instances starting one week ago,
        # stopping two days ago, and starting again yesterday.
        self.events = []
        for instance in self.customer_instances[:2]:
            self.events.extend(
                api_helper.generate_instance_events(
                    instance,
                    [
                        (self.last_week, self.three_days_ago),
                        (self.yesterday, None),
                    ],
                ))
        for instance in self.customer_instances[3:6]:
            self.events.extend(
                api_helper.generate_instance_events(
                    instance,
                    [
                        (self.last_week, self.three_days_ago),
                        (self.yesterday, None),
                    ],
                    cloud_type="azure",
                ))

        # Build the runs for the created events.
        # Note: this crude and *direct* implementation of Run-saving should be
        # replaced as we continue porting pilot functionality and (eventually)
        # better general-purpose Run-handling functions materialize.
        normalized_runs = normalize_runs(models.InstanceEvent.objects.all())
        for normalized_run in normalized_runs:
            run = models.Run(
                start_time=normalized_run.start_time,
                end_time=normalized_run.end_time,
                machineimage_id=normalized_run.image_id,
                instance_id=normalized_run.instance_id,
                instance_type=normalized_run.instance_type,
                memory=normalized_run.instance_memory,
                vcpu=normalized_run.instance_vcpu,
            )
            run.save()

        # Force all images to have RHEL detected ("7.7")
        self.images = list(
            set(instance.machine_image for instance in self.customer_instances
                if instance.machine_image is not None))
        for image in self.images:
            image.inspection_json = json.dumps({
                "rhel_enabled_repos_found": True,
                "rhel_version": "7.7",
                "syspurpose": {
                    "role": "Red Hat Enterprise Linux Server",
                    "service_level_agreement": "Premium",
                    "usage": "Development/Test",
                },
            })
            image.status = image.INSPECTED
            image.region = "us-east-1"
            image.save()

        # Pre-calculate concurrent usage data for upcoming requests.
        # Calculate each day since "last week" (oldest date we use in example requests).
        the_date = self.last_week.date()
        one_day_delta = timedelta(days=1)
        # while the_date <= self.this_morning.date():
        while the_date <= self.next_week.date():
            task_id = f"calculate-concurrent-usage-{seeded_uuid4()}"
            models.ConcurrentUsageCalculationTask.objects.create(
                user_id=self.customer_user.id,
                date=the_date.isoformat(),
                task_id=task_id,
                status=models.ConcurrentUsageCalculationTask.COMPLETE,
            )
            calculate_max_concurrent_usage(the_date, self.customer_user.id)
            the_date = the_date + one_day_delta
예제 #18
0
def calculate_max_concurrent_usage_task(self, date, user_id):
    """
    Schedule a task to calculate maximum concurrent usage of RHEL instances.

    Args:
        self (celery.Task): The bound task. With this we can retry if necessary.
        date (str): the day during which we are measuring usage.
            Celery serializes the date as a string in the format "%Y-%B-%dT%H:%M:%S.
        user_id (int): required filter on user

    Returns:
        ConcurrentUsage for the given date and user ID.

    """
    # Temporary logger.info to help diagnose retry issues.
    logger.info(
        "retries is %(retries)s for id %(id)s user_id %(user_id)s and date %(date)s.",
        {
            "retries": self.request.retries,
            "id": self.request.id,
            "user_id": user_id,
            "date": date,
        },
    )

    # If the user does not exist, all the related ConcurrentUsage
    # objects should also have been removed, so we can exit early.
    if not User.objects.filter(id=user_id).exists():
        return

    date = date_parser.parse(date).date()

    # If there is already an calculate_max_concurrent_usage running for given
    # user and date, then retry this task later.
    running_tasks = ConcurrentUsageCalculationTask.objects.filter(
        date=date,
        user__id=user_id,
        status=ConcurrentUsageCalculationTask.RUNNING)
    if running_tasks:
        logger.info(
            "calculate_max_concurrent_usage_task for user_id %(user_id)s "
            "and date %(date)s is already running. The current task will "
            "be retried later.",
            {
                "user_id": user_id,
                "date": date
            },
        )
        for task in running_tasks:
            logger.info("already running task %(task)s", {"task": task})
        self.retry()

    logger.info(
        "Running calculate_max_concurrent_usage_task for user_id %(user_id)s "
        "and date %(date)s.",
        {
            "user_id": user_id,
            "date": date
        },
    )

    # Set task to running
    task_id = self.request.id
    try:
        calculation_task = ConcurrentUsageCalculationTask.objects.get(
            task_id=task_id)
    except ConcurrentUsageCalculationTask.DoesNotExist:
        # This probably shouldn't happen, but this error that suggest it does:
        # https://sentry.io/organizations/cloudigrade/issues/2299804963/
        # Until we can figure out the root cause of tasks going missing, let's log an
        # error here with details and schedule a new calculation task.
        logger.error(
            'ConcurrentUsageCalculationTask not found for task ID "%(task_id)s"! '
            "Scheduling a new task for user_id %(user_id)s and date %(date)s.",
            {
                "task_id": task_id,
                "user_id": user_id,
                "date": date
            },
        )
        schedule_concurrent_calculation_task(date, user_id)
        return

    calculation_task.status = ConcurrentUsageCalculationTask.RUNNING
    calculation_task.save()

    try:
        # Lock the task at a user level. A user can only run one task at a time.
        # If another user task is already running, then don't start the
        # concurrent usage calculation task
        with lock_task_for_user_ids([user_id]):
            calculate_max_concurrent_usage(date, user_id)
    except Exception:
        calculation_task.status = ConcurrentUsageCalculationTask.ERROR
        calculation_task.save()
        raise

    calculation_task.status = ConcurrentUsageCalculationTask.COMPLETE
    calculation_task.save()
    logger.info(
        "Completed calculate_max_concurrent_usage_task for user_id %(user_id)s "
        "and date %(date)s.",
        {
            "user_id": user_id,
            "date": date
        },
    )