Esempio n. 1
0
    def test_apply_data_retention(self, now_mock):
        now_mock.return_value = timezone.make_aware(
            timezone.datetime(2016, 12, 25))

        self.assertEqual(DsmrReading.objects.count(), 52)
        self.assertEqual(ElectricityConsumption.objects.count(), 67)
        self.assertEqual(GasConsumption.objects.count(), 33)

        # Default inactive.
        dsmr_datalogger.services.apply_data_retention()

        self.assertEqual(DsmrReading.objects.count(), 52)
        self.assertEqual(ElectricityConsumption.objects.count(), 67)
        self.assertEqual(GasConsumption.objects.count(), 33)

        # Retention active, but point of retention not yet passed.
        RetentionSettings.get_solo()
        RetentionSettings.objects.update(
            data_retention_in_hours=RetentionSettings.RETENTION_YEAR)

        dsmr_datalogger.services.apply_data_retention()

        self.assertEqual(DsmrReading.objects.count(), 52)
        self.assertEqual(ElectricityConsumption.objects.count(), 67)
        self.assertEqual(GasConsumption.objects.count(), 33)

        # Allow point of retention to pass.
        RetentionSettings.objects.update(
            data_retention_in_hours=RetentionSettings.RETENTION_WEEK)

        # Should affect data now.
        dsmr_datalogger.services.apply_data_retention()

        self.assertEqual(DsmrReading.objects.count(), 2)
        self.assertEqual(ElectricityConsumption.objects.count(), 8)
        self.assertEqual(GasConsumption.objects.count(), 32)

        # Make sure that specific data is kept.
        for x in [5629376, 5629427]:
            self.assertTrue(DsmrReading.objects.filter(pk=x).exists())

        for x in [95, 154, 155, 214, 215, 216, 217, 218]:
            self.assertTrue(
                ElectricityConsumption.objects.filter(pk=x).exists())

        self.assertFalse(GasConsumption.objects.filter(pk=32).exists())

        # No effect calling multiple times.
        dsmr_datalogger.services.apply_data_retention()

        self.assertEqual(DsmrReading.objects.count(), 2)
        self.assertEqual(ElectricityConsumption.objects.count(), 8)
        self.assertEqual(GasConsumption.objects.count(), 32)
Esempio n. 2
0
    def setUp(self):
        self.schedule_process = ScheduledProcess.objects.get(
            module=settings.DSMRREADER_MODULE_RETENTION_DATA_ROTATION)
        self.schedule_process.update(active=True,
                                     planned=timezone.make_aware(
                                         timezone.datetime(2000, 1, 1)))

        RetentionSettings.get_solo()
        RetentionSettings.objects.update(
            data_retention_in_hours=None
        )  # Legacy tests: This used to be the default.
        self.assertEqual(DsmrReading.objects.count(), 52)
        self.assertEqual(ElectricityConsumption.objects.count(), 67)
        self.assertEqual(GasConsumption.objects.count(), 33)
Esempio n. 3
0
    def _dump_application_info(self):
        pending_migrations = []

        for line in self._intercept_command_stdout('showmigrations',
                                                   no_color=True).split("\n"):
            if line.startswith(' [ ]'):
                pending_migrations.append(line)

        pending_migrations_count = len(pending_migrations)

        self._print_header('DSMR-reader')
        self._pretty_print(
            'App / Python / Database',
            'v{} / v{} / {}'.format(settings.DSMRREADER_VERSION,
                                    platform.python_version(),
                                    connection.vendor))

        self._pretty_print(
            'Backend sleep / Datalogger sleep / Retention cleanup',
            '{}s / {}s / {}h'.format(
                BackendSettings.get_solo().process_sleep,
                DataloggerSettings.get_solo().process_sleep,
                RetentionSettings.get_solo().data_retention_in_hours or '-'))
        self._pretty_print(
            'Telegram latest version read / Parser settings',
            '"{}" / "{}"'.format(MeterStatistics.get_solo().dsmr_version,
                                 DataloggerSettings.get_solo().dsmr_version))

        if pending_migrations_count > 0:
            self._pretty_print('(!) Database migrations pending',
                               '{} (!)'.format(pending_migrations_count))
Esempio n. 4
0
    def test_retention_timestamp_restrictions(self, now_mock):
        now_mock.return_value = timezone.make_aware(timezone.datetime(2016, 12, 25, hour=12))

        RetentionSettings.get_solo()
        RetentionSettings.objects.update(data_retention_in_hours=RetentionSettings.RETENTION_WEEK)

        # Retention should do nothing, since it's noon.
        self.assertEqual(DsmrReading.objects.count(), 52)
        dsmr_datalogger.services.apply_data_retention()
        self.assertEqual(DsmrReading.objects.count(), 52)

        now_mock.return_value = timezone.make_aware(timezone.datetime(2016, 12, 25, hour=5))

        # Retention should kick in now.
        self.assertEqual(DsmrReading.objects.count(), 52)
        dsmr_datalogger.services.apply_data_retention()
        self.assertEqual(DsmrReading.objects.count(), 2)
Esempio n. 5
0
    def test_enabled_with_cleanup(self, now_mock):
        now_mock.return_value = timezone.make_aware(
            timezone.datetime(2016, 12, 25))

        # Retention active, but point of retention not yet passed.
        RetentionSettings.objects.update(
            data_retention_in_hours=RetentionSettings.RETENTION_YEAR)
        RetentionSettings.get_solo().save(
        )  # Trigger hook, faking interface action.

        dsmr_datalogger.services.retention.run(self.schedule_process)

        self.assertEqual(DsmrReading.objects.count(), 52)
        self.assertEqual(ElectricityConsumption.objects.count(), 67)
        self.assertEqual(GasConsumption.objects.count(), 33)

        # Allow point of retention to pass.
        RetentionSettings.objects.update(
            data_retention_in_hours=RetentionSettings.RETENTION_WEEK)
        RetentionSettings.get_solo().save(
        )  # Trigger hook, faking interface action.

        # Should affect data now.
        dsmr_datalogger.services.retention.run(self.schedule_process)

        self.assertEqual(DsmrReading.objects.count(), 2)
        self.assertEqual(ElectricityConsumption.objects.count(), 8)
        self.assertEqual(GasConsumption.objects.count(), 32)

        # Make sure that specific data is kept.
        for x in [5629376, 5629427]:
            self.assertTrue(DsmrReading.objects.filter(pk=x).exists())

        for x in [95, 154, 155, 214, 215, 216, 217, 218]:
            self.assertTrue(
                ElectricityConsumption.objects.filter(pk=x).exists())

        self.assertFalse(GasConsumption.objects.filter(pk=32).exists())

        # As long as there was data, it should still be planned.
        self.schedule_process.refresh_from_db()
        self.assertEqual(self.schedule_process.planned, timezone.now())
Esempio n. 6
0
 def get_context_data(self, **kwargs):
     context_data = super(Configuration, self).get_context_data(**kwargs)
     # 20+ queries, we should cache this at some point.
     context_data.update(
         dict(
             api_settings=APISettings.get_solo(),
             backend_settings=BackendSettings.get_solo(),
             backup_settings=BackupSettings.get_solo(),
             consumption_settings=ConsumptionSettings.get_solo(),
             datalogger_settings=DataloggerSettings.get_solo(),
             dropbox_settings=DropboxSettings.get_solo(),
             email_settings=EmailSettings.get_solo(),
             frontend_settings=FrontendSettings.get_solo(),
             mindergas_settings=MinderGasSettings.get_solo(),
             mqtt_broker_settings=MQTTBrokerSettings.get_solo(),
             mqtt_jsondaytotals_settings=JSONDayTotalsMQTTSettings.get_solo(
             ),
             mqtt_splittopicdaytotals_settings=
             SplitTopicDayTotalsMQTTSettings.get_solo(),
             mqtt_jsoncurrentperiodtotals_settings=
             JSONCurrentPeriodTotalsMQTTSettings.get_solo(),
             mqtt_splittopiccurrentperiodtotals_settings=
             SplitTopicCurrentPeriodTotalsMQTTSettings.get_solo(),
             mqtt_jsongasconsumption_settings=JSONGasConsumptionMQTTSettings
             .get_solo(),
             mqtt_splittopicgasconsumption_settings=
             SplitTopicGasConsumptionMQTTSettings.get_solo(),
             mqtt_splittopicmeterstatistics_settings=
             SplitTopicMeterStatisticsMQTTSettings.get_solo(),
             mqtt_jsontelegram_settings=JSONTelegramMQTTSettings.get_solo(),
             mqtt_rawtelegram_settings=RawTelegramMQTTSettings.get_solo(),
             mqtt_splittopictelegram_settings=SplitTopicTelegramMQTTSettings
             .get_solo(),
             notification_settings=NotificationSetting.get_solo(),
             pvoutput_api_settings=PVOutputAPISettings.get_solo(),
             pvoutput_addstatus_settings=PVOutputAddStatusSettings.get_solo(
             ),
             retention_settings=RetentionSettings.get_solo(),
             weather_settings=WeatherSettings.get_solo(),
             influxdb_settings=InfluxdbIntegrationSettings.get_solo(),
         ))
     return context_data
Esempio n. 7
0
 def setUp(self):
     self.instance = RetentionSettings().get_solo()
Esempio n. 8
0
def apply_data_retention():
    """
    When data retention is enabled, this discards all data applicable for retention. Keeps at least one data point per
    hour available.
    """
    settings = RetentionSettings.get_solo()

    if settings.data_retention_in_hours is None:
        # No retention enabled at all (default behaviour).
        return

    current_hour = timezone.now().hour

    # Only cleanup during nights. Allow from midnight to six a.m.
    if current_hour > 6:
        return

    # Each run should be capped, for obvious performance reasons.
    MAX_HOURS_CLEANUP = 24

    # These models should be rotated with retention. Dict value is the datetime field used.
    MODELS_TO_CLEANUP = {
        DsmrReading.objects.processed(): 'timestamp',
        ElectricityConsumption.objects.all(): 'read_at',
        GasConsumption.objects.all(): 'read_at',
    }

    retention_date = timezone.now() - timezone.timedelta(
        hours=settings.data_retention_in_hours)

    # We need to force UTC here, to avoid AmbiguousTimeError's on DST changes.
    timezone.activate(pytz.UTC)

    for base_queryset, datetime_field in MODELS_TO_CLEANUP.items():
        hours_to_cleanup = base_queryset.filter(**{
            '{}__lt'.format(datetime_field):
            retention_date
        }).annotate(
            item_hour=TruncHour(datetime_field)).values('item_hour').annotate(
                item_count=Count('id')).order_by().filter(
                    item_count__gt=2).order_by('item_hour').values_list(
                        'item_hour', flat=True)[:MAX_HOURS_CLEANUP]

        hours_to_cleanup = list(hours_to_cleanup)  # Force evaluation.

        if not hours_to_cleanup:
            continue

        for current_hour in hours_to_cleanup:

            # Fetch all data per hour.
            data_set = base_queryset.filter(
                **{
                    '{}__gte'.format(datetime_field):
                    current_hour,
                    '{}__lt'.format(datetime_field):
                    current_hour + timezone.timedelta(hours=1),
                })

            # Extract the first/last item, so we can exclude it.
            # NOTE: Want to alter this? Please update "item_count__gt=2" above as well!
            keeper_pks = [
                data_set.order_by(datetime_field)[0].pk,
                data_set.order_by('-{}'.format(datetime_field))[0].pk
            ]

            # Now drop all others.
            print('Retention | Cleaning up: {} ({})'.format(
                current_hour, data_set[0].__class__.__name__))
            data_set.exclude(pk__in=keeper_pks).delete()

    timezone.deactivate()
Esempio n. 9
0
def run(scheduled_process):
    retention_settings = RetentionSettings.get_solo()

    if retention_settings.data_retention_in_hours == RetentionSettings.RETENTION_NONE:
        return scheduled_process.disable(
        )  # Changing the retention settings in the admin will re-activate it again.

    # These models should be rotated with retention. Dict value is the datetime field used.
    ITEM_COUNT_PER_HOUR = 2
    MODELS_TO_CLEANUP = {
        DsmrReading.objects.processed(): 'timestamp',
        ElectricityConsumption.objects.all(): 'read_at',
        GasConsumption.objects.all(): 'read_at',
    }

    retention_date = timezone.now() - timezone.timedelta(
        hours=retention_settings.data_retention_in_hours)
    data_to_clean_up = False

    # We need to force UTC here, to avoid AmbiguousTimeError's on DST changes.
    timezone.activate(pytz.UTC)

    for base_queryset, datetime_field in MODELS_TO_CLEANUP.items():
        hours_to_cleanup = base_queryset.filter(**{
            '{}__lt'.format(datetime_field):
            retention_date
        }).annotate(
            item_hour=TruncHour(datetime_field)).values('item_hour').annotate(
                item_count=Count('id')).order_by().filter(
                    item_count__gt=ITEM_COUNT_PER_HOUR
                ).order_by('item_hour').values_list(
                    'item_hour', flat=True
                )[:settings.DSMRREADER_RETENTION_MAX_CLEANUP_HOURS_PER_RUN]

        hours_to_cleanup = list(hours_to_cleanup)  # Force evaluation.

        if not hours_to_cleanup:
            continue

        data_to_clean_up = True

        for current_hour in hours_to_cleanup:
            # Fetch all data per hour.
            data_set = base_queryset.filter(
                **{
                    '{}__gte'.format(datetime_field):
                    current_hour,
                    '{}__lt'.format(datetime_field):
                    current_hour + timezone.timedelta(hours=1),
                })

            # Extract the first/last item, so we can exclude it.
            # NOTE: Want to alter this? Please update ITEM_COUNT_PER_HOUR above as well!
            keeper_pks = [
                data_set.order_by(datetime_field)[0].pk,
                data_set.order_by('-{}'.format(datetime_field))[0].pk
            ]

            # Now drop all others.
            logger.debug('Retention: Cleaning up: %s (%s)', current_hour,
                         data_set[0].__class__.__name__)
            data_set.exclude(pk__in=keeper_pks).delete()

    timezone.deactivate()

    # Delay for a bit, as there is nothing to do.
    if not data_to_clean_up:
        scheduled_process.delay(timezone.timedelta(hours=12))