Exemplo n.º 1
0
    def setUp(self):
        self.assertEqual(DsmrReading.objects.all().count(), 7)
        self.assertTrue(DsmrReading.objects.unprocessed().exists())
        ConsumptionSettings.get_solo()
        MeterStatistics.get_solo()
        MeterStatistics.objects.all().update(dsmr_version='50')

        self.schedule_process = ScheduledProcess.objects.get(module=settings.DSMRREADER_MODULE_GENERATE_CONSUMPTION)
        self.schedule_process.update(active=True, planned=timezone.make_aware(timezone.datetime(2000, 1, 1)))
Exemplo n.º 2
0
    def setUp(self):
        self.support_gas_readings = True
        self.assertEqual(DsmrReading.objects.all().count(), 3)

        if self.support_gas_readings:
            self.assertTrue(DsmrReading.objects.unprocessed().exists())
        else:
            self.assertFalse(DsmrReading.objects.unprocessed().exists())

        ConsumptionSettings.get_solo()
Exemplo n.º 3
0
    def setUp(self):
        self.support_gas_readings = True
        self.assertEqual(DsmrReading.objects.all().count(), 3)

        if self.support_gas_readings:
            self.assertTrue(DsmrReading.objects.unprocessed().exists())
        else:
            self.assertFalse(DsmrReading.objects.unprocessed().exists())

        ConsumptionSettings.get_solo()
Exemplo n.º 4
0
    def setUp(self):
        self.support_gas_readings = True
        self.assertEqual(DsmrReading.objects.all().count(), 3)
        MeterStatistics.get_solo()
        MeterStatistics.objects.all().update(dsmr_version='42')

        if self.support_gas_readings:
            self.assertTrue(DsmrReading.objects.unprocessed().exists())
        else:
            self.assertFalse(DsmrReading.objects.unprocessed().exists())

        ConsumptionSettings.get_solo()
Exemplo n.º 5
0
    def test_processing(self):
        """ Test fixed data parse outcome. """
        # Default is grouping by minute, so make sure to revert that here.
        consumption_settings = ConsumptionSettings.get_solo()
        consumption_settings.compactor_grouping_type = ConsumptionSettings.COMPACTOR_GROUPING_BY_READING
        consumption_settings.save()

        self.assertFalse(
            ElectricityConsumption.objects.filter(
                phase_currently_delivered_l2__isnull=False,
                phase_currently_delivered_l3__isnull=False).exists())

        dsmr_consumption.services.compact_all()

        self.assertTrue(DsmrReading.objects.processed().exists())
        self.assertFalse(DsmrReading.objects.unprocessed().exists())
        self.assertEqual(ElectricityConsumption.objects.count(), 3)

        if self.support_gas_readings:
            self.assertEqual(GasConsumption.objects.count(), 2)
        else:
            self.assertEqual(GasConsumption.objects.count(), 0)

        self.assertTrue(
            ElectricityConsumption.objects.filter(
                phase_currently_delivered_l2__isnull=False,
                phase_currently_delivered_l3__isnull=False).exists())
Exemplo n.º 6
0
def compact(dsmr_reading):
    """
    Compacts/converts DSMR readings to consumption data. Optionally groups electricity by minute.
    """
    grouping_type = ConsumptionSettings.get_solo().compactor_grouping_type

    # Grouping by minute requires some distinction and history checking.
    reading_start = timezone.datetime.combine(
        dsmr_reading.timestamp.date(),
        time(hour=dsmr_reading.timestamp.hour, minute=dsmr_reading.timestamp.minute),
    ).replace(tzinfo=pytz.UTC)

    if grouping_type == ConsumptionSettings.COMPACTOR_GROUPING_BY_MINUTE:
        system_time_past_minute = timezone.now() >= reading_start + timezone.timedelta(minutes=1)
        reading_past_minute_exists = DsmrReading.objects.filter(
            timestamp__gte=reading_start + timezone.timedelta(minutes=1)
        ).exists()

        # Postpone until the minute has passed on the system time. And when there are (new) readings beyond this minute.
        if not system_time_past_minute or not reading_past_minute_exists:
            return

    # Create consumption records.
    _compact_electricity(dsmr_reading=dsmr_reading, grouping_type=grouping_type, reading_start=reading_start)
    _compact_gas(dsmr_reading=dsmr_reading, grouping_type=grouping_type, reading_start=reading_start)

    dsmr_reading.processed = True
    dsmr_reading.save(update_fields=['processed'])

    # For backend logging in Supervisor.
    logger.debug(' - Processed reading: %s', dsmr_reading)
Exemplo n.º 7
0
def compact(dsmr_reading):
    """
    Compacts/converts DSMR readings to consumption data. Optionally groups electricity by minute.
    """
    grouping_type = ConsumptionSettings.get_solo().compactor_grouping_type

    # Grouping by minute requires some distinction and history checking.
    reading_start = timezone.datetime.combine(
        dsmr_reading.timestamp.date(),
        time(hour=dsmr_reading.timestamp.hour,
             minute=dsmr_reading.timestamp.minute),
    ).replace(tzinfo=pytz.UTC)

    if grouping_type == ConsumptionSettings.COMPACTOR_GROUPING_BY_MINUTE:
        # Postpone when current minute hasn't passed yet.
        if timezone.now() <= reading_start + timezone.timedelta(minutes=1):
            return

    # Create consumption records.
    _compact_electricity(dsmr_reading=dsmr_reading,
                         grouping_type=grouping_type,
                         reading_start=reading_start)
    _compact_gas(dsmr_reading=dsmr_reading,
                 grouping_type=grouping_type,
                 reading_start=reading_start)

    dsmr_reading.processed = True
    dsmr_reading.save(update_fields=['processed'])

    # For backend logging in Supervisor.
    print(' - Processed reading: {}'.format(dsmr_reading))
Exemplo n.º 8
0
 def get_context_data(self, **kwargs):
     context_data = super(Configuration, self).get_context_data(**kwargs)
     context_data['api_settings'] = APISettings.get_solo()
     context_data['consumption_settings'] = ConsumptionSettings.get_solo()
     context_data['datalogger_settings'] = DataloggerSettings.get_solo()
     context_data['frontend_settings'] = FrontendSettings.get_solo()
     context_data['weather_settings'] = WeatherSettings.get_solo()
     context_data['backup_settings'] = BackupSettings.get_solo()
     context_data['dropbox_settings'] = DropboxSettings.get_solo()
     context_data['mindergas_settings'] = MinderGasSettings.get_solo()
     return context_data
Exemplo n.º 9
0
    def test_processing(self):
        """ Test fixed data parse outcome. """
        # Default is grouping by minute, so make sure to revert that here.
        consumption_settings = ConsumptionSettings.get_solo()
        consumption_settings.compactor_grouping_type = ConsumptionSettings.COMPACTOR_GROUPING_BY_READING
        consumption_settings.save()

        dsmr_consumption.services.compact_all()

        self.assertTrue(DsmrReading.objects.processed().exists())
        self.assertFalse(DsmrReading.objects.unprocessed().exists())
        self.assertEqual(ElectricityConsumption.objects.count(), 3)

        if self.support_gas_readings:
            self.assertEqual(GasConsumption.objects.count(), 2)
        else:
            self.assertEqual(GasConsumption.objects.count(), 0)
Exemplo n.º 10
0
 def get_context_data(self, **kwargs):
     context_data = super(Configuration, self).get_context_data(**kwargs)
     # 20+ queries, we should cache this at some point.
     context_data.update(
         dict(
             api_settings=APISettings.get_solo(),
             backend_settings=BackendSettings.get_solo(),
             backup_settings=BackupSettings.get_solo(),
             consumption_settings=ConsumptionSettings.get_solo(),
             datalogger_settings=DataloggerSettings.get_solo(),
             dropbox_settings=DropboxSettings.get_solo(),
             email_settings=EmailSettings.get_solo(),
             frontend_settings=FrontendSettings.get_solo(),
             mindergas_settings=MinderGasSettings.get_solo(),
             mqtt_broker_settings=MQTTBrokerSettings.get_solo(),
             mqtt_jsondaytotals_settings=JSONDayTotalsMQTTSettings.get_solo(
             ),
             mqtt_splittopicdaytotals_settings=
             SplitTopicDayTotalsMQTTSettings.get_solo(),
             mqtt_jsoncurrentperiodtotals_settings=
             JSONCurrentPeriodTotalsMQTTSettings.get_solo(),
             mqtt_splittopiccurrentperiodtotals_settings=
             SplitTopicCurrentPeriodTotalsMQTTSettings.get_solo(),
             mqtt_jsongasconsumption_settings=JSONGasConsumptionMQTTSettings
             .get_solo(),
             mqtt_splittopicgasconsumption_settings=
             SplitTopicGasConsumptionMQTTSettings.get_solo(),
             mqtt_splittopicmeterstatistics_settings=
             SplitTopicMeterStatisticsMQTTSettings.get_solo(),
             mqtt_jsontelegram_settings=JSONTelegramMQTTSettings.get_solo(),
             mqtt_rawtelegram_settings=RawTelegramMQTTSettings.get_solo(),
             mqtt_splittopictelegram_settings=SplitTopicTelegramMQTTSettings
             .get_solo(),
             notification_settings=NotificationSetting.get_solo(),
             pvoutput_api_settings=PVOutputAPISettings.get_solo(),
             pvoutput_addstatus_settings=PVOutputAddStatusSettings.get_solo(
             ),
             retention_settings=RetentionSettings.get_solo(),
             weather_settings=WeatherSettings.get_solo(),
             influxdb_settings=InfluxdbIntegrationSettings.get_solo(),
         ))
     return context_data
Exemplo n.º 11
0
    def test_processing(self):
        """ Test fixed data parse outcome. """
        # Default is grouping by minute, so make sure to revert that here.
        consumption_settings = ConsumptionSettings.get_solo()
        consumption_settings.electricity_grouping_type = ConsumptionSettings.ELECTRICITY_GROUPING_BY_READING
        consumption_settings.save()

        self.assertFalse(
            ElectricityConsumption.objects.filter(
                phase_currently_delivered_l2__isnull=False,
                phase_currently_delivered_l3__isnull=False
            ).exists()
        )

        dsmr_consumption.services.run(self.schedule_process)

        self.assertTrue(DsmrReading.objects.processed().exists())
        self.assertFalse(DsmrReading.objects.unprocessed().exists())
        self.assertEqual(ElectricityConsumption.objects.count(), 3)

        if self.support_gas_readings:
            self.assertEqual(GasConsumption.objects.count(), 2)
            self.assertEqual(
                [x.read_at for x in GasConsumption.objects.all()],
                [
                    # Asume a one hour backtrack.
                    timezone.make_aware(timezone.datetime(2015, 11, 10, hour=18), timezone.utc),
                    timezone.make_aware(timezone.datetime(2015, 11, 10, hour=19), timezone.utc)
                ]
            )
        else:
            self.assertEqual(GasConsumption.objects.count(), 0)

        self.assertTrue(
            ElectricityConsumption.objects.filter(
                phase_currently_delivered_l2__isnull=False,
                phase_currently_delivered_l3__isnull=False
            ).exists()
        )
Exemplo n.º 12
0
    def test_duplicate_processing(self):
        """ Duplicate readings should not crash the compactor when not grouping. """
        # Default is grouping by minute, so make sure to revert that here.
        consumption_settings = ConsumptionSettings.get_solo()
        consumption_settings.compactor_grouping_type = ConsumptionSettings.COMPACTOR_GROUPING_BY_READING
        consumption_settings.save()

        # Just duplicate one, as it will cause: IntegrityError UNIQUE constraint failed: ElectricityConsumption.read_at
        duplicate_reading = DsmrReading.objects.all()[0]
        duplicate_reading.pk = None
        duplicate_reading.save()

        dsmr_consumption.services.compact_all()

        self.assertTrue(DsmrReading.objects.processed().exists())
        self.assertFalse(DsmrReading.objects.unprocessed().exists())
        self.assertEqual(ElectricityConsumption.objects.count(), 3)

        if self.support_gas_readings:
            self.assertEqual(GasConsumption.objects.count(), 2)
        else:
            self.assertEqual(GasConsumption.objects.count(), 0)
Exemplo n.º 13
0
def compact(dsmr_reading: DsmrReading) -> None:
    """ Compacts/converts DSMR readings to consumption data. Optionally groups electricity by minute. """
    consumption_settings = ConsumptionSettings.get_solo()

    # Grouping by minute requires some distinction and history checking.
    reading_start = timezone.datetime.combine(
        dsmr_reading.timestamp.date(),
        time(hour=dsmr_reading.timestamp.hour,
             minute=dsmr_reading.timestamp.minute),
    ).replace(tzinfo=pytz.UTC)

    if consumption_settings.electricity_grouping_type == ConsumptionSettings.ELECTRICITY_GROUPING_BY_MINUTE:
        system_time_past_minute = timezone.now(
        ) >= reading_start + timezone.timedelta(minutes=1)
        reading_past_minute_exists = DsmrReading.objects.filter(
            timestamp__gte=reading_start +
            timezone.timedelta(minutes=1)).exists()

        # Postpone until the minute has passed on the system time. And when there are (new) readings beyond this minute.
        if not system_time_past_minute or not reading_past_minute_exists:
            logger.debug(
                'Compact: Waiting for newer readings before grouping data...')
            raise CompactorNotReadyError()

    # Create consumption records.
    _compact_electricity(dsmr_reading=dsmr_reading,
                         electricity_grouping_type=consumption_settings.
                         electricity_grouping_type,
                         reading_start=reading_start)
    _compact_gas(dsmr_reading=dsmr_reading,
                 gas_grouping_type=consumption_settings.gas_grouping_type)

    dsmr_reading.processed = True
    dsmr_reading.save(update_fields=['processed'])

    logger.debug('Compact: Processed reading: %s', dsmr_reading)
Exemplo n.º 14
0
 def setUp(self):
     self.assertEqual(DsmrReading.objects.all().count(), 6)
     self.assertTrue(DsmrReading.objects.unprocessed().exists())
     ConsumptionSettings.get_solo()
     MeterStatistics.get_solo()
     MeterStatistics.objects.all().update(dsmr_version='50')
Exemplo n.º 15
0
def compact(dsmr_reading):
    """
    Compacts/converts DSMR readings to consumption data. Optionally groups electricity by minute.
    """
    grouping_type = ConsumptionSettings.get_solo().compactor_grouping_type

    # Electricity should be unique, because it's the reading with the lowest interval anyway.
    if grouping_type == ConsumptionSettings.COMPACTOR_GROUPING_BY_READING:
        ElectricityConsumption.objects.get_or_create(
            read_at=dsmr_reading.timestamp,
            delivered_1=dsmr_reading.electricity_delivered_1,
            returned_1=dsmr_reading.electricity_returned_1,
            delivered_2=dsmr_reading.electricity_delivered_2,
            returned_2=dsmr_reading.electricity_returned_2,
            currently_delivered=dsmr_reading.electricity_currently_delivered,
            currently_returned=dsmr_reading.electricity_currently_returned,
            phase_currently_delivered_l1=dsmr_reading.
            phase_currently_delivered_l1,
            phase_currently_delivered_l2=dsmr_reading.
            phase_currently_delivered_l2,
            phase_currently_delivered_l3=dsmr_reading.
            phase_currently_delivered_l3,
        )
    # Grouping by minute requires some distinction and history checking.
    else:
        minute_start = timezone.datetime.combine(
            dsmr_reading.timestamp.date(),
            time(hour=dsmr_reading.timestamp.hour,
                 minute=dsmr_reading.timestamp.minute),
        ).replace(tzinfo=pytz.UTC)
        minute_end = minute_start + timezone.timedelta(minutes=1)

        # Postpone when current minute hasn't passed yet.
        if timezone.now() <= minute_end:
            return

        # We might have six readings per minute, so there is a chance we already parsed it.
        if not ElectricityConsumption.objects.filter(
                read_at=minute_end).exists():
            grouped_reading = DsmrReading.objects.filter(
                timestamp__gte=minute_start,
                timestamp__lt=minute_end).aggregate(
                    avg_delivered=Avg('electricity_currently_delivered'),
                    avg_returned=Avg('electricity_currently_returned'),
                    max_delivered_1=Max('electricity_delivered_1'),
                    max_delivered_2=Max('electricity_delivered_2'),
                    max_returned_1=Max('electricity_returned_1'),
                    max_returned_2=Max('electricity_returned_2'),
                    avg_phase_delivered_l1=Avg('phase_currently_delivered_l1'),
                    avg_phase_delivered_l2=Avg('phase_currently_delivered_l2'),
                    avg_phase_delivered_l3=Avg('phase_currently_delivered_l3'),
                )

            # This instance is the average/max and combined result.
            ElectricityConsumption.objects.create(
                read_at=minute_end,
                delivered_1=grouped_reading['max_delivered_1'],
                returned_1=grouped_reading['max_returned_1'],
                delivered_2=grouped_reading['max_delivered_2'],
                returned_2=grouped_reading['max_returned_2'],
                currently_delivered=grouped_reading['avg_delivered'],
                currently_returned=grouped_reading['avg_returned'],
                phase_currently_delivered_l1=grouped_reading[
                    'avg_phase_delivered_l1'],
                phase_currently_delivered_l2=grouped_reading[
                    'avg_phase_delivered_l2'],
                phase_currently_delivered_l3=grouped_reading[
                    'avg_phase_delivered_l3'],
            )

    # Gas is optional.
    if dsmr_reading.extra_device_timestamp and dsmr_reading.extra_device_delivered:
        # Gas however is only read (or updated) once every hour, so we should check for any duplicates
        # as they will exist at some point.
        passed_hour_start = dsmr_reading.extra_device_timestamp - timezone.timedelta(
            hours=1)

        if not GasConsumption.objects.filter(
                read_at=passed_hour_start).exists():
            # DSMR does not expose current gas rate, so we have to calculate
            # it ourselves, relative to the previous gas consumption, if any.
            try:
                previous_gas_consumption = GasConsumption.objects.get(
                    # Compare to reading before, if any.
                    read_at=passed_hour_start - timezone.timedelta(hours=1))
            except GasConsumption.DoesNotExist:
                gas_diff = 0
            else:
                gas_diff = dsmr_reading.extra_device_delivered - previous_gas_consumption.delivered

            GasConsumption.objects.create(
                # Gas consumption is aligned to start of the hour.
                read_at=passed_hour_start,
                delivered=dsmr_reading.extra_device_delivered,
                currently_delivered=gas_diff)

    dsmr_reading.processed = True
    dsmr_reading.save(update_fields=['processed'])

    # For backend logging in Supervisor.
    print(' - Processed reading: {}.'.format(
        timezone.localtime(dsmr_reading.timestamp)))
Exemplo n.º 16
0
 def setUp(self):
     self.instance = ConsumptionSettings().get_solo()
Exemplo n.º 17
0
def compact(dsmr_reading):
    """
    Compacts/converts DSMR readings to consumption data. Optionally groups electricity by minute.
    """
    grouping_type = ConsumptionSettings.get_solo().compactor_grouping_type

    # Electricity should be unique, because it's the reading with the lowest interval anyway.
    if grouping_type == ConsumptionSettings.COMPACTOR_GROUPING_BY_READING:
        ElectricityConsumption.objects.create(
            read_at=dsmr_reading.timestamp,
            delivered_1=dsmr_reading.electricity_delivered_1,
            returned_1=dsmr_reading.electricity_returned_1,
            delivered_2=dsmr_reading.electricity_delivered_2,
            returned_2=dsmr_reading.electricity_returned_2,
            currently_delivered=dsmr_reading.electricity_currently_delivered,
            currently_returned=dsmr_reading.electricity_currently_returned,
        )
    # Grouping by minute requires some distinction and history checking.
    else:
        minute_start = timezone.datetime.combine(
            dsmr_reading.timestamp.date(),
            time(hour=dsmr_reading.timestamp.hour, minute=dsmr_reading.timestamp.minute),
        ).replace(tzinfo=pytz.UTC)
        minute_end = minute_start + timezone.timedelta(minutes=1)

        # Postpone when current minute hasn't passed yet.
        if timezone.now() <= minute_end:
            return

        # We might have six readings per minute, so there is a chance we already parsed it.
        if not ElectricityConsumption.objects.filter(read_at=minute_end).exists():
            grouped_reading = DsmrReading.objects.filter(
                timestamp__gte=minute_start, timestamp__lt=minute_end
            ).aggregate(
                avg_delivered=Avg('electricity_currently_delivered'),
                avg_returned=Avg('electricity_currently_returned'),
                max_delivered_1=Max('electricity_delivered_1'),
                max_delivered_2=Max('electricity_delivered_2'),
                max_returned_1=Max('electricity_returned_1'),
                max_returned_2=Max('electricity_returned_2')
            )

            # This instance is the average/max and combined result.
            ElectricityConsumption.objects.create(
                read_at=minute_end,
                delivered_1=grouped_reading['max_delivered_1'],
                returned_1=grouped_reading['max_returned_1'],
                delivered_2=grouped_reading['max_delivered_2'],
                returned_2=grouped_reading['max_returned_2'],
                currently_delivered=grouped_reading['avg_delivered'],
                currently_returned=grouped_reading['avg_returned'],
            )

    # Gas is optional.
    if dsmr_reading.extra_device_timestamp and dsmr_reading.extra_device_delivered:
        # Gas however is only read (or updated) once every hour, so we should check for any duplicates
        # as they will exist at some point.
        passed_hour_start = dsmr_reading.extra_device_timestamp - timezone.timedelta(hours=1)

        if not GasConsumption.objects.filter(read_at=passed_hour_start).exists():
            # DSMR does not expose current gas rate, so we have to calculate
            # it ourselves, relative to the previous gas consumption, if any.
            try:
                previous_gas_consumption = GasConsumption.objects.get(
                    # Compare to reading before, if any.
                    read_at=passed_hour_start - timezone.timedelta(hours=1)
                )
            except GasConsumption.DoesNotExist:
                gas_diff = 0
            else:
                gas_diff = dsmr_reading.extra_device_delivered - previous_gas_consumption.delivered

            GasConsumption.objects.create(
                # Gas consumption is aligned to start of the hour.
                read_at=passed_hour_start,
                delivered=dsmr_reading.extra_device_delivered,
                currently_delivered=gas_diff
            )

    dsmr_reading.processed = True
    dsmr_reading.save(update_fields=['processed'])