Пример #1
0
    def test_dsmr_version_3(self):
        """ Test connection parameters for DSMR v2/3. """
        datalogger_settings = DataloggerSettings.get_solo()
        datalogger_settings.dsmr_version = DataloggerSettings.DSMR_VERSION_3
        datalogger_settings.save()

        self.assertEqual(DataloggerSettings.get_solo().dsmr_version, DataloggerSettings.DSMR_VERSION_3)

        connection_parameters = dsmr_datalogger.services.get_dsmr_connection_parameters()
        self.assertEqual(connection_parameters['baudrate'], 9600)
        self.assertEqual(connection_parameters['bytesize'], serial.SEVENBITS)
        self.assertEqual(connection_parameters['parity'], serial.PARITY_EVEN)
Пример #2
0
    def test_dsmr_version_4(self):
        """ Test connection parameters for DSMR v4. """
        self.assertEqual(DataloggerSettings.get_solo().dsmr_version, DataloggerSettings.DSMR_VERSION_4)

        connection_parameters = dsmr_datalogger.services.get_dsmr_connection_parameters()
        self.assertEqual(connection_parameters['baudrate'], 115200)
        self.assertEqual(connection_parameters['bytesize'], serial.EIGHTBITS)
        self.assertEqual(connection_parameters['parity'], serial.PARITY_NONE)
Пример #3
0
    def test_tracking_disabled(self):
        """ Test whether datalogger can bij stopped by changing track setting. """
        datalogger_settings = DataloggerSettings.get_solo()
        datalogger_settings.track = False
        datalogger_settings.save()

        # Datalogger should crash with error.
        with self.assertRaisesMessage(CommandError, 'Datalogger tracking is DISABLED!'):
            self._intercept_command_stdout('dsmr_datalogger')
Пример #4
0
 def get_context_data(self, **kwargs):
     context_data = super(Configuration, self).get_context_data(**kwargs)
     context_data['api_settings'] = APISettings.get_solo()
     context_data['consumption_settings'] = ConsumptionSettings.get_solo()
     context_data['datalogger_settings'] = DataloggerSettings.get_solo()
     context_data['frontend_settings'] = FrontendSettings.get_solo()
     context_data['weather_settings'] = WeatherSettings.get_solo()
     context_data['backup_settings'] = BackupSettings.get_solo()
     context_data['dropbox_settings'] = DropboxSettings.get_solo()
     context_data['mindergas_settings'] = MinderGasSettings.get_solo()
     return context_data
    def run(self, **options):
        """ InfiniteManagementCommandMixin listens to handle() and calls run() in a loop. """
        datalogger_settings = DataloggerSettings.get_solo()

        # This should only by disabled when performing huge migrations.
        if not datalogger_settings.track:
            raise CommandError("Datalogger tracking is DISABLED!")

        telegram = dsmr_datalogger.services.read_telegram()

        # Reflect output to STDOUT for logging and convenience.
        self.stdout.write(telegram)

        dsmr_datalogger.services.telegram_to_reading(data=telegram)
Пример #6
0
def get_dsmr_connection_parameters():
    """ Returns the communication settings required for the DSMR version set. """
    DSMR_VERSION_MAPPING = {
        DataloggerSettings.DSMR_VERSION_3: {
            'baudrate': 9600,
            'bytesize': serial.SEVENBITS,
            'parity': serial.PARITY_EVEN,
        },
        DataloggerSettings.DSMR_VERSION_4: {
            'baudrate': 115200,
            'bytesize': serial.EIGHTBITS,
            'parity': serial.PARITY_NONE,
        },
    }

    datalogger_settings = DataloggerSettings.get_solo()
    connection_parameters = DSMR_VERSION_MAPPING[datalogger_settings.dsmr_version]
    connection_parameters['com_port'] = datalogger_settings.com_port
    return connection_parameters
Пример #7
0
    def get_context_data(self, **kwargs):
        context_data = super(Statistics, self).get_context_data(**kwargs)
        context_data['capabilities'] = dsmr_backend.services.get_capabilities()

        try:
            context_data['latest_reading'] = DsmrReading.objects.all().order_by('-pk')[0]
        except IndexError:
            pass

        today = timezone.localtime(timezone.now()).date()
        context_data['datalogger_settings'] = DataloggerSettings.get_solo()
        context_data['meter_statistics'] = MeterStatistics.get_solo()

        try:
            context_data['energy_prices'] = EnergySupplierPrice.objects.by_date(today)
        except EnergySupplierPrice.DoesNotExist:
            pass

        # Use stats
        context_data['slumber_consumption_watt'] = dsmr_consumption.services.calculate_slumber_consumption_watt()
        context_data['min_max_consumption_watt'] = dsmr_consumption.services.calculate_min_max_consumption_watt()

        return context_data
Пример #8
0
 def setUp(self):
     datalogger_settings = DataloggerSettings.get_solo()
     datalogger_settings.dsmr_version = DataloggerSettings.DSMR_VERSION_2
     datalogger_settings.save()
Пример #9
0
def telegram_to_reading(data):  # noqa: C901
    """
    Converts a P1 telegram to a DSMR reading, which will be stored in database.
    """
    READING_FIELDS = [x.name for x in DsmrReading._meta.get_fields() if x.name not in ('id', 'processed')]
    STATISTICS_FIELDS = [
        x.name for x in MeterStatistics._meta.get_fields() if x.name not in ('id', 'rejected_telegrams')
    ]

    # We will log the telegrams in base64 for convenience and debugging 'n stuff.
    base64_data = base64.b64encode(data.encode())
    datalogger_settings = DataloggerSettings.get_solo()

    # Discard CRC check when any support is lacking anyway. Or when it's disabled.
    connection_parameters = get_dsmr_connection_parameters()

    if connection_parameters['crc'] and datalogger_settings.verify_telegram_crc:
        try:
            # Verify telegram by checking its CRC.
            verify_telegram_checksum(data=data)
        except InvalidTelegramError as error:
            # Hook to keep track of failed readings count.
            MeterStatistics.objects.all().update(rejected_telegrams=F('rejected_telegrams') + 1)
            dsmrreader_logger.warning('Rejected telegram (base64 encoded): {}'.format(base64_data))
            dsmrreader_logger.exception(error)
            raise

    # Defaults all fields to NULL.
    parsed_reading = {k: None for k in READING_FIELDS + STATISTICS_FIELDS}
    field_splitter = re.compile(r'([^(]+)\((.+)\)')
    lines_read = data.split("\r\n")

    for index, current_line in enumerate(lines_read):
        result = field_splitter.search(current_line)

        if not result:
            continue

        code = result.group(1)

        # M-bus (0-n:24.1) cannot identify the type of device, see issue #92.
        if code in ('0-2:24.2.1', '0-3:24.2.1', '0-4:24.2.1'):
            code = '0-1:24.2.1'

        # DSMR 2.x emits gas readings in different format.
        if code == '0-1:24.3.0':
            parsed_reading = _convert_legacy_dsmr_gas_line(
                parsed_reading, current_line, lines_read[index + 1]
            )
            continue

        try:
            field = DSMR_MAPPING[code]
        except KeyError:
            continue

        value = result.group(2)

        # Drop units, as the database does not care for them.
        value = value.replace('*kWh', '').replace('*kW', '').replace('*m3', '')

        # Extra device parameters are placed on a single line, meh.
        if code == "0-1:24.2.1":
            timestamp_value, gas_usage = value.split(")(")
            parsed_reading[field[0]] = reading_timestamp_to_datetime(string=timestamp_value)
            parsed_reading[field[1]] = gas_usage
        else:
            if field == "timestamp":
                value = reading_timestamp_to_datetime(string=value)

            parsed_reading[field] = value

    # Hack for DSMR 2.x legacy, which lacks timestamp info..
    if parsed_reading['timestamp'] is None:
        parsed_reading['timestamp'] = timezone.now()

    # For some reason, there are telegrams generated with a timestamp in the far future. We should disallow that.
    discard_timestamp = timezone.now() + timezone.timedelta(hours=24)

    if parsed_reading['timestamp'] > discard_timestamp or (
            parsed_reading['extra_device_timestamp'] is not None and
            parsed_reading['extra_device_timestamp'] > discard_timestamp):
        error_message = 'Discarded telegram with future timestamp: {}'.format(data)
        django_logger.error(error_message)
        raise InvalidTelegramError(error_message)

    # Optional tracking of phases, but since we already mapped this above, just remove it again... :]
    if not datalogger_settings.track_phases:
        parsed_reading.update({
            'phase_currently_delivered_l1': None,
            'phase_currently_delivered_l2': None,
            'phase_currently_delivered_l3': None,
            'phase_currently_returned_l1': None,
            'phase_currently_returned_l2': None,
            'phase_currently_returned_l3': None,
        })

    # Now we need to split reading & statistics. So we split the dict here.
    reading_kwargs = {k: parsed_reading[k] for k in READING_FIELDS}
    statistics_kwargs = {k: parsed_reading[k] for k in STATISTICS_FIELDS}

    # Reading will be processed later.
    new_reading = DsmrReading.objects.create(**reading_kwargs)

    # There should already be one in database, created when migrating.
    statistics_kwargs['latest_telegram'] = data
    MeterStatistics.objects.all().update(**statistics_kwargs)

    # Broadcast this telegram as signal.
    dsmr_datalogger.signals.raw_telegram.send_robust(sender=None, data=data)

    if settings.DSMRREADER_LOG_TELEGRAMS:
        dsmrreader_logger.info('Received telegram (base64 encoded): {}'.format(base64_data))

    return new_reading
Пример #10
0
    def get(self, request):
        data = {}
        data['capabilities'] = dsmr_backend.services.get_capabilities()

        form = DashboardGraphForm(request.GET)

        if not form.is_valid():
            return HttpResponseBadRequest(form.errors)

        # Optimize queries for large datasets by restricting the data to the last week in the first place.
        base_timestamp = timezone.now() - timezone.timedelta(days=7)

        electricity = ElectricityConsumption.objects.filter(read_at__gt=base_timestamp).order_by('-read_at')
        gas = GasConsumption.objects.filter(read_at__gt=base_timestamp).order_by('-read_at')
        temperature = TemperatureReading.objects.filter(read_at__gt=base_timestamp).order_by('-read_at')

        # Apply any offset requested by the user.
        electricity_offset = form.cleaned_data.get('electricity_offset')
        electricity = electricity[electricity_offset:electricity_offset + self.MAX_ITEMS]

        gas_offset = form.cleaned_data.get('gas_offset')
        gas = gas[gas_offset:gas_offset + self.MAX_ITEMS]

        temperature = temperature[:self.MAX_ITEMS]

        # Reverse all sets gain.
        electricity = electricity[::-1]
        gas = gas[::-1]
        temperature = temperature[::-1]

        # By default we only display the time, scrolling should enable a more verbose x-axis.
        graph_x_format_electricity = 'DSMR_GRAPH_SHORT_TIME_FORMAT'
        graph_x_format_gas = 'DSMR_GRAPH_SHORT_TIME_FORMAT'

        if electricity_offset > 0:
            graph_x_format_electricity = 'DSMR_GRAPH_LONG_TIME_FORMAT'

        if gas_offset > 0:
            graph_x_format_gas = 'DSMR_GRAPH_LONG_TIME_FORMAT'

        data['electricity_x'] = [
            formats.date_format(
                timezone.localtime(x.read_at), graph_x_format_electricity
            )
            for x in electricity
        ]
        data['electricity_y'] = [float(x.currently_delivered * 1000) for x in electricity]
        data['electricity_returned_y'] = [float(x.currently_returned * 1000) for x in electricity]

        data['gas_x'] = [
            formats.date_format(
                timezone.localtime(x.read_at), graph_x_format_gas
            ) for x in gas
        ]
        data['gas_y'] = [float(x.currently_delivered) for x in gas]

        # Some users have multiple phases installed.
        if DataloggerSettings.get_solo().track_phases and data['capabilities']['multi_phases']:
            data['phases_l1_y'] = self._parse_phases_data(electricity, 'phase_currently_delivered_l1')
            data['phases_l2_y'] = self._parse_phases_data(electricity, 'phase_currently_delivered_l2')
            data['phases_l3_y'] = self._parse_phases_data(electricity, 'phase_currently_delivered_l3')

        if WeatherSettings.get_solo().track:
            data['temperature_x'] = [
                formats.date_format(
                    timezone.localtime(x.read_at), 'DSMR_GRAPH_SHORT_TIME_FORMAT'
                )
                for x in temperature
            ]
            data['temperature_y'] = [float(x.degrees_celcius) for x in temperature]

        return HttpResponse(json.dumps(data), content_type='application/json')
Пример #11
0
 def setUp(self):
     DataloggerSettings.get_solo()
     DataloggerSettings.objects.all().update(dsmr_version=DataloggerSettings.DSMR_BELGIUM_FLUVIUS)
Пример #12
0
 def setUp(self):
     self.instance = DataloggerSettings().get_solo()
Пример #13
0
 def setUp(self):
     DataloggerSettings.get_solo()
     DataloggerSettings.objects.all().update(
         dsmr_version=DataloggerSettings.DSMR_LUXEMBOURG_SMARTY)
Пример #14
0
    def test_track_meter_statistics(self):
        datalogger_settings = DataloggerSettings.get_solo()
        datalogger_settings.track_meter_statistics = False
        datalogger_settings.save()

        fake_telegram = ''.join([
            "/XMX5LGBBFFB123456789\r\n",
            "\r\n",
            "1-3:0.2.8(40)\r\n",
            "0-0:1.0.0(151110192959W)\r\n",
            "0-0:96.1.1(xxxxxxxxxxxxx)\r\n",
            "1-0:1.8.1(000510.747*kWh)\r\n",
            "1-0:2.8.1(000000.123*kWh)\r\n",
            "1-0:1.8.2(000500.013*kWh)\r\n",
            "1-0:2.8.2(000123.456*kWh)\r\n",
            "0-0:96.14.0(0001)\r\n",
            "1-0:1.7.0(00.192*kW)\r\n",
            "1-0:2.7.0(00.123*kW)\r\n",
            "0-0:17.0.0(999.9*kW)\r\n",
            "0-0:96.3.10(1)\r\n",
            "0-0:96.7.21(00003)\r\n",
            "0-0:96.7.9(00000)\r\n",
            "1-0:99.97.0(0)(0-0:96.7.19)\r\n",
            "1-0:32.32.0(00002)\r\n",
            "1-0:52.32.0(00002)\r\n",
            "1-0:72.32.0(00000)\r\n",
            "1-0:32.36.0(00000)\r\n",
            "1-0:52.36.0(00000)\r\n",
            "1-0:72.36.0(00000)\r\n",
            "0-0:96.13.1()\r\n",
            "0-0:96.13.0()\r\n",
            "1-0:31.7.0(000*A)\r\n",
            "1-0:51.7.0(000*A)\r\n",
            "1-0:71.7.0(001*A)\r\n",
            "1-0:21.7.0(00.000*kW)\r\n",
            "1-0:41.7.0(00.000*kW)\r\n",
            "1-0:61.7.0(00.192*kW)\r\n",
            "1-0:22.7.0(00.000*kW)\r\n",
            "1-0:42.7.0(00.000*kW)\r\n",
            "1-0:62.7.0(00.000*kW)\r\n",
            "0-1:24.1.0(003)\r\n",
            "0-1:96.1.0(xxxxxxxxxxxxx)\r\n",
            "0-1:24.2.1(151110190000W)(00845.206*m3)\r\n",
            "0-1:24.4.0(1)\r\n",
            "!D19A\n",
        ])

        self.assertIsNone(MeterStatistics.get_solo().electricity_tariff)  # Empty model in DB.
        dsmr_datalogger.services.telegram_to_reading(data=fake_telegram)
        self.assertIsNone(MeterStatistics.get_solo().electricity_tariff)  # Unaffected

        # Try again, but now with tracking settings enabled.
        datalogger_settings = DataloggerSettings.get_solo()
        datalogger_settings.track_meter_statistics = True
        datalogger_settings.save()

        self.assertIsNone(MeterStatistics.get_solo().electricity_tariff)  # Empty model in DB.
        dsmr_datalogger.services.telegram_to_reading(data=fake_telegram)

        # Should be populated now.
        meter_statistics = MeterStatistics.get_solo()
        self.assertIsNotNone(meter_statistics.electricity_tariff)
        self.assertEqual(meter_statistics.electricity_tariff, 1)
        self.assertEqual(meter_statistics.power_failure_count, 3)
        self.assertEqual(meter_statistics.voltage_sag_count_l1, 2)
        self.assertEqual(meter_statistics.voltage_sag_count_l2, 2)
Пример #15
0
def telegram_to_reading(data):
    """
    Converts a P1 telegram to a DSMR reading, which will be stored in database.
    """

    def _get_reading_fields():
        reading_fields = DsmrReading._meta.get_all_field_names()
        reading_fields.remove('id')
        reading_fields.remove('processed')
        return reading_fields

    def _get_statistics_fields():
        reading_fields = MeterStatistics._meta.get_all_field_names()
        reading_fields.remove('id')
        return reading_fields

    def _convert_legacy_dsmr_gas_line(parsed_reading, current_line, next_line):
        """ Legacy support for DSMR 2.x gas. """
        legacy_gas_line = current_line

        if next_line.startswith('('):
            legacy_gas_line = current_line + next_line

        legacy_gas_result = re.search(
            r'[^(]+\((\d+)\)\(\d+\)\(\d+\)\(\d+\)\([0-9-.:]+\)\(m3\)\(([0-9.]+)\)',
            legacy_gas_line
        )
        gas_timestamp = legacy_gas_result.group(1)

        if timezone.now().dst() != timezone.timedelta(0):
            gas_timestamp += 'S'
        else:
            gas_timestamp += 'W'

        parsed_reading['extra_device_timestamp'] = reading_timestamp_to_datetime(
            string=gas_timestamp
        )
        parsed_reading['extra_device_delivered'] = legacy_gas_result.group(2)
        return parsed_reading

    # Defaults all fields to NULL.
    parsed_reading = {k: None for k in _get_reading_fields() + _get_statistics_fields()}
    field_splitter = re.compile(r'([^(]+)\((.+)\)')
    lines_read = data.split("\r\n")

    for index, current_line in enumerate(lines_read):
        result = field_splitter.search(current_line)

        if not result:
            continue

        code = result.group(1)

        # M-bus (0-n:24.1) cannot identify the type of device, see issue #92.
        if code in ('0-2:24.2.1', '0-3:24.2.1', '0-4:24.2.1'):
            code = '0-1:24.2.1'

        # DSMR 2.x emits gas readings in different format.
        if code == '0-1:24.3.0':
            parsed_reading = _convert_legacy_dsmr_gas_line(
                parsed_reading, current_line, lines_read[index + 1]
            )
            continue

        try:
            field = DSMR_MAPPING[code]
        except KeyError:
            continue

        value = result.group(2)

        # Drop units, as the database does not care for them.
        value = value.replace('*kWh', '').replace('*kW', '').replace('*m3', '')

        # Extra device parameters are placed on a single line, meh.
        if code == "0-1:24.2.1":
            timestamp_value, gas_usage = value.split(")(")
            parsed_reading[field[0]] = reading_timestamp_to_datetime(string=timestamp_value)
            parsed_reading[field[1]] = gas_usage
        else:
            if field == "timestamp":
                value = reading_timestamp_to_datetime(string=value)

            parsed_reading[field] = value

    # Hack for DSMR 2.x legacy, which lacks timestamp info..
    if parsed_reading['timestamp'] is None:
        parsed_reading['timestamp'] = timezone.now()

    # Now we need to split reading & statistics. So we split the dict here.
    reading_kwargs = {k: parsed_reading[k] for k in _get_reading_fields()}
    new_reading = DsmrReading.objects.create(**reading_kwargs)

    # Optional feature.
    if DataloggerSettings.get_solo().track_meter_statistics:
        statistics_kwargs = {k: parsed_reading[k] for k in _get_statistics_fields()}
        # There should already be one in database, created when migrating.
        MeterStatistics.objects.all().update(**statistics_kwargs)

    return new_reading
Пример #16
0
def _map_telegram_to_model(parsed_telegram, data):
    """ Maps parsed telegram to the fields. """
    READING_FIELDS = [
        x.name for x in DsmrReading._meta.get_fields()
        if x.name not in ('id', 'processed')
    ]
    STATISTICS_FIELDS = [
        x.name for x in MeterStatistics._meta.get_fields()
        if x.name not in ('id', 'rejected_telegrams', 'latest_telegram')
    ]

    model_fields = {k: None for k in READING_FIELDS + STATISTICS_FIELDS}
    mapping = _get_dsmrreader_mapping(
        DataloggerSettings.get_solo().dsmr_version)

    for obis_ref, obis_data in parsed_telegram.items():
        try:
            # Skip any fields we're not storing in our system.
            target_field = mapping[obis_ref]
        except KeyError:
            continue

        if isinstance(target_field, dict):
            model_fields[target_field['value']] = obis_data.value
            model_fields[target_field['datetime']] = obis_data.datetime
        else:
            model_fields[target_field] = obis_data.value

    # Defaults for telegrams with missing data.
    model_fields['timestamp'] = model_fields['timestamp'] or timezone.now()
    model_fields['electricity_delivered_2'] = model_fields[
        'electricity_delivered_2'] or 0
    model_fields[
        'electricity_returned_2'] = model_fields['electricity_returned_2'] or 0

    # For some reason, there are telegrams generated with a timestamp in the far future. We should disallow that.
    discard_timestamp = timezone.now() + timezone.timedelta(hours=24)

    if model_fields['timestamp'] > discard_timestamp or (
            model_fields['extra_device_timestamp'] is not None
            and model_fields['extra_device_timestamp'] > discard_timestamp):
        error_message = 'Discarded telegram with future timestamp(s): {} / {}'.format(
            model_fields['timestamp'], model_fields['extra_device_timestamp'])
        django_logger.error(error_message)
        raise InvalidTelegramError(error_message)

    # Now we need to split reading & statistics. So we split the dict here.
    reading_kwargs = {k: model_fields[k] for k in READING_FIELDS}
    statistics_kwargs = {k: model_fields[k] for k in STATISTICS_FIELDS}

    # Reading will be processed later.
    new_instance = DsmrReading.objects.create(**reading_kwargs)

    # There should already be one in database, created when migrating.
    statistics_kwargs['latest_telegram'] = data
    MeterStatistics.objects.all().update(**statistics_kwargs)

    # Broadcast this telegram as signal.
    dsmr_datalogger.signals.raw_telegram.send_robust(sender=None, data=data)

    return new_instance
Пример #17
0
 def initialize(self):
     self.sleep_time = DataloggerSettings.get_solo().process_sleep