コード例 #1
0
def verify_telegram_checksum(data):
    """
    Verifies telegram by checking it's CRC. Raises exception on failure. DSMR docs state:
    CRC is a CRC16 value calculated over the preceding characters in the data message (from / to ! using the polynomial)
    """
    matches = re.search(r'^(/[^!]+!)([A-Z0-9]{4})', data)

    try:
        content, crc = matches.groups()
    except AttributeError:
        # AttributeError: 'NoneType' object has no attribute 'groups'. This happens where there is not support for CRC.
        content = crc = None

    if not content or not crc:
        raise InvalidTelegramError('Content or CRC data not found')

    telegram = content.encode(
        'ascii'
    )  # TypeError: Unicode-objects must be encoded before calculating a CRC

    # DSMR docs: "The CRC value is represented as 4 hexadecimal characters (MSB first)". So just flip it back to int.
    telegram_checksum = int('0x{}'.format(crc),
                            0)  # For example: DD84 -> 0xDD84 -> 56708

    crc16_function = crcmod.predefined.mkPredefinedCrcFun('crc16')
    calculated_checksum = crc16_function(telegram)  # For example: 56708

    if telegram_checksum != calculated_checksum:
        raise InvalidTelegramError(
            'CRC mismatch: {} (telegram) != {} (calculated)'.format(
                telegram_checksum, calculated_checksum))
コード例 #2
0
def telegram_to_reading(data: str) -> DsmrReading:
    """ Converts a P1 telegram to a DSMR reading, which will be stored in database. """
    params = get_dsmr_connection_parameters()
    parser = TelegramParser(params['specifications'])
    logger.debug("Received telegram:\n%s", data)

    try:
        parsed_telegram = parser.parse(data)
    except (InvalidChecksumError, ParseError) as error:
        # Hook to keep track of failed readings count.
        MeterStatistics.objects.all().update(
            rejected_telegrams=F('rejected_telegrams') + 1)
        logger.warning('Rejected telegram: %s', error)
        raise InvalidTelegramError(error) from error

    return _map_telegram_to_model(parsed_telegram=parsed_telegram, data=data)
コード例 #3
0
def telegram_to_reading(data):
    """ Converts a P1 telegram to a DSMR reading, which will be stored in database. """
    params = get_dsmr_connection_parameters()
    parser = TelegramParser(params['specifications'])

    # We will log the telegrams in base64 for convenience and debugging.
    base64_data = base64.b64encode(data.encode())

    if settings.DSMRREADER_LOG_TELEGRAMS:
        dsmrreader_logger.info('Received telegram (base64 encoded): %s',
                               base64_data)

    try:
        parsed_telegram = parser.parse(data)
    except (InvalidChecksumError, ParseError) as error:
        # Hook to keep track of failed readings count.
        MeterStatistics.objects.all().update(
            rejected_telegrams=F('rejected_telegrams') + 1)
        dsmrreader_logger.warning(
            'Rejected telegram (%s) (base64 encoded): %s', error, base64_data)
        dsmrreader_logger.exception(error)
        raise InvalidTelegramError(error)

    return _map_telegram_to_model(parsed_telegram=parsed_telegram, data=data)
コード例 #4
0
def telegram_to_reading(data):  # noqa: C901
    """
    Converts a P1 telegram to a DSMR reading, which will be stored in database.
    """
    READING_FIELDS = [
        x.name for x in DsmrReading._meta.get_fields()
        if x.name not in ('id', 'processed')
    ]
    STATISTICS_FIELDS = [
        x.name for x in MeterStatistics._meta.get_fields()
        if x.name not in ('id', 'rejected_telegrams')
    ]

    # We will log the telegrams in base64 for convenience and debugging 'n stuff.
    base64_data = base64.b64encode(data.encode())
    datalogger_settings = DataloggerSettings.get_solo()

    # Discard CRC check when any support is lacking anyway. Or when it's disabled.
    connection_parameters = get_dsmr_connection_parameters()

    if connection_parameters['crc'] and datalogger_settings.verify_telegram_crc:
        try:
            # Verify telegram by checking it's CRC.
            verify_telegram_checksum(data=data)
        except InvalidTelegramError as error:
            # Hook to keep track of failed readings count.
            MeterStatistics.objects.all().update(
                rejected_telegrams=F('rejected_telegrams') + 1)
            dsmrreader_logger.warning(
                'Rejected telegram (base64 encoded): {}'.format(base64_data))
            dsmrreader_logger.exception(error)
            raise

    # Defaults all fields to NULL.
    parsed_reading = {k: None for k in READING_FIELDS + STATISTICS_FIELDS}
    field_splitter = re.compile(r'([^(]+)\((.+)\)')
    lines_read = data.split("\r\n")

    for index, current_line in enumerate(lines_read):
        result = field_splitter.search(current_line)

        if not result:
            continue

        code = result.group(1)

        # M-bus (0-n:24.1) cannot identify the type of device, see issue #92.
        if code in ('0-2:24.2.1', '0-3:24.2.1', '0-4:24.2.1'):
            code = '0-1:24.2.1'

        # DSMR 2.x emits gas readings in different format.
        if code == '0-1:24.3.0':
            parsed_reading = _convert_legacy_dsmr_gas_line(
                parsed_reading, current_line, lines_read[index + 1])
            continue

        try:
            field = DSMR_MAPPING[code]
        except KeyError:
            continue

        value = result.group(2)

        # Drop units, as the database does not care for them.
        value = value.replace('*kWh', '').replace('*kW', '').replace('*m3', '')

        # Extra device parameters are placed on a single line, meh.
        if code == "0-1:24.2.1":
            timestamp_value, gas_usage = value.split(")(")
            parsed_reading[field[0]] = reading_timestamp_to_datetime(
                string=timestamp_value)
            parsed_reading[field[1]] = gas_usage
        else:
            if field == "timestamp":
                value = reading_timestamp_to_datetime(string=value)

            parsed_reading[field] = value

    # Hack for DSMR 2.x legacy, which lacks timestamp info..
    if parsed_reading['timestamp'] is None:
        parsed_reading['timestamp'] = timezone.now()

    # For some reason, there are telegrams generated with a timestamp in the far future. We should disallow that.
    discard_timestamp = timezone.now() + timezone.timedelta(hours=24)

    if parsed_reading['timestamp'] > discard_timestamp or (
            parsed_reading['extra_device_timestamp'] is not None
            and parsed_reading['extra_device_timestamp'] > discard_timestamp):
        error_message = 'Discarded telegram with future timestamp: {}'.format(
            data)
        django_logger.error(error_message)
        raise InvalidTelegramError(error_message)

    # Optional tracking of phases, but since we already mapped this above, just remove it again... :]
    if not datalogger_settings.track_phases:
        parsed_reading.update({
            'phase_currently_delivered_l1': None,
            'phase_currently_delivered_l2': None,
            'phase_currently_delivered_l3': None,
        })

    # Now we need to split reading & statistics. So we split the dict here.
    reading_kwargs = {k: parsed_reading[k] for k in READING_FIELDS}
    statistics_kwargs = {k: parsed_reading[k] for k in STATISTICS_FIELDS}

    # Reading will be processed later.
    new_reading = DsmrReading.objects.create(**reading_kwargs)

    # There should already be one in database, created when migrating.
    MeterStatistics.objects.all().update(**statistics_kwargs)

    dsmrreader_logger.info(
        'Received telegram (base64 encoded): {}'.format(base64_data))
    return new_reading
コード例 #5
0
def _map_telegram_to_model(parsed_telegram: Dict, data: str):
    """ Maps parsed telegram to the fields. """
    READING_FIELDS = [
        x.name for x in DsmrReading._meta.get_fields()
        if x.name not in ('id', 'processed')
    ]
    STATISTICS_FIELDS = [
        x.name for x in MeterStatistics._meta.get_fields()
        if x.name not in ('id', 'rejected_telegrams', 'latest_telegram')
    ]

    datalogger_settings = DataloggerSettings.get_solo()
    model_fields = {k: None for k in READING_FIELDS + STATISTICS_FIELDS}
    mapping = _get_dsmrreader_mapping(datalogger_settings.dsmr_version)

    for obis_ref, obis_data in parsed_telegram.items():
        try:
            # Skip any fields we're not storing in our system.
            target_field = mapping[obis_ref]
        except KeyError:
            continue

        if isinstance(target_field, dict):
            model_fields[target_field['value']] = obis_data.value
            model_fields[target_field['datetime']] = obis_data.datetime
        else:
            model_fields[target_field] = obis_data.value

    # Defaults for telegrams with missing data.
    model_fields['timestamp'] = model_fields['timestamp'] or timezone.now()
    model_fields['electricity_delivered_2'] = model_fields[
        'electricity_delivered_2'] or 0  # type:ignore[assignment]
    model_fields['electricity_returned_2'] = model_fields[
        'electricity_returned_2'] or 0  # type:ignore[assignment]

    # Ignore invalid dates on device bus. Reset the delivered value as well. This MUST be checked before override below.
    if model_fields['extra_device_timestamp'] is None:
        model_fields['extra_device_delivered'] = None

    # This optional setting fixes some rare situations where the smart meter's internal clock is incorrect.
    if datalogger_settings.override_telegram_timestamp:
        now = timezone.now()

        logger.debug(
            "WARNING: Overriding telegram timestamps due to configuration")
        model_fields['timestamp'] = now

        if model_fields['extra_device_timestamp'] is not None:
            # WARNING: So None (v2, v3, Fluvius) default to v4 behaviour.
            is_v5 = model_fields['dsmr_version'] is not None and model_fields[
                'dsmr_version'].startswith('5')

            model_fields[
                'extra_device_timestamp'] = calculate_fake_gas_reading_timestamp(
                    now=now, is_dsmr_v5=is_v5)

    # Fix for rare smart meters with a timestamp in the far future. We should disallow that.
    discard_after = timezone.now() + timezone.timedelta(hours=24)

    if model_fields['timestamp'] > discard_after or (
            model_fields['extra_device_timestamp'] is not None
            and model_fields['extra_device_timestamp'] > discard_after):
        error_message = 'Discarded telegram with future timestamp(s): {} / {}'.format(
            model_fields['timestamp'], model_fields['extra_device_timestamp'])
        logger.error(error_message)
        raise InvalidTelegramError(error_message)

    # Now we need to split reading & statistics. So we split the dict here.
    reading_kwargs = {k: model_fields[k] for k in READING_FIELDS}
    statistics_kwargs = {k: model_fields[k] for k in STATISTICS_FIELDS}

    # Reading will be processed later.
    new_instance = DsmrReading.objects.create(**reading_kwargs)

    # There should already be one in database, created when migrating.
    statistics_kwargs['latest_telegram'] = data  # type:ignore[assignment]
    MeterStatistics.get_solo().update(
        **statistics_kwargs)  # Update() is required for signal!

    # Creation should be completed, can now be broadcasted for post processing.
    dsmr_datalogger.signals.raw_telegram.send_robust(None, data=data)
    dsmr_datalogger.signals.dsmr_reading_created.send_robust(
        None, instance=new_instance)

    return new_instance
コード例 #6
0
def _map_telegram_to_model(parsed_telegram, data):
    """ Maps parsed telegram to the fields. """
    READING_FIELDS = [
        x.name for x in DsmrReading._meta.get_fields()
        if x.name not in ('id', 'processed')
    ]
    STATISTICS_FIELDS = [
        x.name for x in MeterStatistics._meta.get_fields()
        if x.name not in ('id', 'rejected_telegrams', 'latest_telegram')
    ]

    model_fields = {k: None for k in READING_FIELDS + STATISTICS_FIELDS}
    mapping = _get_dsmrreader_mapping(
        DataloggerSettings.get_solo().dsmr_version)

    for obis_ref, obis_data in parsed_telegram.items():
        try:
            # Skip any fields we're not storing in our system.
            target_field = mapping[obis_ref]
        except KeyError:
            continue

        if isinstance(target_field, dict):
            model_fields[target_field['value']] = obis_data.value
            model_fields[target_field['datetime']] = obis_data.datetime
        else:
            model_fields[target_field] = obis_data.value

    # Defaults for telegrams with missing data.
    model_fields['timestamp'] = model_fields['timestamp'] or timezone.now()
    model_fields['electricity_delivered_2'] = model_fields[
        'electricity_delivered_2'] or 0
    model_fields[
        'electricity_returned_2'] = model_fields['electricity_returned_2'] or 0

    # Hack for invalid dates on device bus. Reset the delivered value as well.
    if model_fields['extra_device_timestamp'] is None:
        model_fields['extra_device_delivered'] = None

    # For some reason, there are telegrams generated with a timestamp in the far future. We should disallow that.
    discard_timestamp = timezone.now() + timezone.timedelta(hours=24)

    if model_fields['timestamp'] > discard_timestamp or (
            model_fields['extra_device_timestamp'] is not None
            and model_fields['extra_device_timestamp'] > discard_timestamp):
        error_message = 'Discarded telegram with future timestamp(s): {} / {}'.format(
            model_fields['timestamp'], model_fields['extra_device_timestamp'])
        logger.error(error_message)
        raise InvalidTelegramError(error_message)

    # Now we need to split reading & statistics. So we split the dict here.
    reading_kwargs = {k: model_fields[k] for k in READING_FIELDS}
    statistics_kwargs = {k: model_fields[k] for k in STATISTICS_FIELDS}

    # Reading will be processed later.
    new_instance = DsmrReading.objects.create(**reading_kwargs)

    # There should already be one in database, created when migrating.
    statistics_kwargs['latest_telegram'] = data
    MeterStatistics.get_solo().update(
        **statistics_kwargs)  # Update() is required for signal!

    # Creation should be completed, can now be broadcasted for post processing.
    dsmr_datalogger.signals.raw_telegram.send_robust(None, data=data)
    dsmr_datalogger.signals.dsmr_reading_created.send_robust(
        None, instance=new_instance)

    return new_instance