예제 #1
0
def parse_mcf88(hex_str, port=None):
    """
    Parse payload like "0462651527da078e4d8e01a4691527dd078f488e01676d1527e9078d1a8e015d" float values
    :param hex_str: MCF88 hex payload
    :param port: LoRaWAN port
    :return: dict containing float values
    """
    if hex_str[:2] == '04':
        line = hex_str[2:62]
        n = 20
        datas = [line[i:i + n] for i in range(0, len(line), n)]
        datalines = []
        for data in datas:
            value = bytes.fromhex(data)
            ts = get_timestamp(value)  # datetime in UTC (timezone aware)
            # print("'{}'".format(data[8:10]))
            parsed_data = {
                'temp': struct.unpack('<h', bytes.fromhex(data[8:12]))[0] / 100,  # °C
                'humi': int(data[12:14], 16) / 2,
                'pres': struct.unpack('<I', bytes.fromhex(data[14:20] + '00'))[0] / 100  # hPa
            }
            dataline = create_dataline(ts, parsed_data)
            datalines.append(dataline)
        return datalines
    return None
def parse_ruuvistation_data(data):
    """
    :param data: JSON object
    :return: dict of parsed Ruuvitag values
    """
    # Create object for all tags
    # TODO: add gateway data here too
    parsed_data = {
        'ruuvitags': [],
    }
    timestamp = parse(data['time'])
    fields = {
        'batteryLevel': data['batteryLevel'],
        'latitude': data['location']['latitude'],
        'longitude': data['location']['longitude'],
        'accuracy': data['location']['accuracy'],
    }
    dataline = create_dataline(timestamp, fields)
    parsed_data['gateway'] = {
        'devid': data['deviceId'],
        'datalines': [dataline]
    }
    fields2save = [
        'accelX', 'accelY', 'accelZ', 'humidity', 'measurementSequenceNumber',
        'movementCounter', 'pressure', 'rssi', 'temperature', 'voltage'
    ]
    for item in data['tags']:
        datalines = []
        fields = {}
        devid = item.pop('id').upper()
        ts_str = item.pop('updateAt')
        timestamp = parse(ts_str)
        for f in fields2save:
            fields[f] = float(item[f])
        dataline = create_dataline(timestamp, fields)
        datalines.append(dataline)
        parsed_data['ruuvitags'].append({
            'devid': devid,
            'datalines': datalines
        })
    return parsed_data
예제 #3
0
def parse_sentilo_data(data):
    """
    Extract two data objects from source data:
    - 1 minute average
    - 60 values for every seconds
    :param data: JSON object
    :return: dict of parsed dBA values, one for 1 min average and 60 for every second.
    """
    # Create object for both 1 min and 1 sec data
    parsed_data = {
        'LAeq': {'datalines': []},
        'LAeq1s': {'datalines': []},
    }
    for item in data['sensors']:
        devid = item['sensor'][0:-2]
        parsed_data['LAeq']['devid'] = devid
        parsed_data['LAeq1s']['devid'] = devid
        ts_str = item['observations'][0].get('timestamp')
        if ts_str is not None:
            timestamp = parse(item['observations'][0]['timestamp'], dayfirst=True)
        else:
            timestamp = datetime.datetime.utcnow()
            print(timestamp.strftime("%Y-%m-%dT%H:%M:%S.%fZ data without timestamp!"))
        if item['sensor'].endswith('N'):
            fields = {'dBA': float(item['observations'][0]['value'])}
            dataline = create_dataline(timestamp, fields)
            parsed_data['LAeq']['datalines'].append(dataline)
        if item['sensor'].endswith('S'):
            cnt = 0
            secvals = item['observations'][0]['value'].split(';')
            secvals.reverse()
            for val in secvals:
                fields = {'dBA': float(val.split(',')[0])}
                new_ts = (timestamp - datetime.timedelta(seconds=cnt))
                dataline = create_dataline(new_ts, fields)
                parsed_data['LAeq1s']['datalines'].append(dataline)
                cnt += 1
    return parsed_data
def parse_ruuvicounter_data(data, port, serialised_request):
    """
    :param data: JSON object
    :return: dict of parsed Ruuvitag values
    """
    # Create object for all tags and gateway
    lora = json.loads(serialised_request['request.body'].decode("utf-8"))
    ruuvicounterdata = parse_ruuvicounter(data, port)
    devid = serialised_request['devid']
    timestamp = parse(lora['DevEUI_uplink']['Time']).astimezone(pytz.UTC)
    dataline = create_dataline(timestamp, ruuvicounterdata['gateway'])
    parsed_data = {}
    parsed_data['gateway'] = {'devid': devid, 'datalines': [dataline]}
    parsed_data['ruuvicounter'] = {'datalines': []}
    for item in ruuvicounterdata['tags']:
        mac = item.pop('mac')
        dataline = create_dataline(timestamp,
                                   item,
                                   extra={'extratags': {
                                       'mac': mac
                                   }})
        parsed_data['ruuvicounter']['datalines'].append(dataline)
    return parsed_data
예제 #5
0
def parse_thingpark_request(serialised_request, data):
    d = data['DevEUI_uplink']
    devid = d['DevEUI']
    port = d['FPort']
    datalogger, created = get_datalogger(devid=devid, update_activity=False)
    timestamp = parse(d['Time'])
    timestamp = timestamp.astimezone(pytz.UTC)
    payload_hex = d['payload_hex']
    rssi = d['LrrRSSI']
    # TODO: This may fail, so prepare to handle exception properly
    # Test it by configuring wrong decoder for some Datalogger
    try:
        payload = decode_payload(datalogger, payload_hex, port, serialised_request=serialised_request)
    except ValueError as err:
        decoder = get_datalogger_decoder(datalogger)
        err_msg = f'Failed to parse "{payload_hex}" using "{decoder}" for "{devid}": {err}'
        logger.warning(err_msg)
        serialised_request['parse_fail'] = {
            'error_message': str(err),
            'decoder': get_datalogger_decoder(datalogger)
        }
        save_parse_fail_datalogger_message(devid, data_pack(serialised_request))
        return True
    logging.debug(payload)

    # Some sensors may already return a dict of lists of datalines
    if not payload:
        return True
    if isinstance(payload, dict) and isinstance(payload[list(payload.keys())[0]], dict):
        parsed_data = payload
        for k in parsed_data.keys():
            datalines = parsed_data[k]['datalines']
            if len(datalines) > 0:
                datalines[-1]['data']['rssi'] = float(rssi)  # Add rssi value to the latest dataline
                send_to_exchange(devid, datalogger, datalines, override_measurement=k)
    else:  # Some sensors may already return a list of datalines
        if isinstance(payload, list):
            datalines = payload  # Use payload as datalines (which already have timestamps)
        else:
            dataline = create_dataline(timestamp, payload)  # Create dataline from LoRaWAN timestamp and payload
            datalines = [dataline]
        datalines[-1]['data']['rssi'] = float(rssi)  # Add rssi value to the latest dataline
        send_to_exchange(devid, datalogger, datalines)
    return True
예제 #6
0
 def handle_request(self, request):
     serialised_request = serialize_django_request(request)
     ok, body = decode_json_body(serialised_request['request.body'])
     if ok is False:
         return HttpResponse(f'JSON ERROR: {body}',
                             status=400,
                             content_type='text/plain')
     key = body.get('key', '')
     key_splitted = key.split('/')
     if len(key_splitted) == 3:
         devid = '/'.join(key_splitted[:2])
     else:
         return HttpResponse(
             f'Key error: key "{key}" is not correctly formed',
             status=400,
             content_type='text/plain')
     datalogger, created = get_datalogger(devid=devid,
                                          update_activity=False,
                                          create=False)
     if datalogger is None:
         return HttpResponse(f'Datalogger "{devid}" does not exist',
                             status=400,
                             content_type='text/plain')
     data = body['data']
     location = data.pop('location')
     data.update(location)
     epoch = data.pop('timestamp') / 1000
     timestamp = epoch2datetime(epoch)
     dataline = create_dataline(timestamp, data)
     datalines = [dataline]
     parsed_data = create_parsed_data_message(devid, datalines)
     config = datalogger_get_config(datalogger, parsed_data)
     db_name = config.get('influxdb_database')
     measurement_name = config.get('influxdb_measurement')
     if db_name is not None and measurement_name is not None:
         save_parsed_data2influxdb(db_name, measurement_name, parsed_data)
         return HttpResponse('OK', content_type='text/plain')
     else:
         return HttpResponse(
             f'InfluxDB database and measurement are not defined for Datalogger "{devid}"',
             status=400,
             content_type='text/plain')
예제 #7
0
def parse_everynet_request(serialised_request, body):
    # FIXME: currently this parses only payload_hex from PAXCOUNTER. MUST check BKS too and others
    # TODO: create utility function, which extracts all interesting fields (needed here) out from request data
    devid = body['meta'].get('device', 'unknown')
    payload_base64 = body['params']['payload']
    payload_hex = binascii.hexlify(base64.b64decode(payload_base64))
    port = body['params']['port']
    rssi = body['params']['radio']['hardware']['rssi']
    timestamp = datetime.datetime.utcfromtimestamp(body['params']['rx_time'])
    timestamp = timestamp.astimezone(pytz.UTC)
    # TODO: this may fail if database is offline
    datalogger, created = get_datalogger(devid=devid, update_activity=False)
    payload = decode_payload(datalogger, payload_hex, port)
    payload['rssi'] = rssi

    # RabbitMQ part
    key = create_routing_key('everynet', devid)
    dataline = create_dataline(timestamp, payload)
    datalines = [dataline]
    message = create_parsed_data_message(devid, datalines=datalines)
    packed_message = data_pack(message)
    send_message(settings.PARSED_DATA_EXCHANGE, key, packed_message)
    return True