def save(jsonPacket): data = json.loads(jsonPacket) new_packet = Packet( date=dp.parse(data.get('date', None)), topic=data.get('topic', None), data_collector_id=data.get('data_collector_id', None), organization_id=data.get('organization_id', None), gateway=data.get('gateway', None), tmst=data.get('tmst', None), chan=data.get('chan', None), rfch=data.get('rfch', None), freq=data.get('freq', None), stat=data.get('stat', None), modu=data.get('modu', None), datr=data.get('datr', None), codr=data.get('codr', None), lsnr=data.get('lsnr', None), rssi=data.get('rssi', None), size=data.get('size', None), data=data.get('data', None), m_type=data.get('m_type', None), major=data.get('major', None), mic=data.get('mic', None), join_eui=data.get('join_eui', None), dev_eui=data.get('dev_eui', None), dev_nonce=data.get('dev_nonce', None), dev_addr=data.get('dev_addr', None), adr=data.get('adr', None), ack=data.get('ack', None), adr_ack_req=data.get('adr_ack_req', None), f_pending=data.get('f_pending', None), class_b=data.get('class_b', None), f_count=data.get('f_count', None), f_opts=data.get('f_opts', None), f_port=data.get('f_port', None), error=data.get('error', None), latitude=data.get('latitude', None), longitude=data.get('longitude', None), altitude=data.get('altitude', None), app_name=data.get('app_name', None), dev_name=data.get('dev_name', None), ) new_packet.save_to_db()
def processData(): # Save the packet ids that have to be processed by the selected modules starting_rows = list() if analyze: analyzer_row = RowProcessed.find_one_by_analyzer("packet_analyzer") starting_rows.append(analyzer_row.last_row) if bruteforce: bruteforcer_row = RowProcessed.find_one_by_analyzer("bruteforcer") starting_rows.append(bruteforcer_row.last_row) # Get the lowest packet ID to be processed first_pending_id = starting_rows[0] for row in starting_rows: if row < first_pending_id: first_pending_id = row # Jump to the next to be procesed first_pending_id += 1 # If the user provided the start id, do some checksstart_packet_id = None if options.from_id is not None: start_packet_id = options.from_id if start_packet_id > first_pending_id: print( "Warning! You are jumping over packets that weren't processed. Last packets ID processed: " ) if bruteforce: print("Bruteforcer: %d." % (bruteforcer_row.last_row)) if analyze: print("Analyzer: %d." % (analyzer_row.last_row)) elif start_packet_id < first_pending_id: print( "Warning! You will process twice some packets and duplicate information in DB. Last packets ID processed: " ) if bruteforce: print("Bruteforcer: %d." % (bruteforcer_row.last_row)) if analyze: print("Analyzer: %d." % (analyzer_row.last_row)) else: start_packet_id = first_pending_id # Start processing in batches keep_iterating = True while keep_iterating: session_packets = None # Select the quantity of packets to process according to PACKES_BATCH and the limit that the user may have provided if options.to_id is None: if (start_packet_id + 2 * PACKETS_BATCH) <= Packet.rows_quantity(): session_packets = Packet.find_all_from(start_packet_id, PACKETS_BATCH) start_packet_id += PACKETS_BATCH else: logging.debug("No more packets to process. Sleeping a while") time.sleep(20) continue else: if (start_packet_id + PACKETS_BATCH) <= options.to_id: if (start_packet_id + PACKETS_BATCH) <= Packet.rows_quantity(): session_packets = Packet.find_all_from( start_packet_id, PACKETS_BATCH) start_packet_id += PACKETS_BATCH else: logging.debug( "No more packets to process. Sleeping a while") time.sleep(20) continue else: session_packets = Packet.find_all_from( start_packet_id, options.to_id - start_packet_id + 1) start_packet_id += (options.to_id % PACKETS_BATCH) keep_iterating = False if session_packets is not None: for packet in session_packets: logging.debug("Using packet: %d" % (packet.id)) # Skip packets from /{dev_eui}/up topic? try: # If the starting packet wasn't given, check if the packet wasn't processed by each analyzer (except for the parser, which doesn't modify the DB) if options.from_id is None: if bruteforce and bruteforcer_row.last_row < packet.id: LafBruteforcer.bruteForce(packet) bruteforcer_row.last_row = packet.id if analyze and analyzer_row.last_row < packet.id: LafPacketAnalysis.processPacket(packet) analyzer_row.last_row = packet.id # If the starting packet was given by the user, don't do any check else: if bruteforce: LafBruteforcer.bruteForce(packet) if bruteforcer_row.last_row < packet.id: bruteforcer_row.last_row = packet.id if analyze: LafPacketAnalysis.processPacket(packet) if analyzer_row.last_row < packet.id: analyzer_row.last_row = packet.id except Exception as e: logging.error( "Error processing packet {0}. Exception: {1}".format( packet.id, e)) rollback() if parsedata: LafPrinter.printPacket(packet) # Commit objects in DB before starting with the next batch try: commit() except Exception as exc: logging.error( "Error trying to commit after batch processing finish: {0}" .format(exc))
def find_all(from_id, size): return Packet.find_all_from(from_id, size)
def save(jsonPacket): # Parse the JSON into a dict data = json.loads(jsonPacket) # print('Saving {0}'.format(jsonPacket)) # If a packet was received, persist it new_packet = None packet_dict = data.get('packet') if packet_dict: new_packet = Packet( date=dp.parse(packet_dict.get('date', None)), topic=packet_dict.get('topic', None), data_collector_id=packet_dict.get('data_collector_id', None), organization_id=packet_dict.get('organization_id', None), gateway=packet_dict.get('gateway', None), tmst=packet_dict.get('tmst', None), chan=packet_dict.get('chan', None), rfch=packet_dict.get('rfch', None), freq=packet_dict.get('freq', None), stat=packet_dict.get('stat', None), modu=packet_dict.get('modu', None), datr=packet_dict.get('datr', None), codr=packet_dict.get('codr', None), lsnr=packet_dict.get('lsnr', None), rssi=packet_dict.get('rssi', None), size=packet_dict.get('size', None), data=packet_dict.get('data', None), m_type=packet_dict.get('m_type', None), major=packet_dict.get('major', None), mic=packet_dict.get('mic', None), join_eui=packet_dict.get('join_eui', None), dev_eui=packet_dict.get('dev_eui', None), dev_nonce=packet_dict.get('dev_nonce', None), dev_addr=packet_dict.get('dev_addr', None), adr=packet_dict.get('adr', None), ack=packet_dict.get('ack', None), adr_ack_req=packet_dict.get('adr_ack_req', None), f_pending=packet_dict.get('f_pending', None), class_b=packet_dict.get('class_b', None), f_count=packet_dict.get('f_count', None), f_opts=packet_dict.get('f_opts', None), f_port=packet_dict.get('f_port', None), error=packet_dict.get('error', None), latitude=packet_dict.get('latitude', None), longitude=packet_dict.get('longitude', None), altitude=packet_dict.get('altitude', None), app_name=packet_dict.get('app_name', None), dev_name=packet_dict.get('dev_name', None)) new_packet.save_to_db() # Save the message/s messages = data.get('messages') for message in messages: collector_message = CollectorMessage( data_collector_id=message.get('data_collector_id'), message=message.get('message'), topic=message.get('topic')) # In case a packet was instantiated, relate it with the message if new_packet: collector_message.packet_id = new_packet.id collector_message.save() if len(messages) == 0: raise Exception( "No messages received for packet {0}".format(jsonPacket))
def processPacket(packet): gw_obj = None if packet.gateway is not None: gw_obj = Gateway.find_one_by_gw_hex_id_and_organization_id( packet.gateway, packet.organization_id) if gw_obj is None: try: gw_obj = Gateway(gw_hex_id=packet.gateway, location_latitude=packet.latitude, location_longitude=packet.longitude, data_collector_id=packet.data_collector_id, organization_id=packet.organization_id) gw_obj.save() except Exception as exc: logging.error("Error trying to save Gateway: {0}".format(exc)) gw_obj = updateLocation(gw_obj, packet) if packet.m_type == "JoinRequest": device_obj = Device.find_one_by_dev_eui_and_join_eui_and_datacollector_id( packet.dev_eui, packet.join_eui, packet.data_collector_id) if device_obj is None: device_obj = Device.find_one_by_dev_eui_and_join_eui_and_datacollector_id( packet.dev_eui, None, packet.data_collector_id) if device_obj is None: try: device_obj = Device( dev_eui=packet.dev_eui, join_eui=packet.join_eui, organization_id=packet.organization_id, ) device_obj.save() except Exception as exc: logging.error( "Error trying to save Gateway: {0}".format(exc)) else: # Add the JoinEUI device_obj.join_eui = packet.join_eui # Associate Device with a Gateway if gw_obj is not None: try: gateway_device_obj = GatewayToDevice.find_one_by_gateway_id_and_device_id( gw_obj.id, device_obj.id) if gateway_device_obj is None: gateway_device_obj = GatewayToDevice( gateway_id=gw_obj.id, device_id=device_obj.id) gateway_device_obj.save() except Exception as exc: logging.error( "Error trying to save GatewayToDevice: {0}".format(exc)) else: # If we don't receive the gateway in the packet, # get the hex ID of the gateway associated to the device if any. # If we have more than 1 gateway associated to the device, this method returns None gw_obj = Gateway.find_only_one_gateway_by_device_id(device_obj.id) # Associate Device with the DataCollector if not previously existing device_data_collector_obj = DataCollectorToDevice.find_one_by_data_collector_id_and_device_id( packet.data_collector_id, device_obj.id) if device_data_collector_obj is None: try: device_data_collector_obj = DataCollectorToDevice( data_collector_id=packet.data_collector_id, device_id=device_obj.id) device_data_collector_obj.save() except Exception as exc: logging.error( "Error trying to save DataCollectorToDevice from a JoinRequest: {0}" .format(exc)) # Check if DevNonce is repeated and save it prev_packet_id = DevNonce.saveIfNotExists(packet.dev_nonce, device_obj.id, packet.id) if prev_packet_id and (device_obj.has_joined or device_obj.join_inferred): device_obj.repeated_dev_nonce = True parameters = {} parameters["dev_eui"] = device_obj.dev_eui parameters["dev_nonce"] = packet.dev_nonce parameters["prev_packet_id"] = prev_packet_id parameters['packet_date'] = packet.date.strftime( '%Y-%m-%d %H:%M:%S') if gw_obj: parameters["gateway"] = gw_obj.gw_hex_id else: parameters["gateway"] = "Unkwown" try: alert = Alert(type="LAF-001", created_at=datetime.datetime.now(), packet_id=packet.id, device_id=device_obj.id, parameters=json.dumps(parameters), data_collector_id=packet.data_collector_id) alert.save() ReportAlert.print_alert(alert) except Exception as exc: logging.error( "Error trying to save Alert LAF-001: {0}".format(exc)) elif not (prev_packet_id): device_obj.has_joined = False device_obj.join_inferred = False device_obj.join_request_counter += 1 device_obj.is_otaa = True # Save the first time it was seen if device_obj.first_up_timestamp is None: device_obj.first_up_timestamp = packet.date # Save the last time it was seen device_obj.last_up_timestamp = packet.date device_obj.last_packet_id = packet.id elif packet.m_type == "JoinAccept": # If the packet has a devEUI, increment the JoinAccept Counter in Device if packet.dev_eui is not None: devices = Device.find(packet.dev_eui, packet.data_collector_id) # Device with dev_eui exists an it's unique if len(devices) == 1: devices[0].join_accept_counter += 1 devices[0].has_joined = True # Device with dev_eui doesn't exists. We must create the object (although we haven't the join_eui) elif len(devices) == 0: try: device_obj = Device(dev_eui=packet.dev_eui, organization_id=packet.organization_id, has_joined=True, join_accept_counter=1) device_obj.save() except Exception as exc: logging.error( "Error trying to save Device from a JoinAccept: {0}". format(exc)) try: # Associate Device with the DataCollector device_data_collector_obj = DataCollectorToDevice( data_collector_id=packet.data_collector_id, device_id=device_obj.id) device_data_collector_obj.save() except Exception as exc: logging.error( "Error trying to save DataCollectorToDevice from a JoinAccept: {0}" .format(exc)) # We have more than one device. If the last packet received in that datacollector is a JoinReq, get the Device elif len(devices) > 1: last_packet = Packet.find_previous_by_data_collector_and_dev_eui( packet.date, packet.data_collector_id, packet.dev_eui) if last_packet is not None and last_packet.m_type == "JoinRequest": device_obj = Device.find_one_by_dev_eui_and_join_eui_and_datacollector_id( last_packet.dev_eui, last_packet.join_eui, packet.data_collector_id) if device_obj is not None: device_obj.join_accept_counter += 1 device_obj.has_joined = True else: logging.warning( "Warning! Received a JoinAccept for a dev_eui shared by at least two devices" ) # If we don't know the deveui, check if the last packet received in that datacollector is a JoinReq else: last_packet = Packet.find_previous_by_data_collector_and_dev_eui( packet.date, packet.data_collector_id, None) if last_packet is not None and last_packet.m_type == "JoinRequest": device_obj = Device.find_one_by_dev_eui_and_join_eui_and_datacollector_id( last_packet.dev_eui, last_packet.join_eui, packet.data_collector_id) if device_obj is not None: device_obj.join_accept_counter += 1 device_obj.join_inferred = True # Case DataPacket elif packet.m_type == "UnconfirmedDataUp" or packet.m_type == "UnconfirmedDataDown" or packet.m_type == "ConfirmedDataUp" or packet.m_type == "ConfirmedDataDown": dev_ses_obj = DeviceSession.find_one_by_dev_addr_and_datacollector_id( packet.dev_addr, packet.data_collector_id) if dev_ses_obj is None: try: dev_ses_obj = DeviceSession( dev_addr=packet.dev_addr, organization_id=packet.organization_id, is_confirmed=(packet.m_type == "ConfirmedDataUp" or packet.m_type == "ConfirmedDataDown")) dev_ses_obj.save() except Exception as exc: logging.error( "Error trying to save DeviceSession: {0}".format(exc)) # In case we received the dev_eui, check if we have the Device in the database device_obj = None if packet.dev_eui is not None: devices = Device.find(packet.dev_eui, packet.data_collector_id) # Device with dev_eui exists an it's unique if len(devices) == 1: device_obj = devices[0] # Device with dev_eui doesn't exists. We must create the object (although we haven't the join_eui) elif len(devices) == 0: try: device_obj = Device(dev_eui=packet.dev_eui, organization_id=packet.organization_id) device_obj.save() except Exception as exc: logging.error( "Error trying to save Device from a Data packet: {0}". format(exc)) # Associate Device with the DataCollector try: device_data_collector_obj = DataCollectorToDevice( data_collector_id=packet.data_collector_id, device_id=device_obj.id) device_data_collector_obj.save() except Exception as exc: logging.error( "Error trying to save DataCollectorToDevice from a Data packet: {0}" .format(exc)) # We have more than one device. We can't do anything. elif len(devices) > 1: logging.warning( "Warning! Received a DataPacket for a dev_eui shared by at least two devices" ) # Associate DeviceSession with a Gateway if gw_obj is not None: gateway_device_session_obj = GatewayToDeviceSession.find_one_by_gateway_id_and_device_session_id( gw_obj.id, dev_ses_obj.id) if gateway_device_session_obj is None: try: gateway_device_session_obj = GatewayToDeviceSession( gateway_id=gw_obj.id, device_session_id=dev_ses_obj.id) gateway_device_session_obj.save() except Exception as exc: logging.error( "Error trying to save GatewayToDeviceSession: {0}". format(exc)) else: # If we don't receive the gateway in the packet, # get the hex ID of the gateway associated to the device_session if any. # If we have more than 1 gateway associated to the device_session, this method returns None gw_obj = Gateway.find_only_one_gateway_by_device_session_id( dev_ses_obj.id) # Associate DeviceSession with the DataCollector device_session_data_collector_obj = DataCollectorToDeviceSession.find_one_by_data_collector_id_and_device_session_id( packet.data_collector_id, dev_ses_obj.id) if device_session_data_collector_obj is None: try: device_session_data_collector_obj = DataCollectorToDeviceSession( data_collector_id=packet.data_collector_id, device_session_id=dev_ses_obj.id) device_session_data_collector_obj.save() except Exception as exc: logging.error( "Error trying to save DataCollectorToDeviceSession: {0}". format(exc)) is_uplink_packet = (packet.m_type == "UnconfirmedDataUp" or packet.m_type == "ConfirmedDataUp") if is_uplink_packet: # Check counter if packet.f_count == 0: if dev_ses_obj.id in last_uplink_mic: # Make sure we have processed at least one packet for this device in this run before firing the alarm # Skip if received the same counter as previous packet and mics are equal if not (packet.f_count == dev_ses_obj.getCounter(is_uplink_packet) and last_uplink_mic[dev_ses_obj.id] == packet.mic): if device_obj is not None and device_obj.has_joined: # The counter = 0 is valid, then change the has_joined flag device_obj.has_joined = False elif device_obj is not None and device_obj.join_inferred: # The counter = 0 is valid, then change the join_inferred flag device_obj.join_inferred = False else: parameters = {} parameters["dev_addr"] = dev_ses_obj.dev_addr parameters["counter"] = dev_ses_obj.getCounter( is_uplink_packet) parameters["new_counter"] = packet.f_count parameters[ "prev_packet_id"] = dev_ses_obj.last_packet_id parameters['packet_date'] = packet.date.strftime( '%Y-%m-%d %H:%M:%S') if device_obj: parameters['dev_eui'] = device_obj.dev_eui else: parameters['dev_eui'] = 'Unkwown' if gw_obj: parameters["gateway"] = gw_obj.gw_hex_id else: parameters["gateway"] = "Unkwown" try: alert = Alert( type="LAF-006", created_at=datetime.datetime.now(), packet_id=packet.id, device_session_id=dev_ses_obj.id, parameters=json.dumps(parameters), data_collector_id=packet.data_collector_id) alert.save() ReportAlert.print_alert(alert) except Exception as exc: logging.error( "Error trying to save Alert LAF-006: {0}". format(exc)) if device_obj is not None: if not device_obj.is_otaa: dev_ses_obj.may_be_abp = True else: logging.warning( "Warning! The device is marked as OTAA but reset counter without having joined. Packet id %d" % (packet.id)) dev_ses_obj.reset_counter += 1 elif packet.f_count <= dev_ses_obj.getCounter(is_uplink_packet): if dev_ses_obj.id in last_uplink_mic: # Make sure we have processed at least one packet for this device in this run before firing the alarm # Skip if received the same counter as previous packet and mics are equal if not (packet.f_count == dev_ses_obj.getCounter(is_uplink_packet) and last_uplink_mic[dev_ses_obj.id] == packet.mic): parameters = {} parameters["dev_addr"] = dev_ses_obj.dev_addr parameters["counter"] = dev_ses_obj.getCounter( is_uplink_packet) parameters["new_counter"] = packet.f_count parameters[ "prev_packet_id"] = dev_ses_obj.last_packet_id parameters['packet_date'] = packet.date.strftime( '%Y-%m-%d %H:%M:%S') if device_obj: parameters['dev_eui'] = device_obj.dev_eui else: parameters['dev_eui'] = 'Unkwown' if gw_obj: parameters["gateway"] = gw_obj.gw_hex_id else: parameters["gateway"] = "Unkwown" try: alert = Alert( type="LAF-007", created_at=datetime.datetime.now(), packet_id=packet.id, device_session_id=dev_ses_obj.id, parameters=json.dumps(parameters), data_collector_id=packet.data_collector_id) alert.save() ReportAlert.print_alert(alert) except Exception as exc: logging.error( "Error trying to save Alert LAF-007: {0}". format(exc)) # Update the counter dev_ses_obj.setCounter(packet.f_count, is_uplink_packet) # Update total packet count dev_ses_obj.incrementPacketCounter(is_uplink_packet) # Keep track of the window time the DevAddr was on dev_ses_obj.updateUptime(packet.date, is_uplink_packet) if is_uplink_packet: # Save uplink MIC last_uplink_mic[dev_ses_obj.id] = packet.mic if device_obj is not None: # Check if this DeviceSession hadn't previously a Device if dev_ses_obj.device_id is not None and device_obj.id != dev_ses_obj.device_id: conflict_device_obj = Device.find_one(dev_ses_obj.device_id) parameters = {} parameters["dev_eui"] = conflict_device_obj.dev_eui, parameters["new_dev_eui"] = device_obj.dev_eui, parameters["dev_addr"] = dev_ses_obj.dev_addr parameters["prev_packet_id"] = dev_ses_obj.last_packet_id parameters['packet_date'] = packet.date.strftime( '%Y-%m-%d %H:%M:%S') if gw_obj: parameters["gateway"] = gw_obj.gw_hex_id else: parameters["gateway"] = "Unkwown" try: alert = Alert(type="LAF-002", created_at=datetime.datetime.now(), packet_id=packet.id, device_id=device_obj.id, device_session_id=dev_ses_obj.id, parameters=json.dumps(parameters), data_collector_id=packet.data_collector_id) alert.save() ReportAlert.print_alert(alert) except Exception as exc: logging.error( "Error trying to save Alert LAF-002: {0}".format(exc)) # device_obj.has_joined = False # Associate Device with DeviceSession dev_ses_obj.device_id = device_obj.id # Set the last packet id received for this device dev_ses_obj.setLastPacketId(is_uplink_packet, packet.id)