collector_obj.save() else: print('Datacollector IP and port must be provided if not provided a collector ID.') exit(0) collector = LoraServerIOCollector( data_collector_id=collector_obj.id, organization_id=collector_obj.organization_id, host=collector_obj.ip, port=int(collector_obj.port), ssl=None, user=None, password=None, topics=topics, last_seen=collector_obj.last_seen, connected=collector_obj.connected, verified=collector_obj.verified) collector.connect() while (True): time.sleep(5) try: commit() self.log.debug('Commit done!') except Exception as exc: self.log.error('Error at commit:' + str(exc)) self.log.info('Rolling back the session') rollback()
def processData(): # Save the packet ids that have to be processed by the selected modules starting_rows = list() if analyze: analyzer_row = RowProcessed.find_one_by_analyzer("packet_analyzer") starting_rows.append(analyzer_row.last_row) if bruteforce: bruteforcer_row = RowProcessed.find_one_by_analyzer("bruteforcer") starting_rows.append(bruteforcer_row.last_row) # Get the lowest packet ID to be processed first_pending_id = starting_rows[0] for row in starting_rows: if row < first_pending_id: first_pending_id = row # Jump to the next to be procesed first_pending_id += 1 # If the user provided the start id, do some checksstart_packet_id = None if options.from_id is not None: start_packet_id = options.from_id if start_packet_id > first_pending_id: print( "Warning! You are jumping over packets that weren't processed. Last packets ID processed: " ) if bruteforce: print("Bruteforcer: %d." % (bruteforcer_row.last_row)) if analyze: print("Analyzer: %d." % (analyzer_row.last_row)) elif start_packet_id < first_pending_id: print( "Warning! You will process twice some packets and duplicate information in DB. Last packets ID processed: " ) if bruteforce: print("Bruteforcer: %d." % (bruteforcer_row.last_row)) if analyze: print("Analyzer: %d." % (analyzer_row.last_row)) else: start_packet_id = first_pending_id # Start processing in batches keep_iterating = True while keep_iterating: session_packets = None # Select the quantity of packets to process according to PACKES_BATCH and the limit that the user may have provided if options.to_id is None: if (start_packet_id + 2 * PACKETS_BATCH) <= Packet.rows_quantity(): session_packets = Packet.find_all_from(start_packet_id, PACKETS_BATCH) start_packet_id += PACKETS_BATCH else: logging.debug("No more packets to process. Sleeping a while") time.sleep(20) continue else: if (start_packet_id + PACKETS_BATCH) <= options.to_id: if (start_packet_id + PACKETS_BATCH) <= Packet.rows_quantity(): session_packets = Packet.find_all_from( start_packet_id, PACKETS_BATCH) start_packet_id += PACKETS_BATCH else: logging.debug( "No more packets to process. Sleeping a while") time.sleep(20) continue else: session_packets = Packet.find_all_from( start_packet_id, options.to_id - start_packet_id + 1) start_packet_id += (options.to_id % PACKETS_BATCH) keep_iterating = False if session_packets is not None: for packet in session_packets: logging.debug("Using packet: %d" % (packet.id)) # Skip packets from /{dev_eui}/up topic? try: # If the starting packet wasn't given, check if the packet wasn't processed by each analyzer (except for the parser, which doesn't modify the DB) if options.from_id is None: if bruteforce and bruteforcer_row.last_row < packet.id: LafBruteforcer.bruteForce(packet) bruteforcer_row.last_row = packet.id if analyze and analyzer_row.last_row < packet.id: LafPacketAnalysis.processPacket(packet) analyzer_row.last_row = packet.id # If the starting packet was given by the user, don't do any check else: if bruteforce: LafBruteforcer.bruteForce(packet) if bruteforcer_row.last_row < packet.id: bruteforcer_row.last_row = packet.id if analyze: LafPacketAnalysis.processPacket(packet) if analyzer_row.last_row < packet.id: analyzer_row.last_row = packet.id except Exception as e: logging.error( "Error processing packet {0}. Exception: {1}".format( packet.id, e)) rollback() if parsedata: LafPrinter.printPacket(packet) # Commit objects in DB before starting with the next batch try: commit() except Exception as exc: logging.error( "Error trying to commit after batch processing finish: {0}" .format(exc))