Esempio n. 1
0
    def _delTrack(self, trackId, count=False):
        """! Deletes a the given trackId from the 'BeeTracker' and checks whether this track
        corresponds to a bee that entered or left the hive.
        @param trackId    The trackId of the track to delete
        @param count      Whether to count the bee or not
        """
        track = self.tracks[trackId]
        if count:
            _dh = getStatistics()

            # Y-Position of first detection
            f_y = track.first_position[1]

            # Y-Position of last detection
            l_y = lastPosition = track.trace[-1][1]

            # Half of pane height
            pH = int(self._frame_height / 2)

            # Moved in
            if f_y > pH and l_y <= pH:
                _dh.addBeeIn()

            # Moved out
            if f_y < pH and l_y >= pH:
                _dh.addBeeOut()

        del self.tracks[trackId]
Esempio n. 2
0
    def addTag(self, tag):
        """! Add a tag the to track. Tag could be on of "wasps", "varroa",
             "cooling", "pollen"
        @param tag      The tag to add
        """
        if tag not in self.__tagCnts:
            self.__tagCnts[tag] = 0
        self.__tagCnts[tag] += 1

        # Bees cooling the hive stay at the same position for a long time
        # and will pass the classification network multiple times.
        # To harden the detection, wait for at least 5 detection
        if tag == "cooling" and self.__tagCnts["cooling"] < 5:
            return

        # Report to statistics
        if tag not in self.reported_tags:
            _dh = getStatistics()
            _dh.addClassificationResultByTag(self.trackId, tag)

        # Add the tag
        self.tags |= set((tag, ))
        self.reported_tags |= set((tag, ))
Esempio n. 3
0
    def run(self: Thread) -> None:
        """! The main thread that runs the 'ImageConsumer'
        """
        _process_time = 0
        _process_cnt = 0
        writer = None

        # Create a Bee Tracker
        tracker = BeeTracker(50, 20)

        # Create statistics object
        statistics = getStatistics()

        if type(self._imageQueue) == type(None):
            raise ("No image queue provided!")

        while not self.stopped:

            _start_t = time.time()

            # When the neural network is enabled, then read results from the classifcation queue
            # and forward them the the corresponding track and statistics
            if get_config("NN_ENABLE"):
                if _process_cnt % 100 == 0:
                    logger.debug("Process time(q): %0.3fms" %
                                 ((time.time() - _start_t) * 1000.0))

                # Populate classification results
                while not self._classifierResultQueue.empty():
                    if _process_cnt % 100 == 0:
                        logger.debug("Process time(nn): %0.3fms" %
                                     ((time.time() - _start_t) * 1000.0))

                    # Transfer results to the track
                    trackId, result, image = self._classifierResultQueue.get()
                    track = tracker.getTrackById(trackId)
                    if type(track) != type(None):
                        track.imageClassificationComplete(result)
                    else:
                        statistics.addClassificationResult(trackId, result)

            # Process every incoming image
            if not self._imageQueue.empty():
                _process_cnt += 1

                if _process_cnt % 100 == 0:
                    logger.debug("Process time(get): %0.3fms" %
                                 ((time.time() - _start_t) * 1000.0))

                # Get frame set
                fs = self._imageQueue.get()
                if get_config("NN_EXTRACT_RESOLUTION") == "EXT_RES_150x300":
                    img_1080, img_540, img_180 = fs
                elif get_config("NN_EXTRACT_RESOLUTION") == "EXT_RES_75x150":
                    img_540, img_180 = fs

                if _process_cnt % 100 == 0:
                    logger.debug("Process time(detec): %0.3fms" %
                                 ((time.time() - _start_t) * 1000.0))

                # Detect bees on smallest frame
                detected_bees, detected_bee_groups = detect_bees(img_180, 3)

                if _process_cnt % 100 == 0:
                    logger.debug("Process time(track): %0.3fms" %
                                 ((time.time() - _start_t) * 1000.0))

                # # Update tracker with detected bees
                if get_config("ENABLE_TRACKING"):
                    tracker.update(detected_bees, detected_bee_groups)

                # Extract detected bee images from the video, to use it our neural network
                # Scale is 2 because detection was made on img_540 but cutting is on img_1080
                if get_config("ENABLE_IMAGE_EXTRACTION"):
                    data = tracker.getLastBeePositions(
                        get_config("EXTRACT_FAME_STEP"))
                    if len(data) and type(self._extractQueue) != type(None):
                        if get_config(
                                "NN_EXTRACT_RESOLUTION") == "EXT_RES_150x300":
                            self._extractQueue.put((data, img_1080, 2))
                        elif get_config(
                                "NN_EXTRACT_RESOLUTION") == "EXT_RES_75x150":
                            self._extractQueue.put((data, img_540, 1))
                        else:
                            raise (
                                "Unknown setting for EXT_RES_75x150, expected EXT_RES_150x300 or EXT_RES_75x150"
                            )

                if _process_cnt % 100 == 0:
                    logger.debug("Process time(print): %0.3fms" %
                                 ((time.time() - _start_t) * 1000.0))

                # Draw preview if wanted
                if not get_args().noPreview:

                    draw_on = img_540.copy()
                    if get_config("DRAW_DETECTED_ELLIPSES"):
                        for item in detected_bees:
                            cv2.ellipse(draw_on, item, (0, 0, 255), 2)
                    if get_config("DRAW_DETECTED_GROUPS"):
                        for item in detected_bee_groups:
                            cv2.ellipse(draw_on, item, (255, 0, 0), 2)

                    if get_config("DRAW_TRACKING_RESULTS"):
                        tracker.drawTracks(draw_on)

                    skipKey = 1 if get_config("FRAME_AUTO_PROCESS") else 0

                    cv2.imshow("frame", draw_on)
                    if cv2.waitKey(skipKey) & 0xFF == ord('q'):
                        break

                    # Save as Video
                    if get_config("SAVE_AS_VIDEO"):
                        if type(writer) == type(None):
                            h, w, c = draw_on.shape
                            writer = cv2.VideoWriter(get_config("SAVE_AS_VIDEO_PATH"), \
                                    cv2.VideoWriter_fourcc(*'MJPG'), 18, (w, h))
                        writer.write(draw_on)

                # Print log entry about process time each 100 frames
                _process_time += time.time() - _start_t
                if _process_cnt % 100 == 0:
                    logger.debug("Process time: %0.3fms" %
                                 (_process_time * 10.0))
                    _process_time = 0

                # Limit FPS by delaying manually
                _end_t = time.time() - _start_t
                limit_time = 1 / get_config("LIMIT_FPS_TO")
                if _end_t < limit_time:
                    time.sleep(limit_time - _end_t)

                # Update statistics
                _dh = getStatistics()
                _dh.frameProcessed()

            else:
                time.sleep(0.1)

        self._done = True
        logger.info("Image Consumer stopped")
Esempio n. 4
0
    def run(self: Thread) -> None:
        """! Start the LoRaWAN thread.
        Sends the monitoring results every five minutes
        """

        # Ensure the transcevier is initialized
        try:
            self.initialize()
        except Exception as e:
            logger.error("Initialization failed: " + str(e))
            return

        fail_cnt = 0

        # Send every five minutes
        while not self.stopped:

            # Get current statistics
            _dh = getStatistics()
            (_wespenCount, _varroaCount, _pollenCount, _coolingCount, _beesIn,
             _beesOut, _frames) = _dh.readStatistics()

            # Reset statistics
            _dh.resetStatistics()

            # Prepare data
            data = tuple([
                _varroaCount, _pollenCount, _coolingCount, _wespenCount,
                _beesIn, _beesOut
            ])
            data_bin = struct.pack("hhhhhh", *data)

            # Conver monitoring results in transferrable string
            data_bin_str = ""
            for item in data_bin:
                data_bin_str += "%02X" % (item, )
            logger.debug("Binary data: " + str(data_bin))
            logger.debug("String data: " + data_bin_str)

            # Send the LoRaWAN Telegram
            ret = self._sendCmd("mac tx uncnf 1 %s" % (data_bin_str, ))
            if ret == "ok":
                ret = self._read()
                if ret == "mac_tx_ok":
                    logger.info("Sending successful with: %s" % (ret, ))
                else:
                    logger.error("Sending failed with: %s" % (ret, ))

            elif ret in [
                    "not_joined", "silent", "frame_counter_err_rejoin_needed",
                    "mac_paused"
            ]:
                fail_cnt += 1
                logger.error("Sending failed with: %s" % (ret, ))
                self.initialize()
            else:
                fail_cnt += 1
                logger.error("Sending failed with: %s" % (ret, ))

            # Wait for five minutes, before sending the next results
            _start_t = time.time()
            while not self.stopped and (_start_t + (60 * 1) > time.time()):
                time.sleep(0.01)

        # Close the serial connection
        if self._ser != None:
            self._ser.close()

        # Thread stopped
        self._done = True
        logger.info("LoRaWAN stopped")
Esempio n. 5
0
    def drawTracks(self, frame):
        """! Draw the current tracker status on the given frame.
        Draw tracks, names, ids, groups, ... depending on configuration
        @param  frame   The frame to draw on
        @return The resulting frame
        """

        # Draw tracks and detections
        for j in range(len(self.tracks)):

            # Only Draw tracks that have more than one waypoints
            if len(self.tracks[j].trace) > 1:

                # Select a track color
                t_c = self.track_colors[self.tracks[j].trackId %
                                        len(self.track_colors)]

                # Draw marker that shows tracks underneath groups
                if get_config("DRAW_GROUP_MARKER") and self.tracks[j].in_group:
                    x = int(self.tracks[j].trace[-1][0])
                    y = int(self.tracks[j].trace[-1][1])
                    tl = (x - 30, y - 30)
                    br = (x + 30, y + 30)
                    cv2.rectangle(frame, tl, br, (0, 0, 0), 10)

                # Draw rectangle over last position
                if get_config("DRAW_RECTANGLE_OVER_LAST_POSTION"):
                    x = int(self.tracks[j].trace[-1][0])
                    y = int(self.tracks[j].trace[-1][1])
                    tl = (x - 10, y - 10)
                    br = (x + 10, y + 10)
                    cv2.rectangle(frame, tl, br, t_c, 1)

                # Draw trace
                if get_config("DRAW_TRACK_TRACE"):
                    for k in range(len(self.tracks[j].trace)):
                        x = int(self.tracks[j].trace[k][0])
                        y = int(self.tracks[j].trace[k][1])

                        if k > 0:
                            x2 = int(self.tracks[j].trace[k - 1][0])
                            y2 = int(self.tracks[j].trace[k - 1][1])
                            cv2.line(frame, (x, y), (x2, y2), t_c, 4)
                            cv2.line(frame, (x, y), (x2, y2), (0, 0, 0), 1)

                # Draw prediction
                if get_config("DRAW_TRACK_PREDICTION"):
                    x = int(self.tracks[j].last_predict[0])
                    y = int(self.tracks[j].last_predict[3])
                    cv2.circle(frame, (x, y), self.dist_threshold, (0, 0, 255),
                               1)

                # Draw velocity, acceleration
                if get_config("DRAW_ACCELERATION") or get_config(
                        "DRAW_VELOCITY"):
                    l_p = self.tracks[j].last_predict

                    l_px = int(l_p[0])
                    v_px = int(l_p[1]) * 10 + l_px
                    a_px = int(l_p[2]) * 10 + l_px
                    l_py = int(l_p[3])
                    v_py = int(l_p[4]) * 10 + l_py
                    a_py = int(l_p[5]) * 10 + l_py

                    if DRAW_VELOCITY:
                        cv2.line(frame, (l_px, l_py), (v_px, v_py),
                                 (255, 255, 255), 4)
                        cv2.line(frame, (l_px, l_py), (v_px, v_py), t_c, 2)

                    if DRAW_ACCELERATION:
                        cv2.line(frame, (l_px, l_py), (a_px, a_py),
                                 (255, 255, 255), 8)
                        cv2.line(frame, (l_px, l_py), (a_px, a_py), t_c, 6)

                x = int(self.tracks[j].trace[-1][0])
                y = int(self.tracks[j].trace[-1][1])
                if "varroa" in self.tracks[j].tags:
                    cv2.circle(frame, (x - 10, y - 50), 9, (0, 0, 255), -1)
                    cv2.circle(frame, (x - 10, y - 50), 10, (0, 0, 0), 2)
                if "pollen" in self.tracks[j].tags:
                    cv2.circle(frame, (x - 30, y - 50), 9, (255, 0, 0), -1)
                    cv2.circle(frame, (x - 30, y - 50), 10, (0, 0, 0), 2)
                if "cooling" in self.tracks[j].tags:
                    cv2.circle(frame, (x + 10, y - 50), 9, (0, 255, 0), -1)
                    cv2.circle(frame, (x + 10, y - 50), 10, (0, 0, 0), 2)
                if "wasps" in self.tracks[j].tags:
                    cv2.circle(frame, (x + 30, y - 50), 9, (0, 0, 0), -1)
                    cv2.circle(frame, (x + 30, y - 50), 10, (0, 0, 0), 2)

                # Add Track Id
                if get_config("DRAW_TRACK_ID"):
                    cv2.putText(frame, str(self.tracks[j].trackId) + " " + \
                            self.tracks[j]._name, (x,y-30),
                            cv2.FONT_HERSHEY_DUPLEX, 1, (255,255,255))
        # Draw count of bees
        if get_config("DRAW_IN_OUT_STATS"):
            _dh = getStatistics()
            bees_in, bees_out = _dh.getBeeCountOverall()
            cv2.putText(frame, "In: %i, Out: %i" % (bees_in, bees_out),
                        (50, 50), cv2.FONT_HERSHEY_SIMPLEX, 2, (0, 0, 0), 5)

        return frame