Exemplo n.º 1
0
	def __init__(self):
		""" Initializes the class. """

		self.Helpers = Helpers("Camera")

		# Initiates the iotJumpWay connection class
		self.iot = iot()
		self.iot.connect()

		# Starts the TassAI module
		self.TassAI = TassAI()
		# Loads the required models
		self.TassAI.load_models()
		# Loads known images
		self.TassAI.load_known()

		self.Helpers.logger.info("Camera API Class initialization complete.")
Exemplo n.º 2
0
    def run(self):
        """ Runs the module. """

        fps = ""
        framecount = 0
        time1 = 0
        time2 = 0
        mesg = ""

        self.font = cv2.FONT_HERSHEY_SIMPLEX
        self.color = (0, 0, 0)

        # Starts the TassAI module
        self.TassAI = TassAI()
        # Connects to the camera
        self.TassAI.connect()
        # Loads the required models
        self.TassAI.load_models()
        # Loads known images
        self.TassAI.load_known()
        self.publishes = [None] * (len(self.TassAI.faces_database) + 1)

        # Starts the socket server
        soc = self.Sockets.connect(self.Helpers.confs["Socket"]["host"],
                                   self.Helpers.confs["Socket"]["port"])

        while True:
            try:
                t1 = time.perf_counter()

                # Reads the current frame
                frame = self.TassAI.camera.get(0.65)

                width = frame.shape[1]
                # Processes the frame
                detections = self.TassAI.process(frame)

                # Writes header to frame
                cv2.putText(frame, "TassAI Camera", (10, 30), self.font, 0.5,
                            self.color, 1, cv2.LINE_AA)

                # Writes date to frame
                cv2.putText(frame, str(datetime.now()), (10, 50), self.font,
                            0.3, self.color, 1, cv2.LINE_AA)

                if len(detections):
                    for roi, landmarks, identity in zip(*detections):
                        frame, label = self.TassAI.draw_detection_roi(
                            frame, roi, identity)
                        #frame = self.TassAI.draw_detection_keypoints(frame, roi, landmarks)

                        if label is "Unknown":
                            label = 0
                            mesg = "TassAI identified intruder"
                        else:
                            mesg = "TassAI identified User #" + str(label)

                        # If iotJumpWay publish for user is in past
                        if (self.publishes[int(label)] is None
                                or (self.publishes[int(label)] +
                                    (1 * 120)) < time.time()):
                            # Update publish time for user
                            self.publishes[int(label)] = time.time()

                            # Send iotJumpWay notification
                            self.iot.channelPub(
                                "Sensors", {
                                    "Type": "TassAI",
                                    "Sensor": "USB Camera",
                                    "Value": label,
                                    "Message": mesg
                                })

                            # Send iotJumpWay notification
                            self.iot.channelPub(
                                "Cameras", {
                                    "Type": "TassAI",
                                    "Sensor": "USB Camera",
                                    "Value": label,
                                    "Message": mesg
                                })

                cv2.putText(frame, fps, (width - 120, 26),
                            cv2.FONT_HERSHEY_SIMPLEX, 0.3, self.color, 1,
                            cv2.LINE_AA)

                # Streams the modified frame to the socket server
                encoded, buffer = cv2.imencode('.jpg', frame)
                soc.send(base64.b64encode(buffer))

                # FPS calculation
                framecount += 1
                if framecount >= 15:
                    fps = "Stream: {:.1f} FPS".format(time1 / 15)
                    framecount = 0
                    time1 = 0
                    time2 = 0
                t2 = time.perf_counter()
                elapsedTime = t2 - t1
                time1 += 1 / elapsedTime
                time2 += elapsedTime

            except KeyboardInterrupt:
                self.TassAI.lcv.release()
                break
Exemplo n.º 3
0
    def run(self):
        """ Runs the module. """

        self.identified = 0

        # Starts the TassAI module
        self.TassAI = TassAI()
        self.TassAI.cv()
        self.TassAI.ncs()

        # Starts the socket server
        soc = self.Sockets.connect(self.Helpers.confs["Socket"]["host"],
                                   self.Helpers.confs["Socket"]["port"])

        fps = ""
        framecount = 0
        count = 0
        time1 = 0
        time2 = 0

        self.publishes = [None] * (len(self.TassAI.NCS1.encoded) + 1)

        with pyrs.Service() as serv:
            with serv.Device() as dev:

                dev.apply_ivcam_preset(0)

                while True:
                    t1 = time.perf_counter()

                    dev.wait_for_frames()
                    frame = dev.color
                    frame = cv2.cvtColor(frame, cv2.COLOR_RGB2BGR)

                    # Processes the frame
                    raw, frame = self.TassAI.NCS1.prepareImg(frame)
                    width = frame.shape[1]

                    # Gets faces and coordinates
                    faces, coords = self.TassAI.NCS1.faces(frame)

                    # Writes header to frame
                    cv2.putText(frame, "TassAI", (10, 30), self.TassAI.font,
                                0.7, self.TassAI.color, 2, cv2.LINE_AA)

                    # Writes date to frame
                    cv2.putText(frame, str(datetime.now()), (10, 50),
                                self.TassAI.font, 0.5, self.TassAI.color, 2,
                                cv2.LINE_AA)

                    if len(coords):
                        i = 0
                        mesg = ""
                        # Loops through coordinates
                        for (i, face) in enumerate(coords):

                            # Gets facial landmarks coordinates
                            coordsi = face_utils.shape_to_np(face)
                            # Looks for matches/intruders
                            known, distance = self.TassAI.NCS1.match(
                                raw, faces[i])

                            if known:
                                mesg = "TassAI identified User #" + str(known)
                            else:
                                mesg = "TassAI identified intruder"

                            # If iotJumpWay publish for user is in past
                            if (self.publishes[int(known)] is None
                                    or (self.publishes[int(known)] +
                                        (1 * 20)) < time.time()):
                                # Update publish time for user
                                self.publishes[int(known)] = time.time()

                                # Send iotJumpWay notification
                                self.iot.channelPub(
                                    "Sensors", {
                                        "Type": "TassAI",
                                        "Sensor": "F200 Camera",
                                        "Value": known,
                                        "Message": mesg
                                    })

                                # Send iotJumpWay notification
                                self.iot.channelPub(
                                    "Cameras", {
                                        "Type": "TassAI",
                                        "Sensor": "F200 Camera",
                                        "Value": known,
                                        "Message": mesg
                                    })

                            (x, y, w,
                             h) = self.TassAI.NCS1.bounding_box(faces[i])
                            cv2.rectangle(frame, (x, y), (x + w, y + h),
                                          (0, 255, 0), 1)

                            cx = int(round(+(w / 2)))
                            cy = int(round(y + (h / 2)))

                            cv2.putText(frame, "User ID#" + str(known),
                                        (x, y - 5), cv2.FONT_HERSHEY_PLAIN, 1,
                                        (0, 255, 0), 1)

                            distance = dev.depth[cy][cx] / 1000.0
                            if (distance != 0.0):
                                cv2.putText(frame,
                                            str(distance) + "cm",
                                            (x + (w - 20), y - 5),
                                            cv2.FONT_HERSHEY_PLAIN, 1,
                                            (0, 255, 0), 1)

                            # Draws facial landmarks
                            for (x, y) in coordsi:
                                cv2.circle(frame, (x, y), 2, (0, 255, 0), -1)
                            # Adds user name to frame
                            i += 1

                    cv2.putText(frame, fps, (width - 170, 30),
                                cv2.FONT_HERSHEY_SIMPLEX, 0.5,
                                self.TassAI.color, 1, cv2.LINE_AA)

                    d = dev.depth * dev.depth_scale * 1000
                    d = cv2.applyColorMap(d.astype(np.uint8),
                                          cv2.COLORMAP_RAINBOW)

                    cd = np.concatenate((frame, d), axis=1)

                    # Streams the modified frame to the socket server
                    encoded, buffer = cv2.imencode('.jpg', cd)
                    soc.send(base64.b64encode(buffer))

                    # FPS calculation
                    framecount += 1
                    if framecount >= 15:
                        fps = "Stream: {:.1f} FPS".format(time1 / 15)
                        framecount = 0
                        time1 = 0
                        time2 = 0
                    t2 = time.perf_counter()
                    elapsedTime = t2 - t1
                    time1 += 1 / elapsedTime
                    time2 += elapsedTime
                    time.sleep(0.05)
Exemplo n.º 4
0
    def run(self):
        """ Runs the module. """

        self.identified = 0

        # Starts the TassAI module
        self.TassAI = TassAI()
        self.TassAI.cv()
        self.TassAI.connect()
        self.TassAI.ncs()

        # Starts the socket server
        soc = self.Sockets.connect(self.Helpers.confs["Socket"]["host"],
                                   self.Helpers.confs["Socket"]["port"])

        fps = ""
        framecount = 0
        count = 0
        time1 = 0
        time2 = 0

        self.publishes = [None] * (len(self.TassAI.NCS1.encoded) + 1)

        while True:
            time.sleep(0.05)
            try:
                t1 = time.perf_counter()
                # Reads the current frame

                frame = self.TassAI.camera.get(0.65)

                # Processes the frame
                raw, frame = self.TassAI.NCS1.prepareImg(frame)
                width = frame.shape[1]

                # Gets faces and coordinates
                faces, coords = self.TassAI.NCS1.faces(frame)

                # Writes header to frame
                cv2.putText(frame, "TassAI", (10, 30), self.TassAI.font, 0.7,
                            self.TassAI.color, 2, cv2.LINE_AA)

                # Writes date to frame
                cv2.putText(frame, str(datetime.now()), (10, 50),
                            self.TassAI.font, 0.5, self.TassAI.color, 2,
                            cv2.LINE_AA)

                if len(coords):
                    i = 0
                    mesg = ""
                    # Loops through coordinates
                    for (i, face) in enumerate(coords):
                        # Gets facial landmarks coordinates
                        coordsi = face_utils.shape_to_np(face)
                        # Looks for matches/intruders
                        known, distance = self.TassAI.NCS1.match(raw, faces[i])

                        if known:
                            mesg = "TassAI identified User #" + str(known)
                        else:
                            mesg = "TassAI identified intruder"

                        # If iotJumpWay publish for user is in past
                        if (self.publishes[int(known)] is None
                                or (self.publishes[int(known)] +
                                    (1 * 20)) < time.time()):
                            # Update publish time for user
                            self.publishes[int(known)] = time.time()

                            # Send iotJumpWay notification
                            self.iot.channelPub(
                                "Sensors", {
                                    "Type": "TassAI",
                                    "Sensor": "USB Camera",
                                    "Value": known,
                                    "Message": mesg
                                })

                            # Send iotJumpWay notification
                            self.iot.channelPub(
                                "Cameras", {
                                    "Type": "TassAI",
                                    "Sensor": "USB Camera",
                                    "Value": known,
                                    "Message": mesg
                                })

                        # Draws facial landmarks
                        for (x, y) in coordsi:
                            cv2.circle(frame, (x, y), 2, (0, 255, 0), -1)
                        # Adds user name to frame
                        i += 1

                cv2.putText(frame, fps, (width - 170, 30),
                            cv2.FONT_HERSHEY_SIMPLEX, 0.5, self.TassAI.color,
                            1, cv2.LINE_AA)

                # Streams the modified frame to the socket server
                encoded, buffer = cv2.imencode('.jpg', frame)
                soc.send(base64.b64encode(buffer))

                # FPS calculation
                framecount += 1
                if framecount >= 15:
                    fps = "Stream: {:.1f} FPS".format(time1 / 15)
                    framecount = 0
                    time1 = 0
                    time2 = 0
                t2 = time.perf_counter()
                elapsedTime = t2 - t1
                time1 += 1 / elapsedTime
                time2 += elapsedTime
                time.sleep(0.05)

            except KeyboardInterrupt:
                self.TassAI.camera.release()
                break