Ejemplo n.º 1
0
    def validate_speed(cls, trackable_object, time_stamp, frame):
        # Initialize log file.
        if not cls.log_file:
            cls.initialize_log_file()

        # check if the object has not been logged
        if not trackable_object.logged:
            # check if the object's speed has been estimated and it
            # is higher than the speed limit
            if trackable_object.estimated and trackable_object.speedMPH > MAX_THRESHOLD_SPEED:
                # set the current year, month, day, and time
                year = time_stamp.strftime("%Y")
                month = time_stamp.strftime("%m")
                day = time_stamp.strftime("%d")
                time = time_stamp.strftime("%H:%M:%S")

                if SEND_EMAIL:
                    # initialize the image id, and the temporary file
                    imageID = time_stamp.strftime("%H%M%S%f")
                    tempFile = TempFile()

                    #write the date and speed on the image.
                    cv2.putText(
                        frame,
                        datetime.now().strftime("%A %d %B %Y %I:%M:%S%p"),
                        (10, frame.shape[0] - 10), cv2.FONT_HERSHEY_SIMPLEX,
                        0.75, (0, 255, 0), 1)
                    # write the speed: first get the size of the text
                    size, base = cv2.getTextSize(
                        "%.0f mph" % trackable_object.speedMPH,
                        cv2.FONT_HERSHEY_SIMPLEX, 2, 3)
                    # then center it horizontally on the image
                    cntr_x = int((frame.shape[1] - size[0]) / 2)
                    cv2.putText(frame, "%.0f mph" % trackable_object.speedMPH,
                                (cntr_x, int(frame.shape[0] * 0.2)),
                                cv2.FONT_HERSHEY_SIMPLEX, 2.00, (0, 255, 0), 3)
                    cv2.imwrite(tempFile.path, frame)

                    # create a thread to send the image via email.
                    # and start it
                    t = Thread(target=EmailSender.send_email,
                               args=(
                                   tempFile,
                                   imageID,
                               ))
                    t.start()

                    # log the event in the log file
                    info = "{},{},{},{},{},{}\n".format(
                        year, month, day, time, trackable_object.speedMPH,
                        imageID)
                else:
                    # log the event in the log file
                    info = "{},{},{},{},{}\n".format(year, month, day, time,
                                                     trackable_object.speedMPH)
                cls.log_file.write(info)

                # set the object has logged
                trackable_object.logged = True
Ejemplo n.º 2
0
    def send(self, image):
        # create a temporary path for the image and write it to file
        tempImage = TempFile()
        cv2.imwrite(tempImage.path, image)

        # start a thread to upload the file and send it
        t = Thread(target=self._send, args=(tempImage, ))
        t.start()
Ejemplo n.º 3
0
def send(image, ref_no):
    # create a temporary path for the image and write it to file
    tempImage = TempFile()
    cv2.imwrite(tempImage.path, image)
    way = tempImage.path
    now = datetime.datetime.now()
    date = now.strftime("%d-%m-%Y")
    reference_no = ref_no

    # start a thread to upload the file and send it
    t = Thread(target=add_new_customer,
               args=(
                   tempImage,
                   way,
                   date,
                   reference_no,
                   image,
               ))
    t.daemon = True
    t.start()
Ejemplo n.º 4
0
    def validate_speed(cls, trackable_object, time_stamp, frame):
        # Initialize log file.
        if not cls.log_file:
            cls.initialize_log_file()

        # check if the object has not been logged
        if not trackable_object.logged:
            # check if the object's speed has been estimated and it
            # is higher than the speed limit
            if trackable_object.estimated and trackable_object.speedMPH > MAX_THRESHOLD_SPEED:
                # set the current year, month, day, and time
                year = time_stamp.strftime("%Y")
                month = time_stamp.strftime("%m")
                day = time_stamp.strftime("%d")
                time = time_stamp.strftime("%H:%M:%S")

                if SEND_EMAIL:
                    # initialize the image id, and the temporary file
                    imageID = time_stamp.strftime("%H%M%S%f")
                    tempFile = TempFile()
                    cv2.imwrite(tempFile.path, frame)

                    # create a thread to upload the file to dropbox
                    # and start it
                    t = Thread(target=EmailSender.send_email(),
                               args=(tempFile, imageID))
                    t.start()

                    # log the event in the log file
                    info = "{},{},{},{},{},{}\n".format(
                        year, month, day, time, trackable_object.speedMPH,
                        imageID)
                else:
                    # log the event in the log file
                    info = "{},{},{},{},{}\n".format(year, month, day, time,
                                                     trackable_object.speedMPH)
                cls.logFile.write(info)

                # set the object has logged
                trackable_object.logged = True
Ejemplo n.º 5
0
             cv2.FONT_HERSHEY_SIMPLEX, 0.5, (0, 255, 0), 2)
 cv2.circle(frame, (centroid[0], centroid[1]), 4, (0, 255, 0), -1)
 # check if the object has not been logged
 if not to.logged:
     # check if the object's speed has been estimated and it is higher than the speed limit
     if to.estimated and to.speedMPH > conf["speed_limit"]:
         # set the current year, month, day, and time
         year = ts.strftime("%Y")
         month = ts.strftime("%m")
         day = ts.strftime("%d")
         time = ts.strftime("%H:%M:%S")
         # check if dropbox is to be used to store the vehicle image
         if conf["use_dropbox"]:
             # initialize the image id, and the temporary file
             imageID = ts.strftime("%H%M%S%f")
             tempFile = TempFile()
             cv2.imwrite(tempFile.path, frame)
             # create a thread to upload the file to dropbox and start it
             t = Thread(target=upload_file,
                        args=(
                            tempFile,
                            client,
                            imageID,
                        ))
             t.start()
             # log the event in the log file
             info = "{},{},{},{},{},{}\n".format(
                 year, month, day, time, to.speedMPH, imageID)
             logFile.write(info)
         # otherwise, we are not uploading vehicle images to dropbox
         else:
        writer = None
        tempVideo.cleanup()
        tempVideo = None

    if not objectDetected and objectDetectedPrev and not sentNotify:

        startTime = datetime.now()

        fourcc = cv2.VideoWriter_fourcc('a', 'v', 'c', '1')
        sentNotify = True

        f = open("out.log", "w", encoding="utf-8")
        print("open")
        with redirect_stdout(f):

            tempVideo = TempFile(basePath="./videos", ext=".mp4")

            writer = cv2.VideoWriter(tempVideo.path, fourcc, 20.0, (W, H),
                                     True)

    if (datetime.now() - detectTimer).seconds > 5 and sentNotify:

        sentNotify = False

        endTime = datetime.now()
        totalSec = (endTime - startTime).seconds
        dateDetected = date.today().strftime("%A, %B %d %Y")

        msg = "Object not detected on {} at {} for {} " \
                "seconds.".format(dateDetected,
                startTime.strftime("%I:%M%p"), totalSec)
Ejemplo n.º 7
0
    # calculate the average of all pixels where a higher mean
    # indicates that there is more light coming into the refrigerator
    mean = np.mean(gray)

    # determine if luna is currently in frame
    lunaFound = mean > 50

    # if the luna is in frame and previously she was not, it means
    # the luna has just been spotted
    if lunaFound and not lunaPrevFound:
        # record the start time
        startTime = datetime.now()

        # create a temporary video file and initialize the video
        # writer object
        tempVideo = TempFile(ext=".mp4", basePath="./temp_videos")

        fourcc = cv2.VideoWriter_fourcc(*"mp4v")
        writer = cv2.VideoWriter(tempVideo.path, fourcc, 60, (W, H), True)

    # if Luna was in frame but has suddenly left the frame
    elif lunaPrevFound and not lunaFound:
        # calculate the time different between the current time and
        # start time
        timeDiff = (datetime.now() - startTime).seconds

        # if a notification has already been sent, then just set
        # the notifSent to false for the next iteration
        if notifSent:
            notifSent = False
Ejemplo n.º 8
0
    mean = np.mean(gray)
    print("Mean pixel val: ", mean)

    # determine if the refrigerator is currently open
    fridgeOpen = mean > conf["thresh"]
    print("fridgeOpen: ", fridgeOpen)

    # if the fridge is open and previously it was closed, it means
    # the fridge has been just opened
    if fridgeOpen and not fridgePrevOpen:
        # record the start time
        startTime = datetime.now()

        # create a temporary video file and initialize the video
        # writer object
        tempVideo = TempFile(ext=".mp4")
        writer = cv2.VideoWriter(tempVideo.path, fourcc, 30, (W, H), True)

    # if the fridge is open then there are 2 possibilities,
    # 1) it's left open for more than the *threshold* seconds.
    # 2) it's closed in less than or equal to the *threshold* seconds.
    elif fridgePrevOpen:
        # calculate the time different between the current time and
        # start time

        timeDiff = (datetime.now() - startTime).seconds

        # if the fridge is open and the time difference is greater
        # than threshold, then send a notification
        if fridgeOpen and timeDiff > conf["open_threshold_seconds"]:
            # if a notification has not been sent yet, then send a
Ejemplo n.º 9
0
    def programLoop(self):
        while True:
            # grab the next frame from the stream, store the current
            # timestamp, and store the new date
            self.ret, self.frame = self.vs.read()
            self.ts = datetime.now()
            newDate = self.ts.strftime("%m-%d-%y")

            # check if the frame is None, if so, break out of the loop
            if (self.frame is None):
                # break
                return

            # if the log file has not been created or opened
            self.createLogFileIfNotExist()

            # resize the frame
            self.frame = imutils.resize(self.frame,
                                        width=self.conf["frame_width"])
            self.rgb = cv2.cvtColor(self.frame, cv2.COLOR_BGR2RGB)

            # if the frame dimensions are empty, set them
            if ((self.W is None) or (self.H is None)):
                (self.H, self.W) = self.frame.shape[:2]
                self.meterPerPixel = self.conf["distance"] / self.W

            # initialize our list of bounding box rectangles returned by
            # either (1) our object detector or (2) the correlation trackers
            self.rects = []

            self.runComputationallyTaskingAlgoIfBasicAlgoFails()

            objects = self.ct.update(self.rects)

            # loop over the tracked objects
            for (objectID, centroid) in objects.items():
                # check to see if a trackable object exists for the current
                # object ID
                to = self.trackableObjects.get(objectID, None)

                # if there is no existing trackable object, create one
                if to is None:
                    to = TrackableObject(objectID, centroid)

                # otherwise, if there is a trackable object and its speed has
                # not yet been estimated then estimate it
                elif not to.estimated:
                    # check if the direction of the object has been set, if
                    # not, calculate it, and set it
                    if to.direction is None:
                        y = [c[0] for c in to.centroids]
                        direction = centroid[0] - np.mean(y)
                        to.direction = direction

                    # if the direction is positive (indicating the object
                    # is moving from left to right)
                    if to.direction > 0:
                        # check to see if timestamp has been noted for
                        # point A
                        if to.timestamp["A"] == 0:
                            # if the centroid's x-coordinate is greater than
                            # the corresponding point then set the timestamp
                            # as current timestamp and set the position as the
                            # centroid's x-coordinate
                            if centroid[0] > self.conf[
                                    "speed_estimation_zone"]["A"]:
                                to.timestamp["A"] = self.ts
                                to.position["A"] = centroid[0]

                        # check to see if timestamp has been noted for
                        # point B
                        elif to.timestamp["B"] == 0:
                            # if the centroid's x-coordinate is greater than
                            # the corresponding point then set the timestamp
                            # as current timestamp and set the position as the
                            # centroid's x-coordinate
                            if centroid[0] > self.conf[
                                    "speed_estimation_zone"]["B"]:
                                to.timestamp["B"] = self.ts
                                to.position["B"] = centroid[0]

                        # check to see if timestamp has been noted for
                        # point C
                        elif to.timestamp["C"] == 0:
                            # if the centroid's x-coordinate is greater than
                            # the corresponding point then set the timestamp
                            # as current timestamp and set the position as the
                            # centroid's x-coordinate
                            if centroid[0] > self.conf[
                                    "speed_estimation_zone"]["C"]:
                                to.timestamp["C"] = self.ts
                                to.position["C"] = centroid[0]

                        # check to see if timestamp has been noted for
                        # point D
                        elif to.timestamp["D"] == 0:
                            # if the centroid's x-coordinate is greater than
                            # the corresponding point then set the timestamp
                            # as current timestamp, set the position as the
                            # centroid's x-coordinate, and set the last point
                            # flag as True
                            if centroid[0] > self.conf[
                                    "speed_estimation_zone"]["D"]:
                                to.timestamp["D"] = self.ts
                                to.position["D"] = centroid[0]
                                to.lastPoint = True

                    # if the direction is negative (indicating the object
                    # is moving from right to left)
                    elif to.direction < 0:
                        # check to see if timestamp has been noted for
                        # point D
                        if to.timestamp["D"] == 0:
                            # if the centroid's x-coordinate is lesser than
                            # the corresponding point then set the timestamp
                            # as current timestamp and set the position as the
                            # centroid's x-coordinate
                            if centroid[0] < self.conf[
                                    "speed_estimation_zone"]["D"]:
                                to.timestamp["D"] = self.ts
                                to.position["D"] = centroid[0]

                        # check to see if timestamp has been noted for
                        # point C
                        elif to.timestamp["C"] == 0:
                            # if the centroid's x-coordinate is lesser than
                            # the corresponding point then set the timestamp
                            # as current timestamp and set the position as the
                            # centroid's x-coordinate
                            if centroid[0] < self.conf[
                                    "speed_estimation_zone"]["C"]:
                                to.timestamp["C"] = self.ts
                                to.position["C"] = centroid[0]

                        # check to see if timestamp has been noted for
                        # point B
                        elif to.timestamp["B"] == 0:
                            # if the centroid's x-coordinate is lesser than
                            # the corresponding point then set the timestamp
                            # as current timestamp and set the position as the
                            # centroid's x-coordinate
                            if centroid[0] < self.conf[
                                    "speed_estimation_zone"]["B"]:
                                to.timestamp["B"] = self.ts
                                to.position["B"] = centroid[0]

                        # check to see if timestamp has been noted for
                        # point A
                        elif to.timestamp["A"] == 0:
                            # if the centroid's x-coordinate is lesser than
                            # the corresponding point then set the timestamp
                            # as current timestamp, set the position as the
                            # centroid's x-coordinate, and set the last point
                            # flag as True
                            if centroid[0] < self.conf[
                                    "speed_estimation_zone"]["A"]:
                                to.timestamp["A"] = self.ts
                                to.position["A"] = centroid[0]
                                to.lastPoint = True

                    # check to see if the vehicle is past the last point and
                    # the vehicle's speed has not yet been estimated, if yes,
                    # then calculate the vehicle speed and log it if it's
                    # over the limit
                    if to.lastPoint and not to.estimated:
                        # initialize the list of estimated speeds
                        estimatedSpeeds = []

                        # loop over all the pairs of points and estimate the
                        # vehicle speed
                        for (i, j) in self.points:
                            # calculate the distance in pixels
                            d = to.position[j] - to.position[i]
                            distanceInPixels = abs(d)

                            # check if the distance in pixels is zero, if so,
                            # skip this iteration
                            if distanceInPixels == 0:
                                continue

                            # calculate the time in hours
                            t = to.timestamp[j] - to.timestamp[i]
                            timeInSeconds = abs(t.total_seconds())
                            timeInHours = timeInSeconds / (60 * 60)

                            # calculate distance in kilometers and append the
                            # calculated speed to the list
                            distanceInMeters = distanceInPixels * self.meterPerPixel
                            distanceInKM = distanceInMeters / 1000
                            estimatedSpeeds.append(distanceInKM / timeInHours)

                        # calculate the average speed
                        to.calculate_speed(estimatedSpeeds)

                        # set the object as estimated
                        to.estimated = True
                        print("[INFO] Speed of the vehicle that just passed" \
                            " is: {:.2f} MPH".format(to.speedMPH))

                # store the trackable object in our dictionary
                self.trackableObjects[objectID] = to

                # draw both the ID of the object and the centroid of the
                # object on the output frame
                text = "ID {}".format(objectID)
                cv2.putText(self.frame, text,
                            (centroid[0] - 10, centroid[1] - 10),
                            cv2.FONT_HERSHEY_SIMPLEX, 0.5, (0, 255, 0), 2)
                cv2.circle(self.frame, (centroid[0], centroid[1]), 4,
                           (0, 255, 0), -1)

                # check if the object has not been logged
                if not to.logged:
                    # check if the object's speed has been estimated and it
                    # is higher than the speed limit
                    if to.estimated and to.speedMPH > self.conf["speed_limit"]:
                        # set the current year, month, day, and time
                        year = self.ts.strftime("%Y")
                        month = self.ts.strftime("%m")
                        day = self.ts.strftime("%d")
                        time = self.ts.strftime("%H:%M:%S")

                        # check if dropbox is to be used to store the vehicle
                        # image
                        if self.conf["use_cloudinary"]:
                            # initialize the image id, and the temporary file
                            imageID = self.ts.strftime("%H%M%S%f")
                            tempFile = TempFile()
                            cv2.imwrite(tempFile.path, self.frame)

                            # create a thread to upload the file to dropbox
                            # and start it
                            t = Thread(target=self.upload_file,
                                       args=(
                                           tempFile,
                                           imageID,
                                       ))
                            t.start()
                            # t = Thread(target=app(self.keys, tempFile.path).main())
                            # t.start()
                            # app(keys, tempFile.path).main()

                            # log the event in the log file
                            info = "{},{},{},{},{},{}\n".format(
                                year, month, day, time, to.speedMPH, imageID)
                            self.logFile.write(info)

                        # otherwise, we are not uploading vehicle images to
                        # dropbox
                        else:
                            # log the event in the log file
                            info = "{},{},{},{},{}\n".format(
                                year, month, day, time, to.speedMPH)
                            self.logFile.write(info)

                        # set the object has logged
                        to.logged = True

            # # if the *display* flag is set, then display the current frame
            # to the screen and record if a user presses a key
            if self.conf["display"]:
                cv2.imshow("frame", self.frame)
                key = cv2.waitKey(1) & 0xFF

                # if the `q` key is pressed, break from the loop
                if key == ord("q"):
                    break

            # increment the total number of frames processed thus far and
            # then update the FPS counter
            self.totalFrames += 1
            self.fps.update()

            # stop the timer and display FPS information
            self.fps.stop()
            print("[INFO] elapsed time: {:.2f}".format(self.fps.elapsed()))
            print("[INFO] approx. FPS: {:.2f}".format(self.fps.fps()))
Ejemplo n.º 10
0
# author:    Adrian Rosebrock
# website:   http://www.pyimagesearch.com

# USAGE
# BE SURE TO INSTALL 'imutils' PRIOR TO EXECUTING THIS COMMAND
# python temp_file.py --image ../demo_images/bridge.jpg

# import the necessary packages
from __future__ import print_function
from imutils.io import TempFile
import argparse
import cv2

# construct the argument parser and parse the arguments
ap = argparse.ArgumentParser()
ap.add_argument("-i", "--image", required=True, help="path to input image")
args = ap.parse_args()

# load the input image
image = cv2.imread(args.image)

# create a temporary path for the image, then write the image
# to file
t = TempFile()
cv2.imwrite(t.path, image)
print("[INFO] path: {}".format(t.path))

# delete the file
t.cleanup()
 #print( 'Motion Detected' + str(motion_detected))
 #if motionDetector.changeObserved(frame):
 #ourString = 'Motion Detected'
 #cv2.putText(displayFrame, ourString, (50,50), cv2.FONT_HERSHEY_SIMPLEX, 1, (240,170,0), 2)
 if armed:
     personDetected, person_count, frame = objDetector.getDetectedObjects(frame, boxes)
 else:
     personDetected = False
     
 # we ensure at least one detection exists
 if personDetected:# or motion_detected:
     print(armed, send_notification)
     smsSent =  copy.copy(send_notification)
     
     if armed:
         tempImg = TempFile(ext=".jpg")
         cv2.imwrite(tempImg.path, frame)  
         cv2.putText(frame, str(personDetected), (10,460), cv2.FONT_HERSHEY_SIMPLEX,2, (0,255,0), 2)
         timer.startTimer()
         print("Person Triggered Alarm")
         try:
             sms_body = messenger.getSMSBody(person_count)
             print(sms_body)
             tn.send(sms_body, tempImg, smsSent)   
             url = "https://icstoragett.s3.us-east-2.amazonaws.com" + tempImg.path[2:]   
             db.updateCustomerAlertDB(smsSent, sms_body, url)
             send_notification = False          
         except:
             print("sending failed")
             pass            
         
Ejemplo n.º 12
0
# author:    Adrian Rosebrock
# website:   http://www.pyimagesearch.com

# USAGE
# BE SURE TO INSTALL 'imutils' PRIOR TO EXECUTING THIS COMMAND
# python temp_file.py --image ../demo_images/bridge.jpg 

# import the necessary packages
from __future__ import print_function
from imutils.io import TempFile
import argparse
import cv2

# construct the argument parser and parse the arguments
ap = argparse.ArgumentParser()
ap.add_argument("-i", "--image", required=True, help="path to input image")
args = ap.parse_args()

# load the input image
image = cv2.imread(args.image)

# create a temporary path for the image, then write the image
# to file
t = TempFile()
cv2.imwrite(t.path, image)
print("[INFO] path: {}".format(t.path))

# delete the file
t.cleanup()
Ejemplo n.º 13
0
def send_alert(msg: str, img):
    temp_file = TempFile(ext=".jpg")
    cv2.imwrite(temp_file.path, img)
    tn.send(msg, temp_file)
Ejemplo n.º 14
0
    def tracker_speed(self):
        # construct the argument parser and parse the arguments
        #ap = argparse.ArgumentParser()
        #ap.add_argument("-c", "--conf", required=True,
        #	help="Path to the input configuration file")
        #ap.add_argument("-i", "--input", required=True,
        #	help="Path to the input video file")
        #args = vars(ap.parse_args())

        # load the configuration file
        #conf = Conf(args["conf"])
        conteudo = open(conf_path).read()

        conf = json.loads(conteudo)

        # initialize the list of class labels MobileNet SSD was trained to
        # detect
        CLASSES = ["background", "aeroplane", "bicycle", "bird", "boat",
            "bottle", "bus", "car", "cat", "chair", "cow", "diningtable",
            "dog", "horse", "motorbike", "person", "pottedplant", "sheep",
            "sofa", "train", "tvmonitor"]

        # check to see if the Dropbox should be used
        if conf["use_dropbox"]:
            # connect to dropbox and start the session authorization process
            client = dropbox.Dropbox(conf["dropbox_access_token"])
            print("[SUCCESS] dropbox account linked")

        # load our serialized model from disk
        print("[INFO] loading model...")
        net = cv2.dnn.readNetFromCaffe(conf["prototxt_path"],
            conf["model_path"])
        #net.setPreferableTarget(cv2.dnn.DNN_TARGET_MYRIAD)

        # initialize the video stream and allow the camera sensor to warmup
        print("[INFO] warming up camera...")
        #vs = VideoStream(src=0).start()
        vs = cv2.VideoCapture(data_path)
        tm.sleep(2.0)

        # initialize the frame dimensions (we'll set them as soon as we read
        # the first frame from the video)
        H = None
        W = None

        # instantiate our centroid tracker, then initialize a list to store
        # each of our dlib correlation trackers, followed by a dictionary to
        # map each unique object ID to a TrackableObject
        ct = CentroidTracker(maxDisappeared=conf["max_disappear"],
            maxDistance=conf["max_distance"])
        trackers = []
        trackableObjects = {}

        # keep the count of total number of frames
        totalFrames = 0

        # initialize the log file
        logFile = None

        # initialize the list of various points used to calculate the avg of
        # the vehicle speed
        points = [("A", "B"), ("B", "C"), ("C", "D")]

        # start the frames per second throughput estimator
        fps = FPS().start()

        # loop over the frames of the stream
        while True:
            # grab the next frame from the stream, store the current
            # timestamp, and store the new date
            ret, frame  = vs.read()
            ts = datetime.now()
            newDate = ts.strftime("%m-%d-%y")

            # check if the frame is None, if so, break out of the loop
            if frame is None:
                break

            # if the log file has not been created or opened
            if logFile is None:
                # build the log file path and create/open the log file
                logPath = os.path.join(conf["output_path"], conf["csv_name"])
                logFile = open(logPath, mode="a")

                # set the file pointer to end of the file
                pos = logFile.seek(0, os.SEEK_END)

                # if we are using dropbox and this is a empty log file then
                # write the column headings
                if conf["use_dropbox"] and pos == 0:
                    logFile.write("Year,Month,Day,Time,Speed (in KMPH),ImageID\n")

                # otherwise, we are not using dropbox and this is a empty log
                # file then write the column headings
                elif pos == 0:
                    logFile.write("Year,Month,Day,Time (in KMPH),Speed\n")

            # resize the frame
            frame = imutils.resize(frame, width=conf["frame_width"])
            rgb = cv2.cvtColor(frame, cv2.COLOR_BGR2RGB)

            # if the frame dimensions are empty, set them
            if W is None or H is None:
                (H, W) = frame.shape[:2]
                meterPerPixel = conf["distance"] / W

            # initialize our list of bounding box rectangles returned by
            # either (1) our object detector or (2) the correlation trackers
            rects = []

            # check to see if we should run a more computationally expensive
            # object detection method to aid our tracker
            if totalFrames % conf["track_object"] == 0:
                # initialize our new set of object trackers
                trackers = []

                # convert the frame to a blob and pass the blob through the
                # network and obtain the detections
                blob = cv2.dnn.blobFromImage(frame, size=(300, 300),
                    ddepth=cv2.CV_8U)
                net.setInput(blob, scalefactor=1.0/127.5, mean=[127.5,
                    127.5, 127.5])
                detections = net.forward()

                # loop over the detections
                for i in np.arange(0, detections.shape[2]):
                    # extract the confidence (i.e., probability) associated
                    # with the prediction
                    confidence = detections[0, 0, i, 2]

                    # filter out weak detections by ensuring the `confidence`
                    # is greater than the minimum confidence
                    if confidence > conf["confidence"]:
                        # extract the index of the class label from the
                        # detections list
                        idx = int(detections[0, 0, i, 1])

                        # if the class label is not a car, ignore it
                        if CLASSES[idx] != "car":
                            continue

                        # if the class label is not a motorbike, ignore it
                        #if CLASSES[idx] != "motorbike":
                        #	continue

                        # compute the (x, y)-coordinates of the bounding box
                        # for the object
                        box = detections[0, 0, i, 3:7] * np.array([W, H, W, H])
                        (startX, startY, endX, endY) = box.astype("int")

                        # construct a dlib rectangle object from the bounding
                        # box coordinates and then start the dlib correlation
                        # tracker
                        tracker = dlib.correlation_tracker()
                        rect = dlib.rectangle(startX, startY, endX, endY)
                        tracker.start_track(rgb, rect)

                        # add the tracker to our list of trackers so we can
                        # utilize it during skip frames
                        trackers.append(tracker)

            # otherwise, we should utilize our object *trackers* rather than
            # object *detectors* to obtain a higher frame processing
            # throughput
            else:
                # loop over the trackers
                for tracker in trackers:
                    # update the tracker and grab the updated position
                    tracker.update(rgb)
                    pos = tracker.get_position()

                    # unpack the position object
                    startX = int(pos.left())
                    startY = int(pos.top())
                    endX = int(pos.right())
                    endY = int(pos.bottom())

                    # add the bounding box coordinates to the rectangles list
                    rects.append((startX, startY, endX, endY))

            # use the centroid tracker to associate the (1) old object
            # centroids with (2) the newly computed object centroids
            objects = ct.update(rects)

            # loop over the tracked objects
            for (objectID, centroid) in objects.items():
                # check to see if a trackable object exists for the current
                # object ID
                to = trackableObjects.get(objectID, None)

                # if there is no existing trackable object, create one
                if to is None:
                    to = TrackableObject(objectID, centroid)

                # otherwise, if there is a trackable object and its speed has
                # not yet been estimated then estimate it
                elif not to.estimated:
                    # check if the direction of the object has been set, if
                    # not, calculate it, and set it
                    if to.direction is None:
                        y = [c[0] for c in to.centroids]
                        direction = centroid[0] - np.mean(y)
                        to.direction = direction

                    # if the direction is positive (indicating the object
                    # is moving from left to right)
                    if to.direction > 0:
                        # check to see if timestamp has been noted for
                        # point A
                        if to.timestamp["A"] == 0 :
                            # if the centroid's x-coordinate is greater than
                            # the corresponding point then set the timestamp
                            # as current timestamp and set the position as the
                            # centroid's x-coordinate
                            if centroid[0] > conf["speed_estimation_zone"]["A"]:
                                to.timestamp["A"] = ts
                                to.position["A"] = centroid[0]

                        # check to see if timestamp has been noted for
                        # point B
                        elif to.timestamp["B"] == 0:
                            # if the centroid's x-coordinate is greater than
                            # the corresponding point then set the timestamp
                            # as current timestamp and set the position as the
                            # centroid's x-coordinate
                            if centroid[0] > conf["speed_estimation_zone"]["B"]:
                                to.timestamp["B"] = ts
                                to.position["B"] = centroid[0]

                        # check to see if timestamp has been noted for
                        # point C
                        elif to.timestamp["C"] == 0:
                            # if the centroid's x-coordinate is greater than
                            # the corresponding point then set the timestamp
                            # as current timestamp and set the position as the
                            # centroid's x-coordinate
                            if centroid[0] > conf["speed_estimation_zone"]["C"]:
                                to.timestamp["C"] = ts
                                to.position["C"] = centroid[0]

                        # check to see if timestamp has been noted for
                        # point D
                        elif to.timestamp["D"] == 0:
                            # if the centroid's x-coordinate is greater than
                            # the corresponding point then set the timestamp
                            # as current timestamp, set the position as the
                            # centroid's x-coordinate, and set the last point
                            # flag as True
                            if centroid[0] > conf["speed_estimation_zone"]["D"]:
                                to.timestamp["D"] = ts
                                to.position["D"] = centroid[0]
                                to.lastPoint = True

                    # if the direction is negative (indicating the object
                    # is moving from right to left)
                    elif to.direction < 0:
                        # check to see if timestamp has been noted for
                        # point D
                        if to.timestamp["D"] == 0 :
                            # if the centroid's x-coordinate is lesser than
                            # the corresponding point then set the timestamp
                            # as current timestamp and set the position as the
                            # centroid's x-coordinate
                            if centroid[0] < conf["speed_estimation_zone"]["D"]:
                                to.timestamp["D"] = ts
                                to.position["D"] = centroid[0]

                        # check to see if timestamp has been noted for
                        # point C
                        elif to.timestamp["C"] == 0:
                            # if the centroid's x-coordinate is lesser than
                            # the corresponding point then set the timestamp
                            # as current timestamp and set the position as the
                            # centroid's x-coordinate
                            if centroid[0] < conf["speed_estimation_zone"]["C"]:
                                to.timestamp["C"] = ts
                                to.position["C"] = centroid[0]

                        # check to see if timestamp has been noted for
                        # point B
                        elif to.timestamp["B"] == 0:
                            # if the centroid's x-coordinate is lesser than
                            # the corresponding point then set the timestamp
                            # as current timestamp and set the position as the
                            # centroid's x-coordinate
                            if centroid[0] < conf["speed_estimation_zone"]["B"]:
                                to.timestamp["B"] = ts
                                to.position["B"] = centroid[0]

                        # check to see if timestamp has been noted for
                        # point A
                        elif to.timestamp["A"] == 0:
                            # if the centroid's x-coordinate is lesser than
                            # the corresponding point then set the timestamp
                            # as current timestamp, set the position as the
                            # centroid's x-coordinate, and set the last point
                            # flag as True
                            if centroid[0] < conf["speed_estimation_zone"]["A"]:
                                to.timestamp["A"] = ts
                                to.position["A"] = centroid[0]
                                to.lastPoint = True

                    # check to see if the vehicle is past the last point and
                    # the vehicle's speed has not yet been estimated, if yes,
                    # then calculate the vehicle speed and log it if it's
                    # over the limit
                    if to.lastPoint and not to.estimated:
                        # initialize the list of estimated speeds
                        estimatedSpeeds = []

                        # loop over all the pairs of points and estimate the
                        # vehicle speed
                        for (i, j) in points:
                            # calculate the distance in pixels
                            d = to.position[j] - to.position[i]
                            distanceInPixels = abs(d)

                            # check if the distance in pixels is zero, if so,
                            # skip this iteration
                            if distanceInPixels == 0:
                                continue

                            # calculate the time in hours
                            t = to.timestamp[j] - to.timestamp[i]
                            timeInSeconds = abs(t.total_seconds())
                            timeInHours = timeInSeconds / (60 * 60)

                            # calculate distance in kilometers and append the
                            # calculated speed to the list
                            distanceInMeters = distanceInPixels * meterPerPixel
                            distanceInKM = distanceInMeters / 1000
                            estimatedSpeeds.append(distanceInKM / timeInHours)

                        # calculate the average speed
                        to.calculate_speed(estimatedSpeeds)

                        # set the object as estimated
                        to.estimated = True
                        print("[INFO] Speed of the vehicle that just passed"\
                            " is: {:.2f} KMPH".format(to.speedKMPH))
                        #str = ("{:.2f}".format(to.speedKMPH))
                        self.set_velocidade(int(to.speedKMPH))

                # store the trackable object in our dictionary
                trackableObjects[objectID] = to

                # draw both the ID of the object and the centroid of the
                # object on the output frame
                text = "ID {}".format(objectID)
                cv2.putText(frame, text, (centroid[0] - 10, centroid[1] - 10)
                    , cv2.FONT_HERSHEY_SIMPLEX, 0.5, (0, 255, 0), 2)
                cv2.circle(frame, (centroid[0], centroid[1]), 4,
                    (0, 255, 0), -1)

                # check if the object has not been logged
                if not to.logged:
                    # check if the object's speed has been estimated and it
                    # is higher than the speed limit
                    if to.estimated and to.speedKMPH > conf["speed_limit"]:
                        # set the current year, month, day, and time
                        year = ts.strftime("%Y")
                        month = ts.strftime("%m")
                        day = ts.strftime("%d")
                        time = ts.strftime("%H:%M:%S")

                        # check if dropbox is to be used to store the vehicle
                        # image
                        if conf["use_dropbox"]:
                            # initialize the image id, and the temporary file
                            imageID = ts.strftime("%H%M%S%f")
                            tempFile = TempFile()
                            cv2.imwrite(tempFile.path, frame)

                            # create a thread to upload the file to dropbox
                            # and start it
                            t = Thread(target=upload_file, args=(tempFile,
                                client, imageID,))
                            t.start()

                            # log the event in the log file
                            info = "{},{},{},{},{},{}\n".format(year, month,
                                day, time, to.speedKMPH, imageID)
                            logFile.write(info)

                        # otherwise, we are not uploading vehicle images to
                        # dropbox
                        else:
                            # log the event in the log file
                            info = "{},{},{},{},{}\n".format(year, month,
                                day, time, to.speedKMPH)
                            logFile.write(info)

                        # set the object has logged
                        to.logged = True

            # if the *display* flag is set, then display the current frame
            # to the screen and record if a user presses a key
            if conf["display"]:
                cv2.imshow("frame", frame)
                key = cv2.waitKey(1) & 0xFF

                # if the `q` key is pressed, break from the loop
                if key == ord("q"):
                    break

            # increment the total number of frames processed thus far and
            # then update the FPS counter
            totalFrames += 1
            fps.update()

        # stop the timer and display FPS information
        fps.stop()
        print("[INFO] elapsed time: {:.2f}".format(fps.elapsed()))
        print("[INFO] approx. FPS: {:.2f}".format(fps.fps()))

        # check if the log file object exists, if it does, then close it
        if logFile is not None:
            logFile.close()

        # close any open windows
        cv2.destroyAllWindows()

        # clean up
        print("[INFO] cleaning up...")
        vs.release()