Ejemplo n.º 1
0
    def setUp(self):
        Properties.mode == 'top'
        Properties.egomotion_num_corners = 100  # Number of corners to track for egomotion correction
        Properties.flow_num_corners = 100  # Number of corners to track for flow computation

        # Min and max normalized velocities to count in our average.  These values
        # may be dataset dependent.
        if Properties.mode == 'velo':
            Properties.min_velocity = 0
            Properties.max_velocity = 100
        else:
            Properties.min_velocity_in_pixels = 1
            Properties.max_velocity = 0.1

        # Is the crowd moving in the horizontal(0) or vertical(1) direction?
        Properties.dimension_of_motion = 1
        Properties.sample_rect_dims = (0.1, 0.1)

        # pronounced crowd motion, about 10 pixels, almost no camera motion
        self.im5244 = cv2.imread('test_images/DSC_5244.JPG',
                                 cv2.CV_LOAD_IMAGE_GRAYSCALE)
        self.im5245 = cv2.imread('test_images/DSC_5245.JPG',
                                 cv2.CV_LOAD_IMAGE_GRAYSCALE)
        # pronounced camera motion, crowd nearly stationary
        self.im5817 = cv2.imread('test_images/DSC_5817.JPG',
                                 cv2.CV_LOAD_IMAGE_GRAYSCALE)
        self.im5818 = cv2.imread('test_images/DSC_5818.JPG',
                                 cv2.CV_LOAD_IMAGE_GRAYSCALE)

        self.frame5244 = crowdmisc.TrackedFrame(self.im5244)
        self.frame5245 = crowdmisc.TrackedFrame(self.im5245)
        self.frame5817 = crowdmisc.TrackedFrame(self.im5817)
        self.frame5818 = crowdmisc.TrackedFrame(self.im5818)

        self.trackingRegions5817 = crowdmisc.constructTrackingRegions(
            self.im5817)
        newPoints = [(0, 700), (0, 0), (175, 0), (175, 700), (1550, 600),
                     (900, 0), (2136, 0), (2136, 700)]
        crowdmisc.updateEgomotionTrackingRegion(newPoints, 1,
                                                self.trackingRegions5817)
        newPoints = [(230, 882), (217, 441), (500, 429), (698, 889)]
        crowdmisc.updateFlowTrackingRegion(newPoints, 1,
                                           self.trackingRegions5817)

        self.trackingRegions5244 = crowdmisc.constructTrackingRegions(
            self.im5244)
        displayFlowCorners = [(277, 439), (276, 195), (421, 188), (554, 440)]
        displayStableCorners =\
            [(15, 259), (16, 3), (241, 5), (243, 299),\
             (1068, 302), (680, 64), (694, 7), (1075, 8),
             (72, 618), (19, 292), (174, 315), (163, 479)]
        crowdmisc.updateEgomotionTrackingRegion(displayStableCorners, 0.5,
                                                self.trackingRegions5244)
        crowdmisc.updateFlowTrackingRegion(displayFlowCorners, 0.5,
                                           self.trackingRegions5244)
Ejemplo n.º 2
0
def test_find_egomotion():
    prev_image = cv.LoadImageM('Jun12/DSC_5817.JPG',
                               cv.CV_LOAD_IMAGE_GRAYSCALE)
    curr_image = cv.LoadImageM('Jun12/DSC_5818.JPG',
                               cv.CV_LOAD_IMAGE_GRAYSCALE)
    trackingRegions = crowdmisc.constructTrackingRegions(
        image_size=(prev_image.rows, prev_image.cols))
    for newPoint in [(0, 700), (0, 0), (175, 0), (175, 700), (1550, 600),
                     (900, 0), (2136, 0), (2136, 700)]:
        crowdmisc.updateEgomotionTrackingRegion(newPoint, 1, trackingRegions)

    plt.imshow(trackingRegions['stableRegionMask'])
    plt.title('Stable mask')

    egomotion_matrix, prev_pts, curr_pts = \
        crowd.find_egomotion(prev_image, curr_image, trackingRegions, None, None)
    warped_image = cv.CreateMat(prev_image.rows, prev_image.cols,
                                prev_image.type)
    cv.Zero(warped_image)
    cv.WarpPerspective(prev_image, warped_image, egomotion_matrix)
    plt.figure()
    plt.imshow(warped_image, 'gray')
    plt.title('Warped image')

    curr_image = np.tile(np.array(curr_image)[:, :, np.newaxis], (1, 1, 3))
    draw.drawPoints(curr_image, cvutils.array2point_list(curr_pts))
    plt.figure()
    plt.imshow(curr_image)
    plt.title('Real current image')
Ejemplo n.º 3
0
    def test_updateEgomotionTrackingRegion(self):
        mask = np.zeros((20, 10), dtype = np.uint8)
        trackingRegions = crowdmisc.constructTrackingRegions(mask)
        
        crowdmisc.updateEgomotionTrackingRegion([(2,1), (2,3)], 0.5, trackingRegions)
        self.assertEquals(trackingRegions['stableCorners'], [(4,2), (4, 6)])
        crowdmisc.updateEgomotionTrackingRegion([(3,3)], 0.5, trackingRegions)
        crowdmisc.updateEgomotionTrackingRegion([(3,1)], 0.5, trackingRegions)
        self.assertEquals(trackingRegions['stableCorners'], [(4,2), (4, 6), (6,6), (6,2)])

        trueMask = np.zeros((20, 10), np.uint8)
        trueMask[2:7][:, 4:7] = 255
        npt.assert_equal(trackingRegions['stableRegionMask'], trueMask)
        
        crowdmisc.updateEgomotionTrackingRegion([(5,1)], 0.5, trackingRegions)
        crowdmisc.updateEgomotionTrackingRegion([(5,3)], 0.5, trackingRegions)
        self.assertEquals(trackingRegions['stableCorners'],\
                          [(4,2), (4, 6), (6,6), (6,2), (10,2), (10, 6)])
        crowdmisc.updateEgomotionTrackingRegion([(6,3)], 0.5, trackingRegions)
        crowdmisc.updateEgomotionTrackingRegion([(6,1)], 0.5, trackingRegions)
        self.assertEquals(trackingRegions['stableCorners'],
                          [(4,2), (4, 6), (6,6), (6,2), (10,2), (10, 6), (12,6), (12,2)])
        
        trueMask[2:7][:, 10:13] = 255
        #trueMask[0, 0] = 1
        npt.assert_equal(trackingRegions['stableRegionMask'], trueMask)
Ejemplo n.º 4
0
 def test_warpTrackingRegion(self):
     mask = np.zeros((10, 10), dtype = np.uint8)
     trackingRegions = crowdmisc.constructTrackingRegions(mask)
     crowdmisc.updateEgomotionTrackingRegion([(2,3), (2,1), (3,1), (3,3)],
                                              0.5, trackingRegions)
     crowdmisc.updateFlowTrackingRegion([(2,3), (2,1), (3,1), (3,3)],
                                        0.5, trackingRegions)
     warp_matrix = np.eye(3)
     warp_matrix[0, 2] = 2
     warp_matrix[1, 2] = -1
     
     out = crowdmisc.warpTrackingRegions(trackingRegions, mask, warp_matrix)
     self.assertListEqual(out['flowCorners'], [(6, 5), (6, 1), (8, 1), (8,5)])
     self.assertListEqual(out['stableCorners'], [(6, 5), (6, 1), (8, 1), (8,5)])
     self.assertAlmostEqual(out['configImageZoom'], 0.5)
     self.assertListEqual(out['displayFlowCorners'], 
                          [(3, 3), (3, 1), (4, 1), (4,3)])
     self.assertListEqual(out['displayStableCorners'], 
                          [(3, 3), (3, 1), (4, 1), (4,3)])
      # Check that trackingRegions are left unchanged
     reserveTrackingRegions = crowdmisc.constructTrackingRegions(mask)                                                 
     crowdmisc.updateEgomotionTrackingRegion([(2,3), (2,1), (3,1), (3,3)],
                                              0.5, reserveTrackingRegions)
     crowdmisc.updateFlowTrackingRegion([(2,3), (2,1), (3,1), (3,3)],
                                        0.5, reserveTrackingRegions)
     keys = trackingRegions.keys()
     for key in keys:
         npt.assert_array_equal(np.array(reserveTrackingRegions[key]),
                                np.array(trackingRegions[key]))
     
     true_mask = np.zeros((10, 10), dtype=np.uint8)
     true_mask[1:6][:, 6:9] = 255
     npt.assert_equal(out['flowMask'], true_mask)
     npt.assert_equal(out['stableRegionMask'], true_mask)
     
     true_scaling = np.eye(3)
     true_scaling[(0, 1), (0, 1)] = [0.5, 0.25]
     true_shift = np.eye(3)
     true_shift[(0, 1), (2,2)] = [-3, -0.25]
     npt.assert_almost_equal(out['flowWarpMatrix'], np.dot(true_shift, true_scaling))
     self.assertAlmostEqual(out['minVelocity'], 0.25)
Ejemplo n.º 5
0
    def test_updateFlowTrackingRegion(self):
        mask = np.zeros((10, 10), dtype = np.uint8)
        trackingRegions = crowdmisc.constructTrackingRegions(mask)
        trackingRegions['flowMask'] = mask
        
        crowdmisc.updateFlowTrackingRegion([(2,1)], 0.5, trackingRegions)
        crowdmisc.updateFlowTrackingRegion([(2,3)], 0.5, trackingRegions)
        self.assertEquals(trackingRegions['flowCorners'], [(4,2), (4, 6)])
        crowdmisc.updateFlowTrackingRegion([(3,3),(3,1)], 0.5, trackingRegions)
        self.assertEquals(trackingRegions['flowCorners'], [(4,2), (4, 6), (6,6), (6,2)])

        trueMask = np.zeros((10, 10), np.uint8)
        trueMask[2:7][:, 4:7] = 255
        npt.assert_equal(trackingRegions['flowMask'], trueMask)
        
        crowdmisc.updateFlowTrackingRegion([(2, 2)], 0.5, trackingRegions)
        self.assertEqual(trackingRegions['flowCorners'], [(4,4)])
        self.assertEqual(trackingRegions['displayFlowCorners'], [(2,2)])
Ejemplo n.º 6
0
    def test_compute_pedestrian_flow(self):
        base_image = cv2.imread('test_images/DSC_5156.JPG',
                                cv2.CV_LOAD_IMAGE_GRAYSCALE)
        displayFlowCorners = [(267, 449), (266, 205), (411, 198), (544, 450)]
        displayStableCorners =\
            [(5, 269), (6, 3), (231, 5), (233, 309),\
             (1058, 312), (670, 74), (684, 7), (1065, 8),
             (62, 628), (9, 302), (164, 325), (153, 489)]
        baseTrackingRegions = crowdmisc.constructTrackingRegions(base_image)
        crowdmisc.updateEgomotionTrackingRegion(displayStableCorners, 0.5,
                                                baseTrackingRegions)
        crowdmisc.updateFlowTrackingRegion(displayFlowCorners, 0.5,
                                           baseTrackingRegions)

        # Test density computation
        (curr_count, prev_features, curr_features,
         velocity, velocity_inlier_idx, currTrackingRegions,
         prev_egomotion_matrix, curr_egomotion_matrix) =\
            crowd.compute_pedestrian_flow(self.frame5244,
                                          self.frame5245,
                                          baseTrackingRegions,
                                          10, 100)

        self.assertAlmostEqual(curr_count, 100.0930081143904)
        self.assertAlmostEqual(velocity, 0.0093008114395)

        # Test tracking regions transformation.
        (curr_count, prev_features, curr_features,
        velocity, velocity_inlier_idx, currTrackingRegions,
        prev_egomotion_matrix, curr_egomotion_matrix) =\
            crowd.compute_pedestrian_flow(self.frame5817,
                                          self.frame5818,
                                          baseTrackingRegions,
                                          10, 100)
        # I eyeballed the following results to make sure they are plausible.
        self.assertListEqual(currTrackingRegions['flowCorners'], [(232, 886),
                                                                  (229, 390),
                                                                  (525, 377),
                                                                  (795, 886)])
        self.assertListEqual(currTrackingRegions['stableCorners'],
                             [(-313, 517), (-312, -29), (157, -18), (162, 601),
                              (1809, 611), (1046, 133), (1073, -1), (1820, 12),
                              (-192, 1254), (-305, 585), (19, 633), (-3, 968)])
Ejemplo n.º 7
0
def main(sourceDir_, recycleDir_, statFileName_, waitKey=cv.WaitKey):
    global sourceDir, recycleDir, statFileName
    sourceDir = sourceDir_
    recycleDir = recycleDir_
    statFileName = statFileName_
    global newPoint, previousFrameTime

    # Algorithm parameters
    sampleRectDims = crowdmisc.Properties.sample_rect_dims
    egomotion_correction = crowdmisc.Properties.egomotion_correction

    # Variables
    processMode = "pause"

    trackingRegions = None

    delayBetweenFrames = 500  # ms. For 'delay' mode
    gotoNextFrame = 0  # allow go to next frame in 'pause' mode

    crowdDensity = 0.
    numPeopleInSample = 0
    currPeopleCount = 0
    velocities = []

    peoplePerStreetRegStr = ""

    currentFileName = ""

    rgbFrame = None

    #TODO:  Remove all references to currentFrame and previusFrame in favor
    # of the versions that memoize things about the image.
    currentFrame = None
    previousFrame = None
    currentTrackedFrame = None
    previousTrackedFrame = None

    firstFrameTime = 0
    currentFrameTime = 0
    previousFrameTime = 0
    frameQueueLenght = 0

    recomputeDensityFlag = True

    # Set up windows
    configWindow = 'flowCorners'
    configWindowImage = None

    densityWindow = 'Density'
    densityWindowImage = np.zeros(densityWindowSize + (3, ), dtype=np.uint8)

    stateWindow = 'State'
    stateWindowImage = np.zeros(stateWindowSize + (3, ), dtype=np.uint8)

    statisticWindow = 'Statistic'
    statisticWindowImage = np.zeros(statisticWindowSize + tuple([3]),
                                    dtype=np.uint8)
    cv.NamedWindow(configWindow)
    cv.SetMouseCallback(configWindow, on_mouse)

    cv.NamedWindow(densityWindow, 0)
    cv.NamedWindow(statisticWindow)
    cv.NamedWindow(stateWindow)

    #############################
    # Load saved state.  COMMENT TO RESET EVERY TIME
    #############################
    firstFrameTime, previousFrameTime, currPeopleCount = restoreState()

    # Take one first and second frames
    imageSequence = crowdio.ImageSequence(sourceDir, recycleDir)
    frame, previousFrameTime, frameQueueLenght, currentFileName = \
        imageSequence.getNextFrame(previousFrameTime)
    gotoNextFrame = 1
    if firstFrameTime <= 0.0:
        firstFrameTime = previousFrameTime

    trackingRegions = crowdmisc.constructTrackingRegions(frame)
    currTrackingRegions = crowdmisc.constructTrackingRegions(frame)
    configWindowSize = (int(frame.shape[1] * configImageZoom),
                        int(frame.shape[0] * configImageZoom))

    # Main Loop
    delayTime = 1
    while True:
        # Determine how long to pause to check for key input, depending on
        # the state of the program.  If we check too often, we lose performance
        # but if we don't check often enough, the program becomes unresponsive.
        if processMode == 'delay':
            delayTime = delayBetweenFrames
        elif processMode in ('pause', 'egomotion') and gotoNextFrame == 0:
            delayTime = 250  # check for events four times a second
        elif processMode == 'pause' and gotoNextFrame > 0:
            delayTime = 1
        else:
            delayTime = 100

        key = waitKey(delayTime)
        if key >= 0:
            key = key & 0xff

            # Reset motion and egomotion tracking regions
            if key == ord('r'):
                trackingRegions = crowdmisc.constructTrackingRegions(
                    currentFrame)

            # To edit number of people
            if key >= ord('0') and key <= ord('9'):
                peoplePerStreetRegStr = peoplePerStreetRegStr + chr(key)
            if key == 8 and peoplePerStreetRegStr != "":
                peoplePerStreetRegStr = peoplePerStreetRegStr[:-1]

            # 'Enter' pressed - save new number of people
            if key in (10, 13) and peoplePerStreetRegStr != "":
                print "Density input successful"
                crowdDensity = crowd.calculate_density(
                    int(peoplePerStreetRegStr), sampleRectDims)
                numPeopleInSample = int(peoplePerStreetRegStr)
                peoplePerStreetRegStr = ""
                recomputeDensityFlag = True

            # Select mode
            if key == ord('p'):
                processMode = "pause"
            if key == ord('e') and egomotion_correction == True:
                processMode = "egomotion"
            if key == ord(
                    'a') and not trackingRegions['flowWarpMatrix'] is None:
                processMode = "auto"
            if key == ord(
                    'd') and not trackingRegions['flowWarpMatrix'] is None:
                processMode = "delay"

            if processMode == "delay":
                if key == 82:  # UP
                    delayBetweenFrames += 1000
                elif key == 84:  # DOWN
                    delayBetweenFrames -= 1000
                elif key == 81:  # LEFT
                    delayBetweenFrames -= 100
                elif key == 83:  # RIGHT
                    delayBetweenFrames += 100

                if delayBetweenFrames < 1:
                    delayBetweenFrames = 1

            if processMode in ("pause", "egomotion"):
                if key == 82:  # UP
                    gotoNextFrame = 10
                if key == 83:  # RIGHT
                    gotoNextFrame = 1

            if processMode == 'delay':
                delayTime = delayBetweenFrames

        # Quit from main loop
        if key == ord('q'):
            cv2.destroyAllWindows()
            return velocities
            break

        # Check new corner point
        if not newPoint is None:
            if processMode == 'egomotion':
                crowdmisc.updateEgomotionTrackingRegion([newPoint],
                                                        configImageZoom,
                                                        trackingRegions)
            else:
                crowdmisc.updateFlowTrackingRegion([newPoint], configImageZoom,
                                                   trackingRegions)
            newPoint = None
            currTrackingRegions = trackingRegions

        ##################################
        # Get next frame
        if not processMode in ("pause", "egomotion") or gotoNextFrame > 0:
            frame, tm, frameQueueLenght, currentFileName = \
                imageSequence.getNextFrame(previousFrameTime)
            if not frame is None:
                currentFrameTime = tm
            gotoNextFrame -= 1

        if not frame is None:
            if firstFrameTime <= 0.0:
                firstFrameTime = currentFrameTime

            if rgbFrame is None:
                rgbFrame = np.zeros(frame.shape + tuple([3]), dtype=np.uint8)

            if currentFrame is None:
                currentFrame = np.zeros_like(frame)
            if previousFrame is None:
                previousFrame = np.zeros_like(frame)

            if frame.squeeze().ndim == 2:
                rgbFrame = cv2.cvtColor(frame, cv.CV_GRAY2BGR)
            else:
                rgbFrame = frame.copy()

            currentFrame = frame.copy()
            currentTrackedFrame = crowdmisc.TrackedFrame(frame)

            ##################################
            # Draw corner configuration

            configWindowImage = cv2.resize(rgbFrame, configWindowSize)

        #end if (not frame is None)

        draw.drawPoints(configWindowImage,
                        currTrackingRegions['displayFlowCorners'])
        draw.drawPoints(configWindowImage,
                        currTrackingRegions['displayStableCorners'],
                        color=cv.RGB(0, 0, 255))

        ##################################
        # Prepare images for windows
        #cv.SetZero(densityWindowImage)
        stateWindowImage.fill(0)
        # cv.SetZero(statisticWindowImage)

        ###################################
        # calculate result
        if not trackingRegions['flowWarpMatrix'] is None:
            # draw part of street
            if recomputeDensityFlag:
                densityWindowImage, sampleRegionBounds = draw.draw_sample_region( \
                    currentFrame, currTrackingRegions['flowWarpMatrix'], sampleRectDims)

                recomputeDensityFlag = False

                #print "redisplaying density image..."
            if currentFrameTime > previousFrameTime:
                # analyzing
                currPeopleCount, prevFeatures, currFeatures, \
                meanVelocity, velocityInlierIdx, \
                currTrackingRegions,\
                prevEgomotionMatrix, currEgomotionMatrix = \
                    crowd.compute_pedestrian_flow(
                        previousTrackedFrame, currentTrackedFrame,
                        trackingRegions,
                        crowdDensity, currPeopleCount)
                velocities.append((meanVelocity, currentFrameTime))
                # draw statistic to
                statImage = draw.drawResult(
                    currentTrackedFrame.getImage(), prevFeatures, currFeatures,
                    meanVelocity, velocityInlierIdx, crowdDensity,
                    numPeopleInSample, currPeopleCount, currentFrameTime,
                    sampleRegionBounds, trackingRegions, currTrackingRegions,
                    prevEgomotionMatrix, currEgomotionMatrix)
                saveStatImage(statImage, currentFrameTime)
                sampleRegionBounds = None

        ##################################
        ## Draw text
        if not configWindowImage is None:
            draw.drawText(configWindowImage,
                          "Press 'r' to reset street corners",
                          cv.RGB(255, 255, 0))

        densityStr = "Number of People: %d" % (crowdDensity)
        draw.drawText(densityWindowImage, densityStr, cv.RGB(0, 255, 0))
        draw.drawText(densityWindowImage,
                      "Input number: " + peoplePerStreetRegStr,
                      cv.RGB(0, 255, 0), 1)

        modeStr = "Current mode: '" + processMode + "'"
        if trackingRegions['flowWarpMatrix'] is None:
            modeStr = modeStr + " (configuring)"
        if processMode == 'delay':
            secBetweenFrames = delayBetweenFrames / 1000.0
            modeStr = (modeStr + " ( %.03f sec )") % secBetweenFrames
        draw.drawText(stateWindowImage, modeStr, cv.RGB(255, 255, 0), 0)
        timeString = "Statistic begin: " + time.strftime(
            "%Y.%m.%d %H:%M:%S", time.localtime(firstFrameTime))
        timeString += " (%.2f min)" % (
            (currentFrameTime - firstFrameTime) / 60.0)
        draw.drawText(stateWindowImage, timeString, cv.RGB(0, 155, 0), 1)
        draw.drawText(
            stateWindowImage, "Time of the frame: " + time.strftime(
                "%Y.%m.%d %H:%M:%S", time.localtime(currentFrameTime)),
            cv.RGB(0, 155, 0), 2)
        draw.drawText(stateWindowImage,
                      "Total number of people: %d" % currPeopleCount,
                      cv.RGB(0, 255, 0), 3)
        draw.drawText(stateWindowImage,
                      "Frame queue length: %d" % frameQueueLenght,
                      cv.RGB(0, 255, 0), 4)
        if frame is None:
            draw.drawText(stateWindowImage, "No any frame", cv.RGB(255, 0, 0),
                          5)

        draw.drawText(stateWindowImage, "'a' - 'auto' mode",
                      cv.RGB(0, 155, 155), 7)
        draw.drawText(stateWindowImage,
                      "'p' - 'pause' mode ('right' - next, 'up' - 10 next)",
                      cv.RGB(0, 155, 155), 8)
        draw.drawText(stateWindowImage,
                      "'d' - 'delay' mode (arrows change delay time)",
                      cv.RGB(0, 155, 155), 9)
        draw.drawText(stateWindowImage, "'q' - Quit", cv.RGB(0, 155, 155), 10)

        ##################################
        # Update windows
        cv2.imshow(configWindow, configWindowImage)
        cv2.imshow(densityWindow, densityWindowImage)
        cv2.imshow(stateWindow, stateWindowImage)
        cv2.imshow(statisticWindow, statisticWindowImage)

        # Save current file
        if not processMode in (
                "pause", "egomotion") or currentFrameTime != previousFrameTime:
            frm = previousFrame
            previousFrame = currentFrame
            previousTrackedFrame = currentTrackedFrame
            currentFrame = frm
            previousFrameTime = currentFrameTime
            imageSequence.moveProcessedFrames(currentFrameTime)
            logState(currentFileName, currentFrameTime, crowdDensity,
                     currPeopleCount)