Exemplo n.º 1
0
def kalman_predict_out_line(track,line,out_direction):
    # print(track.track_id)
    # print(line)
    # print(out_direction)
    # print(track.tlbr)
    if box_line_relative(track.tlbr,line)==out_direction:
        return 0
    predict_num_out=0
    prev_mean,prev_cov=track.mean,track.covariance
    kal_man=KalmanFilter()
    predict_thres=0 if out_direction=='up' else 0
    max_long_predict=5 if out_direction=='up' else 2 if track.infer_type() in ['person','motorcycle','biycycle'] else 8
    while  box_line_relative(mean_to_tlbr(prev_mean),line) !=out_direction:
        predict_num_out+=1
        cur_mean=prev_mean #of t
        mean,cov=kal_man.predict(prev_mean,prev_cov)
        if predict_num_out>predict_thres:
            new_mean,new_cov=mean,cov
        else:
            new_mean,new_cov= kal_man.update(prev_mean,prev_cov,mean[:4])
        prev_mean,prev_cov=new_mean,new_cov #of t+1
        if predict_num_out>=max_long_predict or np.sum(np.abs(cur_mean-mean))==0:
            break
        # print(mean_to_tlbr(mean))

    return predict_num_out
Exemplo n.º 2
0
def kalman_predict_out_line(track, line, out_direction, predict_long=None):
    # print(track.track_id)
    # print(line)
    # print(out_direction)
    # print(track.tlbr)
    if box_line_relative(track.tlbr, line) == out_direction:
        return 0
    predict_num_out = 0
    prev_mean, prev_cov = track.mean, track.covariance
    kal_man = KalmanFilter()
    predict_thres = 15 if out_direction == 'up' else 0
    if predict_long is not None:
        max_long_predict = predict_long
    else:
        max_long_predict = 50 if out_direction == 'up' else 4
    while box_line_relative(mean_to_tlbr(prev_mean), line) != out_direction:
        predict_num_out += 1
        cur_mean = prev_mean  #of t
        mean, cov = kal_man.predict(prev_mean, prev_cov)
        if predict_num_out > predict_thres:
            new_mean, new_cov = mean, cov
        else:
            new_mean, new_cov = kal_man.update(prev_mean, prev_cov, mean[:4])
        prev_mean, prev_cov = new_mean, new_cov  #of t+1
        if predict_num_out >= max_long_predict or np.sum(
                np.abs(cur_mean - mean)) == 0:
            break
        # print(mean_to_tlbr(mean))

    return predict_num_out
Exemplo n.º 3
0
def check_track_line_reasonable(track, line, line_id, mov_id):
    if line_id == '2':
        accept_mov_id = ['2', '3']
    if line_id == '1':
        accept_mov_id = ['1', '4']
    if box_line_relative(track.track_trajectory[-2],
                         line) == 'cross' and mov_id not in accept_mov_id:
        return False
    return True
Exemplo n.º 4
0
 def box_cross_line(self,bbox_list,line):
     for bbox in bbox_list:
         if box_line_relative(bbox,line)=='cross' :
             return True
     
     return False
Exemplo n.º 5
0
    def update(self, im_blob, img0):
        self.frame_id += 1
        activated_starcks = []
        refind_stracks = []
        lost_stracks = []
        removed_stracks = []

        width = img0.shape[1]
        height = img0.shape[0]
        init_polygon = self.polygon2 if self.two_polygon_system and self.frame_id >= self.warmup_frame else self.polygon
        two_wheel_polygon = init_polygon
        four_wheel_polygon = self.polygon
        virtual_polygon = self.virtual_polygon
        huge_box_thres = 140
        ''' Step 1: Network forward, get detections & embeddings'''
        with torch.no_grad():
            ori_imgs, framed_imgs, framed_metas = preprocess(
                [img0], max_size=self.input_size)
            device = torch.device('cuda:0')
            x = torch.stack(
                [torch.from_numpy(fi).to(device) for fi in framed_imgs], 0)
            x = x.to(torch.float32).permute(0, 3, 1, 2)
            features, regression, classification, anchors = self.detetection_model(
                x)
            regressBoxes = BBoxTransform()
            clipBoxes = ClipBoxes()
            out = postprocess(x, anchors, regression, classification,
                              regressBoxes, clipBoxes, self.opt.det_thres,
                              self.opt.nms_thres)
            out = invert_affine(framed_metas, out)
            bbox = []
            score = []
            types = []
            huge_vehicles = []
            for j in range(len(out[0]['rois'])):
                obj = self.obj_list[out[0]['class_ids'][j]]
                if obj in self.obj_interest:
                    x1, y1, x2, y2 = out[0]['rois'][j].astype(np.int)
                    #bike,bicycle
                    if (y1 + y2) / 2 > 0.36 * height and float(
                            out[0]['scores'][j]) <= 0.25:
                        continue
                    elif (y1 + y2) / 2 > 0.4 * height and float(
                            out[0]['scores']
                        [j]) <= 0.35 and obj not in self.person_or_motorcycle:
                        continue
                    if obj not in self.person_or_motorcycle and float(
                            out[0]['scores'][j]) >= 0.3:
                        bbox.append([x1, y1, x2, y2])
                        score.append(float(out[0]['scores'][j]))
                        types.append(obj)
                        if (y1 + y2) / 2 < 0.3 * height and (
                            (y2 - y1) >= 110 or (x2 - x1) >= 110):
                            huge_vehicles.append(True)
                            continue
                        if (y2 - y1) <= huge_box_thres and (
                                x2 - x1) <= huge_box_thres:
                            huge_vehicles.append(False)
                        else:
                            huge_vehicles.append(True)
                    elif obj in self.person_or_motorcycle:  #['bicycle',  'motorcycle']
                        bbox.append([x1, y1, x2, y2])
                        score.append(float(out[0]['scores'][j]))
                        types.append(obj)
                        huge_vehicles.append(False)

        # vis
        # print(len(bbox))
        # print(img0.shape)
        # print(self.polygon)
        # for i in range(len(bbox)):
        #     bb = bbox[i]
        #     cv2.rectangle(img0, (bb[0], bb[1]),
        #                   (bb[2], bb[3]),
        #                   (0, 255, 0), 2)
        # cv2.polylines(img0,[np.asarray(self.polygon)],True,(0,255,255))
        # cv2.imshow('dets', img0)
        # cv2.waitKey(0)

        if len(bbox) > 0:
            '''Detections'''
            detections = [
                STrack(STrack.tlbr_to_tlwh(tlbr),
                       sco,
                       clas,
                       30,
                       huge_vehicle=hv)
                for (tlbr, sco, clas,
                     hv) in zip(bbox, score, types, huge_vehicles)
            ]

        else:
            detections = []

        detections_plot = detections.copy()
        ''' Add newly detected tracklets to tracked_stracks'''
        unconfirmed = []
        tracked_stracks = []  # type: list[STrack]
        for track in self.tracked_stracks:
            if not track.is_activated:
                unconfirmed.append(track)
            else:
                tracked_stracks.append(track)
        ''' Step 2: First association, with gating distance'''
        strack_pool, lost_map_tracks = joint_stracks(tracked_stracks,
                                                     self.lost_stracks)
        # Predict the current location with KF
        #for strack in strack_pool:
        #strack.predict()
        STrack.multi_predict(strack_pool)
        #dists = matching.embedding_distance(strack_pool, detections)
        detections = heuristic_occlusion_detection(detections)
        match_thres = 150
        dists = np.zeros(shape=(len(strack_pool), len(detections)))
        dists = matching.gate_cost_matrix2(self.kalman_filter,
                                           dists,
                                           strack_pool,
                                           detections,
                                           type_diff=True)
        #dists = matching.fuse_motion(self.opt,self.kalman_filter, dists, strack_pool, detections,lost_map=lost_map_tracks,occlusion_map=occlusion_map,thres=match_thres)
        matches, u_track, u_detection = matching.linear_assignment(
            dists, thresh=match_thres)

        for itracked, idet in matches:
            track = strack_pool[itracked]
            det = detections[idet]
            if track.state == TrackState.Tracked:
                track.update(detections[idet], self.frame_id)
                activated_starcks.append(track)
            else:
                track.re_activate(det, self.frame_id, new_id=False)
                refind_stracks.append(track)
        ''' Step 3: Second association, with IOU'''
        detections = [detections[i] for i in u_detection]
        r_tracked_stracks = [
            strack_pool[i] for i in u_track
            if strack_pool[i].state == TrackState.Tracked
        ]
        dists = matching.iou_distance(r_tracked_stracks, detections)
        matches, u_track, u_detection = matching.linear_assignment(dists,
                                                                   thresh=0.5)

        for itracked, idet in matches:
            track = r_tracked_stracks[itracked]
            det = detections[idet]
            if track.state == TrackState.Tracked:
                track.update(det, self.frame_id)
                activated_starcks.append(track)
            else:
                track.re_activate(det, self.frame_id, new_id=False)
                refind_stracks.append(track)
        ''' '''
        for it in u_track:
            track = r_tracked_stracks[it]
            if not track.state == TrackState.Lost:
                track.mark_lost()
                lost_stracks.append(track)
        '''Deal with unconfirmed tracks, usually tracks with only one beginning frame'''
        detections = [detections[i] for i in u_detection]
        dists = matching.iou_distance(unconfirmed, detections)
        matches, u_unconfirmed, u_detection = matching.linear_assignment(
            dists, thresh=0.6)
        for itracked, idet in matches:
            unconfirmed[itracked].update(detections[idet], self.frame_id)
            activated_starcks.append(unconfirmed[itracked])
        for it in u_unconfirmed:
            track = unconfirmed[it]
            track.mark_removed()
            removed_stracks.append(track)
        """ Step 4: Init new stracks"""
        for inew in u_detection:
            track = detections[inew]
            track_init_polygon = init_polygon if not track.huge_vehicle else virtual_polygon
            if track.score < self.det_thresh or track.occlusion_status == True or check_bbox_outside_polygon(
                    track_init_polygon, track.tlbr):
                continue
            # track_types=self.person_or_motorcycle[0] if tlbrs_to_mean_area(track.track_trajectory) <=1500 else track.infer_type()
            if self.frame_id >= 1 and not check_bbox_inside_polygon(
                    track_init_polygon, track.tlbr
            ):  #and track_types in self.person_or_motorcycle #person, motorcycle
                continue
            track.activate(self.kalman_filter, self.frame_id)
            activated_starcks.append(track)
        """ Step 5: Update state and getting out of interest tracklet if have"""
        out_of_polygon_tracklet = []
        refind_stracks_copy = []
        activated_starcks_copy = []
        for idx, current_tracked_tracks in enumerate(
            [refind_stracks, activated_starcks]):  #

            for track in current_tracked_tracks:
                if tlbrs_to_mean_area(track.track_trajectory) <= 1000:
                    track_type = self.person_or_motorcycle[0]  #person
                else:
                    track_type = track.infer_type()
                if track_type in self.person_or_motorcycle:
                    out_polygon = two_wheel_polygon
                    p_type = 'two_wheel'
                else:
                    out_polygon = four_wheel_polygon  #if not track.huge_vehicle else virtual_polygon
                    p_type = 'four_wheel'
                if check_bbox_outside_polygon(out_polygon, track.tlbr):
                    track.mark_removed()
                    removed_stracks.append(track)
                    if (
                        (len(track.track_frames) >= 4 and self.frame_id <= 5)
                            or (len(track.track_frames) >= 5
                                and self.frame_id >= self.warmup_frame + 5)
                    ) and idx == 1:  ########## 4 is confident number of frame

                        track_center = [[(x[0] + x[2]) / 2, (x[1] + x[3]) / 2]
                                        for x in track.track_trajectory]

                        #movement_id=counting_moi(self.paths,[(track_center[0],track_center[-1])])[0]

                        # if track_type in self.person_or_motorcycle:
                        #     movement_id=counting_moi(self.paths,[(track_center[0],track_center[min(5,len(track_center)-1)])])[0]
                        # else:
                        #     movement_id=counting_moi(self.paths,[(track_center[0],track_center[min(15,(len(track_center)+1)//2)])])[0]
                        if track_center[-1][1] <= 220:
                            if track_center[-1][0] >= 170:
                                movement_id = '2'
                            else:
                                continue
                        elif ((track_center[-1][0] >= 950
                               and track_type in self.person_or_motorcycle) or
                              (track.track_trajectory[0][2] >=
                               800)) and track_center[-1][1] >= 400:
                            movement_id = '3'
                        elif track_center[-1][1] >= 400:
                            movement_id = '1'
                        else:
                            continue
                        line_interest = self.line1 if str(
                            movement_id) == '1' else self.line2 if str(
                                movement_id) == '2' else None
                        out_direction = 'bottom' if str(
                            movement_id) == '1' else 'up' if str(
                                movement_id) == '2' else None
                        frame_id = self.frame_id + kalman_predict_out_line(
                            track, line_interest, out_direction
                        ) if line_interest is not None else self.frame_id
                        # if (not check_track_line_reasonable(track,self.line1,'1',movement_id) or
                        # not check_track_line_reasonable(track,self.line2,'2',movement_id)):
                        #     continue
                        if box_line_relative(
                                track.track_trajectory[-2],
                                self.line3) == 'cross' or box_line_relative(
                                    track.track_trajectory[-1],
                                    self.line3) == 'cross':
                            continue
                        out_of_polygon_tracklet.append(
                            (frame_id, track.track_id, track_type,
                             movement_id))
                else:
                    refind_stracks_copy.append(
                        track) if idx == 0 else activated_starcks_copy.append(
                            track)
        refind_stracks = refind_stracks_copy
        activated_starcks = activated_starcks_copy

        lost_stracks_copy = []
        for track in lost_stracks:
            if tlbrs_to_mean_area(track.track_trajectory) <= 1000:
                track_type = self.person_or_motorcycle[0]  #person
            else:
                track_type = track.infer_type()
            if track_type in self.person_or_motorcycle:
                out_polygon = two_wheel_polygon
                p_type = 'two_wheel'
            else:
                out_polygon = four_wheel_polygon
                p_type = 'four_wheel'

            if check_bbox_intersect_or_outside_polygon(out_polygon,
                                                       track.tlbr):
                track.mark_removed()
                removed_stracks.append(track)
                if ((len(track.track_frames) >= 4 and self.frame_id <= 5)
                        or (len(track.track_frames) >= 6
                            and self.frame_id >= self.warmup_frame + 5)):
                    track_center = [[(x[0] + x[2]) / 2, (x[1] + x[3]) / 2]
                                    for x in track.track_trajectory]

                    #movement_id=counting_moi(self.paths,[(track_center[0],track_center[-1])])[0]
                    # if track_type in self.person_or_motorcycle:
                    #     movement_id=counting_moi(self.paths,[(track_center[0],track_center[min(5,len(track_center)-1)])])[0]
                    # else:
                    #     movement_id=counting_moi(self.paths,[(track_center[0],track_center[min(15,(len(track_center)+1)//2)])])[0]
                    if track_center[-1][1] <= 220:
                        if track_center[-1][0] >= 170:
                            movement_id = '2'
                        else:
                            continue
                    elif ((track_center[-1][0] >= 950
                           and track_type in self.person_or_motorcycle) or
                          (track.track_trajectory[0][2] >=
                           800)) and track_center[-1][1] >= 400:
                        movement_id = '3'
                    elif track_center[-1][1] >= 400:
                        movement_id = '1'
                    else:
                        continue

                    line_interest = self.line1 if str(
                        movement_id) == '1' else self.line2 if str(
                            movement_id) == '2' else None
                    out_direction = 'bottom' if str(
                        movement_id) == '1' else 'up' if str(
                            movement_id) == '2' else None
                    frame_id = self.frame_id + kalman_predict_out_line(
                        track, line_interest, out_direction
                    ) if line_interest is not None else self.frame_id
                    # if (not check_track_line_reasonable(track,self.line1,'1',movement_id) or
                    #     not check_track_line_reasonable(track,self.line2,'2',movement_id)):
                    #     continue
                    if box_line_relative(
                            track.track_trajectory[-2],
                            self.line3) == 'cross' or box_line_relative(
                                track.track_trajectory[-1],
                                self.line3) == 'cross':
                        continue
                    out_of_polygon_tracklet.append(
                        (frame_id, track.track_id, track_type, movement_id))
            else:
                lost_stracks_copy.append(track)

        lost_stracks = lost_stracks_copy

        for track in self.lost_stracks:
            if self.frame_id - track.end_frame > self.max_time_lost:
                track.mark_removed()
                removed_stracks.append(track)
            #Remove out of screen tracklet
            elif track.tlwh[0] + track.tlwh[2] // 2 > width or track.tlwh[
                    1] + track.tlwh[3] // 2 > height or min(
                        track.tlwh[0] + track.tlwh[2] // 2,
                        track.tlwh[1] + track.tlwh[3] // 2) < 0:
                track.num_out_frame += 1
                if track.num_out_frame > STrack.out_of_frame_patience:
                    track.mark_removed()
                    removed_stracks.append(track)

        # print('Ramained match {} s'.format(t4-t3))
        # print(out_of_polygon_tracklet)
        self.tracked_stracks = [
            t for t in self.tracked_stracks if t.state == TrackState.Tracked
        ]
        self.tracked_stracks, _ = joint_stracks(self.tracked_stracks,
                                                activated_starcks)
        self.tracked_stracks, _ = joint_stracks(self.tracked_stracks,
                                                refind_stracks)
        self.lost_stracks = sub_stracks(self.lost_stracks,
                                        self.tracked_stracks)
        self.lost_stracks.extend(lost_stracks)
        self.lost_stracks = sub_stracks(self.lost_stracks,
                                        self.removed_stracks)
        self.removed_stracks.extend(removed_stracks)
        self.tracked_stracks, self.lost_stracks = remove_duplicate_stracks(
            self.tracked_stracks, self.lost_stracks)
        #self.merge_track()
        output_stracks = [
            track for track in self.tracked_stracks if track.is_activated
        ]

        # logger.debug('===========Frame {}=========='.format(self.frame_id))
        # logger.debug('Activated: {}'.format([track.track_id for track in activated_starcks]))
        # logger.debug('Refind: {}'.format([track.track_id for track in refind_stracks]))
        # logger.debug('Lost: {}'.format([track.track_id for track in lost_stracks]))
        # logger.debug('Removed: {}'.format([track.track_id for track in removed_stracks]))

        return output_stracks, detections_plot, out_of_polygon_tracklet