from Tracking.deep_sort.detection import Detection from Tracking import generate_dets as gdet from Tracking.deep_sort.tracker import Tracker from keras.models import load_model from .action_enum import Actions # 기본 매개 변수 정의 file_path = Path.cwd() clip_length = 15 max_cosine_distance = 0.3 nn_budget = None nms_max_overlap = 1.0 # 초기화 deep_sort model_filename = str(file_path / 'Tracking/graph_model/mars-small128.pb') encoder = gdet.create_box_encoder(model_filename, batch_size=1) metric = NearestNeighborDistanceMetric("cosine", max_cosine_distance, nn_budget) tracker = Tracker(metric) # track_box 색상 trk_clr = (0, 255, 0) def load_action_premodel(model): return load_model(model) def framewise_recognize(pose, pretrained_model): frame, joints, bboxes, xcenter = pose[0], pose[1], pose[2], pose[3] joints_norm_per_frame = np.array(pose[-1])
# Use Deep-sort(Simple Online and Realtime Tracking) # To track multi-person for multi-person actions recognition # 定义基本参数 file_path = Path.cwd() clip_length = 15 max_cosine_distance = 0.3 nn_budget = None nms_max_overlap = 1.0 fall_num = 0 # 初始化deep_sort model_filename = str(file_path / 'Tracking/graph_model/mars-small128.pb') #对检测到的object path 编码 encoder = gdet.create_box_encoder(model_filename, batch_size=1) #encoder的索引 计算得到特征 对特征进行编码 #度量kalman预测的目标和下一帧的检测目标进行距离计算 使用余弦距离能够缓解遮挡 ID switch比较频繁的问题 metric = NearestNeighborDistanceMetric("cosine", max_cosine_distance, nn_budget) # tracker = Tracker(metric) #根据度量结果追踪 # track_box颜色 trk_clr = (0, 255, 0) # class ActionRecognizer(object): # @staticmethod # def load_action_premodel(model): # return load_model(model) # # @staticmethod # def framewise_recognize(pose, pretrained_model):