def callback(self, ros_data): thoi_gian = time.time() #### direct conversion to CV2 #### np_app = np.fromstring(ros_data.data, np.uint8) self.frame = cv2.imdecode(np_app, cv2.IMREAD_COLOR) #rospy.logwarn("FLAGG " + str(self.den_diem_re)) try: # Publish to server if self.den_diem_re == False: #self.flag_car = True pose = Float32() if self.da_den_bung_binh == False: pose.data = 50 else: pose.data = 40 self.speed.publish(pose) #image = self.frame[40:,:] #cv2.imshow("Test",image) #self.traffic_detection_sign() # i_d = detector.detect_object(mat) # i_d.visualize() with self.session.as_default(): with self.graph.as_default(): self.frame_normal = self.frame[40:, :].copy() resized = cv2.resize(self.frame_normal, (84, 84)) gray = cv2.cvtColor(resized, cv2.COLOR_RGB2GRAY) / 255.0 unet_model = self.model.predict(np.array([[gray]])) image = cv2.resize(self.frame.copy(), (320, 240)) image = cv2.cvtColor(image, cv2.COLOR_BGR2RGB) image = np.reshape(image, (1, 240, 320, 3)) y_pred_decoded = self.get_ypred_decoded(image) gray *= 255. unet_model *= 255 # print(image.shape,y[0][0].shape) #cv2.imshow("Deep learning",y[0][0]) self.y_true = cv2.resize(unet_model[0][0], (320, 240)) self.src_pts = np.float32([[0, 85], [320, 85], [320, 240], [0, 240]]) self.dst_pts = np.float32([[0, 0], [320, 0], [200, 240], [120, 240]]) # src_pts = np.float32([[0,0],[320,0],[215,240],[105,240]]) # dst_pts = np.float32([[0,0],[240,0],[240,320],[0,320]]) self.transform_matrix = perspective_transform( self.src_pts, self.dst_pts) #From utils.py warped_image = birdView(self.y_true, self.transform_matrix['M']) #cv2.imshow("Bird",warped_image) contours, _ = cv2.findContours( warped_image.astype(np.uint8), cv2.RETR_EXTERNAL, cv2.CHAIN_APPROX_SIMPLE) contour_sizes = [(cv2.contourArea(contour), contour) for contour in contours] biggest_contour = max(contour_sizes, key=lambda x: x[0])[1] contours_goc, _ = cv2.findContours( self.y_true.astype(np.uint8), cv2.RETR_EXTERNAL, cv2.CHAIN_APPROX_SIMPLE) contour_sizes_goc = [(cv2.contourArea(contour), contour) for contour in contours_goc] biggest_contour_goc = np.array( max(contour_sizes_goc, key=lambda x: x[0])[1]) if self.flag_car == False: ''' Chay voi che do normal Khogn co xe. ''' ''' LAM MU BUNG BINH ''' if not len(biggest_contour) == 0: #cv2.drawContours(mask_after, [biggest_contour], -1, (255), -1) #Approx ############ TEST ####################### #extLeft = tuple(biggest_contour[biggest_contour[:, :, 0].argmin()][0]) #extRight = tuple(biggest_contour[biggest_contour[:, :, 0].argmax()][0]) #extTop = tuple(biggest_contour[biggest_contour[:, :, 1].argmin()][0]) #extBot = tuple(biggest_contour[biggest_contour[:, :, 1].argmax()][0]) #cv2.circle(mask_after, extLeft, 8, (0, 0, 255), -1) #cv2.circle(mask_after, extRight, 8, (0, 255, 0), -1) #cv2.circle(mask_after, extTop, 8, (255, 0, 0), -1) #cv2.circle(mask_after, extBot, 8, (255, 255, 0), -1) ######################################### M = cv2.moments(biggest_contour) # calculate x,y coordinate of center if not M["m00"] == 0: self.cX_center = int(M["m10"] / M["m00"]) if self.da_den_bung_binh == True: print("Da den bung binh") self.cX_center = self.cX_center - 10 self.cY_center = int(M["m01"] / M["m00"]) ''' Ve Tam duong ''' mask_after = np.zeros(self.frame.shape, np.uint8) cv2.drawContours(mask_after, [biggest_contour], -1, (255), -1) #Approx # TEST # for i in range(210,320): # for j in range(0,240): # if np.sum(mask_after[j][i]) == 255: # mask_after[j][i][0] = 0 ##### cv2.circle(mask_after, (self.cX_center, self.cY_center), 5, (102, 42, 255), -1) cv2.putText( mask_after, "centroid", (self.cX_center - 25, self.cY_center - 25), cv2.FONT_HERSHEY_SIMPLEX, 0.5, (102, 42, 255), 2) slope = (self.cX_center - 160) / float(self.cY_center - 240) angle_radian = float(math.atan(slope)) angle_degree = float(angle_radian * 180.0 / math.pi) #self.last_angle = angle_degree #rospy.logwarn("GOC NORMAL: " + str(angle_degree)) angle = Float32() angle.data = -angle_degree #while time.time() -start <= 0.02: #connect.publish('g2_never_die/set_angle',angle) self.angle_car.publish(angle) ''' Show anh ''' cv2.imshow("Normal", mask_after) # mo anh chay contour deeplaerning ########### THEM RE VAO CHO NAY ######## TRONG TRUONG HOP THAY DUONG CON NEU NHIN CA DUONG VA XE PHAI DE RA NGOAI ''' Dem mat do diem anh de phat hien nga tu ''' mask = np.zeros(self.frame.shape, np.uint8) cv2.drawContours(mask, [biggest_contour], -1, 255, -1) diem_re = 0 ''' Dem nga tu comment tu dong 409 den 423 ''' #################################################### ############### MO COMMENT DE DEM NGA TU ########### # bung_binh_ngang = 0 # bung_binh_doc = 0 # # for i in range(0,320): # if np.sum(mask[0][i]) == 255: # bung_binh_ngang += 1 # for i in range(0,240): # if np.sum(mask[i][160]) ==255: # bung_binh_doc += 1 # # if bung_binh_doc==240 and bung_binh_ngang == 320: # # self.check_nga_tu = True # rospy.logwarn("DA DEN NGA TU") # ##################################################### ''' Su dung contour thay vi deep learning # lane_detection = laneDetection(self.frame) # warp_img,gray_bird = lane_detection.processing_traffic_sign() # contours, _ = cv2.findContours(warp_img.astype(np.uint8), cv2.RETR_EXTERNAL, cv2.CHAIN_APPROX_SIMPLE) # contour_sizes = [(cv2.contourArea(contour), contour) for contour in contours] # biggest_contour = max(contour_sizes, key=lambda x: x[0])[1] # mask = np.zeros(self.frame.shape, np.uint8) #cv2.drawContours(mask, [biggest_contour], -1, 255, -1) #cv2.imshow("MASK",mask) ''' try: if self.flag_sign == -1: self.steering_bool = True for i in range(0, 160): if np.sum(mask[50][i]) == 255: diem_re += 1 elif self.flag_sign == 1: self.steering_bool = True for i in range(160, 320): if np.sum(mask[50][i]) == 255: diem_re += 1 if diem_re == 135: self.nga_tu = 1 speed_car = Float32() speed_car = 0 self.speed.publish(speed_car) if self.nga_tu == 1: if diem_re < 110: self.den_diem_re = True except: pass ''' LAM VIEC VOI DEM XE O DAY ''' ##################################################### #if self.count_for_car == 2 or self.count_for_car == 3: # self.drive_for_my_way = True # # Xet cung vi tri xe o mot checky_pred_decoded ###################################################### ###################################################### self.old_count = self.count with self.session.as_default(): with self.graph.as_default(): if y_pred_decoded != []: #print(y_pred_decoded) centroid_x = int(y_pred_decoded[0][2]) + ( (int(y_pred_decoded[0][4]) - int(y_pred_decoded[0][2])) / 2) centroid_y = int(y_pred_decoded[0][3]) + ( (int(y_pred_decoded[0][5]) - int(y_pred_decoded[0][3])) / 2) '''Ve box ''' #self.frame = cv2.rectangle(self.frame, (int(y_pred_decoded[0][2]),int(y_pred_decoded[0][3])), (int(y_pred_decoded[0][4]),int(y_pred_decoded[0][5])), (0,0,255), 2) #cv2.circle(self.frame, (centroid_x, centroid_y), 5, (102, 42, 255), -1) if y_pred_decoded[0][ 0] == 1 and y_pred_decoded[0][1] > 0.9: ##CAR HERE ### #self.count_for_car += 1 self.flag_car = True #cv2.putText(self.frame, "Car", (centroid_x - 25, centroid_y - 25),cv2.FONT_HERSHEY_SIMPLEX, 0.5, (102, 42, 255), 2) points_to_be_transformed = np.array( [[[centroid_x, centroid_y]]], dtype=np.float32 ) #diem chuyen sang birdview bird_view = cv2.perspectiveTransform( points_to_be_transformed, self.transform_matrix['M']) x_bird = bird_view[0][0][0] y_bird = bird_view[0][0][1] ''' Xoa nhung toa do nho hon tam xe de xac dinh vi tri xe ''' i = 0 for contour in biggest_contour_goc: for (x, y) in contour: if y > centroid_y - 10: biggest_contour_goc = np.delete( biggest_contour_goc, i, axis=0) i = i - 1 i = i + 1 M = cv2.moments(biggest_contour_goc) # calculate x,y coordinate of center if not M["m00"] == 0: cX_center_goc = int(M["m10"] / M["m00"]) cY_center_goc = int(M["m01"] / M["m00"]) ''' Imshow tam duong va tam xe so sanh vi tri ''' # mask_position = np.zeros(self.frame.shape, np.uint8) # cv2.drawContours(mask_position, [biggest_contour], -1, (255), -1) #Approx # cv2.circle(mask_position, (cX_center_goc, cY_center_goc), 5, (102, 42, 255), -1) # cv2.circle(mask_position, (centroid_x, centroid_y), 5, (0, 255, 0), -1) # #cv2.imwrite("/catkin_ws/src/goodgame_fptu_dl/src/dat.jpg",mask_position) # cv2.imshow("Goc",mask_position) if cX_center_goc > centroid_x: # and left[0] < centroid_x < right[0]: self.vi_tri_vat_can = -1 rospy.logwarn( "Car is on the left lane!") elif cX_center_goc < centroid_x: # and left[0] < centroid_x < right[0]: rospy.logwarn( "Car is on the right lane!") self.vi_tri_vat_can = 1 ''' Sau khi xac dinh duoc vi tri xe, bat dau tim tam duong sau do dich chuyen tam duong sang trai hoac phai tuy vao vi tri vat can ''' M_before = cv2.moments(biggest_contour) cX_center_before = int(M_before["m10"] / M_before["m00"]) ''' Them vi tri fix cung tai mot checkpoint nao do tai day ''' ###################################################### #if self.drive_for_my_way == True: # self.vi_tri_vat_can = 1 # self.drive_for_my_way = False ###################################################### if self.vi_tri_vat_can == -1: cX_center_before = cX_center_before + 25 elif self.vi_tri_vat_can == 1: cX_center_before = cX_center_before - 25 cY_center_before = int(M_before["m01"] / M_before["m00"]) slope = (cX_center_before - 160) / float(cY_center_before - 240) angle_radian = float(math.atan(slope)) angle_degree_goc = float(angle_radian * 180.0 / math.pi) angle = Float32() angle.data = -angle_degree_goc self.angle_car.publish(angle) self.count += 1 elif y_pred_decoded[0][ 0] == 2 and y_pred_decoded[0][1] > 0.98: self.flag_sign = -1 #cv2.putText(self.frame, "Left", (centroid_x - 25, centroid_y - 25),cv2.FONT_HERSHEY_SIMPLEX, 0.5, (102, 42, 255), 2) elif y_pred_decoded[0][ 0] == 3 and y_pred_decoded[0][1] > 0.98: self.flag_sign = 1 #cv2.putText(self.frame, "Right", (centroid_x - 25, centroid_y - 25),cv2.FONT_HERSHEY_SIMPLEX, 0.5, (102, 42, 255), 2) elif y_pred_decoded[0][0] == 4: cv2.putText(self.frame, "Bien bao la", (centroid_x - 25, centroid_y - 25), cv2.FONT_HERSHEY_SIMPLEX, 0.5, (102, 42, 255), 2) elif y_pred_decoded[0][ 0] == 5 and y_pred_decoded[0][1] > 0.8: self.da_den_bung_binh = True #print(y_pred_decoded[0][1]) cv2.putText(self.frame, "Bung binh", (centroid_x - 25, centroid_y - 25), cv2.FONT_HERSHEY_SIMPLEX, 0.5, (102, 42, 255), 2) ''' Show frame detect box ''' #cv2.imshow("frame2", self.frame) ''' Neu trong truong hop khong the detect ra box ma vi tri vat can o do thi van coi frame tiep theo detect duoc ''' if self.old_count != 0 and self.old_count == self.count: with self.session.as_default(): with self.graph.as_default(): M_before = cv2.moments(biggest_contour) cX_center_before = int(M_before["m10"] / M_before["m00"]) if self.vi_tri_vat_can == -1: cX_center_before = cX_center_before + 25 elif self.vi_tri_vat_can == 1: cX_center_before = cX_center_before - 25 cY_center_before = int(M_before["m01"] / M_before["m00"]) slope = (cX_center_before - 160) / float(cY_center_before - 240) angle_radian = float(math.atan(slope)) angle_degree_goc = float(angle_radian * 180.0 / math.pi) angle = Float32() angle.data = -angle_degree_goc self.angle_car.publish(angle) self.pre_count += 1 if self.pre_count == 1: self.flag_car_for_real = True rospy.loginfo("Stop!") self.pre_count = 0 self.old_count = 0 self.count = 0 self.flag_car = False self.vi_tri_vat_can = 0 else: ''' Truong hop den nga tu re Se can bang lane duong bang viec su dung thuat toan Hough line transformation ''' lane_detection = laneDetection(self.frame) if self.flag_sign == 1: rospy.loginfo("Right traffic sign detection!") if self.da_den_bung_binh == True: slope = (self.cX_center + 20 - 160) / float(self.cY_center - 240) angle_radian = float(math.atan(slope)) angle_degree = float(angle_radian * 180.0 / math.pi) #self.last_angle = angle_degree #rospy.logwarn("GOC NORMAL: " + str(angle_degree)) angle = Float32() angle.data = -angle_degree #while time.time() -start <= 0.02: #connect.publish('g2_never_die/set_angle',angle) self.angle_car.publish(angle) else: angle = Float32() speed_car = Float32() angle.data = 90 speed_car.data = 0 self.speed.publish(speed_car) self.angle_car.publish(angle) if self.flag_sign == -1: rospy.loginfo("Left traffic sign detection!") if self.da_den_bung_binh == True: slope = (self.cX_center - 20 - 160) / float(self.cY_center - 240) angle_radian = float(math.atan(slope)) angle_degree = float(angle_radian * 180.0 / math.pi) #self.last_angle = angle_degree #rospy.logwarn("GOC NORMAL: " + str(angle_degree)) angle = Float32() angle.data = -angle_degree #while time.time() -start <= 0.02: #connect.publish('g2_never_die/set_angle',angle) self.angle_car.publish(angle) else: angle = Float32() speed_car = Float32() angle.data = -90 speed_car.data = 0 self.speed.publish(speed_car) self.angle_car.publish(angle) if self.stop_here: self.count_for_traffic += 1 if self.count_for_traffic == 5: #Cho di tiep 10 frame self.da_den_bung_binh = False self.nga_tu = 0 self.steering_bool = False self.count_for_traffic = 0 self.den_diem_re = False self.flag_sign = 0 self.stop_here = False self.check_point += 1 try: line_segments, lines, components = lane_detection.processImage( ) corr_img, gray_ex, cleaned, masked = lane_detection.processBirdView( ) transform_matrix, warped_image = lane_detection.perspective( ) steering = steering_angle(corr_img, lines, transform_matrix) lane_lines = steering.average_slop_intercept() frame = steering.display() angle_degree, check_lane, lane_width = steering.compute_angle_and_speed( ) if abs(angle_degree) > 30: self.flag_for_re = True if self.flag_for_re == True: if -10 < angle_degree < 10: self.stop_here = True self.flag_for_re = False self.count_for_sign += 1 except: pass except Exception, e: rospy.logwarn(str(e))
def callback(self, ros_data): with self.session.as_default(): with self.graph.as_default(): #### direct conversion to CV2 #### np_app = np.fromstring(ros_data.data, np.uint8) self.frame = cv2.imdecode(np_app, cv2.IMREAD_COLOR) cv2.imshow("hello", self.frame) #rospy.logwarn("FLAGG " + str(self.den_diem_re)) try: # Publish to server if self.den_diem_re == False: #self.flag_car = True pose = Float32() pose.data = 50 self.speed.publish(pose) print("hello") self.old_count = self.count self.traffic_detection_sign() if self.flag_car == False: self.frame_normal = self.frame[40:, :].copy() resized = cv2.resize(self.frame_normal, (84, 84)) gray = cv2.cvtColor(resized, cv2.COLOR_RGB2GRAY) / 255.0 # i_d = detector.detect_object(mat) # i_d.visualize() #with self.session.as_default(): # with self.graph.as_default(): y = self.model.predict(np.array([[gray]])) gray *= 255. y *= 255 # print(image.shape,y[0][0].shape) #cv2.imshow("Deep learning",y[0][0]) self.y_true = cv2.resize(y[0][0], (320, 240)) self.src_pts = np.float32([[0, 85], [320, 85], [320, 240], [0, 240]]) self.dst_pts = np.float32([[0, 0], [320, 0], [200, 240], [120, 240]]) # src_pts = np.float32([[0,0],[320,0],[215,240],[105,240]]) # dst_pts = np.float32([[0,0],[240,0],[240,320],[0,320]]) self.transform_matrix = perspective_transform( self.src_pts, self.dst_pts) #From utils.py warped_image = birdView(self.y_true, self.transform_matrix['M']) #cv2.imshow("Bird",warped_image) _, contours, _ = cv2.findContours( warped_image.astype(np.uint8), cv2.RETR_EXTERNAL, cv2.CHAIN_APPROX_NONE) contour_sizes = [(cv2.contourArea(contour), contour) for contour in contours] biggest_contour = max(contour_sizes, key=lambda x: x[0])[1] mask_after = np.zeros(self.frame.shape, np.uint8) #cv2.drawContours(mask, [biggest_contour], -1, 255, -1) #cv2.imshow("demo",mask) if not len(biggest_contour) == 0: cv2.drawContours(mask_after, [biggest_contour], -1, (255), -1) #Approx ############ TEST ####################### extLeft = tuple(biggest_contour[ biggest_contour[:, :, 0].argmin()][0]) extRight = tuple(biggest_contour[ biggest_contour[:, :, 0].argmax()][0]) #extTop = tuple(biggest_contour[biggest_contour[:, :, 1].argmin()][0]) #extBot = tuple(biggest_contour[biggest_contour[:, :, 1].argmax()][0]) cv2.circle(mask_after, extLeft, 8, (0, 0, 255), -1) cv2.circle(mask_after, extRight, 8, (0, 255, 0), -1) #cv2.circle(mask_after, extTop, 8, (255, 0, 0), -1) #cv2.circle(mask_after, extBot, 8, (255, 255, 0), -1) ######################################### M = cv2.moments(biggest_contour) # calculate x,y coordinate of center if not M["m00"] == 0: cX_center = int(M["m10"] / M["m00"]) cY_center = int(M["m01"] / M["m00"]) cv2.circle(mask_after, (cX_center, cY_center), 5, (102, 42, 255), -1) cv2.putText( mask_after, "centroid", (cX_center - 25, cY_center - 25), cv2.FONT_HERSHEY_SIMPLEX, 0.5, (102, 42, 255), 2) slope = (cX_center - 160) / float(cY_center - 240) angle_radian = float(math.atan(slope)) angle_degree = float(angle_radian * 180.0 / math.pi) #self.last_angle = angle_degree #rospy.logwarn("GOC NORMAL: " + str(angle_degree)) angle = Float32() angle.data = -angle_degree #while time.time() -start <= 0.02: #connect.publish('g2_never_die/set_angle',angle) self.angle_car.publish(angle) #cv2.imshow("Normal",mask_after) # with self.session.as_default(): # with self.graph.as_default(): image = cv2.resize(self.frame, (320, 240)) image = cv2.cvtColor(image, cv2.COLOR_BGR2RGB) image = np.reshape(image, (1, 240, 320, 3)) y_pred_decoded = self.get_ypred_decoded(image) if y_pred_decoded != []: #self.frame = cv2.rectangle(self.frame, (int(y_pred_decoded[0][2]),int(y_pred_decoded[0][3])), (int(y_pred_decoded[0][4]),int(y_pred_decoded[0][5])), (0,0,255), 2) centroid_x = int(y_pred_decoded[0][2]) + ( (int(y_pred_decoded[0][4]) - int(y_pred_decoded[0][2])) / 2) centroid_y = int(y_pred_decoded[0][3]) + ( (int(y_pred_decoded[0][5]) - int(y_pred_decoded[0][3])) / 2) #cv2.circle(self.frame, (centroid_x, centroid_y), 5, (102, 42, 255), -1) if y_pred_decoded[0][ 0] == 1 and y_pred_decoded[0][1] > 0.9: ##CAR HERE ### self.count_for_car += 1 # car_coors = np.array([centroid_x,centroid_y], dtype=np.float32) # self.pub.publish(car_coors) # Gui toa do xe self.flag_car = True self.count += 1 #cv2.putText(self.frame, "Car", (centroid_x - 25, centroid_y - 25),cv2.FONT_HERSHEY_SIMPLEX, 0.5, (102, 42, 255), 2) points_to_be_transformed = np.array( [[[centroid_x, centroid_y]]], dtype=np.float32 ) #diem chuyen sang birdview bird_view = cv2.perspectiveTransform( points_to_be_transformed, self.transform_matrix['M']) x_bird = bird_view[0][0][0] y_bird = bird_view[0][0][1] self.frame_for_lane = self.frame[40:, :].copy() resized = cv2.resize(self.frame_for_lane, (84, 84)) gray = cv2.cvtColor(resized, cv2.COLOR_RGB2GRAY) / 255.0 # i_d = detector.detect_object(mat) # i_d.visualize() # with self.session.as_default(): # with self.graph.as_default(): y = self.model.predict(np.array([[gray]])) gray *= 255. y *= 255 # print(image.shape,y[0][0].shape) self.y_true = cv2.resize(y[0][0], (320, 240)) _, contours_goc, _ = cv2.findContours( self.y_true.astype(np.uint8), cv2.RETR_EXTERNAL, cv2.CHAIN_APPROX_NONE) contour_sizes = [(cv2.contourArea(contour), contour) for contour in contours_goc] biggest_contour = max(contour_sizes, key=lambda x: x[0])[1] left = tuple(biggest_contour[ biggest_contour[:, :, 0].argmin()][0]) right = tuple(biggest_contour[ biggest_contour[:, :, 0].argmax()][0]) i = 0 for contour in biggest_contour: for (x, y) in contour: if y > centroid_y - 10: biggest_contour = np.delete( biggest_contour, i, axis=0) i = i - 1 i = i + 1 M = cv2.moments(biggest_contour) # calculate x,y coordinate of center if not M["m00"] == 0: cX_center_goc = int(M["m10"] / M["m00"]) cY_center_goc = int(M["m01"] / M["m00"]) #mask_position = np.zeros(self.frame.shape, np.uint8) #cv2.drawContours(mask_position, [biggest_contour], -1, (255), -1) #Approx #cv2.circle(mask_position, (cX_center_goc, cY_center_goc), 5, (102, 42, 255), -1) #cv2.circle(mask_position, (centroid_x, centroid_y), 5, (0, 255, 0), -1) #cv2.imshow("Goc",mask_test) #print("VI TRI: ", left[0],centroid_x,right[0]) if cX_center_goc > centroid_x and left[ 0] < centroid_x < right[0]: self.vi_tri_vat_can = -1 rospy.logwarn( "Car is on the left lane!") elif cX_center_goc < centroid_x and left[ 0] < centroid_x < right[0]: rospy.logwarn( "Car is on the right lane!") self.vi_tri_vat_can = 1 self.src_pts = np.float32([[0, 85], [320, 85], [320, 240], [0, 240]]) self.dst_pts = np.float32([[0, 0], [320, 0], [200, 240], [120, 240]]) # src_pts = np.float32([[0,0],[320,0],[215,240],[105,240]]) # dst_pts = np.float32([[0,0],[240,0],[240,320],[0,320]]) self.transform_matrix = perspective_transform( self.src_pts, self.dst_pts) #From utils.py warped_image = birdView( self.y_true, self.transform_matrix['M']) #cv2.imshow("Bird",warped_image) _, contours, _ = cv2.findContours( warped_image.astype(np.uint8), cv2.RETR_EXTERNAL, cv2.CHAIN_APPROX_NONE) contour_sizes = [(cv2.contourArea(contour), contour) for contour in contours] biggest_contour = max(contour_sizes, key=lambda x: x[0])[1] #print(left,right) #mask_after = np.zeros(self.frame.shape, np.uint8) # mask_zero = np.zeros(self.frame.shape, np.uint8) #mask_test = np.zeros(self.frame.shape, np.uint8) #cv2.drawContours(mask_after, [biggest_contour], -1, (255), -1) #Approx #cv2.imshow("AFTER",mask_after) M_before = cv2.moments(biggest_contour) cX_center_before = int(M_before["m10"] / M_before["m00"]) if self.vi_tri_vat_can == -1: #print("Here") cX_center_before = cX_center_before + 12.5 elif self.vi_tri_vat_can == 1: #print("Here2") cX_center_before = cX_center_before - 12.5 cY_center_before = int(M_before["m01"] / M_before["m00"]) slope = (cX_center_before - 160) / float(cY_center_before - 240) angle_radian = float(math.atan(slope)) angle_degree_goc = float(angle_radian * 180.0 / math.pi) rospy.logwarn("MAXIMUM: " + str(angle_degree_goc)) angle = Float32() angle.data = -angle_degree_goc self.angle_car.publish(angle) elif y_pred_decoded[0][ 0] == 2 and y_pred_decoded[0][1] > 0.98: self.flag_sign = -1 cv2.putText(self.frame, "Left", (centroid_x - 25, centroid_y - 25), cv2.FONT_HERSHEY_SIMPLEX, 0.5, (102, 42, 255), 2) elif y_pred_decoded[0][ 0] == 3 and y_pred_decoded[0][1] > 0.98: self.flag_sign = 1 cv2.putText(self.frame, "Right", (centroid_x - 25, centroid_y - 25), cv2.FONT_HERSHEY_SIMPLEX, 0.5, (102, 42, 255), 2) #cv2.imshow("frame2", self.frame) gray_ex = cv2.cvtColor(self.frame, cv2.COLOR_BGR2GRAY) if self.old_count != 0 and self.old_count == self.count: # with self.session.as_default(): # with self.graph.as_default(): self.transform_matrix = perspective_transform( self.src_pts, self.dst_pts) #From utils.py warped_image = birdView(self.y_true, self.transform_matrix['M']) #cv2.imshow("Bird",warped_image) _, contours, _ = cv2.findContours( warped_image.astype(np.uint8), cv2.RETR_EXTERNAL, cv2.CHAIN_APPROX_NONE) contour_sizes = [(cv2.contourArea(contour), contour) for contour in contours] biggest_contour = max(contour_sizes, key=lambda x: x[0])[1] #left = tuple(biggest_contour[biggest_contour[:, :, 0].argmin()][0]) #right = tuple(biggest_contour[biggest_contour[:, :, 0].argmax()][0]) #print(left,right) #mask_after = np.zeros(self.frame.shape, np.uint8) # mask_zero = np.zeros(self.frame.shape, np.uint8) #mask_test = np.zeros(self.frame.shape, np.uint8) #cv2.drawContours(mask_after, [biggest_contour], -1, (255), -1) #Approx #cv2.imshow("AFTER",mask_after) M_before = cv2.moments(biggest_contour) #extLeft = tuple(biggest_contour[biggest_contour[:, :, 0].argmin()][0]) #extRight = tuple(biggest_contour[biggest_contour[:, :, 0].argmax()][0]) cX_center_before = int(M_before["m10"] / M_before["m00"]) if self.vi_tri_vat_can == -1: cX_center_before = cX_center_before + 12.5 elif self.vi_tri_vat_can == 1: cX_center_before = cX_center_before - 12.5 cY_center_before = int(M_before["m01"] / M_before["m00"]) slope = (cX_center_before - 160) / float(cY_center_before - 240) angle_radian = float(math.atan(slope)) angle_degree_goc = float(angle_radian * 180.0 / math.pi) #rospy.logwarn("MAXIMUM: " + str(angle_degree_goc)) angle = Float32() angle.data = -angle_degree_goc self.angle_car.publish(angle) self.pre_count += 1 print(self.count_for_car) if self.pre_count == 1: self.count_for_car = 0 rospy.loginfo("Stop!") self.pre_count = 0 self.old_count = 0 self.count = 0 self.flag_car = False self.vi_tri_vat_can = 0 else: #cv2.imshow("ELSE",self.frame) lane_detection = laneDetection(self.frame) if self.flag_sign == 1: rospy.loginfo("Right traffic sign detection!") angle = Float32() speed_car = Float32() angle.data = 90 speed_car.data = 0 self.speed.publish(speed_car) self.angle_car.publish(angle) if self.flag_sign == -1: rospy.loginfo("Left traffic sign detection!") angle = Float32() speed_car = Float32() angle.data = -90 speed_car.data = 0 self.speed.publish(speed_car) self.angle_car.publish(angle) if self.stop_here: self.count_for_traffic += 1 if self.count_for_traffic == 2: self.nga_tu = 0 self.steering_bool = False self.count_for_traffic = 0 self.den_diem_re = False self.flag_sign = 0 self.stop_here = False self.check_point += 1 try: line_segments, lines, components = lane_detection.processImage( ) corr_img, gray_ex, cleaned, masked = lane_detection.processBirdView( ) transform_matrix, warped_image = lane_detection.perspective( ) steering = steering_angle(corr_img, lines, transform_matrix) lane_lines = steering.average_slop_intercept() frame = steering.display() angle_degree, check_lane, lane_width = steering.compute_angle_and_speed( ) #rospy.logwarn("GOC " + str(angle_degree)) if abs(angle_degree) > 20: self.flag_for_re = True if self.flag_for_re == True: if -10 < angle_degree < 10: self.stop_here = True self.flag_for_re = False # if not check_lane is None and check_lane !=0 and abs(angle_degree) > 35: # self.stop_here = True # if check_lane == 0 and abs(angle_degree) > 30: # self.stop_here = True except: pass except Exception, e: rospy.logwarn(str(e))
def traffic_detection_sign(self): lane_detection = laneDetection(self.frame) warp_img, gray_bird = lane_detection.processing_traffic_sign() contours, _ = cv2.findContours(warp_img.astype(np.uint8), cv2.RETR_EXTERNAL, cv2.CHAIN_APPROX_SIMPLE) contour_sizes = [(cv2.contourArea(contour), contour) for contour in contours] biggest_contour = max(contour_sizes, key=lambda x: x[0])[1] mask = np.zeros(self.frame.shape, np.uint8) cv2.drawContours(mask, [biggest_contour], -1, 255, -1) #cv2.imshow("Sign",mask) diem_re = 0 try: #print("GLAFFFFFFFFFFFFFFF",flag) if self.flag_sign == -1: self.steering_bool = True for i in range(0, 160): if np.sum(mask[140][i]) == 255: diem_re += 1 elif self.flag_sign == 1: self.steering_bool = True for i in range(160, 320): if np.sum(mask[140][i]) == 255: diem_re += 1 #print("Diem re: ", diem_re,self.flag_sign) if diem_re == 90: self.nga_tu = 1 if self.nga_tu == 1: if diem_re < 90: if self.flag_sign == -1: rospy.loginfo("Left traffic sign detection!") time_test = time.time() #while steering_bool: while time.time( ) - time_test <= 0.5: #time.time() - time_test <= 0.5steering_bool: #steering_bool: # lane = connect.subscribe('g2_never_die/lane',Int8()) # print("LANEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEE: ",lane.data) # raw = connect.subscribe('g2_never_die/camera/rgb/compressed', CompressedImage()) # frame = callback_image(raw) # lane_choose = rospy.get_param('~lane_detection') # fp_abc = open(lane_choose) # shared_abc = pickle.load(fp_abc) # lane = shared_abc["Lane"] #print("ERROR: ",lane) #flag = 1 #rospy.loginfo("This time to port to left!") angle = Float32() speed_car = Float32() angle.data = -90 speed_car.data = 0 self.speed.publish(speed_car) self.angle_car.publish(angle) #connect.publish('g2_never_die/set_speed',speed) #connect.publish('g2_never_die/set_angle',angle) # if self.balance_road != 0: # self.count_for_traffic += 1 # if self.count_for_traffic == 30: # self.steering_bool = False # self.count_for_traffic = 0 #flag = 0 # Rewrite flag self.flag_sign = 0 # shared = {"Object":0} # traffic_detection = rospy.get_param('~traffic_detection') # fp = open(traffic_detection,"w") # pickle.dump(shared, fp) if self.flag_sign == 1: rospy.loginfo("Right traffic sign detection!") time_test = time.time() #while time.time() -time_test <= 0.3: while self.steering_bool: #print("HEREEEE") #self.subscriber1.unregister() #flag = 1 steering_bool = True #rospy.loginfo("This time to port to right!") #angle= Float32() # raw = connect.subscribe('g2_never_die/camera/rgb/compressed', CompressedImage()) # frame = callback_image(raw) # self.subscriber1 = rospy.Subscriber("g2_never_die/camera/rgb/compressed", # CompressedImage, # self.callback, # queue_size=1) # self.balance_image = rospy.Subscriber("g2_never_die/camera/rgb/compressed", # CompressedImage, # self.callback, # queue_size=1) # self.balance = rospy.Subscriber("g2_never_die/balance", # Int8, # self.balance_lane, # queue_size=1) # lane_detection = rospy.get_param('~lane_detection') # fp_abc = open(lane_detection) # shared_abc = pickle.load(fp_abc) # lane = shared_abc["Lane"] # self.balance_road = lane mes = rospy.wait_for_message( "g2_never_die/balance", Int8, timeout=50) print(mes) self.balance_road = mes.data angle = Float32() speed_car = Float32() angle.data = 90 speed_car.data = 0 self.speed.publish(speed_car) self.angle_car.publish(angle) print("BALANCE: ", self.balance_road) if self.balance_road != 0: rospy.logerr( "Hereeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeee" ) self.count_for_traffic += 1 if self.count_for_traffic == 15: self.steering_bool = False self.count_for_traffic = 0 #rospy.spin() # flag = 0 # Rewrite flag # shared = {"Object":0} # traffic_detection = rospy.get_param('~traffic_detection') # fp = open(traffic_detection,"w") # pickle.dump(shared, fp) #self.balance_road = 0 self.flag_sign = 0 # self.subscriber1 = rospy.Subscriber("g2_never_die/camera/rgb/compressed", # CompressedImage, # self.callback, # queue_size=1) self.nga_tu = 0 except Exception, e: print(str(e))
try: rospy.loginfo( "Connect successfully for balance lane! Goodluck Goodgame!") value = Int8() while not rospy.is_shutdown(): # subscribe raw = connect.subscribe('g2_never_die/camera/rgb/compressed', CompressedImage()) frame = callback_image(raw) lane_detection = laneDetection(frame) try: line_segments, lines, components = lane_detection.processImage( ) corr_img, gray_ex, cleaned, masked = lane_detection.processBirdView( ) transform_matrix, warped_image = lane_detection.perspective() steering = steering_angle(corr_img, lines, transform_matrix) lane_lines = steering.average_slop_intercept() angle_degree, check_lane, lane_width = steering.compute_angle_and_speed(