def all_init(args): ''' Initializing the state and the configuration. ''' C = Config(lite=False, config_path=args.config_path) S = State(mouse_track=args.mouse_track, exec_action=args.exec_action) start_key_listener(S) landmark_list = landmarkList_pb2.LandmarkList() return C, S, landmark_list
class SegDetectorRepresenter(Configurable): thresh = State(default=0.3) box_thresh = State(default=0.7) max_candidates = State(default=100) dest = State(default='binary') def __init__(self, cmd={}, **kwargs): self.load_all(**kwargs) self.min_size = 3 self.scale_ratio = 0.4 if 'debug' in cmd: self.debug = cmd['debug'] if 'thresh' in cmd: self.thresh = cmd['thresh'] if 'box_thresh' in cmd: self.box_thresh = cmd['box_thresh'] if 'dest' in cmd: self.dest = cmd['dest'] def represent(self, batch, _pred, is_output_polygon=False): ''' batch: (image, polygons, ignore_tags batch: a dict produced by dataloaders. image: tensor of shape (N, C, H, W). polygons: tensor of shape (N, K, 4, 2), the polygons of objective regions. ignore_tags: tensor of shape (N, K), indicates whether a region is ignorable or not. shape: the original shape of images. filename: the original filenames of images. pred: binary: text region segmentation map, with shape (N, 1, H, W) thresh: [if exists] thresh hold prediction with shape (N, 1, H, W) thresh_binary: [if exists] binarized with threshhold, (N, 1, H, W) ''' images = batch['image'] if isinstance(_pred, dict): pred = _pred[self.dest] else: pred = _pred segmentation = self.binarize(pred) boxes_batch = [] scores_batch = [] for batch_index in range(images.size(0)): height, width = batch['shape'][batch_index] if is_output_polygon: boxes, scores = self.polygons_from_bitmap( pred[batch_index], segmentation[batch_index], width, height) else: boxes, scores = self.boxes_from_bitmap( pred[batch_index], segmentation[batch_index], width, height) boxes_batch.append(boxes) scores_batch.append(scores) return boxes_batch, scores_batch def binarize(self, pred): return pred > self.thresh def polygons_from_bitmap(self, pred, _bitmap, dest_width, dest_height): ''' _bitmap: single map with shape (1, H, W), whose values are binarized as {0, 1} ''' assert _bitmap.size(0) == 1 bitmap = _bitmap.cpu().numpy()[0] # The first channel pred = pred.cpu().detach().numpy()[0] height, width = bitmap.shape boxes = [] scores = [] contours, _ = cv2.findContours((bitmap * 255).astype(np.uint8), cv2.RETR_LIST, cv2.CHAIN_APPROX_SIMPLE) for contour in contours[:self.max_candidates]: epsilon = 0.01 * cv2.arcLength(contour, True) approx = cv2.approxPolyDP(contour, epsilon, True) points = approx.reshape((-1, 2)) if points.shape[0] < 4: continue # _, sside = self.get_mini_boxes(contour) # if sside < self.min_size: # continue score = self.box_score_fast(pred, points.reshape(-1, 2)) if self.box_thresh > score: continue if points.shape[0] > 2: box = self.unclip(points, unclip_ratio=2.0) if len(box) > 1: continue else: continue box = box.reshape(-1, 2) _, sside = self.get_mini_boxes(box.reshape((-1, 1, 2))) if sside < self.min_size + 2: continue if not isinstance(dest_width, int): dest_width = dest_width.item() dest_height = dest_height.item() box[:, 0] = np.clip(np.round(box[:, 0] / width * dest_width), 0, dest_width) box[:, 1] = np.clip(np.round(box[:, 1] / height * dest_height), 0, dest_height) boxes.append(box.tolist()) scores.append(score) return boxes, scores def boxes_from_bitmap(self, pred, _bitmap, dest_width, dest_height): ''' _bitmap: single map with shape (1, H, W), whose values are binarized as {0, 1} ''' assert _bitmap.size(0) == 1 bitmap = _bitmap.cpu().numpy()[0] # The first channel pred = pred.cpu().detach().numpy()[0] height, width = bitmap.shape contours, _ = cv2.findContours((bitmap * 255).astype(np.uint8), cv2.RETR_LIST, cv2.CHAIN_APPROX_SIMPLE) num_contours = min(len(contours), self.max_candidates) boxes = np.zeros((num_contours, 4, 2), dtype=np.int16) scores = np.zeros((num_contours, ), dtype=np.float32) for index in range(num_contours): contour = contours[index] points, sside = self.get_mini_boxes(contour) if sside < self.min_size: continue points = np.array(points) score = self.box_score_fast(pred, points.reshape(-1, 2)) if self.box_thresh > score: continue box = self.unclip(points).reshape(-1, 1, 2) box, sside = self.get_mini_boxes(box) if sside < self.min_size + 2: continue box = np.array(box) if not isinstance(dest_width, int): dest_width = dest_width.item() dest_height = dest_height.item() box[:, 0] = np.clip(np.round(box[:, 0] / width * dest_width), 0, dest_width) box[:, 1] = np.clip(np.round(box[:, 1] / height * dest_height), 0, dest_height) boxes[index, :, :] = box.astype(np.int16) scores[index] = score return boxes, scores def unclip(self, box, unclip_ratio=1.5): poly = Polygon(box) distance = poly.area * unclip_ratio / poly.length offset = pyclipper.PyclipperOffset() offset.AddPath(box, pyclipper.JT_ROUND, pyclipper.ET_CLOSEDPOLYGON) expanded = np.array(offset.Execute(distance)) return expanded def get_mini_boxes(self, contour): bounding_box = cv2.minAreaRect(contour) points = sorted(list(cv2.boxPoints(bounding_box)), key=lambda x: x[0]) index_1, index_2, index_3, index_4 = 0, 1, 2, 3 if points[1][1] > points[0][1]: index_1 = 0 index_4 = 1 else: index_1 = 1 index_4 = 0 if points[3][1] > points[2][1]: index_2 = 2 index_3 = 3 else: index_2 = 3 index_3 = 2 box = [ points[index_1], points[index_2], points[index_3], points[index_4] ] return box, min(bounding_box[1]) def box_score_fast(self, bitmap, _box): h, w = bitmap.shape[:2] box = _box.copy() xmin = np.clip(np.floor(box[:, 0].min()).astype(np.int), 0, w - 1) xmax = np.clip(np.ceil(box[:, 0].max()).astype(np.int), 0, w - 1) ymin = np.clip(np.floor(box[:, 1].min()).astype(np.int), 0, h - 1) ymax = np.clip(np.ceil(box[:, 1].max()).astype(np.int), 0, h - 1) mask = np.zeros((ymax - ymin + 1, xmax - xmin + 1), dtype=np.uint8) box[:, 0] = box[:, 0] - xmin box[:, 1] = box[:, 1] - ymin cv2.fillPoly(mask, box.reshape(1, -1, 2).astype(np.int32), 1) return cv2.mean(bitmap[ymin:ymax + 1, xmin:xmax + 1], mask)[0]
def main(use_imu=False): """Main program """ # Create imu handle if use_imu: imu = IMU(port="/dev/ttyACM0") imu.flush_buffer() # Create controller and user input handles controller = Controller( config, solver.inverse_kinematics_body, ) state = State() print("Creating joystick listener...") joystick_interface = JoystickInterface(config) print("Done.") last_loop = time.time() print("Summary of gait parameters:") print("overlap time: ", config.overlap_time) print("swing time: ", config.swing_time) print("z clearance: ", config.z_clearance) print("x shift: ", config.x_shift) # exit() # Wait until the activate button has been pressed while True: print("Waiting for L1 to activate robot.") while True: # break command = joystick_interface.get_command(state, True) # print(command) # joystick_interface.set_color(config.ps4_deactivated_color) if command.activate_event == 1: break time.sleep(0.1) print("Robot activated.") # joystick_interface.set_color(config.ps4_color) while True: now = time.time() if now - last_loop < config.dt: time.sleep(config.dt - (now - last_loop)) last_loop = time.time() # Parse the udp joystick commands and then update the robot controller's parameters command = joystick_interface.get_command(state) if command.activate_event == 1: print("Deactivating Robot") break # Read imu data. Orientation will be None if no data was available quat_orientation = (imu.read_orientation() if use_imu else np.array([1, 0, 0, 0])) state.quat_orientation = quat_orientation # Step the controller forward by dt controller.run(state, command) # Update the pwm widths going to the servos angles = [] for leg in state.joint_angles: for i in leg: angles.append(int(i / math.pi * 180)) # print(angles) # print(angles, state.joint_angles) print(hardware_interface.set_actuator_postions(state.joint_angles))
'--config', help="Config File Directory", default="", metavar="FILE") # path = "/var/log/auth.log" # path1 = "/home/harm/test.log" # path2 = "/home/harm/test1.log" args = parser.parse_args() config_path = '' if args.config: config_path = args.config config_file = os.path.join(config_path, config_file_name) state_file = os.path.join(config_path, state_file_name) output_file = os.path.join(config_path, output_file_name) config = Config() state = State() output = Outputs() config.parse_config(config_file) state.parse_state(state_file) output.parse_outputs(output_file) args = parser.parse_args() observer = LogObserver(state_file) for fl in config.get_files(): pos = state.pos(fl) inode, dev = state.id(fl) filters = config.get_filter(fl) name = config.get_name(fl) retention = config.get_retention(fl) out = output.get_output(config.get_output(fl))
def main(): ''' Main ''' # using None because arguments are irrelevant S = State(None, None) # setup socket sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) HOST = '127.0.0.1' PORT = 5556 sock.bind((HOST, PORT)) sock.listen(1) landmark_list = landmarkList_pb2.LandmarkList() print("Waiting for keypoint generator..") # Establish connection conn, addr = sock.accept() gesture = input( "Enter the name of the gesture for which you are capturing data, \ (a simple description of the gesture you will perform) :\n") logging.info( "Hold and release the Ctrl key to record one gesture. Hit the Esc key to stop recording." ) path = "gestop/data/dynamic_gestures/" + gesture if not os.path.exists(path): os.mkdir(path) count = 1 start_key_listener(S) keypoint_buffer = [] while True: data = conn.recv(4096) # Start recording data if S.ctrl_flag: landmark_list.ParseFromString(data) landmarks = [] for lmark in landmark_list.landmark: landmarks.extend([str(lmark.x), str(lmark.y), str(lmark.z)]) keypoint_buffer.append(landmarks) # if there is data recorded if len(keypoint_buffer) != 0 and not S.ctrl_flag: fname = path + "/" + gesture + str(count) + ".txt" lmark_str = '' for i in keypoint_buffer: # verifying data quality if '0.0' in i: lmark_str = '' break lmark_str += ' '.join(i) + '\n' if lmark_str != '': with open(fname, 'w') as f: f.write(lmark_str) logging.info( "Gesture has been successfully recorded in {0}. Sequence len: {1}" .format(fname, str(len(keypoint_buffer)))) count += 1 else: logging.info( "Data was not recorded properly, not written to file.") # Empty the buffer keypoint_buffer = [] if threading.active_count() == 1: break conn.close() sock.close()