def updateState(self, msg, alive): if(len(msg.variance_map.data)): self.variance_map = toQImage(numpy_from_ros_compressed(msg.variance_map)) if not alive: self.stateLabel.setText("Waiting for node heartbeat...") self.triggerBtn.setEnabled(False) return else: self.triggerBtn.setEnabled(True) self.progress.emit(msg.state == 1, msg.capture_progress) if len(msg.led_all_unfiltered.detections): self.unfiltered_leds = msg.led_all_unfiltered self.cell_size = msg.cell_size if msg.state == 1: self.triggerBtn.setVisible(False) self.stateLabel.setText("Capture: ") self.unfiltered_leds = None elif msg.state == 2: self.triggerBtn.setVisible(False) self.stateLabel.setText("Processing...") elif msg.state == 0: self.triggerBtn.setVisible(True) if self.unfiltered_leds is not None: self.stateLabel.setText("Click on the squares for details or on image to toggle camera/variance map...") else: self.stateLabel.setText("No detections, press Detect to start.")
def camera_callback(self, msg): if not self.active: return float_time = msg.header.stamp.to_sec() debug_msg = LEDDetectionDebugInfo() if self.trigger: #rospy.loginfo('[%s] GOT TRIGGER! Starting...') self.trigger = False self.data = [] self.capture_finished = False # Start capturing images #rospy.loginfo('[%s] Start capturing frames'%self.node_name) self.first_timestamp = msg.header.stamp.to_sec() self.tinit = time.time() elif self.capture_finished: self.node_state = 0 #rospy.loginfo('[%s] Waiting for trigger...' %self.node_name) if self.first_timestamp > 0: # TODO sanity check rel_time positive, restart otherwise rel_time = float_time - self.first_timestamp # Capturing if rel_time < self.capture_time: self.node_state = 1 # Capture image rgb = numpy_from_ros_compressed(msg) rgb = cv2.cvtColor(rgb, cv2.COLOR_BGRA2GRAY) rgb = cv2.resize(rgb, (640 * 1, 480 * 1)) rgb = 255 - rgb #rospy.loginfo('[%s] Capturing frame %s' %(self.node_name, rel_time)) # Save image to data #if np.size(self.data) == 0: # self.data = rgb #else: # self.data = np.dstack((self.data,rgb)) self.data.append({'timestamp': float_time, 'rgb': rgb[:, :]}) debug_msg.capture_progress = 100.0 * rel_time / self.capture_time # Start processing elif not self.capture_finished and self.first_timestamp > 0: #rospy.loginfo('[%s] Relative Time %s, processing' %(self.node_name, rel_time)) self.node_state = 2 self.capture_finished = True self.first_timestamp = 0 self.sub_cam.unregister( ) # IMPORTANT! Explicitly ignore messages # while processing, accumulates delay otherwise! self.send_state(debug_msg) # Process image and publish results self.process_and_publish() self.send_state(debug_msg) # TODO move heartbeat to dedicated thread
def camera_callback(self, msg): if not self.active: return #float_time = msg.header.stamp.to_sec() rgb = numpy_from_ros_compressed(msg) self.status = "default" self.rgb = rgb rospy.loginfo("[%s] get image data" % (self.node_name))
def camera_callback(self, msg): if not self.active: return float_time = msg.header.stamp.to_sec() debug_msg = LEDDetectionDebugInfo() if self.trigger: rospy.loginfo('[%s] GOT TRIGGER! Starting...') self.trigger = False self.data = [] self.capture_finished = False rospy.loginfo('[%s] Start capturing frames' % self.node_name) self.first_timestamp = msg.header.stamp.to_sec() self.tinit = time.time() elif self.capture_finished: self.node_state = 0 rospy.loginfo('[%s] Waiting for trigger...' % self.node_name) if self.first_timestamp > 0: # TODO sanity check rel_time positive, restart otherwise rel_time = float_time - self.first_timestamp # Capturing if rel_time < self.capture_time: self.node_state = 1 rgb = numpy_from_ros_compressed(msg) rospy.loginfo('[%s] Capturing frame %s' % (self.node_name, rel_time)) self.data.append({ 'timestamp': float_time, 'rgb': rgb[:, :, :] }) debug_msg.capture_progress = 100.0 * rel_time / self.capture_time # Start processing elif not self.capture_finished and self.first_timestamp > 0: rospy.loginfo('[%s] Relative Time %s, processing' % (self.node_name, rel_time)) self.node_state = 2 self.capture_finished = True self.first_timestamp = 0 self.sub_cam.unregister( ) # IMPORTANT! Explicitly ignore messages # while processing, accumulates delay otherwise! self.send_state(debug_msg) self.process_and_publish() self.send_state(debug_msg) # TODO move heartbeat to dedicated thread
def camera_callback(self, msg): """ Callback that collects images and starts the detection. It unregister the subscriber while processing the images. Once finished, it restarts it. Args: msg (:obj:`sensor_msgs.msg.CompressedImage`): Input image. """ float_time = msg.header.stamp.to_sec() if self.trigger: self.trigger = False self.data = [] self.capture_finished = False # Start capturing images self.first_timestamp = msg.header.stamp.to_sec() self.t_init = rospy.Time.now().to_sec() elif self.capture_finished: self.node_state = 0 if self.first_timestamp > 0: rel_time = float_time - self.first_timestamp # Capturing if rel_time < self.params['~capture_time']: self.node_state = 1 # Capture image rgb = numpy_from_ros_compressed(msg) rgb = cv2.cvtColor(rgb, cv2.COLOR_BGRA2GRAY) rgb = 255 - rgb self.data.append({'timestamp': float_time, 'rgb': rgb[:, :]}) # Start processing elif not self.capture_finished and self.first_timestamp > 0: if self.params['~verbose'] == 2: self.log('Relative Time %s, processing' % rel_time) self.node_state = 2 self.capture_finished = True self.first_timestamp = 0 # IMPORTANT! Explicitly ignore messages while processing, accumulates delay otherwise! self.sub_cam.unregister() # Process image and publish results self.process_and_publish()
def camera_callback(self, msg): if not self.active: return float_time = msg.header.stamp.to_sec() debug_msg = LEDDetectionDebugInfo() if self.trigger: rospy.loginfo('[%s] GOT TRIGGER! Starting...') self.trigger = False self.data = [] self.capture_finished = False rospy.loginfo('[%s] Start capturing frames'%self.node_name) self.first_timestamp = msg.header.stamp.to_sec() self.tinit = time.time() elif self.capture_finished: self.node_state = 0 rospy.loginfo('[%s] Waiting for trigger...' %self.node_name) if self.first_timestamp > 0: # TODO sanity check rel_time positive, restart otherwise rel_time = float_time - self.first_timestamp # Capturing if rel_time < self.capture_time: self.node_state = 1 rgb = numpy_from_ros_compressed(msg) rospy.loginfo('[%s] Capturing frame %s' %(self.node_name, rel_time)) self.data.append({'timestamp': float_time, 'rgb': rgb[:,:,:]}) debug_msg.capture_progress = 100.0*rel_time/self.capture_time # Start processing elif not self.capture_finished and self.first_timestamp > 0: rospy.loginfo('[%s] Relative Time %s, processing' %(self.node_name, rel_time)) self.node_state = 2 self.capture_finished = True self.first_timestamp = 0 self.sub_cam.unregister() # IMPORTANT! Explicitly ignore messages # while processing, accumulates delay otherwise! self.send_state(debug_msg) self.process_and_publish() self.send_state(debug_msg) # TODO move heartbeat to dedicated thread
def camera_callback(self, msg): if not self.active: return float_time = msg.header.stamp.to_sec() debug_msg = LEDDetectionDebugInfo() if self.trigger: self.trigger = False self.data = [] self.capture_finished = False rospy.loginfo('Start capturing frames') self.first_timestamp = msg.header.stamp.to_sec() self.tinit = time.time() elif self.capture_finished: self.node_state = 0 print('Waiting for trigger signal...') if self.first_timestamp > 0: # TODO sanity check rel_time positive, restart otherwise rel_time = float_time - self.first_timestamp # Capturing if rel_time < self.capture_time: rgb = numpy_from_ros_compressed(msg) rospy.loginfo('Capturing frame %s' %rel_time) self.data.append({'timestamp': float_time, 'rgb': rgb}) self.node_state = 1 debug_msg.capture_progress = 100.0*rel_time/self.capture_time # Start processing elif not self.capture_finished and self.first_timestamp > 0: self.capture_finished = True self.first_timestamp = 0 self.node_state = 2 self.send_state(debug_msg) self.process_and_publish() self.send_state(debug_msg) # TODO move heartbeat to dedicated thread
def camera_callback(self, msg): if not self.active: return float_time = msg.header.stamp.to_sec() debug_msg = LEDDetectionDebugInfo() if self.trigger: self.trigger = False self.data = [] self.capture_finished = False rospy.loginfo('Start capturing frames') self.first_timestamp = msg.header.stamp.to_sec() self.tinit = time.time() elif self.capture_finished: self.node_state = 0 print('Waiting for trigger signal...') if self.first_timestamp > 0: # TODO sanity check rel_time positive, restart otherwise rel_time = float_time - self.first_timestamp # Capturing if rel_time < self.capture_time: rgb = numpy_from_ros_compressed(msg) rospy.loginfo('Capturing frame %s' % rel_time) self.data.append({'timestamp': float_time, 'rgb': rgb}) self.node_state = 1 debug_msg.capture_progress = 100.0 * rel_time / self.capture_time # Start processing elif not self.capture_finished and self.first_timestamp > 0: self.capture_finished = True self.first_timestamp = 0 self.node_state = 2 self.send_state(debug_msg) self.process_and_publish() self.send_state(debug_msg) # TODO move heartbeat to dedicated thread
def cam_callback(self, msg): #print('Received camera image) npimg = numpy_from_ros_compressed(msg) win.camera_image = toQImage(npimg) win.update()
def cam_callback(self, msg): #print('Received camera image') npimg = numpy_from_ros_compressed(msg) win.camera_image = toQImage(npimg) win.update()