def init_robot(self): """Make sure the robot is booted and ready to receive commands.""" self.wait_for_driver(self) # Make sure ursim is brake released and powered on time.sleep(2) # DashboardClient needs to be up and running do_dashboard_command(self.app, "brakeRelease") wait_for_dc_mode(self.app, "robotmode", "RUNNING") do_dashboard_command(self.app, "stop") time.sleep(1) resend_control_script = Message.create_message_builder("BooleanProto") resend_control_script.proto.flag = True self.app.publish("ur.subgraph", "interface", "resend_control_script", resend_control_script) time.sleep(5)
def test_external_control(self): """Testing that it is possible to switch between robot control and driver control.""" do_dashboard_command(self.app, "play") self.wait_for_robot_program_running(True) joint_values = [-1.0 for i in range(6)] target_pos = np.array(joint_values, dtype=np.float64) target_pos_msg = Composite.create_composite_message( self.position_parser, target_pos) self.app.publish("ur.subgraph", "interface", "joint_target", target_pos_msg) trajectory_executed_succesfully = self.wait_for_trajectory_result() self.assertTrue(trajectory_executed_succesfully.flag == True, "failed to execute trajectory succesfully") stop_control_msg = Message.create_message_builder("BooleanProto") stop_control_msg.proto.flag = True self.app.publish("ur.subgraph", "interface", "stop_control", stop_control_msg) self.wait_for_robot_program_running(False, timeout=1) self.wait_for_robot_program_running(True) joint_states_msg = wait_for_new_message(self.app, "ur.subgraph", "interface", "arm_state") joint_states = Composite.parse_composite_message( joint_states_msg, self.position_parser) time.sleep(2) joint_command_msg = wait_for_new_message(self.app, "ur.controller", "ScaledMultiJointController", "joint_command") if joint_command_msg is None: self.fail( "Could not read joint command within timeout {}".format(2)) joint_command = Composite.parse_composite_message( joint_command_msg, self.position_parser) self.assertTrue( np.allclose(joint_command, joint_states, rtol=0.001, atol=0.001), "current joint command {} is not equal to expected joint states {}" .format(joint_command, joint_states)) new_joint_command_msg = wait_for_new_message( self.app, "ur.controller", "ScaledMultiJointController", "joint_command") if joint_command_msg is None: self.fail( "Could not read joint command within timeout {}".format(2)) new_joint_command = Composite.parse_composite_message( new_joint_command_msg, self.position_parser) self.assertTrue( np.allclose(joint_command, new_joint_command, rtol=0.001, atol=0.001), "current joint command {} and last joint command is not equal {}". format(new_joint_command, joint_command))
def main(args): # Read CVAT XML file cvat_xml_path = args.cvat_xml if os.path.exists(cvat_xml_path): tree = et.parse(cvat_xml_path) else: print("Please provide a valid XML file from CVAT.") return # Get image cask UUID that these labels are associated with image_cask_uuid = cvat_xml_path.split('/')[-1].split('.')[0] # Start application to record app = Application() # Add a dummy node to publish the constructed Detections2 messages from app.add("node") message_ledger = app.nodes["node"].components["MessageLedger"] # Load record subgraph and configure app.load("packages/cask/apps/record.subgraph.json", prefix="record") record_interface = app.nodes["record.interface"].components["input"] record_interface.config.base_directory = args.base_directory_gt # Connect output of dummy node to recorder app.connect(message_ledger, 'in', record_interface, 'bounding_boxes') app.start() # Loop through each image element in the XML tree count = 0 for image in tree.findall("./image"): # "Name" attribute corresponds to the image filepath that was input to the CVAT labeling # tool. Convention is: <image_cask_uuid>/<channel>/<acqtime>.png image_uuid, channel, png = image.attrib['name'].split('/') # Check that the image_uuid corresponds to the one specified by the XML filename if (image_uuid != image_cask_uuid): continue # Extract the acquisition time acqtime = int(png.lstrip('0').split('.')[0]) # S the detections of interest for this image all_ground_truth_bboxes = image.findall("./box") sliced_ground_truth_boxes = slice_detections(all_ground_truth_bboxes, args.slice_mode) num_sliced_ground_truth_boxes = len(sliced_ground_truth_boxes) # Build Detections2Proto message detections2 = Message.create_message_builder('Detections2Proto') detections2.acqtime = acqtime detections2.uuid = str(uuid.uuid1()) predictions = detections2.proto.init('predictions', num_sliced_ground_truth_boxes) bounding_boxes = detections2.proto.init('boundingBoxes', num_sliced_ground_truth_boxes) # Populate the Detections2Proto and PredictionProto messages per sliced boudning box for i in range(num_sliced_ground_truth_boxes): box = sliced_ground_truth_boxes[i] row = { a.attrib['name']: a.text for a in box.findall("./attribute") } row.update(box.attrib) prediction = predictions[i] prediction.label = row['label'] prediction.confidence = 1.0 bbox = bounding_boxes[i] bbox.min.y = float(row['xtl']) bbox.min.x = float(row['ytl']) bbox.max.y = float(row['xbr']) bbox.max.x = float(row['ybr']) # Publish the message to the node being recorded app.publish('node', 'MessageLedger', 'in', detections2) recv_msg = app.receive('node', 'MessageLedger', 'in') count += 1 time.sleep(0.1) #sleep to make sure we don't lose any messages app.stop() print("Wrote " + str(count) + " messages") # Write metadata to JSON data per output cask. The metadata servers to associate # corresponding image and ground truth casks. As per RACI evaluation workflow # and data management, image casks and ground truth casks are stored in separate # directories. if args.raci_metadata: # Populate ground truth cask metadata ground_truth_metadata_json = {} ground_truth_metadata_json["Image_Cask_File"] = image_cask_uuid ground_truth_metadata_json["Data_Source"] = "ground_truth" # Write ground truth cask metadata ground_truth_metadata_path = os.path.join(args.base_directory_gt, app.uuid + "_md.json") with open(ground_truth_metadata_path, 'w') as f: json.dump(ground_truth_metadata_json, f, indent=2)