def chrome_tracing_object_transfer_dump(self, filename=None): logger.warning( "ray.global_state.chrome_tracing_object_transfer_dump() is " "deprecated and will be removed in a subsequent release. Use " "ray.object_transfer_timeline() instead.") return ray.object_transfer_timeline(filename=filename)
def test_actor_broadcast(ray_start_cluster_with_resource): cluster, num_nodes = ray_start_cluster_with_resource @ray.remote class Actor: def ready(self): pass def set_weights(self, x): pass actors = [ Actor._remote(args=[], kwargs={}, num_cpus=0.01, resources={str(i % num_nodes): 1}) for i in range(30) ] # Wait for the actors to start up. ray.get([a.ready.remote() for a in actors]) object_ids = [] # Broadcast a large object to all actors. for _ in range(5): x_id = ray.put(np.zeros(1024 * 1024, dtype=np.uint8)) object_ids.append(x_id) # Pass the object into a method for every actor. ray.get([a.set_weights.remote(x_id) for a in actors]) # Wait for profiling information to be pushed to the profile table. time.sleep(1) transfer_events = ray.object_transfer_timeline() # Make sure that each object was transferred a reasonable number of times. for x_id in object_ids: relevant_events = [ event for event in transfer_events if event["cat"] == "transfer_send" and event["args"][0] == x_id.hex() and event["args"][2] == 1 ] # NOTE: Each event currently appears twice because we duplicate the # send and receive boxes to underline them with a box (black if it is a # send and gray if it is a receive). So we need to remove these extra # boxes here. deduplicated_relevant_events = [ event for event in relevant_events if event["cname"] != "black" ] assert len(deduplicated_relevant_events) * 2 == len(relevant_events) relevant_events = deduplicated_relevant_events # Each object must have been broadcast to each remote machine. assert len(relevant_events) >= num_nodes - 1 # If more object transfers than necessary have been done, print a # warning. if len(relevant_events) > num_nodes - 1: warnings.warn("This object was transferred {} times, when only {} " "transfers were required.".format( len(relevant_events), num_nodes - 1)) # Each object should not have been broadcast more than once from every # machine to every other machine. Also, a pair of machines should not # both have sent the object to each other. assert len(relevant_events) <= (num_nodes - 1) * num_nodes / 2 # Make sure that no object was sent multiple times between the same # pair of object managers. send_counts = defaultdict(int) for event in relevant_events: # The pid identifies the sender and the tid identifies the # receiver. send_counts[(event["pid"], event["tid"])] += 1 assert all(value == 1 for value in send_counts.values())
nargs=1, default=None, help= "directory to store the encrypted ballots on disk, enables equivalence checking (default: memory only)", ) parser.add_argument( "cvr_file", type=str, nargs="+", help="filename(s) for the Dominion-style ballot CVR file", ) args = parser.parse_args() files = args.cvr_file file_dir = args.dir[0] if args.dir else None use_progressbar = args.progress if args.local: print("Using Ray locally") ray_init_localhost() else: print("Using Ray on a cluster") ray_init_cluster() for arg in files: run_bench(arg, file_dir, use_progressbar) print("Writing Ray timelines to disk.") ray.timeline("ray-timeline.json") ray.object_transfer_timeline("ray-object-transfer-timeline.json")