def _split_traces(traces): """Splits the flattened RandomWalkTraces structure into list of list of tensors. Parameters ---------- traces : PackedFunc object of RandomWalkTraces structure Returns ------- traces : list[list[Tensor]] traces[i][j] is the j-th trace generated for i-th seed. """ trace_counts = F.zerocopy_to_numpy( F.zerocopy_from_dlpack(traces(0).to_dlpack())).tolist() trace_lengths = F.zerocopy_from_dlpack(traces(1).to_dlpack()) trace_vertices = F.zerocopy_from_dlpack(traces(2).to_dlpack()) trace_vertices = F.split(trace_vertices, F.zerocopy_to_numpy(trace_lengths).tolist(), 0) traces = [] s = 0 for c in trace_counts: traces.append(trace_vertices[s:s + c]) s += c return traces
def random_walk(g, seeds, num_traces, num_hops): """Batch-generate random walk traces on given graph with the same length. Parameters ---------- g : DGLGraph The graph. seeds : Tensor The node ID tensor from which the random walk traces starts. num_traces : int Number of traces to generate for each seed. num_hops : int Number of hops for each trace. Returns ------- traces : Tensor A 3-dimensional node ID tensor with shape (num_seeds, num_traces, num_hops + 1) traces[i, j, 0] are always starting nodes (i.e. seed[i]). """ if len(seeds) == 0: return utils.toindex([]).tousertensor() seeds = utils.toindex(seeds).todgltensor() traces = _CAPI_DGLRandomWalk(g._graph._handle, seeds, int(num_traces), int(num_hops)) return F.zerocopy_from_dlpack(traces.to_dlpack())
def connect(self): """Connect to all the KVServer nodes """ for ID, addr in self._server_namebook.items(): server_ip, server_port = addr.split(':') _add_receiver_addr(self._sender, server_ip, int(server_port), ID) _sender_connect(self._sender) self._addr = self._get_local_addr() client_ip, client_port = self._addr.split(':') # find local server nodes for ID, addr in self._server_namebook.items(): server_ip, server_port = addr.split(':') if client_ip == server_ip or server_ip == '127.0.0.1': self._local_server_id.add(ID) # send addr to server nodes msg = KVStoreMsg(type=KVMsgType.IP_ID, rank=0, name=self._addr, id=None, data=None) for server_id in range(self._server_count): _send_kv_msg(self._sender, msg, server_id) _receiver_wait(self._receiver, client_ip, int(client_port), self._server_count) # recv client id msg = _recv_kv_msg(self._receiver) assert msg.rank == 0 self._client_id = int(msg.name) # recv name of shared tensor from server 0 msg = _recv_kv_msg(self._receiver) assert msg.rank == 0 data_str = msg.name.split('|') # open shared tensor on local machine for data in data_str: if data != '' and self._close_shared_mem == False: tensor_name, shape, dtype = self._deserialize_shared_tensor( data) for server_id in self._local_server_id: shared_data = empty_shared_mem( tensor_name + str(server_id), False, shape, dtype) dlpack = shared_data.to_dlpack() self._data_store[tensor_name] = F.zerocopy_from_dlpack( dlpack) self._has_data.add(tensor_name)
def init_data(self, name, data_tensor): """Initialize data on KVServer with data name. Parameters ---------- name : str data name data_tensor : tensor (mx.ndarray or torch.tensor) data tensor """ assert len(name) > 0, 'name cannot be empty.' assert len(data_tensor) > 0, 'data_tensor cannot be empty.' shared_data = empty_shared_mem(name+'-data-'+str(self._server_id), True, data_tensor.shape, 'float32') dlpack = shared_data.to_dlpack() self._data_store[name+'-data-'] = F.zerocopy_from_dlpack(dlpack) self._data_store[name+'-data-'][:] = data_tensor[:] self._has_data.add(name+'-data-')
def set_global2local(self, name, global2local): """Set a data mapping of global ID to local ID. Parameters ---------- name : str data name global2local : list or tensor (mx.ndarray or torch.tensor) A data mapping of global ID to local ID. KVStore will use global ID automatically if this global2local is not been set. """ assert len(name) > 0, 'name cannot be empty.' assert len(global2local) > 0, 'global2local cannot be empty.' if isinstance(global2local, list): global2local = F.tensor(global2local) shared_data = empty_shared_mem(name+'-g2l-'+str(self._server_id), True, global2local.shape, 'int64') dlpack = shared_data.to_dlpack() self._data_store[name+'-g2l-'] = F.zerocopy_from_dlpack(dlpack) self._data_store[name+'-g2l-'][:] = global2local[:] self._has_data.add(name+'-g2l-')