def recv(dst, uniq_dst, bucketing_index, msg, reduce_function, node_ids): """Recv message from given msg to dst nodes. """ if reduce_function == "sum": if isinstance(msg, dict): raise TypeError("The message for build-in function" " should be Tensor not dict.") try: out_dims = msg.shape[-1] init_output = fluid.layers.fill_constant_batch_size_like( node_ids, shape=[1, out_dims], value=0, dtype="float32") init_output.stop_gradient = False output = paddle_helper.scatter_add(init_output, dst, msg) return output except TypeError as e: warnings.warn( "scatter_add is not supported with paddle version <= 1.5") def sum_func(message): return fluid.layers.sequence_pool(message, "sum") reduce_function = sum_func # convert msg into lodtensor bucketed_msg = op.nested_lod_reset(msg, bucketing_index) # Check dim for bucketed_msg equal to out_dims output = reduce_function(bucketed_msg) out_dims = output.shape[-1] init_output = fluid.layers.fill_constant_batch_size_like( node_ids, shape=[1, out_dims], value=0, dtype="float32") init_output.stop_gradient = False output = fluid.layers.scatter(init_output, uniq_dst, output) return output
def recv(dst, uniq_dst, bucketing_index, msg, reduce_function, num_nodes, num_edges): """Recv message from given msg to dst nodes. """ if reduce_function == "sum": if isinstance(msg, dict): raise TypeError("The message for build-in function" " should be Tensor not dict.") try: out_dim = msg.shape[-1] init_output = L.fill_constant(shape=[num_nodes, out_dim], value=0, dtype=msg.dtype) init_output.stop_gradient = False empty_msg_flag = L.cast(num_edges > 0, dtype=msg.dtype) msg = msg * empty_msg_flag output = paddle_helper.scatter_add(init_output, dst, msg) return output except TypeError as e: warnings.warn( "scatter_add is not supported with paddle version <= 1.5") def sum_func(message): return L.sequence_pool(message, "sum") reduce_function = sum_func bucketed_msg = op.nested_lod_reset(msg, bucketing_index) output = reduce_function(bucketed_msg) output_dim = output.shape[-1] empty_msg_flag = L.cast(num_edges > 0, dtype=output.dtype) output = output * empty_msg_flag init_output = L.fill_constant(shape=[num_nodes, output_dim], value=0, dtype=output.dtype) init_output.stop_gradient = True final_output = L.scatter(init_output, uniq_dst, output) return final_output