def flush_summary_writer(name=None): """Flush the summary writer Args: name: This operator's name """ if name is None: name = id_util.UniqueStr("FlushWriter_") flow.user_op_builder(name).Op( "flush_summary_writer").Build().InferAndTryRun()
def dynamic_loss_scale_schedule(count_not_finite, loss_scale, good_step_counter, increment_period, multiplier, name): flow.user_op_builder(name).Op("dynamic_loss_scale_schedule").Input( "count_not_finite", [count_not_finite]).Input("loss_scale", [loss_scale]).Input( "good_step_counter", [good_step_counter]).Attr( "increment_period", increment_period).Attr("multiplier", multiplier).Build().InferAndTryRun()
def create_summary_writer(logdir, name=None): """Create a summary writer object Args: logdir: log dir name: This operator's name """ if name is None: name = id_util.UniqueStr("CreateWriter_") flow.user_op_builder(name).Op("create_summary_writer").Attr( "logdir", logdir).Build().InferAndTryRun()
def write_pb(value, step=None, name=None): """Write raw protobuf data to log file Args: value: A 'Blob' with dtype in 'flow.int8' step: A 'Blob' with 1 value and dtype is 'flow.int64' name: This operator's name """ if name is None: name = id_util.UniqueStr("WritePb_") flow.user_op_builder(name).Op("summary_write_pb").Input( "in", [value]).Input("step", [step]).Build().InferAndTryRun()
def write_histogram(value, step, tag, name=None): """Write histogram to log file Args: value: A 'Blob' with dtype in (flow.float, flow.double, flow.int64, flow.int32, flow.int8, flow.uint8) step: A 'Blob' with 1 value and dtype is 'flow.int64' tag: A 'Blob' with 1 value and dtype is 'flow.int8' name: This operator's name """ if name is None: name = id_util.UniqueStr("WriteHistogram_") flow.user_op_builder(name).Op("summary_write_histogram").Input( "in", [value]).Input("step", [step]).Input("tag", [tag]).Build().InferAndTryRun()
def pad_grad( x: oneflow._oneflow_internal.BlobDesc, paddings: Sequence[int], constant_value: Union[int, float] = 0, name: Optional[str] = None, ) -> oneflow._oneflow_internal.BlobDesc: padding_before = [] padding_after = [] if isinstance(paddings, (list, tuple)): assert len(paddings) == len(x.shape), ValueError( "paddings must be the same size of input dims") for p in paddings: assert isinstance(p, (list, tuple)) and len(p) == 2, ValueError( "the elem of paddings must be a tuple or a list with length of 2" ) padding_before.append(p[0]) padding_after.append(p[1]) else: raise ValueError("paddings must be a tuple or a list.") return (flow.user_op_builder(name if name is not None else id_util. UniqueStr("PadGrad_")).Op("pad_grad").Input( "dy", [x]).Output("dx").Attr( "padding_before", padding_before).Attr( "padding_after", padding_after).Attr( "floating_constant_value", float(constant_value)).Attr( "integral_constant_value", int(constant_value)). Build().InferAndTryRun().RemoteBlobList()[0])
def _do_reduce(x, name, op_type_name, keepdims, axis): op = (flow.user_op_builder(name).Op(op_type_name).Input( "input_tensor", [x]).Output("output_tensor").Attr("axis", axis).Attr("keepdims", keepdims).Build()) return op.InferAndTryRun().SoleOutputBlob()
def replication_pad2d( x: oneflow._oneflow_internal.BlobDesc, padding: Union[int, tuple, list], name: Optional[str] = None, ) -> oneflow._oneflow_internal.BlobDesc: """Pads the input tensor using the replication of the input boundary. Args: x (oneflow._oneflow_internal.BlobDesc): input blob, only support "NCHW" format. padding (Union[int, oneflow._oneflow_internal.BlobDesc]): The size or bundary of padding, if is int uses the same padding in all dimension; if 4-dims tuple, uses ( ext{padding\\_left}padding_left , ext{padding\\_right}padding_right , ext{padding\\_top}padding_top , ext{padding\\_bottom}padding_bottom ) name (Optional[str], optional): The name for the operation. Defaults to None. Returns: oneflow._oneflow_internal.BlobDesc: [description] For example: .. code-block:: python import oneflow.compatible.single_client as flow import oneflow.compatible.single_client.typing as tp import numpy as np @flow.global_function() def pad_Job(x: tp.Numpy.Placeholder((1, 2, 3, 3)) ) -> tp.Numpy: return flow.reflection_pad2d(x, padding=[2, 2, 1, 1]) x = np.arange(18).reshape((1, 2, 3, 3)).astype(np.float32) out = pad_Job(x) # out [[[[ 0. 0. 0. 1. 2. 2. 2.] # [ 0. 0. 0. 1. 2. 2. 2.] # [ 3. 3. 3. 4. 5. 5. 5.] # [ 6. 6. 6. 7. 8. 8. 8.] # [ 6. 6. 6. 7. 8. 8. 8.]] # [[ 9. 9. 9. 10. 11. 11. 11.] # [ 9. 9. 9. 10. 11. 11. 11.] # [ 12. 12. 12. 13. 14. 14. 14.] # [ 15. 15. 15. 16. 17. 17. 17.] # [ 15. 15. 15. 16. 17. 17. 17.]]]] """ (H, W) = (x.shape[2], x.shape[3]) if isinstance(padding, (tuple, list)): assert len(padding) == len(x.shape), ValueError( "padding boundry must be the same size of input dims") boundry = [padding[0], padding[1], padding[2], padding[3]] elif isinstance(padding, int): boundry = [padding, padding, padding, padding] else: raise ValueError("padding must be in or list or tuple!") return (flow.user_op_builder( name if name is not None else id_util.UniqueStr("Replication_Pad2d_") ).Op("replication_pad2d").Input("x", [x]).Output("y").Attr( "padding", list(boundry)).Build().InferAndTryRun().RemoteBlobList()[0])
def distributed_partial_fc_sample( weight: oneflow._oneflow_internal.BlobDesc, label: oneflow._oneflow_internal.BlobDesc, num_sample: int, name: Optional[str] = None, ) -> oneflow._oneflow_internal.BlobDesc: parallel_num = flow.current_scope().device_parallel_desc_symbol.parallel_num assert num_sample % parallel_num == 0 assert weight.shape[0] % parallel_num == 0 return ( flow.user_op_builder( name if name is not None else id_util.UniqueStr("DistributedPartialFcSample_") ) .Op("distributed_partial_fc_sample") .Input("weight", [weight]) .Input("label", [label]) .Attr("num_sample", num_sample) .Output("mapped_label") .Output("sampled_label") .Output("sampled_weight") .Build() .InferAndTryRun() .RemoteBlobList() )
def tensor_buffer_to_tensor( x: oneflow._oneflow_internal.BlobDesc, dtype: flow.dtype, instance_shape: Sequence[int], name: Optional[str] = None, ) -> oneflow._oneflow_internal.BlobDesc: """This operator converts the Blob's type from TensorBuffer to Tensor. Some operator's output data type is `TensorBuffer`, you can use this operator to convert back to `Tensor`. Refer to `Concept Explanation <https://docs.oneflow.org/basics_topics/concept_explanation.html#3tensorbuffer-tensorlist>`_ for more about TensorBuffer. Args: x (oneflow._oneflow_internal.BlobDesc): Input `Blob`. dtype (flow.dtype): The data dtype. instance_shape (Sequence[int]): The shape of each TensorBuffer instance. name (Optional[str], optional): The name for the operation. Defaults to None. Returns: oneflow._oneflow_internal.BlobDesc: A `Blob`. For example: .. code-block:: python import oneflow.compatible.single_client as flow import numpy as np import oneflow.compatible.single_client.typing as tp @flow.global_function() def tensor_buffer_to_tensor_Job(x: tp.Numpy.Placeholder(shape=(4, 16, 64, 64), dtype=flow.float32), ) -> tp.Numpy: x = flow.tensor_to_tensor_buffer(x, instance_dims=2) return flow.tensor_buffer_to_tensor(x, instance_shape=(64, 64), dtype=flow.float) x = np.random.randn(4, 16, 64, 64).astype(np.float32) out = tensor_buffer_to_tensor_Job(x) # out.shape (4, 16, 64, 64) """ if name is None: name = id_util.UniqueStr("TensorBufferToTensor_") return ( flow.user_op_builder(name) .Op("tensor_buffer_to_tensor") .Input("in", [x]) .Output("out") .Attr("dtype", dtype) .Attr("instance_shape", instance_shape) .Build() .InferAndTryRun() .RemoteBlobList()[0] )
def reduce_global_stage(x, device_count, axis, keepdims, op_name, name): (out, mask) = (flow.user_op_builder(name).Op(op_name).Input("in", [x]).Input( "device_count", [device_count]).Output("out").Output("mask").Attr("axis", axis).Attr( "keepdims", keepdims).Build().InferAndTryRun().RemoteBlobList()) return out
def hierarchical_parallel_cast(input, nd_sbp, grad_mode, grad_nd_sbp, name): if name is None: name = id_util.UniqueStr("HierarchicalParallelCast_") def distribute_to_str(dist): if dist is None: return "" elif type(dist) is str: return dist elif type(dist) is oneflow._oneflow_internal.distribute.SplitDistribute: return "S({})".format(dist.axis) elif type(dist) is oneflow._oneflow_internal.distribute.BroadcastDistribute: return "B" else: raise ValueError("unsupported distribute") op = ( flow.user_op_builder(name) .Op("hierarchical_parallel_cast") .Input("in", [input]) .Output("out") .Attr("nd_sbp", list(map(distribute_to_str, nd_sbp))) .Attr("grad_mode", grad_mode or "restore") .Attr( "grad_nd_sbp", list(map(distribute_to_str, grad_nd_sbp)) if grad_nd_sbp else [], ) .Build() ) return op.InferAndTryRun().SoleOutputBlob()
def parallel_cast(input, name=None, distribute=None, gradient_distribute=None): if name is None: name = id_util.UniqueStr("ParallelCast_") def distribute_to_str(dist): dist_str = "" if dist is None: pass elif type(dist) is oneflow._oneflow_internal.distribute.SplitDistribute: dist_str = "S({})".format(dist.axis) elif type(dist) is oneflow._oneflow_internal.distribute.BroadcastDistribute: dist_str = "B" else: raise ValueError("unsupported distribute") return dist_str sbp_parallel = distribute_to_str(distribute) grad_sbp_parallel = distribute_to_str(gradient_distribute) op = ( flow.user_op_builder(name) .Op("parallel_cast") .Input("in", [input]) .Output("out") .Attr("sbp_parallel", sbp_parallel) .Attr("grad_sbp_parallel", grad_sbp_parallel) .Build() ) return op.InferAndTryRun().SoleOutputBlob()
def TestDataTypeAttr(input, output_type): assert output_type in flow.dtypes() return ( flow.user_op_builder("TestDataTypeAttr").Op("TestDataTypeAttr").Input( "in", [input]).Output("out").Attr( "output_type", output_type).Build().InferAndTryRun().RemoteBlobList()[0])
def square_sum( x: oneflow._oneflow_internal.BlobDesc, name: Optional[str] = None) -> oneflow._oneflow_internal.BlobDesc: return ( flow.user_op_builder(name if name is not None else id_util.UniqueStr( "SquareSum_")).Op("square_sum").Input( "x", [x]).Output("y").Build().InferAndTryRun().RemoteBlobList()[0])
def user_sigmoid_backward(y, dy, name: Optional[str] = None): return (flow.user_op_builder( name if name is not None else flow.util. unique_str("UerSigmoidBackward_")).Op("user_sigmoid_backward").Input( "y", [y]).Input( "dy", [dy ]).Output("dx").Build().InferAndTryRun().RemoteBlobList()[0])
def write_image(value, step=None, tag=None, name=None): """Write image to log file Args: value: A 'Blob' with dtype in 'flow.uint8' step: A 'Blob' with 1 value and dtype is 'flow.int64' tag: A 'Blob' with 1 value and dtype is 'flow.int8' name: This operator's name """ if name is None: name = id_util.UniqueStr("WriteImage_") if tag is None: tag = "image" flow.user_op_builder(name).Op("summary_write_image").Input( "in", [value]).Input("step", [step]).Input("tag", [tag]).Build().InferAndTryRun()
def count_not_finite( x: oneflow._oneflow_internal.BlobDesc, name: Optional[str] = None) -> oneflow._oneflow_internal.BlobDesc: return ( flow.user_op_builder(name if name is not None else id_util.UniqueStr( "CountNotFinite_")).Op("count_not_finite").Input( "x", [x]).Output("y").Build().InferAndTryRun().RemoteBlobList()[0])
def multi_count_not_finite( x: Optional[Sequence[oneflow._oneflow_internal.BlobDesc]] = None, name: Optional[str] = None, ) -> oneflow._oneflow_internal.BlobDesc: return ( flow.user_op_builder(name if name is not None else id_util.UniqueStr( "MultiCountNotFinite_")).Op("multi_count_not_finite").Input( "x", x).Output("y").Build().InferAndTryRun().RemoteBlobList()[0])
def my_test_source(name, seed): return ( flow.user_op_builder(name) .Op("TestRandomSource") .Output("out") .Attr("seed", seed) .Build() .InferAndTryRun() .RemoteBlobList()[0] )
def eager_nccl_all_reduce( x: oneflow._oneflow_internal.BlobDesc, parallel_conf: str, name: Optional[str] = None, ) -> oneflow._oneflow_internal.BlobDesc: return (flow.user_op_builder( name if name is not None else id_util.UniqueStr("EagerNcclAllReduce_") ).Op("eager_nccl_all_reduce").Input("in", [x]).Output("out").Attr( "parallel_conf", parallel_conf).Build().InferAndTryRun().RemoteBlobList()[0])
def indexed_slices_reduce_sum( indices: input_blob_util.ArgBlobDef, values: input_blob_util.ArgBlobDef, name: Optional[str] = None, ) -> Tuple[oneflow._oneflow_internal.BlobDesc]: op = (flow.user_op_builder(name if name is not None else id_util.UniqueStr( "IndexedSlicesReduceSum_")).Op("indexed_slices_reduce_sum").Input( "x_indices", [indices]).Input("x_values", [values]).Output( "y_indices").Output("y_values").Output("num_unique").Build()) return op.InferAndTryRun().RemoteBlobList()
def my_test_source(name, out_num): return ( flow.user_op_builder(name) .Op("TestSourceMultiGpuFixedOutNum") .Output("out") .Attr("out_num", out_num) .Build() .InferAndTryRun() .RemoteBlobList()[0] )
def smooth_l1_loss( prediction: oneflow._oneflow_internal.BlobDesc, label: oneflow._oneflow_internal.BlobDesc, beta: float = 1.0, name: Optional[str] = None, ) -> oneflow._oneflow_internal.BlobDesc: """This operator computes the smooth l1 loss. The equation is: .. math:: & out = \\frac{(\\beta*x)^2}{2}, \\left|x\\right|<\\frac{1}{{\\beta}^2} & out = \\left|x\\right|-\\frac{0.5}{{\\beta}^2}, otherwise Args: prediction (oneflow._oneflow_internal.BlobDesc): The prediction Blob label (oneflow._oneflow_internal.BlobDesc): The label Blob beta (float, optional): The :math:`\\beta` in the equation. Defaults to 1.0. name (Optional[str], optional): The name for the operation. Defaults to None. Returns: oneflow._oneflow_internal.BlobDesc: The result Blob For example: .. code-block:: python import oneflow.compatible.single_client as flow import numpy as np import oneflow.compatible.single_client.typing as tp @flow.global_function() def smooth_l1_loss_Job(prediction: tp.Numpy.Placeholder((5, )), label: tp.Numpy.Placeholder((5, )) ) -> tp.Numpy: return flow.smooth_l1_loss(prediction=prediction, label=label) prediction = np.array([0.1, 0.4, 0.3, 0.5, 0.9]).astype(np.float32) label = np.array([0.3, 0.9, 2.5, 0.4, 0.3]).astype(np.float32) out = smooth_l1_loss_Job(prediction, label) # out [0.02 0.12499999 1.7 0.005 0.17999998] """ op = (flow.user_op_builder(name if name is not None else id_util.UniqueStr( "SmoothL1Loss_")).Op("smooth_l1_loss").Input( "input", [prediction]).Input("target", [label]).Output("out")) op.Attr("beta", float(beta)) return op.Build().InferAndTryRun().RemoteBlobList()[0]
def TestMultiInput(x1, x2): return ( flow.user_op_builder("my_test_multi_input") .Op("TestMultiInput") .Input("x1", [x1]) .Input("x2", [x2]) .Output("y") .Build() .InferAndTryRun() .RemoteBlobList()[0] )
def _sort_at_last_dim( input: oneflow._oneflow_internal.BlobDesc, direction: str = "ASCENDING", name: Optional[str] = None, ) -> oneflow._oneflow_internal.BlobDesc: assert direction in ["ASCENDING", "DESCENDING"] return ( flow.user_op_builder(name if name is not None else id_util.UniqueStr( "Sort_")).Op("sort").Input("in", [input]).Output("out").Attr( "direction", direction).Build().InferAndTryRun().RemoteBlobList()[0])
def TestListDataTypeAndListShapeAndListStringAttr(input, out_shapes, out_types, string_list): assert isinstance(out_shapes, list) assert isinstance(out_types, list) return ( flow.user_op_builder("TestListDataTypeAndListShapeAndListStringAttr"). Op("TestListDataTypeAndListShapeAndListStringAttr").Input( "in", [input]).Output("out", 3).Attr( "out_shapes", out_shapes).Attr("out_types", out_types).Attr( "string_list", string_list).Build().InferAndTryRun().RemoteBlobList())
def unpack(input, unpack_num, name=None): assert not flow.eager_execution_enabled() return ( flow.user_op_builder(name if name is not None else id_util.UniqueStr("Unpack_")) .Op("unpack") .Input("in", [input]) .Output("out") .Attr("unpack_num", unpack_num) .Build() .InferAndTryRun() .RemoteBlobList()[0] )
def split_like(input, like, name): return ( flow.user_op_builder(name) .Op("split_like") .Input("in", [input]) .Input("like", like) .Output("out", len(like)) .Attr("axis", 0) .Build() .InferAndTryRun() .RemoteBlobList() )
def acc(one, max_acc_num, name=None): assert not flow.eager_execution_enabled() return ( flow.user_op_builder(name if name is not None else id_util.UniqueStr("Acc_")) .Op("acc") .Input("in", [one]) .Output("out") .Attr("max_acc_num", max_acc_num) .Build() .InferAndTryRun() .RemoteBlobList()[0] )