Пример #1
0
 def build(builder):
     if blob_object.op_arg_parallel_attr.is_mirrored():
         input_blob_def = input_blob_def_util.MirroredTensorDef(
             ndarray.shape,
             dtype=dtype_util.convert_numpy_dtype_to_oneflow_dtype(ndarray.dtype),
         )
     else:
         input_blob_def = input_blob_def_util.FixedTensorDef(
             ndarray.shape,
             dtype=dtype_util.convert_numpy_dtype_to_oneflow_dtype(ndarray.dtype),
         )
     push_util.FeedValueToEagerBlob(blob_object, input_blob_def, ndarray)
Пример #2
0
def _FeedValueToVariable(var_blob: oneflow_api.EagerConsistentBlob,
                         value: ValueContainer) -> None:
    """
    Feed the value of `value` to the variable `var_blob`
    """
    assert isinstance(value, (EagerBlobTrait, FileBackendVariableBlob,
                              np.ndarray)), "Unknown value type: {}".format(
                                  type(value).__name__)

    if isinstance(value, FileBackendVariableBlob):
        if not value.has_meta_info_:
            value = FileBackendVariableBlob(value.var_dir_, var_blob.dtype,
                                            var_blob.shape)
    assert var_blob.shape == value.shape, "{} vs {}".format(
        var_blob.shape, value.shape)
    if isinstance(value, np.ndarray):
        value_flow_dtype = dtype_util.convert_numpy_dtype_to_oneflow_dtype(
            value.dtype)
    else:
        value_flow_dtype = value.dtype
    assert var_blob.dtype == value_flow_dtype, "{} vs {}".format(
        var_blob.dtype, value_flow_dtype)
    for start, stop, slice in _ReadSlice(value):
        slice_value_blob = _GetCpu0VariableBlobFromNumpy(slice, var_blob.dtype)
        _LogicalSliceAssign(
            var_blob,
            slice_value_blob,
            start,
            stop,
        )
Пример #3
0
def construct_tensor(
    data,
    dtype=None,
    device=None,
    requires_grad=False,
    placement=None,
    sbp=None,
    is_consistent=False,
    is_lazy=False,
):
    if _is_scalar(data) or _input_args_is_data(data):
        if (not _input_args_is_numpy(data) and dtype is None
                and _input_dtype_is_float(data)):
            dtype = flow.float32
        data = np.array(data)
        if dtype is None:
            dtype = dtype_util.convert_numpy_dtype_to_oneflow_dtype(data.dtype)
        return Tensor(
            data,
            dtype=dtype,
            device=device,
            requires_grad=requires_grad,
            placement=placement,
            sbp=sbp,
            is_consistent=is_consistent,
            is_lazy=is_lazy,
        )
    else:
        raise TypeError("Construction error, invalid combination of arguments")
Пример #4
0
 def build(builder):
     blob_object = builder.MakeLazyRefBlobObject(op_name)
     if blob_object.op_arg_blob_attr.is_tensor_list:
         input_blob_def = input_blob_def_util.MirroredTensorListDef(
             [x.shape for x in ndarray],
             dtype=dtype_util.convert_numpy_dtype_to_oneflow_dtype(
                 ndarray.dtype),
         )
     elif blob_object.op_arg_parallel_attr.is_mirrored():
         input_blob_def = input_blob_def_util.MirroredTensorDef(
             ndarray.shape,
             dtype=dtype_util.convert_numpy_dtype_to_oneflow_dtype(
                 ndarray.dtype),
         )
     else:
         input_blob_def = input_blob_def_util.FixedTensorDef(
             ndarray.shape,
             dtype=dtype_util.convert_numpy_dtype_to_oneflow_dtype(
                 ndarray.dtype),
         )
     push_util.FeedValueToEagerBlob(blob_object, input_blob_def,
                                    ndarray)
     Yield()
Пример #5
0
 def infer_oneflow_data_placeholder(
     self, batch: Tuple[np.ndarray, ...] = None, optimizer_idx: int = 0
 ):
     assert isinstance(batch, tuple), "model.NumpyDataModule must return a tuple."
     data_placeholder_list = []
     for item in batch:
         assert isinstance(
             item, np.ndarray
         ), "model.NumpyDataModule must return a tuple of numpy."
         of_dtype = dtype_util.convert_numpy_dtype_to_oneflow_dtype(item.dtype)
         numpy_placeholder = oneflow_typing.Numpy.Placeholder(
             shape=item.shape, dtype=of_dtype
         )
         data_placeholder_list.append(numpy_placeholder)
     return data_placeholder_list
Пример #6
0
def FeedValueToVariable(
    var_blob: Union[oneflow._oneflow_internal.EagerConsistentBlob,
                    "oneflow.Tensor"],
    value: ValueContainer,
    scope_symbol_id: Optional[int],
) -> None:
    """
    Feed the value of `value` to the variable `var_blob`
    """
    assert isinstance(value,
                      (EagerBlobTrait, FileBackendVariableBlob, np.ndarray,
                       oneflow.Tensor)), "Unknown value type: {}".format(
                           type(value).__name__)

    if isinstance(value, FileBackendVariableBlob):
        if not value.has_meta_info_:
            value = FileBackendVariableBlob(value.var_dir_, var_blob.dtype,
                                            var_blob.shape)
    assert var_blob.shape == value.shape, "{} vs {}".format(
        var_blob.shape, value.shape)
    if isinstance(value, np.ndarray):
        value_flow_dtype = dtype_util.convert_numpy_dtype_to_oneflow_dtype(
            value.dtype)
    else:
        value_flow_dtype = value.dtype
    assert var_blob.dtype == value_flow_dtype, "{} vs {}".format(
        var_blob.dtype, value_flow_dtype)

    if isinstance(var_blob, oneflow.Tensor):
        raise ValueError("Tensor object arguments are not supported")
    else:
        assert isinstance(var_blob, EagerBlobTrait)
        var_blob_object = var_blob.blob_object

    for start, stop, slice in _ReadSlice(value):
        slice_value_blob = _GetCpu0VariableBlobFromNumpy(slice, var_blob.dtype)
        _LogicalSliceAssign(
            var_blob_object,
            slice_value_blob.blob_object,
            start,
            stop,
            scope_symbol_id,
        )