Exemplo n.º 1
0
 def numpy(self) -> np.ndarray:
     if not self.has_meta_info_:
         raise RuntimeError("This variable does not have meta info")
     return np.fromfile(
         self.file_path,
         dtype=dtype_util.convert_oneflow_dtype_to_numpy_dtype(self.dtype),
     ).reshape(self.shape)
Exemplo n.º 2
0
    def run_job(test_case, device=None, in_type=None, shape=None):
        assert shape is not None
        flow.clear_default_session()
        func_config = flow.FunctionConfig()

        @flow.global_function(type="train", function_config=func_config)
        def FuseBnAddReluJob(x: oft.Numpy.Placeholder(
            shape, dtype=in_type)) -> oft.Numpy:
            addend = flow.constant_like(x, 2)
            with flow.scope.placement(device, "0:0-0"):
                x = (flow.get_variable(
                    "x1",
                    shape=shape,
                    dtype=in_type,
                    initializer=flow.random_uniform_initializer(minval=-10,
                                                                maxval=10),
                    trainable=True,
                ) + x)
                loss = flow.nn.relu(_batch_norm(x, last=False) + addend) + 1
                flow.optimizer.SGD(flow.optimizer.PiecewiseConstantScheduler(
                    [], [0.0001]),
                                   momentum=0).minimize(loss)
                return loss

        np_in_type = dtype_util.convert_oneflow_dtype_to_numpy_dtype(in_type)
        x = (np.random.rand(*shape) * 10).astype(np_in_type)
        FuseBnAddReluJob(x)
Exemplo n.º 3
0
    def run_fuse_cast_scale_mlir(test_case,
                                 device=None,
                                 in_type=None,
                                 out_type=None,
                                 shape=None):
        flow.clear_default_session()
        func_config = flow.FunctionConfig()

        @flow.global_function(function_config=func_config)
        def FuseCastScaleJob(x: oft.Numpy.Placeholder(
            shape, dtype=in_type)) -> Tuple[oft.Numpy, oft.Numpy]:
            with flow.scope.placement(device, "0:0-0"):
                scale = flow.get_variable(
                    "scale",
                    shape=(1, ),
                    dtype=out_type,
                    initializer=flow.random_uniform_initializer(),
                    trainable=False,
                )
                loss = flow.cast(x, dtype=out_type) * scale
                return (loss, scale)

        np_in_type = dtype_util.convert_oneflow_dtype_to_numpy_dtype(in_type)
        x = (np.random.rand(*shape) * 10).astype(np_in_type)
        ret = FuseCastScaleJob(x)
        (loss, scale) = ret
        test_case.assertTrue(np.allclose(loss, x * scale))
Exemplo n.º 4
0
    def run_job(test_case, device=None, in_type=None, shape=None):
        assert shape is not None
        flow.clear_default_session()
        func_config = flow.FunctionConfig()

        @flow.global_function(function_config=func_config)
        def FuseTrilScaleJob(x: oft.Numpy.Placeholder(
            shape, dtype=in_type)) -> oft.Numpy:
            with flow.scope.placement(device, "0:0-0"):
                scale = 3.0
                loss = flow.math.tril(x * scale)
                loss += flow.math.tril(x) * scale
                return loss

        np_in_type = dtype_util.convert_oneflow_dtype_to_numpy_dtype(in_type)
        x = (np.random.rand(*shape) * 10).astype(np_in_type)
        FuseTrilScaleJob(x)
Exemplo n.º 5
0
 def __init__(
     self,
     var_dir: str,
     dtype: Optional[oneflow.dtype] = None,
     shape: Optional[Sequence[int]] = None,
 ):
     data_path = os.path.join(var_dir, DATA_FILENAME)
     if not os.path.isfile(data_path):
         raise FileNotFoundError()
     self.var_dir_ = var_dir
     meta_info_path = os.path.join(self.var_dir_, META_INFO_FILENAME)
     if os.path.exists(meta_info_path):
         meta_info = variable_meta_info_pb.VariableMetaInfo()
         with open(meta_info_path) as f:
             text_format.Parse(f.read(), meta_info)
         self.has_meta_info_ = True
     else:
         self.has_meta_info_ = False
     if self.has_meta_info_:
         assert dtype is None and shape is None
         self.shape_ = tuple(meta_info.shape.dim)
         self.dtype_ = dtype_util.convert_proto_dtype_to_oneflow_dtype(
             meta_info.data_type)
     elif shape is not None and dtype is not None:
         self.shape_ = shape
         self.dtype_ = dtype
         self.has_meta_info_ = True
     elif shape is not None or dtype is not None:
         raise RuntimeError(
             "both or neither of shape and dtype should be None")
     else:
         pass
     if self.has_meta_info_:
         itemsize = np.dtype(
             dtype_util.convert_oneflow_dtype_to_numpy_dtype(
                 self.dtype_)).itemsize
         assert os.path.getsize(data_path) == np.prod(
             self.shape).item() * itemsize
Exemplo n.º 6
0
def generate_values_by_initializer(initializer, shape, dtype):
    np_dtype = np.dtype(dtype_util.convert_oneflow_dtype_to_numpy_dtype(dtype))
    length = _elem_cnt(shape)
    return np.array(initializer(length)).astype(np_dtype).reshape(shape)