示例#1
0
def tensor_getstate(self):
    if save_load_path is not None:
        # save_load_path is not None means setstate/getstate is called inside
        # flow.save or flow.load
        assert isinstance(save_load_path, Path)
        if consistent_src_dsk_rank is None:
            assert self.is_local
            rel_dir_name = id_util.UniqueStr("tensor_")
            abs_dir_name = save_load_path / rel_dir_name

            tensor = self
        else:
            assert not self.is_local
            rel_dir_name = f"consistent_tensor_{self.consistent_id()}"
            abs_dir_name = save_load_path / rel_dir_name

            tensor = self.to_consistent(sbp=[flow.sbp.broadcast] *
                                        len(self.sbp)).to_local()
        if (consistent_src_dsk_rank is None
                or consistent_src_dsk_rank == flow.env.get_rank()):
            _save_tensor_to_disk(tensor, abs_dir_name)

        return {"path": rel_dir_name}
    else:
        # save_load_path is None means setstate/getstate is called inside
        # methods other than flow.save/load, for example, copy.deepcopy
        if self.is_local:
            return {"data": self.numpy(), "dtype": self.dtype}
        else:
            return {
                "data": self.numpy(),
                "dtype": self.dtype,
                "placement": self.placement,
                "sbp": self.sbp,
            }
示例#2
0
 def __init__(
     self,
     target_width: int,
     target_height: int,
     num_attempts: Optional[int] = 10,
     seed: Optional[int] = 0,
     random_area: Optional[Sequence[float]] = [0.08, 1.0],
     random_aspect_ratio: Optional[Sequence[float]] = [0.75, 1.333333],
     num_workers: Optional[int] = 3,
     warmup_size: Optional[int] = 6400,
     max_num_pixels: Optional[int] = 67108864,
 ):
     super().__init__()
     self.target_width = target_width
     self.target_height = target_height
     self.num_attempts = num_attempts
     self.seed = seed
     assert len(random_area) == 2
     self.random_area = random_area
     assert len(random_aspect_ratio) == 2
     self.random_aspect_ratio = random_aspect_ratio
     self.num_workers = num_workers
     self.warmup_size = warmup_size
     self.max_num_pixels = max_num_pixels
     gpu_decoder_conf = (flow._oneflow_internal.oneflow.core.operator.
                         op_conf.ImageDecoderRandomCropResizeOpConf())
     gpu_decoder_conf.set_in("error_input_need_to_be_replaced")
     gpu_decoder_conf.set_out("out")
     self._op = flow._oneflow_internal.one.ImageDecoderRandomCropResizeOpExpr(
         id_util.UniqueStr("ImageGpuDecoder"), gpu_decoder_conf, ["in"],
         ["out"])
示例#3
0
 def __init__(self, op_type_name, op_name=None):
     if op_name is None:
         op_name = id_util.UniqueStr(op_type_name)
     self._builder = oneflow._oneflow_internal.one.OpBuilder(
         op_type_name, op_name)
     self._op = None
     self._op_type_name = op_type_name
示例#4
0
def tensor_getstate(self):
    if save_load_path is not None:
        # save_load_path is not None means setstate/getstate is called inside
        # flow.save or flow.load
        assert isinstance(save_load_path, Path)
        if global_src_dsk_rank is None:
            assert self.is_local
            rel_dir_name = id_util.UniqueStr("tensor_")
            abs_dir_name = save_load_path / rel_dir_name

            tensor = self
        else:
            assert not self.is_local
            rel_dir_name = f"global_tensor_{self.global_id()}"
            abs_dir_name = save_load_path / rel_dir_name

            tensor = self.to_global(
                sbp=flow.sbp.broadcast,
                placement=flow.placement("cpu", [global_src_dsk_rank]),
            ).to_local()
        if global_src_dsk_rank is None or global_src_dsk_rank == flow.env.get_rank(
        ):
            _save_tensor_to_disk(tensor, abs_dir_name)

        return {"path": rel_dir_name}
    else:
        # save_load_path is None means setstate/getstate is called inside
        # methods other than flow.save/load, for example, copy.deepcopy
        if self.is_local:
            if self.is_cuda:
                device = "cuda"
            else:
                device = "cpu"
            return {
                "data": self.numpy(),
                "dtype": self.dtype,
                "device": device
            }
        else:
            return {
                "data": self.numpy(),
                "dtype": self.dtype,
                "placement": self.placement,
                "sbp": self.sbp,
            }
示例#5
0
 def __init__(
     self,
     target_width: int,
     target_height: int,
     num_attempts: Optional[int] = 10,
     seed: Optional[int] = 0,
     random_area: Optional[Sequence[float]] = [0.08, 1.0],
     random_aspect_ratio: Optional[Sequence[float]] = [0.75, 1.333333],
     num_workers: Optional[int] = 3,
     warmup_size: Optional[int] = 6400,
     max_num_pixels: Optional[int] = 67108864,
 ):
     super().__init__()
     self.target_width = target_width
     self.target_height = target_height
     self.num_attempts = num_attempts
     self.seed = seed
     assert len(random_area) == 2
     self.random_area = random_area
     assert len(random_aspect_ratio) == 2
     self.random_aspect_ratio = random_aspect_ratio
     self.num_workers = num_workers
     self.warmup_size = warmup_size
     self.max_num_pixels = max_num_pixels
     gpu_decoder_conf = (flow.core.operator.op_conf_pb2.
                         ImageDecoderRandomCropResizeOpConf())
     # parse failed when excu clang format if use `gpu_decoder_conf.in = "error_input_need_to_be_replaced"`
     setattr(gpu_decoder_conf, "in", "error_input_need_to_be_replaced")
     gpu_decoder_conf.out = "out"
     gpu_decoder_conf.target_width = (
         -1)  # Set the default value, otherwise the parsing fails
     gpu_decoder_conf.target_height = -1
     gpu_decoder_conf_str = text_format.MessageToString(gpu_decoder_conf)
     self._op = flow._oneflow_internal.one.ImageDecoderRandomCropResizeOpExpr(
         id_util.UniqueStr("ImageGpuDecoder"), gpu_decoder_conf_str, ["in"],
         ["out"])