Exemple #1
0
 def check_astype_dtype(dtype):
     """Check whether dtype is a valid input, and convert to mstype"""
     all_types = mstype.__dtype__ + ["int", "float", "bool"]
     if isinstance(dtype, str):
         if dtype.lower() not in all_types:
             raise TypeError(f"`{dtype}` not understood.")
         dtype = mstype.pytype_to_dtype(np.dtype(dtype.lower()))
     elif isinstance(dtype, type):
         dtype = mstype.pytype_to_dtype(dtype)
     elif not dtype in mstype.number_type + (mstype.bool_,):
         raise TypeError(f"`{dtype}` not understood.")
     return dtype
Exemple #2
0
def _fill_param_into_net(net, parameter_list):
    """
    Fills parameter_list into net.

    Args:
        net (Cell): train network.
        parameter_list (list): parameters list from ge callback.
    """
    parameter_dict = {}
    for each_param in parameter_list:
        param_name = each_param["name"]
        if isinstance(each_param["data"], Parameter):
            each_param["data"].init_data()
        np_val = each_param["data"].asnumpy()
        if np_val.shape == (1, ):
            parameter_dict[param_name] = Parameter(np_val, name=param_name)
        elif np_val.shape == ():
            parameter_dict[param_name] = Parameter(Tensor(
                np_val.tolist(), mstype.pytype_to_dtype(np_val.dtype)),
                                                   name=param_name)
        else:
            parameter_dict[param_name] = Parameter(Tensor(np_val),
                                                   name=param_name)

    load_param_into_net(net, parameter_dict)
Exemple #3
0
def set_param_type(args, hint_type):
    """
    Find the common type among arguments.

    Args:
        args (dict): dictionary of arguments, {'name':value}.
        hint_type (mindspore.dtype): hint type to return.

    Raises:
        TypeError: if tensors in args are not the same dtype.
    """
    int_type = mstype.int_type + mstype.uint_type
    if hint_type in int_type:
        hint_type = mstype.float32
    common_dtype = None
    for name, arg in args.items():
        if hasattr(arg, 'dtype'):
            if isinstance(arg, np.ndarray):
                cur_dtype = mstype.pytype_to_dtype(arg.dtype)
            else:
                cur_dtype = arg.dtype
            if common_dtype is None:
                common_dtype = cur_dtype
            elif cur_dtype != common_dtype:
                raise TypeError(
                    f"{name} should have the same dtype as other arguments.")
    if common_dtype in int_type or common_dtype == mstype.float64:
        return mstype.float32
    return hint_type if common_dtype is None else common_dtype
Exemple #4
0
def get_dataclass_attributes(cls):
    """Get attributes of dataclass."""
    fields = cls.__dataclass_fields__
    attributes = {
        name: pytype_to_dtype(field.type)
        for name, field in fields.items()
    }
    return attributes
Exemple #5
0
def common_dtype(arg_a, name_a, arg_b, name_b, hint_type):
    """
    check if arg_a and arg_b have the same dtype.
    """
    if hasattr(arg_a, 'dtype') and hasattr(arg_b, 'dtype'):
        if isinstance(arg_a, np.ndarray):
            a_dtype = mstype.pytype_to_dtype(arg_a.dtype)
        else:
            a_dtype = arg_a.dtype
        if isinstance(arg_b, np.ndarray):
            b_dtype = mstype.pytype_to_dtype(arg_b.dtype)
        else:
            b_dtype = arg_b.dtype
        if a_dtype != b_dtype:
            raise TypeError(f"{name_a} and {name_b} should have the same dtype.")
        int_type = mstype.int_type + mstype.uint_type
        if a_dtype in int_type or a_dtype == mstype.float64:
            return mstype.float32
        return a_dtype
    return hint_type
Exemple #6
0
def _convert_type(types):
    """
    Convert from numpy type to tensor type.

    Args:
        types (list): Numpy type list of element in dataset.

    Returns:
        list, list of element in dataset.
    """
    ms_types = []
    for np_type in types:
        ms_type = pytype_to_dtype(np_type)
        ms_types.append(ms_type)
    return ms_types
def connect_network_with_dataset(network, dataset_helper):
    """
    Connect the `network` with dataset in `dataset_helper`.

    This function wraps the input network with 'GetNext' so that the data can be fetched automatically from the
    data channel corresponding to the 'queue_name' and passed to the input network during forward computation.

    Note:
        In the case of running the network on Ascend/GPU in graph mode, this function will wrap the input network with
        'GetNext', in other cases, the input network will be returned with no change.
        The 'GetNext' is required to get data only in sink mode, so this function is not applicable to no-sink mode.

    Args:
        network (Cell): The training network for dataset.
        dataset_helper (DatasetHelper): A class to process the MindData dataset, it provides the type, shape and queue
            name of the dataset to wrap the `GetNext`.

    Returns:
        Cell, a new network wrapped with 'GetNext' in the case of running the task on Ascend in graph mode, otherwise
        it is the input network.

    Supported Platforms:
        ``Ascend`` ``GPU``

    Examples:
        >>> from mindspore import DatasetHelper
        >>>
        >>> # call create_dataset function to create a regular dataset, refer to mindspore.dataset
        >>> train_dataset = create_custom_dataset()
        >>> dataset_helper = DatasetHelper(train_dataset, dataset_sink_mode=True)
        >>> net = Net()
        >>> net_with_get_next = connect_network_with_dataset(net, dataset_helper)
    """

    dataset_iter = dataset_helper.iter
    dataset = dataset_iter.dataset

    if isinstance(dataset_iter, _DatasetIterNormal):
        raise RuntimeError(
            "Dataset should be connected with network only in sink mode.")

    ms_role = os.getenv("MS_ROLE")
    if ms_role in ("MS_PSERVER", "MS_SCHED"):
        return network

    queue_name = dataset.__transfer_dataset__.queue_name
    if _dynamic_sink_scenario(dataset, dataset_iter):
        if not hasattr(dataset_iter, '__network__'):
            dataset_iter.__network__ = network
        network = dataset_iter.__network__

        dataset_types, dataset_shapes = dataset_helper.get_data_info()
        dataset_types = [pytype_to_dtype(x) for x in dataset_types]

        key = str(dataset_types) + str(dataset_shapes)
        if hasattr(dataset_iter, '__network_manage__'
                   ) and key in dataset_iter.__network_manage__:
            network = dataset_iter.__network_manage__[key]
        else:
            if _need_to_full():
                device_num = _get_device_num()
                dataset_shapes = _to_full_shapes(dataset_shapes, device_num)

            network = _generate_dataset_sink_mode_net(network, dataset_shapes,
                                                      dataset_types,
                                                      queue_name)
            dataset_iter.__network_manage__ = dataset_iter.__network_manage__ if hasattr(
                dataset_iter, '__network_manage__') else dict()
            dataset_iter.__network_manage__[key] = network
        return network

    if not hasattr(dataset, '__me_inited__') and \
       not context.get_context("enable_ge") and \
       context.get_context("device_target") in ("Ascend", "GPU"):
        dataset.__me_inited__ = True
        dataset_types, dataset_shapes = dataset_helper.types_shapes()
        network = _generate_dataset_sink_mode_net(network, dataset_shapes,
                                                  dataset_types, queue_name)

    if hasattr(dataset_iter, "sink_size") and \
       dataset_iter.sink_size == 1 and \
       dataset.get_dataset_size() != 1 and \
       hasattr(dataset_iter, "sink_count") and \
       dataset_iter.sink_count == 1 and \
       context.get_context("device_target") == "Ascend" and \
       context.get_context("mode") == context.PYNATIVE_MODE:
        dataset_helper.get_data_info()

    return network
Exemple #8
0
def connect_network_with_dataset(network, dataset_helper):
    """
    Connect the `network` with dataset in `dataset_helper`.

    This function wraps the input network with 'GetNext' so that the data can be fetched automatically from the
    data channel corresponding to the 'queue_name' and passed to the input network during forward computation.

    Note:
        In the case of running the network on Ascend/GPU in graph mode, this function will wrap the input network with
        'GetNext', in other cases, the input network will be returned with no change.
        The 'GetNext' is required to get data only in sink mode, so this function is not applicable to no-sink mode.

    Args:
        network (Cell): The training network for dataset.
        dataset_helper(DatasetHelper): A class to process the MindData dataset, it provides the type, shape and queue
            name of the dataset to wrap the `GetNext`.

    Outputs:
        Cell, a new network wrapped with 'GetNext' in the case of running the task on Ascend in graph mode, otherwise
        it is the input network.

    Examples:
        >>> # call create_dataset function to create a regular dataset, refer to mindspore.dataset
        >>> train_dataset = create_dataset()
        >>> dataset_helper = mindspore.DatasetHelper(train_dataset, dataset_sink_mode=True)
        >>> net = Net()
        >>> net_with_get_next = connect_network_with_dataset(net, dataset_helper)
    """
    class _DataWrapper(nn.Cell):
        """
        Wraps the input network with a dataset which automatically fetches data with 'GetNext' function from the
        dataset channel 'queue_name' and performs the forward computation.
        """

        def __init__(self, network, dataset_types, dataset_shapes, queue_name):
            super(_DataWrapper, self).__init__(auto_prefix=False, flags=network.get_flags())
            # Also copy the flag in `network` construct
            flags = getattr(network.__class__.construct, "_mindspore_flags", {})
            self.add_flags(**flags)
            self.get_next = P.GetNext(dataset_types, dataset_shapes, len(dataset_types), queue_name)
            self.network = network

        def construct(self):
            outputs = self.get_next()
            return self.network(*outputs)

    dataset_iter = dataset_helper.iter
    dataset = dataset_iter.dataset

    if isinstance(dataset_iter, _DatasetIterNormal):
        raise RuntimeError("Dataset should be connected with network only in sink mode.")

    if (hasattr(dataset_iter, "sink_size") and dataset_iter.sink_size == 1) \
        and (hasattr(dataset_iter, "sink_count") and dataset_iter.sink_count == 1) \
        and context.get_context("device_target") == "Ascend":

        if not hasattr(dataset, '__network__'):
            dataset.__network__ = network
        network = dataset.__network__

        dataset_types, dataset_shapes = dataset_helper.get_data_info()
        dataset_types = [pytype_to_dtype(x) for x in dataset_types]

        key = str(dataset_types) + str(dataset_shapes)
        if hasattr(dataset, '__network_manage__') and key in dataset.__network_manage__:
            network = dataset.__network_manage__[key]
        else:
            network = _DataWrapper(network, dataset_types, dataset_shapes, dataset.__transfer_dataset__.queue_name)
            dataset.__network_manage__ = dataset.__network_manage__ if hasattr(
                dataset, '__network_manage__') else dict()
            dataset.__network_manage__[key] = network

        return network

    if not hasattr(dataset, '__me_inited__') and (context.get_context("device_target") == "Ascend" or \
            context.get_context("device_target") == "GPU") and not context.get_context("enable_ge"):
        dataset.__me_inited__ = True

        dataset_types, dataset_shapes = dataset_helper.types_shapes()
        queue_name = dataset.__transfer_dataset__.queue_name

        network = _DataWrapper(network, dataset_types, dataset_shapes, queue_name)
    return network