示例#1
0
def _get_parameter_broadcast():
    """Get the parameter broadcast."""
    parallel_mode = auto_parallel_context().get_parallel_mode()
    parameter_broadcast = auto_parallel_context().get_parameter_broadcast()

    if parallel_mode in (
            "data_parallel", "hybrid_parallel"
    ) and parameter_broadcast is False and get_seed() is None:
        logger.warning(
            "You are suggested to use mindspore.context.set_auto_parallel_context(parameter_broadcast=True)"
            " or mindspore.common.set_seed() to share parameters among multi-devices."
        )

    return parameter_broadcast
示例#2
0
 def __init__(self, keep_prob=0.5, dtype=mstype.float32):
     super(Dropout, self).__init__()
     if keep_prob <= 0 or keep_prob > 1:
         raise ValueError(
             "dropout probability should be a number in range (0, 1], but got {}"
             .format(keep_prob))
     Validator.check_subclass("dtype", dtype, mstype.number_type,
                              self.cls_name)
     Validator.check_value_type('keep_prob', keep_prob, [float],
                                self.cls_name)
     self.keep_prob = keep_prob
     seed0 = get_seed()
     self.seed0 = seed0 if seed0 is not None else 0
     self.seed1 = 0
     self.dtype = dtype
     self.get_shape = P.Shape()
     self.dropout_gen_mask = P.DropoutGenMask(Seed0=self.seed0,
                                              Seed1=self.seed1)
     self.dropout_do_mask = P.DropoutDoMask()
     self.cast = P.Cast()
     self.is_gpu = context.get_context('device_target') in ["GPU"]
     self.dropout = P.Dropout(keep_prob)