Exemple #1
0
    def __init__(self,
                 input_dim,
                 hidden_num,
                 hidden_dim,
                 output_dim,
                 mu,
                 lamb,
                 nonlinear="leaky-relu",
                 norm_prod='paths',
                 square_prod=False):
        super(NonlinearGaussANM, self).__init__(input_dim=input_dim,
                                                hidden_num=hidden_num,
                                                hidden_dim=hidden_dim,
                                                output_dim=output_dim,
                                                mu=mu,
                                                lamb=lamb,
                                                nonlinear=nonlinear,
                                                norm_prod=norm_prod,
                                                square_prod=square_prod)

        # extra parameters are log_std
        extra_params = np.ones((self.input_dim, ))
        np.random.shuffle(extra_params)
        extra_params_list = list()
        for i, extra_param in enumerate(extra_params):
            extra_params_list.append(
                Parameter(MsTensor(np.log(extra_param).reshape(1),
                                   dtype=mstype.float32),
                          requires_grad=True,
                          name='e' + str(i)))

        # each element in the list represents a variable,
        # the size of the element is the number of extra_params per var
        self.extra_params = ParameterTuple(extra_params_list)
Exemple #2
0
    def __init__(self,
                 data,
                 normalize=False,
                 mean=None,
                 std=None,
                 shuffle=False,
                 train_size=0.8,
                 train=True,
                 random_seed=42):
        self.random = np.random.RandomState(random_seed)

        shuffle_idx = np.arange(data.shape[0])
        if shuffle:
            self.random.shuffle(shuffle_idx)

        if isinstance(train_size, float):
            train_samples = int(data.shape[0] * train_size)
        else:
            raise TypeError("The param train_size must be float < 1")
        if train:
            data = data[shuffle_idx[:train_samples]]
        else:
            data = data[shuffle_idx[train_samples:]]
        # as tensor
        self.data_set = MsTensor(data).astype(dtype=mstype.float32)

        # Normalize data
        self.mean, self.std = mean, std
        if normalize:
            if mean is None or std is None:
                self.mean = msnp.mean(self.data_set, 0, keepdims=True)
                self.std = msnp.std(self.data_set, 0, keepdims=True)
            self.data_set = (self.data_set - self.mean) / self.std
        self.n_samples = self.data_set.shape[0]
Exemple #3
0
def neighbors_selection(model, all_samples, num_neighbors, thresh):
    """
    Preliminary neighborhood selection
    After pns, just model.adjacency is changed. if nodes > 50, use it.

    Parameters
    ----------
    model: model object
    all_samples: array-like
        2 dimensional array include all samples
    num_neighbors: integer
        variable number or neighbors number you want
    thresh: float
        apply for sklearn.feature_selection.SelectFromModel

    Returns
    -------
    out: model
    """

    model_adj = model.adjacency.asnumpy()
    model_adj = _pns(model_adj, all_samples, num_neighbors, thresh)
    model.adjacency = MsTensor(model_adj).copy()

    return model
Exemple #4
0
    def sample(self, batch_size):
        """
        sampling from self.dataset

        Parameters
        ----------
        batch_size: int
            batch size of sample

        Returns
        -------
        samples: mindspore.Tensor
            sample data after sampling
        ops.ones_like(samples): mindspore.Tensor
        """
        sample_idxs = self.random.choice(np.arange(int(self.n_samples)),
                                         size=(int(batch_size), ),
                                         replace=False)
        samples = self.data_set[MsTensor(sample_idxs, mstype.int32)]

        return samples, ops.ones_like(samples)
Exemple #5
0
def convert_to_ms_tensor(data):
    """Convert C++ tensor to mindspore tensor."""
    return MsTensor(data)
 def __init__(self, network):
     super(GradNetWrtX, self).__init__(auto_prefix=False)
     self.network = network
     self.grad_op = ops.GradOperation(sens_param=True)
     self.flag = MsTensor(True, dtype=mstype.bool_)