def init(m_sample=100, n_feature=10): n = n_feature m = m_sample mean = [1] * n cov = np.zeros((n, n)) for i in range(cov.shape[1]): cov[i, i] = 1 rdd = check_random_state(1) X = rdd.multivariate_normal(mean, cov, m) return X
def __init__(self, samples, shape_x, random_state=None, method="poisson", inter=True, cut_zero=True, re_range=None, question="reg", rank=True): """ Parameters ---------- samples:int shape_x:int or tuple of int random_state:None or int method:"poission" or None inter:bool integer or not. cut_zero: non-negative or not re_range:None or tuple of int, default is (0,1) range question:"reg" or "clf" rank:bool rank values for y, just for the "reg" """ if isinstance(shape_x, int): self.shape_x = (shape_x, ) else: self.shape_x = shape_x self.rdd = check_random_state(random_state) self.random_state = random_state self.samples = samples self.method = method self.inter = inter self.cut_zero = cut_zero self.re_range = re_range self.question = question self.rank = rank
def add_noise(s, ratio): print(s.shape) rdd = check_random_state(1) return s + rdd.random_sample(s.shape) * np.max(s) * ratio
def __init__( self, cols=None, return_df=True, lr=1e-2, max_epoch=25, A=10, B=5, dropout1=0.1, dropout2=0.1, random_state=1000, verbose=1, n_jobs=-1, class_weight=None, batch_size=1024, optimizer="adam", normalize=True, copy=True, budget=10 ): self.budget = budget self.normalize = normalize self.copy = copy self.optimizer = optimizer self.batch_size = batch_size self.random_state = random_state self.class_weight = class_weight self.n_jobs = n_jobs self.verbose = verbose self.lr = lr self.dropout2 = dropout2 self.dropout1 = dropout1 self.B = B self.A = A self.max_epoch = max_epoch self.return_df = return_df self.drop_cols = [] self.cols = cols self._dim = None self.feature_names = None self.model = None self.logger = get_logger(self) self.nn_params = { "A": self.A, "B": self.B, "dropout1": self.dropout1, "dropout2": self.dropout2, } self.rng = check_random_state(self.random_state) self.trainer = TrainEntityEmbeddingNN( lr=self.lr, max_epoch=self.max_epoch, n_class=None, nn_params=self.nn_params, random_state=self.rng, batch_size=batch_size, optimizer=optimizer, n_jobs=self.n_jobs, class_weight=class_weight ) self.scaler = StandardScaler(copy=True) self.keep_going = False self.iter = 0