Exemple #1
0
 def __init__(self, category, learning_rate):
     self.p_model = Categorical(categories=category)
     self.leaning_rate = learning_rate
     self.information_recoder = {
         'epoch': np.zeros((self.p_model.d, self.p_model.Cmax)),
         'performance': np.zeros((self.p_model.d, self.p_model.Cmax))
     }
Exemple #2
0
class CategoricalMDENAS:
    def __init__(self, category, learning_rate):
        self.p_model = Categorical(categories=category)
        self.leaning_rate = learning_rate
        self.information_recoder = {
            'epoch': np.zeros((self.p_model.d, self.p_model.Cmax)),
            'performance': np.zeros((self.p_model.d, self.p_model.Cmax))
        }

    def sampling(self):
        return self.p_model.sampling()

    def sampling_index(self):
        return self.p_model.sampling_index()

    def record_information(self, sample, performance):
        for i in range(len(sample)):
            self.information_recoder['epoch'][i, sample[i]] += 1
            self.information_recoder['performance'][i, sample[i]] = performance

    def update(self):

        # update the probability
        for edges_index in range(self.p_model.d):
            for i in range(self.p_model.Cmax):
                for j in range(i + 1, self.p_model.Cmax):
                    if (self.information_recoder['epoch'][edges_index, i] >= self.information_recoder['epoch'][edges_index, j])\
                            and (self.information_recoder['performance'][edges_index, i] < self.information_recoder['performance'][edges_index, j]):
                        if self.p_model.theta[edges_index,
                                              i] > self.leaning_rate:
                            self.p_model.theta[edges_index,
                                               i] -= self.leaning_rate
                            self.p_model.theta[edges_index,
                                               j] += self.leaning_rate
                        else:
                            self.p_model.theta[edges_index,
                                               j] += self.p_model.theta[
                                                   edges_index, i]
                            self.p_model.theta[edges_index, i] = 0

                    if (self.information_recoder['epoch'][edges_index, i] <= self.information_recoder['epoch'][edges_index, j]) \
                            and (self.information_recoder['performance'][edges_index, i] > self.information_recoder['performance'][edges_index, j]):
                        if self.p_model.theta[edges_index,
                                              j] > self.leaning_rate:
                            self.p_model.theta[edges_index,
                                               j] -= self.leaning_rate
                            self.p_model.theta[edges_index,
                                               i] += self.leaning_rate
                        else:
                            self.p_model.theta[edges_index,
                                               i] += self.p_model.theta[
                                                   edges_index, j]
                            self.p_model.theta[edges_index, j] = 0
Exemple #3
0
    def __init__(self,
                 categories,
                 fresh_size=4,
                 init_theta=None,
                 max_mize=True):

        self.p_model = Categorical(categories)
        self.p_model.C = np.array(self.p_model.C)
        self.valid_d = len(self.p_model.C[self.p_model.C > 1])

        # Refresh theta
        for k in range(self.p_model.d):
            self.p_model.theta[k, 0] = 1
            self.p_model.theta[k, 1:self.p_model.C[k]] = 0

        if init_theta is not None:
            self.p_model.theta = init_theta

        self.fresh_size = fresh_size
        self.sample = []
        self.objective = []
        self.maxmize = -1 if max_mize else 1
        self.obj_optim = float('inf')
        self.training_finish = False

        # Record point to move
        self.sample_point = self.p_model.theta
        self.point = [self.p_model.d - 1, 0]
Exemple #4
0
    def __init__(self, categories,
                 alpha=1.5, delta_init=1., lam=6,
                 Delta_max=np.inf, init_theta=None, max_mize=True):

        self.N = np.sum(np.array(categories) - 1)
        # Categorical distribution
        self.p_model = Categorical(categories)
        # valid dimension size
        self.p_model.C = np.array(self.p_model.C)
        self.valid_d = len(self.p_model.C[self.p_model.C > 1])

        if init_theta is not None:
            self.p_model.theta = init_theta

            # Adaptive SG
        self.alpha = alpha  # threshold for adaptation
        self.delta_init = delta_init
        self.lam = lam  # lambda_theta
        self.Delta_max = Delta_max  # maximum Delta (can be np.inf)

        self.Delta = 1.
        self.gamma = 0.0  # correction factor
        self.s = np.zeros(self.N)  # averaged stochastic natural gradient
        self.delta = self.delta_init / self.Delta
        self.eps = self.delta

        self.sample = []
        self.objective = []
        self.max_mize = -1 if max_mize else 1
Exemple #5
0
    def __init__(self,
                 categories,
                 lam=-1,
                 delta_init=1.,
                 step=3,
                 pruning=True,
                 init_theta=None,
                 max_mize=True,
                 sample_with_prob=False,
                 utility_function='picewise',
                 utility_function_hyper=0.5,
                 momentum=True,
                 gamma=0.9,
                 sampling_number_per_edge=1,
                 dynamic_sampling=True):
        # Categorical distribution
        self.p_model = Categorical(categories)
        self.lam = lam
        # valid dimension size
        self.p_model.C = np.array(self.p_model.C)
        self.valid_d = len(self.p_model.C[self.p_model.C > 1])

        if init_theta is not None:
            self.p_model.theta = init_theta

        self.delta = delta_init
        self.eps = self.delta

        self.sample = []
        self.objective = []
        self.max_mize = -1 if max_mize else 1

        # this is for dynamic distribution
        self.sample_with_prob = sample_with_prob
        self.ignore_index = []
        self.sample_index = []
        self.pruned_index = []
        self.pruning = pruning
        self.steps = step
        self.current_step = 1
        self.training_finish = False
        self.utility_function = utility_function
        self.utility_function_hyper = utility_function_hyper
        self.Momentum = momentum
        self.gamma = gamma
        self.velocity = np.zeros(self.p_model.theta.shape)
        self.init_record()
        self.sampling_number_per_edge = sampling_number_per_edge
        self.dynamic_sampling = dynamic_sampling
Exemple #6
0
    def __init__(self,
                 categories,
                 alpha=1.5,
                 delta_init=1.,
                 lam=6,
                 step=3,
                 pruning=True,
                 Delta_max=np.inf,
                 init_theta=None,
                 max_mize=True,
                 sample_with_prob=False):

        self.N = np.sum(np.array(categories) - 1)
        # Categorical distribution
        self.p_model = Categorical(categories)
        # valid dimension size
        self.p_model.C = np.array(self.p_model.C)
        self.valid_d = len(self.p_model.C[self.p_model.C > 1])

        if init_theta is not None:
            self.p_model.theta = init_theta

            # Adaptive SG
        self.alpha = alpha  # threshold for adaptation
        self.delta_init = delta_init
        self.lam = lam  # lambda_theta
        self.Delta_max = Delta_max  # maximum Delta (can be np.inf)

        self.Delta = 1.
        self.gamma = 0.0  # correction factor
        self.s = np.zeros(self.N)  # averaged stochastic natural gradient
        self.delta = self.delta_init / self.Delta
        self.eps = self.delta

        self.sample = []
        self.objective = []
        self.max_mize = -1 if max_mize else 1

        # this is for dynamic distribution
        self.sample_with_prob = sample_with_prob
        self.ignore_index = []
        self.sample_index = []
        self.pruned_index = []
        self.pruning = pruning
        self.steps = step
        self.current_step = 1
        self.training_finish = False
        self.init_record()
Exemple #7
0
    def __init__(self, categories, delta_init=1., lam=2, init_theta=None, max_mize=True):

        # self.N = np.sum(categories - 1)
        # Categorical distribution
        self.p_model = Categorical(categories)
        # valid dimension size
        self.p_model.C = np.array(self.p_model.C)
        self.valid_d = len(self.p_model.C[self.p_model.C > 1])

        if init_theta is not None:
            self.p_model.theta = init_theta

        # Natural SG
        self.delta = delta_init
        self.lam = lam  # lambda_theta
        self.eps = self.delta
        self.sample = []
        self.objective = []
        self.maxmize = -1 if max_mize else 1
Exemple #8
0
    def __init__(self,
                 categories,
                 delta_init=1.,
                 opt_type="best",
                 init_theta=None,
                 max_mize=True):
        # Categorical distribution
        self.p_model = Categorical(categories)
        # valid dimension size
        self.p_model.C = np.array(self.p_model.C)

        if init_theta is not None:
            self.p_model.theta = init_theta

        self.sample_list = []
        self.obj_list = []
        self.max_mize = -1 if max_mize else 1

        self.select = opt_type
        self.best_object = 1e10 * self.max_mize
Exemple #9
0
 def __init__(self, category, steps, gamma=0.8):
     self.p_model = Categorical(categories=category)
     # how many steps to pruning the distribution
     self.steps = steps
     self.current_step = 1
     self.ignore_index = []
     self.sample_index = []
     self.pruned_index = []
     self.val_performance = []
     self.sample = []
     self.init_record()
     self.score_decay = 0.5
     self.learning_rate = 0.2
     self.training_finish = False
     self.training_epoch = self.get_training_epoch()
     self.non_param_index = [0, 1, 2, 7]
     self.param_index = [3, 4, 5, 6]
     self.non_param_index_num = len(self.non_param_index)
     self.pruned_index_num = len(self.param_index)
     self.non_param_index_count = [0] * self.p_model.d
     self.param_index_count = [0] * self.p_model.d
     self.gamma = 0.8
     self.velocity = np.zeros(self.p_model.theta.shape)