Example #1
0
    def solve_inner(self, optimizer, data, num_epochs=1, batch_size=32):
        '''Solves local optimization problem'''
        if (batch_size == 0):  # Full data or batch_size
            batch_size = len(data['y'])  # //10

        #if(optimizer == "fedavg"):
        #data_x, data_y = suffer_data(data)
        for _ in trange(num_epochs, desc='Epoch: ', leave=False, ncols=120):
            #X, y = get_random_batch_sample(data_x, data_y, batch_size)
            #with self.graph.as_default():
            #    self.sess.run(self.train_op, feed_dict={self.features: X, self.labels: y})
            for X, y in batch_data(data, batch_size):
                with self.graph.as_default():
                    self.sess.run(self.train_op,
                                  feed_dict={
                                      self.features: X,
                                      self.labels: y
                                  })
        soln = self.get_params()
        with self.graph.as_default():
            grad = self.sess.run(self.grads,
                                 feed_dict={
                                     self.features: data['x'],
                                     self.labels: data['y']
                                 })
        comp = num_epochs * \
            (len(data['y'])//batch_size) * batch_size * self.flops
        return soln, grad, comp
Example #2
0
 def solve_inner_support_query(self, data, client_id, round_i, num_epochs=1, batch_size=32, hide_output=False):
     """
     :param data:
     :param client_id:
     :param round_i:
     :param num_epochs:
     :param batch_size:
     :param hide_output:
     :return:
     """
     grads = []
     num_inter = 0
     with tqdm.trange(num_epochs, disable=hide_output) as t:
         for epoch in t:
             t.set_description(f'Client: {client_id}, Round: {round_i + 1}, Epoch :{epoch + 1}')
             for batch_idx, (X, y) in enumerate(batch_data(data, batch_size)):
                 with self.graph.as_default():
                     iter_grads, _ = self.sess.run([self.grads, self.train_op], feed_dict={self.features: X, self.labels: y})
                 num_inter += 1
                 grads.append(iter_grads)
     comp = num_epochs * (len(data['y']) // batch_size) * batch_size * self.flops
     # 这里的 grad 的和必须要求和后除以迭代的次数
     grads_sum = [np.zeros_like(g) for g in grads[0]]
     for i in grads:
         for j, grad in enumerate(i):
             grads_sum[j] += grad
     grads_mean = [g / num_inter for g in grads_sum]
     return grads_mean, comp
 def test(self, data):
     """
     基于某个数据集得到准确个数和平均的损失
     :param data:
     :return: 准确个数, 平均的损失
     """
     data_size = len(data['y'])
     if data_size <= 1000:
         return self._test_all(data)
     # 分为若干 batch
     tot_correct, tot_loss, num_samples = 0, 0.0, 0
     for X, y in batch_data(data, batch_size=200, shuffle=False):
         # X 转为矩阵
         # y 为 one-hot
         x_vecs = process_x(X)
         labels = process_y(y)
         num_sample = len(labels)
         with self.graph.as_default():
             correct, loss = self.sess.run(
                 [self.eval_metric_ops, self.loss],
                 feed_dict={
                     self.features: x_vecs,
                     self.labels: labels
                 })
         tot_correct += correct
         num_samples += num_sample
         # loss -> 这个batch 的损失
         tot_loss += loss * num_sample
     return tot_correct, (tot_loss / num_samples)
Example #4
0
 def solve_inner(self, data, num_epochs=1, batch_size=32):
     '''Solves local optimization problem'''
     for _ in trange(num_epochs, desc='Epoch: ', leave=False, ncols=120):
         for X, y in batch_data(data, batch_size):
             with self.graph.as_default():
                 self.sess.run(self.train_op, feed_dict={self.features: X, self.labels: y})
     soln = self.get_params()
     comp = num_epochs * (len(data['y'])//batch_size) * batch_size * self.flops
     return soln, comp
Example #5
0
File: svm.py Project: s-huu/ditto-1
 def solve_inner(self, data, num_epochs=1, batch_size=32):
     '''Solves local optimization problem'''
     for _ in range(num_epochs):
         for X, y in batch_data(data, batch_size):
             with self.graph.as_default():
                 _, pred = self.sess.run([self.train_op, self.predictions],
                     feed_dict={self.features: X, self.labels: y})
     soln = self.get_params()
     comp = num_epochs * (len(data['y'])//batch_size) * batch_size * self.flops
     return soln, comp
Example #6
0
 def solve_inner(self, data, num_epochs=1, batch_size=32):
     '''
     Args:
         data: dict of the form {'x': [list], 'y': [list]}
     Return:
         comp: number of FLOPs computed while training given data
         update: list of np.ndarray weights, with each weight array
     corresponding to a variable in the resulting graph
     '''
     
     for _ in trange(num_epochs, desc='Epoch: ', leave=False):
         for X,y in batch_data(data, batch_size):
             input_data = process_x(X, self.seq_len)
             target_data = process_y(y)
             with self.graph.as_default():
                 self.sess.run(self.train_op,
                     feed_dict={self.features: input_data, self.labels: target_data})
     soln = self.get_params()
     comp = num_epochs * (len(data['y'])//batch_size) * batch_size * self.flops
     return soln, comp
Example #7
0
    def solve_inner(self, data, client_id, round_i, num_epochs=1, batch_size=32, hide_output=False):
        """

        :param data:
        :param client_id:
        :param round_i:
        :param num_epochs:
        :param batch_size:
        :param hide_output:
        :return:
        """
        with tqdm.trange(num_epochs, disable=hide_output) as t:
            for epoch in t:
                t.set_description(f'Client: {client_id}, Round: {round_i + 1}, Epoch :{epoch + 1}')
                for batch_idx, (X, y) in enumerate(batch_data(data, batch_size)):
                    with self.graph.as_default():
                        self.sess.run(self.train_op,
                                      feed_dict={self.features: X, self.labels: y})
        soln = self.get_params()
        comp = num_epochs * (len(data['y']) // batch_size) * batch_size * self.flops
        return soln, comp
 def solve_inner(self, data, num_epochs=1, batch_size=32):
     '''
     Args:
         data: dict of the form {'x': [list], 'y': [list]}
     Return:
         soln: trainable variables of the lstm model
         comp: number of FLOPs computed while training given data
     '''
     for _ in trange(num_epochs, desc='Epoch: ', leave=False):
         for X, y in batch_data(data, batch_size):
             input_data = process_x(X)
             target_data = process_y(y)
             with self.graph.as_default():
                 self.sess.run(self.train_op,
                               feed_dict={
                                   self.features: input_data,
                                   self.labels: target_data
                               })
     soln = self.get_params()
     comp = num_epochs * (len(data['y']) //
                          batch_size) * batch_size * self.flops
     return soln, comp
Example #9
0
    def solve_inner(self, optimizer, data, num_epochs=1, batch_size=32):
        '''Solves local optimization problem'''
        if (batch_size == 0):  # Full data or batch_size
            # print("Full dataset")
            batch_size = len(data['y'])

        if(optimizer == "fedavg"):
            for _ in trange(num_epochs, desc='Epoch: ', leave=False, ncols=120):
                for X, y in batch_data(data, batch_size):
                    with self.graph.as_default():
                        self.sess.run(self.train_op, feed_dict={
                                      self.features: X, self.labels: y})

        if(optimizer == "fedprox" or optimizer == "fedsgd"):
            data_x, data_y = suffer_data(data)
            for _ in range(num_epochs):  # t = 1,2,3,4,5,...m
                X, y = get_random_batch_sample(data_x, data_y, batch_size)
                with self.graph.as_default():
                    self.sess.run(self.train_op, feed_dict={
                        self.features: X, self.labels: y})

        if(optimizer == "fedsarah" or optimizer == "fedsvrg"):
            data_x, data_y = suffer_data(data)

            wzero = self.get_params()
            w1 = wzero - self.optimizer._lr * np.array(self.vzero)
            w1 = prox_L2(np.array(w1), np.array(wzero),self.optimizer._lr, self.optimizer._lamb)
            self.set_params(w1)

            for e in range(num_epochs-1):  # t = 1,2,3,4,5,...m
                X, y = get_random_batch_sample(data_x, data_y, batch_size)
                with self.graph.as_default():
                    # get the current weight
                    if(optimizer == "fedsvrg"):
                        current_weight = self.get_params()
                    
                        # calculate fw0 first:
                        self.set_params(wzero)
                        fwzero = self.sess.run(self.grads, feed_dict={self.features: X, self.labels: y})
                        self.optimizer.set_fwzero(fwzero, self)

                        # return the current weight to the model
                        self.set_params(current_weight)
                        self.sess.run(self.train_op, feed_dict={
                            self.features: X, self.labels: y})
                    elif(optimizer == "fedsarah"):
                        if(e == 0):
                            self.set_params(wzero)
                            grad_w0 = self.sess.run(self.grads, feed_dict={
                                                    self.features: X, self.labels: y})  # grad w0)
                            self.optimizer.set_preG(grad_w0, self)

                            self.set_params(w1)
                            preW = self.get_params()   # previous is w1

                            self.sess.run(self.train_op, feed_dict={
                                self.features: X, self.labels: y})
                        else: # == w1
                            curW = self.get_params()

                            # get previous grad
                            self.set_params(preW)
                            grad_preW = self.sess.run(self.grads, feed_dict={self.features: X, self.labels: y})  # grad w0)
                            self.optimizer.set_preG(grad_preW, self)
                            preW = curW

                            # return back curent grad 
                            self.set_params(curW)
                            self.sess.run(self.train_op, feed_dict={self.features: X, self.labels: y})
        soln = self.get_params()
        comp = num_epochs * \
            (len(data['y'])//batch_size) * batch_size * self.flops
        return soln, comp