def _get_feed_dict(self, xs, ys, *args, **kwargs): if self.reg == 'dropout': if args: kp = args[0] else: kp = kwargs['keep_prob'] if not hasattr(self, '_keep_prob'): self._keep_prob = tfnn.constant(kp) _feed_dict = { self.data_placeholder: xs, self.target_placeholder: ys, self.keep_prob_placeholder: kp } elif self.reg == 'l2': if args: l2_value = args[0] else: l2_value = kwargs['l2_value'] if not hasattr(self, '_l2_value'): self._l2_value = tfnn.constant(l2_value) _feed_dict = { self.data_placeholder: xs, self.target_placeholder: ys, self.l2_placeholder: l2_value } else: _feed_dict = { self.data_placeholder: xs, self.target_placeholder: ys } return _feed_dict
def set_learning_rate(self, lr, exp_decay=None): """ :param lr: :param exp_decay: a dictionary like dict(decay_steps=None, decay_rate=None, staircase=False, name=None), otherwise None. :return: """ if isinstance(exp_decay, dict): if 'decay_steps' not in exp_decay: raise KeyError( 'Set decay_steps in exp_decay=dict(decay_steps)') if 'decay_rate' not in exp_decay: raise KeyError('Set decay_steps in exp_decay=dict(decay_rate)') if 'staircase' not in exp_decay: exp_decay['staircase'] = False if 'name' not in exp_decay: exp_decay['name'] = None self._lr = tfnn.train.exponential_decay( lr, self.global_step, decay_steps=exp_decay['decay_steps'], decay_rate=exp_decay['decay_rate'], staircase=exp_decay['staircase'], name=exp_decay['name']) else: self._lr = tfnn.constant(lr) tfnn.scalar_summary('learning_rate', self._lr)
def __init__(self, network, ): self.network = network if isinstance(self.network, tfnn.ClfNetwork): with tfnn.name_scope('accuracy'): with tfnn.name_scope('correct_prediction'): correct_prediction = tfnn.equal(tfnn.argmax(network.predictions, 1), tfnn.argmax(network.target_placeholder, 1), name='correct_prediction') with tfnn.name_scope('accuracy'): self.accuracy = tfnn.reduce_mean(tfnn.cast(correct_prediction, tfnn.float32), name='accuracy') tfnn.scalar_summary('accuracy', self.accuracy) elif isinstance(self.network, tfnn.RegNetwork): self.first_time_lm = True self.first_time_soc = True with tfnn.name_scope('r2_score'): with tfnn.name_scope('ys_mean'): ys_mean = tfnn.reduce_mean(network.target_placeholder, reduction_indices=[0], name='ys_mean') with tfnn.name_scope('total_sum_squares'): ss_tot = tfnn.reduce_sum(tfnn.square(network.target_placeholder - ys_mean), reduction_indices=[0], name='total_sum_squares') # ss_reg = np.sum(np.square(predictions-ys_mean), axis=0) with tfnn.name_scope('residual_sum_squares'): ss_res = tfnn.reduce_sum(tfnn.square(network.target_placeholder - network.predictions), reduction_indices=[0], name='residual_sum_squares') with tfnn.name_scope('coefficient_of_determination'): self.r2_score = tfnn.sub(tfnn.constant(1, dtype=tfnn.float32), (ss_res / ss_tot)[0], name='coefficient_of_determination') tfnn.scalar_summary('r2_score', self.r2_score)
def set_learning_rate(self, lr, exp_decay=None): """ :param lr: :param exp_decay: a dictionary like dict(decay_steps=None, decay_rate=None, staircase=False, name=None), otherwise None. :return: """ if isinstance(exp_decay, dict): if 'decay_steps' not in exp_decay: raise KeyError('Set decay_steps in exp_decay=dict(decay_steps)') if 'decay_rate' not in exp_decay: raise KeyError('Set decay_steps in exp_decay=dict(decay_rate)') if 'staircase' not in exp_decay: exp_decay['staircase'] = False if 'name' not in exp_decay: exp_decay['name'] = None self._lr = tfnn.train.exponential_decay(lr, self.global_step, decay_steps=exp_decay['decay_steps'], decay_rate=exp_decay['decay_rate'], staircase=exp_decay['staircase'], name=exp_decay['name']) else: self._lr = tfnn.constant(lr) tfnn.scalar_summary('learning_rate', self._lr)
def __init__( self, network, ): self.network = network if isinstance(self.network, tfnn.ClfNetwork): with tfnn.name_scope('accuracy'): with tfnn.name_scope('correct_prediction'): correct_prediction = tfnn.equal( tfnn.argmax(network.predictions, 1), tfnn.argmax(network.target_placeholder, 1), name='correct_prediction') with tfnn.name_scope('accuracy'): self.accuracy = tfnn.reduce_mean(tfnn.cast( correct_prediction, tfnn.float32), name='accuracy') tfnn.scalar_summary('accuracy', self.accuracy) elif isinstance(self.network, tfnn.RegNetwork): self.first_time_lm = True self.first_time_soc = True with tfnn.name_scope('r2_score'): with tfnn.name_scope('ys_mean'): ys_mean = tfnn.reduce_mean(network.target_placeholder, reduction_indices=[0], name='ys_mean') with tfnn.name_scope('total_sum_squares'): ss_tot = tfnn.reduce_sum( tfnn.square(network.target_placeholder - ys_mean), reduction_indices=[0], name='total_sum_squares') # ss_reg = np.sum(np.square(predictions-ys_mean), axis=0) with tfnn.name_scope('residual_sum_squares'): ss_res = tfnn.reduce_sum( tfnn.square(network.target_placeholder - network.predictions), reduction_indices=[0], name='residual_sum_squares') with tfnn.name_scope('coefficient_of_determination'): self.r2_score = tfnn.sub(tfnn.constant(1, dtype=tfnn.float32), (ss_res / ss_tot)[0], name='coefficient_of_determination') tfnn.scalar_summary('r2_score', self.r2_score)
def _bias_variable(self, shape): initial = tfnn.constant(0.1, shape=shape, dtype=self.input_dtype, name='biases') return tfnn.Variable(initial)