예제 #1
0
파일: schedule.py 프로젝트: vcj-huy/lingvo
 def Value(self):
   p = self.params
   num_decays = tf.floor(
       tf.div(
           tf.cast(py_utils.GetGlobalStep(), tf.float32),
           float(p.num_steps_per_decay)))
   return tf.pow(p.decay, num_decays)
예제 #2
0
def ComputeSplits(batch_size, num_splits):
    """Creates a tensor of size num_splits of number of values per split.

  Assigns each split floor(batch_size/num_splits) and round-robins
  the remainder (if any) to each split.

  Example::

    batch_size: [5]
    num_splits: 3
    returns: [2, 2, 1]

  Args:
    batch_size: tensor of rank 0, size of tensor to be split
    num_splits: number of splits to split tensor into
  Returns:
    tensor of length num_splits containing sizes of each split
  """
    values = tf.tile(tf.div([batch_size], num_splits),
                     tf.constant([num_splits], dtype=tf.int32))
    mods = tf.tile(tf.constant([1]), tf.math.floormod([batch_size],
                                                      num_splits))
    zeros = tf.tile(tf.constant([0]),
                    tf.subtract(tf.shape(values), tf.shape(mods)))
    mods = tf.concat([mods, zeros], 0)
    ret = tf.add(values, mods)
    # for some reason TF erases shape information if num_splits is 1
    if num_splits == 1:
        ret.set_shape([1])
    return ret
예제 #3
0
 def Value(self, step=None):
   p = self.params
   num_decays = tf.floor(
       tf.div(
           tf.cast(self.GetStep(step), tf.float32),
           float(p.num_steps_per_decay)))
   return tf.pow(p.decay, num_decays)
예제 #4
0
파일: pruning.py 프로젝트: snsun/lingvo
    def _get_sparsity(self, weight_name):
        """Returns target sparsity for the given layer/weight name."""
        target_sparsity = [
            sparsity for regexp, sparsity in self._weight_sparsity_map.items()
            if regexp.search(weight_name)
        ]
        if not target_sparsity:
            return self._sparsity

        if len(target_sparsity) > 1:
            raise ValueError(
                'Multiple matches in weight_sparsity_map for weight %s' %
                weight_name)
        # TODO(suyoggupta): This will work when initial_sparsity = 0. Generalize
        # to handle other cases as well.
        return tf.multiply(
            self._sparsity,
            tf.div(target_sparsity[0], self._spec.target_sparsity))
예제 #5
0
파일: pruning.py 프로젝트: snsun/lingvo
    def _setup_sparsity(self):
        begin_step = self._spec.sparsity_function_begin_step
        end_step = self._spec.sparsity_function_end_step
        initial_sparsity = self._spec.initial_sparsity
        target_sparsity = self._spec.target_sparsity
        exponent = self._spec.sparsity_function_exponent

        with tf.name_scope(self._spec.name):
            p = tf.minimum(
                1.0,
                tf.maximum(
                    0.0,
                    tf.div(tf.cast(self._global_step - begin_step, tf.float32),
                           end_step - begin_step)))
            sparsity = tf.add(tf.multiply(initial_sparsity - target_sparsity,
                                          tf.pow(1 - p, exponent)),
                              target_sparsity,
                              name='sparsity')

        return sparsity
예제 #6
0
파일: schedule.py 프로젝트: leozz37/lingvo
 def FProp(self, theta, current_step):
   p = self.params
   num_decays = tf.floor(
       tf.div(tf.cast(current_step, tf.float32), float(p.num_steps_per_decay)))
   return tf.pow(p.decay, num_decays)