コード例 #1
0
ファイル: ada_sgd.py プロジェクト: tchigher/KungFu
    def distributed_initializer(self):
        bcast_ops = []
        for v in self.variables():
            bcast_ops.append(tf.assign(v, broadcast(v)))

        with tf.control_dependencies(bcast_ops):
            with tf.control_dependencies([self._save_model_op]):
                return barrier()
コード例 #2
0
    def _distributed_initializer(self):
        bcast_ops = [tf.assign(v, broadcast(v)) for v in tf.global_variables()]

        # FIXME: tf.trainable_variables() will return a TFOptimizer/iterations:0 if with Keras
        # I think we need to find a better way to identify trainable variables?
        # TODO: Can we decouple distribuetd_initilizer and the tensor store init?
        variables = tf.trainable_variables()
        variables = [
            v for v in variables if 'TFOptimizer/iterations' not in v.name
        ]

        with tf.control_dependencies(bcast_ops):
            with tf.control_dependencies([self._build_save_op(variables)]):
                return barrier()
コード例 #3
0
ファイル: sync_sgd.py プロジェクト: jingmouren/KungFu
 def _distributed_initializer(self):
     ops = [tf.assign(v, broadcast(v)) for v in tf.global_variables()]
     return tf.group(ops)