Ejemplo n.º 1
0
def _broadcast_model(model, optimizer, backend):
    if _executing_eagerly():
        # TensorFlow 2.0 or TensorFlow eager
        broadcast_variables(model.variables, root_rank=0)
        broadcast_variables(optimizer.variables(), root_rank=0)
    else:
        bcast_op = broadcast_variables(_global_variables(), root_rank=0)
        backend.get_session().run(bcast_op)
Ejemplo n.º 2
0
    def __init__(self, variables=None, session=None, **kwargs):
        self.variables = variables or _global_variables()
        self.session = session or _default_session()
        self._bcast_op = broadcast_variables(self.variables, root_rank=0)
        self._eval_fn = self._to_numpy if _executing_eagerly() else self._eval_var
        self._assign_fn = self._assign_var if _IS_TF2 else self._load_var
        self._save_model()

        bcast_obj = broadcast_object_fn(session=session) if not _executing_eagerly() else broadcast_object

        def broadcast_object_with_session(obj):
            return bcast_obj(obj)

        super(TensorFlowState, self).__init__(bcast_object=broadcast_object_with_session,
                                              get_rank=rank,
                                              **kwargs)
Ejemplo n.º 3
0
    def broadcast_global_variables(root_rank):
        """Broadcasts all global variables from root rank to all other processes.

        **NOTE:** deprecated in TensorFlow 2.0.

        Arguments:
            root_rank: rank of the process from which global variables will be broadcasted
                       to all other processes.
        """
        if _executing_eagerly():
            raise RuntimeError(
                "hvd.broadcast_global_variables() does not support eager execution. "
                "Please use `hvd.broadcast_variables(<model/optimizer variables>)` instead."
            )

        return broadcast_variables(_global_variables(), root_rank)
Ejemplo n.º 4
0
    def __init__(self, model, optimizer=None, backend=None, **kwargs):
        self.model = model
        if not _model_built(model):
            raise ValueError('Model must be built first. Run `model.build(input_shape)`.')

        self.optimizer = optimizer or model.optimizer
        self.backend = backend
        self._save_model()

        if not backend or _executing_eagerly():
            self._bcast_model = lambda: _broadcast_model(self.model, self.optimizer, backend=self.backend)
            bcast_object = broadcast_object
        else:
            # For TensorFlow v1, we need to reuse the broadcast op to prevent incrementing the uids
            bcast_op = broadcast_variables(_global_variables(), root_rank=0)
            self._bcast_model = lambda: self.backend.get_session().run(bcast_op)
            bcast_object = broadcast_object_fn(session=self.backend.get_session())

        super(TensorFlowKerasState, self).__init__(bcast_object=bcast_object,
                                                   get_rank=rank,
                                                   **kwargs)
Ejemplo n.º 5
0
 def _broadcast_model(self):
     broadcast_variables(self._model.variables, root_rank=0)
     broadcast_variables(self._optimizer.variables(), root_rank=0)
Ejemplo n.º 6
0
 def set_broadcast_variables(self, variables):
     if self._bcast_op is None:
         self._variables = variables
         self._bcast_op = broadcast_variables(self._variables, root_rank=0)