Beispiel #1
0
    def _build_optimizer(self):
        """Creates the optimizer"""
        optimizer = getters.get_optimizer(
            self.optimizer_config.module,
            learning_rate=self.optimizer_config.learning_rate,
            decay_type=self.optimizer_config.decay_type,
            decay_steps=self.optimizer_config.decay_steps,
            decay_rate=self.optimizer_config.decay_rate,
            start_decay_at=self.optimizer_config.start_decay_at,
            stop_decay_at=self.optimizer_config.stop_decay_at,
            min_learning_rate=self.optimizer_config.min_learning_rate,
            staircase=self.optimizer_config.staircase,
            **self.optimizer_config.params)

        # Optionally wrap with SyncReplicasOptimizer
        if self.optimizer_config.sync_replicas > 0:
            optimizer = tf.train.SyncReplicasOptimizer(
                opt=optimizer,
                replicas_to_aggregate=self.optimizer_config.
                sync_replicas_to_aggregate,
                total_num_replicas=self.optimizer_config.sync_replicas)
            # This is really ugly, but we need to do this to make the optimizer
            # accessible outside of the model.
            configs.SYNC_REPLICAS_OPTIMIZER = optimizer

        return optimizer
Beispiel #2
0
    def _build_optimizer(self):
        """Creates the optimizer"""
        optimizer = getters.get_optimizer(
            self.optimizer_config.module,
            learning_rate=self.optimizer_config.learning_rate,
            decay_type=self.optimizer_config.decay_type,
            decay_steps=self.optimizer_config.decay_steps,
            decay_rate=self.optimizer_config.decay_rate,
            start_decay_at=self.optimizer_config.start_decay_at,
            stop_decay_at=self.optimizer_config.stop_decay_at,
            min_learning_rate=self.optimizer_config.min_learning_rate,
            staircase=self.optimizer_config.staircase,
            **self.optimizer_config.params)

        # Optionally wrap with SyncReplicasOptimizer
        if self.optimizer_config.sync_replicas > 0:
            optimizer = tf.train.SyncReplicasOptimizer(
                opt=optimizer,
                replicas_to_aggregate=self.optimizer_config.sync_replicas_to_aggregate,
                total_num_replicas=self.optimizer_config.sync_replicas)
            # This is really ugly, but we need to do this to make the optimizer
            # accessible outside of the model.
            configs.SYNC_REPLICAS_OPTIMIZER = optimizer

        return optimizer
Beispiel #3
0
    def _build_optimizer(self):
        """Creates the optimizer"""
        optimizer = getters.get_optimizer(self.optimizer.IDENTIFIER,
                                          **self.optimizer.to_dict())

        # TODO: use the _SyncReplicasOptimizerHook
        # # Optionally wrap with SyncReplicasOptimizer
        # if self.optimizer.sync_replicas > 0:
        #     optimizer = tf.train.SyncReplicasOptimizer(
        #         opt=optimizer,
        #         replicas_to_aggregate=self.optimizer.sync_replicas_to_aggregate,
        #         total_num_replicas=self.optimizer.sync_replicas)
        #     # This is really ugly, but we need to do this to make the optimizer
        #     # accessible outside of the model.
        #     configs.SYNC_REPLICAS_OPTIMIZER = optimizer

        return optimizer