def _create_strategy_and_mid_level(self, optimizer_name): strategy = self._get_strategy() with strategy.scope(): if optimizer_name == 'sgd': optimizer = tpu_embedding_v2_utils.SGD(learning_rate=0.1) elif optimizer_name == 'adagrad': optimizer = tpu_embedding_v2_utils.Adagrad(learning_rate=0.1) elif optimizer_name == 'adam': optimizer = tpu_embedding_v2_utils.Adam(learning_rate=0.1) elif optimizer_name == 'ftrl': optimizer = tpu_embedding_v2_utils.FTRL(learning_rate=0.1) elif optimizer_name == 'adagrad_momentum': optimizer = tpu_embedding_v2_utils.AdagradMomentum( learning_rate=0.1, momentum=0.9, use_nesterov=True, exponent=3.0, epsilon=0.1, beta2=0.9) else: raise ValueError('optimizer is not recognized: ', optimizer_name) mid_level_api = self._create_mid_level(optimizer=optimizer) return strategy, mid_level_api, optimizer
def create_strategy_and_mid_level(optimizer_name): strategy = get_strategy() with strategy.scope(): if optimizer_name == 'sgd': optimizer = tpu_embedding_v2_utils.SGD(learning_rate=0.1) elif optimizer_name == 'adagrad': optimizer = tpu_embedding_v2_utils.Adagrad(learning_rate=0.1) elif optimizer_name == 'adam': optimizer = tpu_embedding_v2_utils.Adam(learning_rate=0.1) else: raise ValueError('optimizer is not recognized: ', optimizer_name) embedding = create_mid_level(optimizer=optimizer) return strategy, embedding, optimizer
def _create_strategy_and_mid_level(self, optimizer_name): strategy = self._get_strategy() with strategy.scope(): if optimizer_name == 'sgd': optimizer = tpu_embedding_v2_utils.SGD(learning_rate=0.1) elif optimizer_name == 'adagrad': optimizer = tpu_embedding_v2_utils.Adagrad(learning_rate=0.1) elif optimizer_name == 'adam': optimizer = tpu_embedding_v2_utils.Adam(learning_rate=0.1) elif optimizer_name == 'ftrl': optimizer = tpu_embedding_v2_utils.FTRL(learning_rate=0.1) else: raise ValueError('optimizer is not recognized: ', optimizer_name) mid_level_api = self._create_mid_level(optimizer=optimizer) return strategy, mid_level_api, optimizer
def _create_strategy_and_mid_level(self, optimizer_name): strategy = self._get_strategy() # Keras optimizers has to be translated to embedding optimizer with slot # variable creation fn properly populated. with strategy.scope(): if optimizer_name == 'sgd': optimizer = optimizer_v2.gradient_descent.SGD( learning_rate=0.1) embedding_optimizer = tpu_embedding_v2_utils.SGD( learning_rate=0.1) elif optimizer_name == 'adagrad': optimizer = optimizer_v2.adagrad.Adagrad(learning_rate=0.1) embedding_optimizer = tpu_embedding_v2_utils.Adagrad( learning_rate=0.1, slot_variable_creation_fn=self. _get_slot_variable_creation_fn(optimizer)) elif optimizer_name == 'adam': optimizer = optimizer_v2.adam.Adam(learning_rate=0.1) embedding_optimizer = tpu_embedding_v2_utils.Adam( learning_rate=0.1, slot_variable_creation_fn=self. _get_slot_variable_creation_fn(optimizer)) elif optimizer_name == 'ftrl': optimizer = optimizer_v2.ftrl.Ftrl(learning_rate=0.1) embedding_optimizer = tpu_embedding_v2_utils.FTRL( learning_rate=0.1, slot_variable_creation_fn=self. _get_slot_variable_creation_fn(optimizer)) else: raise ValueError('optimizer is not recognized: ', optimizer_name) mid_level_api = self._create_mid_level( optimizer=embedding_optimizer) return strategy, mid_level_api, optimizer