Esempio n. 1
0
    def _init_hyper_parameters(self):
        self.is_distributed = False
        self.distributed_embedding = False

        if envs.get_fleet_mode().upper() == "PSLIB":
            self.is_distributed = True

        if envs.get_global_env("hyper_parameters.distributed_embedding",
                               0) == 1:
            self.distributed_embedding = True

        self.sparse_feature_number = envs.get_global_env(
            "hyper_parameters.sparse_feature_number")
        self.sparse_feature_dim = envs.get_global_env(
            "hyper_parameters.sparse_feature_dim")
        self.sparse_inputs_slot = envs.get_global_env(
            "hyper_parameters.sparse_inputs_slots")
        self.dense_input_dim = envs.get_global_env(
            "hyper_parameters.dense_input_dim")
        self.learning_rate = envs.get_global_env(
            "hyper_parameters.optimizer.learning_rate")
        self.fc_sizes = envs.get_global_env("hyper_parameters.fc_sizes")
        self.use_embedding_gate = envs.get_global_env(
            'hyper_parameters.use_embedding_gate')
        self.use_hidden_gate = envs.get_global_env(
            'hyper_parameters.use_hidden_gate')
Esempio n. 2
0
 def _init_hyper_parameters(self):
     self.is_distributed = True if envs.get_fleet_mode().upper(
     ) == "PSLIB" else False
     self.sparse_feature_number = envs.get_global_env(
         "hyper_parameters.sparse_feature_number")
     self.sparse_feature_dim = envs.get_global_env(
         "hyper_parameters.sparse_feature_dim")
     self.learning_rate = envs.get_global_env(
         "hyper_parameters.optimizer.learning_rate")
Esempio n. 3
0
 def _init_hyper_parameters(self):
     self.is_distributed = True if envs.get_fleet_mode().upper(
     ) == "PSLIB" else False
     self.sparse_feature_number = envs.get_global_env(
         "hyper_parameters.sparse_feature_number", None)
     self.sparse_feature_dim = envs.get_global_env(
         "hyper_parameters.sparse_feature_dim", None)
     self.reg = envs.get_global_env("hyper_parameters.reg", 1e-4)
     self.num_field = envs.get_global_env("hyper_parameters.num_field",
                                          None)
Esempio n. 4
0
 def _init_hyper_parameters(self):
     self.is_distributed = True if envs.get_fleet_mode().upper(
     ) == "PSLIB" else False
     self.sparse_feature_number = envs.get_global_env(
         "hyper_parameters.sparse_feature_number", None)
     self.sparse_feature_dim = envs.get_global_env(
         "hyper_parameters.sparse_feature_dim", None)
     self.is_sparse = envs.get_global_env("hyper_parameters.is_sparse",
                                          False)
     self.reg = envs.get_global_env("hyper_parameters.reg", 1e-4)
     self.num_field = envs.get_global_env("hyper_parameters.num_field",
                                          None)
     self.hidden1_attention_size = envs.get_global_env(
         "hyper_parameters.hidden1_attention_size", 16)
     self.attention_act = envs.get_global_env("hyper_parameters.act",
                                              "relu")
Esempio n. 5
0
 def _init_hyper_parameters(self):
     self.is_distributed = True if envs.get_fleet_mode().upper(
     ) == "PSLIB" else False
     self.sparse_feature_number = envs.get_global_env(
         "hyper_parameters.sparse_feature_number")
     self.sparse_feature_dim = envs.get_global_env(
         "hyper_parameters.sparse_feature_dim")
     self.neg_num = envs.get_global_env("hyper_parameters.neg_num")
     self.with_shuffle_batch = envs.get_global_env(
         "hyper_parameters.with_shuffle_batch")
     self.learning_rate = envs.get_global_env(
         "hyper_parameters.optimizer.learning_rate")
     self.decay_steps = envs.get_global_env(
         "hyper_parameters.optimizer.decay_steps")
     self.decay_rate = envs.get_global_env(
         "hyper_parameters.optimizer.decay_rate")
Esempio n. 6
0
 def _init_hyper_parameters(self):
     self.is_distributed = True if envs.get_fleet_mode().upper(
     ) == "PSLIB" else False
     self.sparse_feature_number = envs.get_global_env(
         "hyper_parameters.sparse_feature_number", None)
     self.sparse_feature_dim = envs.get_global_env(
         "hyper_parameters.sparse_feature_dim", None)
     self.deep_input_size = envs.get_global_env(
         "hyper_parameters.deep_input_size", 50)
     self.use_inner_product = envs.get_global_env(
         "hyper_parameters.use_inner_product", None)
     self.layer_sizes = envs.get_global_env("hyper_parameters.fc_sizes",
                                            None)
     self.reg = envs.get_global_env("hyper_parameters.reg", 1e-4)
     self.num_field = envs.get_global_env("hyper_parameters.num_field",
                                          None)
     self.act = envs.get_global_env("hyper_parameters.act", None)
Esempio n. 7
0
 def _init_hyper_parameters(self):
     self.is_distributed = True if envs.get_fleet_mode().upper(
     ) == "PSLIB" else False
     self.sparse_feature_number = envs.get_global_env(
         "hyper_parameters.sparse_feature_number", None)
     self.sparse_feature_dim = envs.get_global_env(
         "hyper_parameters.sparse_feature_dim", None)
     self.is_sparse = envs.get_global_env("hyper_parameters.is_sparse",
                                          False)
     self.use_batchnorm = envs.get_global_env(
         "hyper_parameters.use_batchnorm", False)
     self.filters = envs.get_global_env("hyper_parameters.filters",
                                        [38, 40, 42, 44])
     self.filter_size = envs.get_global_env("hyper_parameters.filter_size",
                                            [1, 9])
     self.pooling_size = envs.get_global_env(
         "hyper_parameters.pooling_size", [2, 2, 2, 2])
     self.new_filters = envs.get_global_env("hyper_parameters.new_filters",
                                            [3, 3, 3, 3])
     self.hidden_layers = envs.get_global_env("hyper_parameters.fc_sizes")
     self.num_field = envs.get_global_env("hyper_parameters.num_field",
                                          None)
     self.act = envs.get_global_env("hyper_parameters.act", None)
Esempio n. 8
0
 def _init_hyper_parameters(self):
     self.is_distributed = True if envs.get_fleet_mode().upper(
     ) == "PSLIB" else False
     self.sparse_feature_number = envs.get_global_env(
         "hyper_parameters.sparse_feature_number", None)
     self.sparse_feature_dim = envs.get_global_env(
         "hyper_parameters.sparse_feature_dim", None)
     self.is_sparse = envs.get_global_env("hyper_parameters.is_sparse",
                                          False)
     self.use_batchnorm = envs.get_global_env(
         "hyper_parameters.use_batchnorm", False)
     self.use_dropout = envs.get_global_env("hyper_parameters.use_dropout",
                                            False)
     self.dropout_prob = envs.get_global_env(
         "hyper_parameters.dropout_prob", None)
     self.layer_sizes = envs.get_global_env("hyper_parameters.fc_sizes",
                                            None)
     self.loss_type = envs.get_global_env("hyper_parameters.loss_type",
                                          'logloss')
     self.reg = envs.get_global_env("hyper_parameters.reg", 1e-4)
     self.num_field = envs.get_global_env("hyper_parameters.num_field",
                                          None)
     self.act = envs.get_global_env("hyper_parameters.act", None)