def __init__(self, config): ModelBase.__init__(self, config) # tree meta hyper parameters self.max_layers = envs.get_global_env("tree_parameters.max_layers", 4, self._namespace) self.node_nums = envs.get_global_env("tree_parameters.node_nums", 26, self._namespace) self.leaf_node_nums = envs.get_global_env( "tree_parameters.leaf_node_nums", 13, self._namespace) self.output_positive = envs.get_global_env( "tree_parameters.output_positive", True, self._namespace) self.layer_node_num_list = envs.get_global_env( "tree_parameters.layer_node_num_list", [2, 4, 7, 12], self._namespace) self.child_nums = envs.get_global_env("tree_parameters.child_nums", 2, self._namespace) self.tree_layer_path = envs.get_global_env("tree.tree_layer_path", None, "train.startup") # model training hyper parameter self.node_emb_size = envs.get_global_env( "hyper_parameters.node_emb_size", 64, self._namespace) self.input_emb_size = envs.get_global_env( "hyper_parameters.input_emb_size", 768, self._namespace) self.act = envs.get_global_env("hyper_parameters.act", "tanh", self._namespace) self.neg_sampling_list = envs.get_global_env( "hyper_parameters.neg_sampling_list", [1, 2, 3, 4], self._namespace) # model infer hyper parameter self.topK = envs.get_global_env("hyper_parameters.node_nums", 1, self._namespace) self.batch_size = envs.get_global_env("batch_size", 1, "evaluate.reader")
def __init__(self, config): ModelBase.__init__(self, config) self.dict_dim = 100 self.max_len = 10 self.cnn_dim = 32 self.cnn_filter_size = 128 self.emb_dim = 8 self.hid_dim = 128 self.class_dim = 2
def __init__(self, config): ModelBase.__init__(self, config) self.dict_dim = 100 self.max_len = 10 self.cnn_dim = 32 self.cnn_filter_size = 128 self.emb_dim = 8 self.hid_dim = 128 self.class_dim = 2 self.is_sparse = envs.get_global_env("hyper_parameters.is_sparse", False)
def __init__(self, config): ModelBase.__init__(self, config) self.cost = None self.metrics = {} self.vocab_text_size = envs.get_global_env("vocab_text_size", None, self._namespace) self.vocab_tag_size = envs.get_global_env("vocab_tag_size", None, self._namespace) self.emb_dim = envs.get_global_env("emb_dim", None, self._namespace) self.hid_dim = envs.get_global_env("hid_dim", None, self._namespace) self.win_size = envs.get_global_env("win_size", None, self._namespace) self.margin = envs.get_global_env("margin", None, self._namespace) self.neg_size = envs.get_global_env("neg_size", None, self._namespace)
def __init__(self, config): ModelBase.__init__(self, config) self.cost = None self.metrics = {} self.vocab_text_size = envs.get_global_env( "hyper_parameters.vocab_text_size") self.vocab_tag_size = envs.get_global_env( "hyper_parameters.vocab_tag_size") self.emb_dim = envs.get_global_env("hyper_parameters.emb_dim") self.hid_dim = envs.get_global_env("hyper_parameters.hid_dim") self.win_size = envs.get_global_env("hyper_parameters.win_size") self.margin = envs.get_global_env("hyper_parameters.margin") self.neg_size = envs.get_global_env("hyper_parameters.neg_size")
def __init__(self, config): """R """ Model.__init__(self, config) self._config = config self._name = config['name'] f = open(config['layer_file'], 'r') self._build_nodes = yaml.safe_load(f.read()) self._build_phase = ['input', 'param', 'summary', 'layer'] self._build_param = { 'layer': {}, 'inner_layer': {}, 'layer_extend': {}, 'model': {} } self._inference_meta = {'dependency': {}, 'params': {}}
def __init__(self, config): ModelBase.__init__(self, config)
def __init__(self, config): ModelBase.__init__(self, config) self.init_config()