Exemple #1
0
def threshold_calculator_func(network):
    # Noise Coefficient
    network.noiseCoefficientCalculator = DecayingParameter(
        name="noise_coefficient_calculator",
        value=0.0,
        decay=0.0,
        decay_period=1,
        min_limit=0.0)
    # Decision Loss Coefficient
    # network.decisionLossCoefficientCalculator = DiscreteParameter(name="decision_loss_coefficient_calculator",
    #                                                               value=0.0,
    #                                                               schedule=[(12000, 1.0)])
    network.decisionLossCoefficientCalculator = FixedParameter(
        name="decision_loss_coefficient_calculator", value=1.0)
    for node in network.topologicalSortedNodes:
        if node.isLeaf:
            continue
        # Probability Threshold
        node_degree = GlobalConstants.TREE_DEGREE_LIST[node.depth]
        initial_value = 1.0 / float(node_degree)
        threshold_name = network.get_variable_name(
            name="prob_threshold_calculator", node=node)
        node.probThresholdCalculator = DecayingParameter(name=threshold_name,
                                                         value=initial_value,
                                                         decay=0.8,
                                                         decay_period=12000,
                                                         min_limit=0.4)
        # Softmax Decay
        decay_name = network.get_variable_name(name="softmax_decay", node=node)
        node.softmaxDecayCalculator = DecayingParameter(
            name=decay_name,
            value=GlobalConstants.SOFTMAX_DECAY_INITIAL,
            decay=GlobalConstants.SOFTMAX_DECAY_COEFFICIENT,
            decay_period=GlobalConstants.SOFTMAX_DECAY_PERIOD,
            min_limit=GlobalConstants.SOFTMAX_DECAY_MIN_LIMIT)
Exemple #2
0
 def create_parameter_from_train_program(parameter_name, train_program):
     value_dict = train_program.load_settings_for_property(property_name=parameter_name)
     if value_dict["type"] == "DecayingParameter":
         param_object = DecayingParameter.from_training_program(name=parameter_name, training_program=train_program)
     elif value_dict["type"] == "FixedParameter":
         param_object = FixedParameter.from_training_program(name=parameter_name, training_program=train_program)
     else:
         raise Exception("Unknown parameter type.")
     return param_object
Exemple #3
0
 def create_global_input_drivers(self):
     # Branching probability
     self.globalInputDrivers[GlobalInputNames.branching_prob_threshold.value] = \
         UtilityFuncs.create_parameter_from_train_program(
             parameter_name=GlobalInputNames.branching_prob_threshold.value, train_program=self.trainProgram)
     # Batch Size
     self.globalInputDrivers[GlobalInputNames.batch_size.value] = \
         UtilityFuncs.create_parameter_from_train_program(
             parameter_name=GlobalInputNames.batch_size.value, train_program=self.trainProgram)
     # Evaluation Batch Size
     self.globalInputDrivers[GlobalInputNames.evaluation_batch_size.value] = \
         UtilityFuncs.create_parameter_from_train_program(
             parameter_name=GlobalInputNames.evaluation_batch_size.value, train_program=self.trainProgram)
     # Epoch Count
     self.globalInputDrivers[GlobalInputNames.epoch_count.value] = \
         UtilityFuncs.create_parameter_from_train_program(
             parameter_name=GlobalInputNames.epoch_count.value, train_program=self.trainProgram)
     # All hyper parameters regarding to parameter training
     hyper_parameter_set = {GlobalInputNames.wd.value, GlobalInputNames.lr_initial.value,
                            GlobalInputNames.momentum.value, GlobalInputNames.weight_update_interval.value,
                            GlobalInputNames.lr_update_interval.value, GlobalInputNames.lr_decay_ratio.value}
     for node in self.nodes.values():
         for parameter in node.parametersDict.values():
             if parameter.parameterType == parameterTypes.learnable_parameter:
                 hyper_parameter_dict = {}
                 for hyper_parameter in hyper_parameter_set:
                     parameter_hyper_parameter_name = parameter.get_property_name(property_=hyper_parameter)
                     value_dict = self.trainProgram.load_settings_for_property(property_name=hyper_parameter)
                     hyper_parameter_value = self.trainProgram.decode_json_element_for_parameter(
                         parameter_name=parameter_hyper_parameter_name, json_element=value_dict)
                     hyper_parameter_dict[hyper_parameter] = hyper_parameter_value
                 # Decaying Parameter for the learning rate
                 lr_hyper_param_name = parameter.get_property_name(property_=GlobalInputNames.lr.value)
                 self.globalInputDrivers[lr_hyper_param_name] = \
                     DecayingParameter(name=lr_hyper_param_name,
                                       value=hyper_parameter_dict[GlobalInputNames.lr_initial.value],
                                       decay=hyper_parameter_dict[GlobalInputNames.lr_decay_ratio.value],
                                       decay_period=hyper_parameter_dict[GlobalInputNames.lr_update_interval.value])
                 # Fixed Parameter for the wd
                 wd_hyper_parameter_name = parameter.get_property_name(property_=GlobalInputNames.wd.value)
                 self.globalInputDrivers[wd_hyper_parameter_name] = \
                     FixedParameter(name=wd_hyper_parameter_name,
                                    value=hyper_parameter_dict[GlobalInputNames.wd.value])
                 # Fixed Parameter for the momentum value
                 momentum_hyper_parameter_name = \
                     parameter.get_property_name(property_=GlobalInputNames.momentum.value)
                 self.globalInputDrivers[momentum_hyper_parameter_name] = \
                     FixedParameter(name=momentum_hyper_parameter_name,
                                    value=hyper_parameter_dict[GlobalInputNames.momentum.value])
                 # Weight update parameter
                 weight_update_interval_parameter_name = \
                     parameter.get_property_name(property_=GlobalInputNames.weight_update_interval.value)
                 self.globalInputDrivers[weight_update_interval_parameter_name] = \
                     FixedParameter(name=weight_update_interval_parameter_name,
                                    value=hyper_parameter_dict[GlobalInputNames.weight_update_interval.value])
     print("X")