def populate_defaults(input_feature): set_default_values(input_feature, {TIED: None, "encoder": "parallel_cnn", "level": "word"}) encoder_class = get_encoder_cls(input_feature["type"], input_feature["encoder"]) if hasattr(encoder_class, "default_params"): set_default_values(input_feature, encoder_class.default_params)
def populate_defaults(output_feature): # If Loss is not defined, set an empty dictionary set_default_value(output_feature, LOSS, {}) set_default_values( output_feature[LOSS], { "robust_lambda": 0, "confidence_penalty": 0, "positive_class_weight": 1, "weight": 1, }, ) set_default_value(output_feature[LOSS], "robust_lambda", 0) set_default_value(output_feature[LOSS], "confidence_penalty", 0) set_default_value(output_feature[LOSS], "positive_class_weight", 1) set_default_value(output_feature[LOSS], "weight", 1) set_default_values( output_feature, { "threshold": 0.5, "dependencies": [], "reduce_input": SUM, "reduce_dependencies": SUM, }, )
def populate_defaults(output_feature): # If Loss is not defined, set an empty dictionary set_default_value(output_feature, LOSS, {}) set_default_values( output_feature[LOSS], { 'robust_lambda': 0, 'confidence_penalty': 0, 'positive_class_weight': 1, 'weight': 1 } ) set_default_value(output_feature[LOSS], 'robust_lambda', 0) set_default_value(output_feature[LOSS], 'confidence_penalty', 0) set_default_value(output_feature[LOSS], 'positive_class_weight', 1) set_default_value(output_feature[LOSS], 'weight', 1) set_default_values( output_feature, { 'threshold': 0.5, 'dependencies': [], 'reduce_input': SUM, 'reduce_dependencies': SUM } )
def populate_defaults(input_feature): set_default_values( input_feature, { TIED: None, "encoder": "parallel_cnn", }, )
def populate_defaults(input_feature): set_default_values( input_feature, { TIED: None, 'encoder': 'parallel_cnn', } )
def populate_defaults(input_feature): set_default_values( input_feature, { TIED: None, 'preprocessing': {} } )
def populate_defaults(input_feature): set_default_values(input_feature, { TIED: None, 'encoder': 'parallel_cnn', 'level': 'word' }) encoder_class = get_from_registry(input_feature['encoder'], TextInputFeature.encoder_registry) if hasattr(encoder_class, 'default_params'): set_default_values(input_feature, encoder_class.default_params)
def update_hyperopt_params_with_defaults(hyperopt_params): set_default_value(hyperopt_params, STRATEGY, {}) set_default_value(hyperopt_params, EXECUTOR, {}) set_default_value(hyperopt_params, "split", VALIDATION) set_default_value(hyperopt_params, "output_feature", COMBINED) set_default_value(hyperopt_params, "metric", LOSS) set_default_value(hyperopt_params, "goal", MINIMIZE) set_default_values(hyperopt_params[STRATEGY], {TYPE: "random"}) strategy = get_from_registry(hyperopt_params[STRATEGY][TYPE], sampler_registry) strategy_defaults = { k: v for k, v in strategy.__dict__.items() if k in get_class_attributes(strategy) } set_default_values( hyperopt_params[STRATEGY], strategy_defaults, ) set_default_values(hyperopt_params[EXECUTOR], {TYPE: "serial"}) executor = get_from_registry(hyperopt_params[EXECUTOR][TYPE], executor_registry) executor_defaults = { k: v for k, v in executor.__dict__.items() if k in get_class_attributes(executor) } set_default_values( hyperopt_params[EXECUTOR], executor_defaults, )
def populate_defaults(output_feature): set_default_value(output_feature, LOSS, {TYPE: "mean_squared_error", "weight": 1}) set_default_value(output_feature[LOSS], TYPE, "mean_squared_error") set_default_value(output_feature[LOSS], "weight", 1) set_default_values( output_feature, { "clip": None, "dependencies": [], "reduce_input": SUM, "reduce_dependencies": SUM, }, )
def populate_defaults(output_feature): set_default_value(output_feature, LOSS, { TYPE: 'mean_squared_error', 'weight': 1 }) set_default_value(output_feature[LOSS], TYPE, 'mean_squared_error') set_default_value(output_feature[LOSS], 'weight', 1) set_default_values( output_feature, { 'clip': None, 'dependencies': [], 'reduce_input': SUM, 'reduce_dependencies': SUM })
def update_hyperopt_params_with_defaults(hyperopt_params): from ludwig.hyperopt.execution import executor_registry set_default_value(hyperopt_params, EXECUTOR, {}) set_default_value(hyperopt_params, "split", VALIDATION) set_default_value(hyperopt_params, "output_feature", COMBINED) set_default_value(hyperopt_params, "metric", LOSS) set_default_value(hyperopt_params, "goal", MINIMIZE) set_default_values(hyperopt_params[EXECUTOR], {TYPE: "ray"}) executor = get_from_registry(hyperopt_params[EXECUTOR][TYPE], executor_registry) executor_defaults = { k: v for k, v in executor.__dict__.items() if k in get_class_attributes(executor) } set_default_values( hyperopt_params[EXECUTOR], executor_defaults, )
def populate_defaults(output_feature): # If Loss is not defined, set an empty dictionary set_default_value(output_feature, LOSS, {}) # Populate the default values for LOSS if they aren't defined already set_default_values( output_feature[LOSS], { TYPE: 'softmax_cross_entropy', 'labels_smoothing': 0, 'class_weights': 1, 'robust_lambda': 0, 'confidence_penalty': 0, 'class_similarities_temperature': 0, 'weight': 1 }) if output_feature[LOSS][TYPE] == 'sampled_softmax_cross_entropy': set_default_values( output_feature[LOSS], { 'sampler': 'log_uniform', 'unique': False, 'negative_samples': 25, 'distortion': 0.75 }) set_default_values( output_feature, { 'top_k': 3, 'dependencies': [], 'reduce_input': SUM, 'reduce_dependencies': SUM })
def populate_defaults(output_feature): # If Loss is not defined, set an empty dictionary set_default_value(output_feature, LOSS, {}) # Populate the default values for LOSS if they aren't defined already set_default_values( output_feature[LOSS], { TYPE: "softmax_cross_entropy", "labels_smoothing": 0, "class_weights": 1, "robust_lambda": 0, "confidence_penalty": 0, "class_similarities_temperature": 0, "weight": 1, }, ) if output_feature[LOSS][TYPE] == "sampled_softmax_cross_entropy": set_default_values( output_feature[LOSS], {"sampler": "log_uniform", "unique": False, "negative_samples": 25, "distortion": 0.75}, ) set_default_values( output_feature, {"top_k": 3, "dependencies": [], "reduce_input": SUM, "reduce_dependencies": SUM} )
def populate_defaults(output_feature): # If Loss is not defined, set an empty dictionary set_default_value(output_feature, LOSS, {}) # Populate the default values for LOSS if they aren't defined already set_default_values( output_feature[LOSS], { TYPE: "softmax_cross_entropy", "class_weights": 1, "robust_lambda": 0, "confidence_penalty": 0, "class_similarities_temperature": 0, "weight": 1, }, ) set_default_values( output_feature, { "top_k": 3, "dependencies": [], "reduce_input": SUM, "reduce_dependencies": SUM })