def test_default(config, expected): """test parsing of model""" if isinstance(expected, dict): formatted_config = parse_default(config, MODEL) try: assert expected == formatted_config except AssertionError: for k, d in formatted_config["model"]["layers"].items(): for opt in ["user_vals"]: try: assert (d["options"][opt] is expected["model"]["layers"][k]["options"][opt] ), f"layer {k} does not have matching {opt}" except AssertionError: for i, a in enumerate(d["options"][opt]): b = expected["model"]["layers"][k]["options"][opt][ i] try: assert ( a is b ), f"layer {k} does not have matching {opt} for {a} != {b}" except AssertionError: if issubclass( type(b), tf.keras.regularizers.Regularizer): # TODO: implement more in depth check assert issubclass( type(a), tf.keras.regularizers.Regularizer) elif issubclass( type(b), tf.keras.initializers.Initializer): # TODO: implement more in depth check assert issubclass( type(a), tf.keras.initializers.Initializer) else: assert ( a == b ), f"{opt} in layer {k} does not match: {a} != {b}" for opt in ["func", "func_args", "func_defaults"]: assert (d["layer_base"][opt] == expected["model"]["layers"] [k]["layer_base"][opt] ), f"layer {k} does not have matching {opt}" for opt in ["layer_in_name"]: # print(d[opt]) assert (d[opt] == expected["model"]["layers"][k][opt] ), f"layer {k} does not have matching {opt}" elif isinstance(expected, ValueError): with pytest.raises(ValueError): formatted_config = parse_default(config, MODEL) elif isinstance(expected, TypeError): with pytest.raises(TypeError): formatted_config = parse_default(config, MODEL)
def test_default(config, expected): """test parsing of performance""" if isinstance(expected, dict): formatted_config = parse_default(config, PERFORMANCE) assert expected == formatted_config elif isinstance(expected, ValueError): with pytest.raises(ValueError): formatted_config = parse_default(config, PERFORMANCE) elif isinstance(expected, TypeError): with pytest.raises(TypeError): formatted_config = parse_default(config, PERFORMANCE)
def test_default(config, expected): """test parsing of data""" if isinstance(expected, dict): formatted_config = parse_default(config, DATA) assert expected == formatted_config elif isinstance(expected, ValueError): with pytest.raises(ValueError): formatted_config = parse_default(config, DATA) elif isinstance(expected, TypeError): with pytest.raises(TypeError): formatted_config = parse_default(config, DATA)
def test_default(config, expected): """test parsing of hyper parameters""" if isinstance(expected, dict): formatted_config = parse_default(config, HYPER_PARAMETERS) assert expected == formatted_config elif isinstance(expected, ValueError): with pytest.raises(ValueError): formatted_config = parse_default(config, HYPER_PARAMETERS) elif isinstance(expected, TypeError): with pytest.raises(TypeError): formatted_config = parse_default(config, HYPER_PARAMETERS)
def test_default(config, expected): """test parsing of optimize""" if isinstance(expected, dict): formatted_config = parse_default(config, OPTIMIZE) print(formatted_config) assert expected == formatted_config elif isinstance(expected, ValueError): with pytest.raises(ValueError): formatted_config = parse_default(config, OPTIMIZE) elif isinstance(expected, TypeError): with pytest.raises(TypeError): formatted_config = parse_default(config, OPTIMIZE)