示例#1
0
def test_get_back(constats_enabled):
    x = Symbol('x')
    basis_functions = [
        x, exp, log, sin
    ]  #Pay attention as the order is indeed important, for testing we put it in alphabetical order (apart from x)
    for i in range(10):
        fun_generator = DatasetCreator(basis_functions,
                                       max_linear_terms=4,
                                       constants_enabled=constats_enabled)
        simpy_output, dictionary, dictionary_clean = fun_generator.generate_fun(
        )
        separated_dict = tokenization.extract_terms(dictionary_clean)
        numberized_dict, mapping = tokenization.numberize_terms(separated_dict)
        final_seq = tokenization.flatten_seq(numberized_dict, mapping=mapping)
        ori_fun = lambdify(x, simpy_output, 'numpy')
        try:
            get_back = lambdify(x, tokenization.get_string(final_seq), 'numpy')
        except:
            pdb.set_trace()
        input_x = np.arange(-3, 3, 0.1)
        if not constats_enabled:
            ori_y = np.nan_to_num(
                fun_generator.handling_nan_evaluation(input_x, ori_fun))
            new_y = np.nan_to_num(
                fun_generator.handling_nan_evaluation(input_x, get_back))
            assert np.all(ori_y == new_y)
示例#2
0
def test_separator():
    x = Symbol('x')
    basis_functions = [
        x, exp, log, sin
    ]  #Pay attention as the order is indeed important, for testing we put it in alphabetical order (apart from x)
    fun_generator = DatasetCreator(basis_functions, max_linear_terms=4)
    string, dictionary, _ = fun_generator.generate_fun()
    separated_dict = tokenization.extract_terms(dictionary)
    print(separated_dict['Single'])
示例#3
0
def test_final():
    x = Symbol('x')
    basis_functions = [
        x, exp, log, sin
    ]  #Pay attention as the order is indeed important, for testing we put it in alphabetical order (apart from x)
    fun_generator = DatasetCreator(basis_functions, max_linear_terms=4)
    string, dictionary, _ = fun_generator.generate_fun()
    separated_dict = tokenization.extract_terms(dictionary)
    numberized_dict, mapping = tokenization.numberize_terms(separated_dict)
    final_seq = tokenization.flatten_seq(numberized_dict, mapping=mapping)
    print(final_seq)
    print("hello")