Пример #1
0
 def score(self, exptree, constants):
     with warnings.catch_warnings():
         warnings.simplefilter("ignore")
         a, b = fit.sym_fit(exptree, constants, self.X, self.Y, self.guesses)
         if b < 0:
             return a, float("inf")
         return a, b
Пример #2
0
def physics_fit(prototypes, x_data, y_data, threshold=0, max_terms=5,
                fit_tries=12, mul_depth=2):
    '''Do a "physicist fit"; expects standard score normalized data
    prototypes - list of base functions
    x_data, y_data - data to fit
    threshold - stop when fit error is below threshold
    max_terms - stop when model has this many potototype expressions
    mul-depth - generates more prototypes by cross_multipying (unimplemented)
    '''
    
    debug_info = []

    prototypes = cross_multiply(prototypes, mul_depth)

    c0, c1 = sympy.symbols('c0 c1')

    # zero order model
    opt, cur_err = fit.sym_fit(c0, [c0], x_data, y_data)
    zero_order = opt[0]
    zero_order_error = cur_err
    
    debug_info.append("Zero order model:")
    debug_info.append((zero_order, cur_err))

    debug_info.append("prototype scores:")
    # calculate base error levels for every function prototype
    base_score = []
    for func in prototypes:
        # multiply by constant for better fitting, rename
        func_times_const = c1 * func + c0
        constants = func_times_const.free_symbols 
        constants.remove(x) 
        constants = list(constants)
        
        # fit data
        opt, err = fit.sym_fit(func_times_const, constants, x_data,
                               y_data, fit_tries * len(constants))
        
        if err < numpy.inf:
            base_score.append((err, func))
        
        #debug_info.append((func_times_const.subs(zip(constants, opt)),err))
    
    base_score = sorted(base_score, key=lambda x: x[0], reverse=True)

    for i in base_score:
        debug_info.append((i[1], i[0]))
   
    debug_info.append("Modeling execution trace:")
    # start modeling
    model = c0
    model_constants = [c0]
    for i in range(0, max_terms):
        if cur_err < threshold: break
        
        # grab the best_scoring expression
        best_expr = base_score.pop()[1]
        
        # rename constants to avoid name collisions
        best_expr = rename_constants(best_expr, model_constants)
        
        # every prototype term gets a leading multiplicative constant
        model_constants.append(sympy.symbols('c' +
                               str(len(model_constants))))
        best_expr *= model_constants[-1]
        
        # refit
        model += best_expr
        opt, cur_err = fit.sym_fit(model, model_constants, x_data, y_data,
                                   fit_tries * len(model_constants))
        
        debug_info.append((model.subs(zip(model_constants, opt)), cur_err))

        # break if you start doing worse than zero-order model
        if cur_err > zero_order_error: break

    return debug_info
Пример #3
0
from sympy import symbols
from fit import sym_fit
import operations as ops

x, c0, c1, c2 = symbols("x c0 c1 c2")

expr = c1 * x ** 2 + c0

# 2 * x^2 + 1
x_data = [0, 1, 2, 3, 4, 5]
y_data = [1, 3, 9, 19, 33, 51]  # exact
y_err = [1.1, 3.1, 8.9, 19.8, 33.1, 50]  # with some error

print("Attempting to fit c1 * x^2 + c2")

popt, err = sym_fit(expr, [c0, c1], x_data, y_data)
print("Fit to y_data")
print(popt)
print(err)

popt, err = sym_fit(expr, [c0, c1], x_data, y_err)
print("Fit to y_err")
print(popt)
print(err)

print("")

x_data = [1, 2, 3, 4, 5]
y_data = [3, 9, 19, 33, 51]
expr = c0
Пример #4
0
 def score(self, exptree, constants):
     return fit.sym_fit(exptree, constants, self.X, self.Y, self.guesses)