def interpolate(x, y, half_window=10): perm = flex.sort_permutation(x) x = x.select(perm) y = y.select(perm) x_all = flex.double() y_all = flex.double() for i in range(x.size()): x_all.append(x[i]) y_all.append(y[i]) if i < x.size()-1 and (x[i+1] - x[i]) > 1: window_left = min(half_window, i) window_right = min(half_window, x.size() - i) x_ = x[i-window_left:i+window_right] y_ = y[i-window_left:i+window_right] from scitbx.math import curve_fitting # fit a 2nd order polynomial through the missing points polynomial = curve_fitting.univariate_polynomial(1, 1) fit = curve_fitting.lbfgs_minimiser([polynomial], x_,y_).functions[0] missing_x = flex.double(range(int(x[i]+1), int(x[i+1]))) x_all.extend(missing_x) y_all.extend(fit(missing_x)) perm = flex.sort_permutation(x_all) x_all = x_all.select(perm) y_all = y_all.select(perm) return x_all, y_all
def log_fit(x, y, degree=5): """Fit the values log(y(x)) then return exp() to this fit. x, y should be iterables containing floats of the same size. The order is the order of polynomial to use for this fit. This will be useful for e.g. I/sigma.""" fit = curve_fitting.univariate_polynomial_fit( x, flex.log(y), degree=degree, max_iterations=100 ) f = curve_fitting.univariate_polynomial(*fit.params) return flex.exp(f(x))
def log_inv_fit(x, y, degree=5): """Fit the values log(1 / y(x)) then return the inverse of this fit. x, y should be iterables, the order of the polynomial for the transformed fit needs to be specified. This will be useful for e.g. Rmerge.""" fit = curve_fitting.univariate_polynomial_fit( x, flex.log(1 / y), degree=degree, max_iterations=100 ) f = curve_fitting.univariate_polynomial(*fit.params) return 1 / flex.exp(f(x))
def polynomial_fit(x, y, degree=5): """ Fit a polynomial to the values y(x) and return this fit x, y should be iterables containing floats of the same size. The order is the order of polynomial to use for this fit. This will be useful for e.g. I/sigma. """ fit = curve_fitting.univariate_polynomial_fit( x, y, degree=degree, max_iterations=100 ) f = curve_fitting.univariate_polynomial(*fit.params) return f(x)
def exercise_polynomial_fit(): def do_polynomial_fit(x, params): n_terms = len(params) y = flex.double(x.size()) for i in range(len(params)): y += params[i] * flex.pow(x, i) fit = curve_fitting.univariate_polynomial_fit(x, y, degree=n_terms - 1) assert approx_equal(params, fit.params, eps=1e-4) x = flex.double(range(-50, 50)) do_polynomial_fit(x, (2, 3, 5)) # y = 2 + 3x + 5x^2 do_polynomial_fit(x, (-0.0002, -1000)) # y = -0.0002 -1000x for n_terms in range(1, 6): params = [100 * random.random() for i in range(n_terms)] x = flex.double( frange(-random.randint(1, 10), random.randint(1, 10), 0.1)) functor = curve_fitting.univariate_polynomial(*params) fd_grads = finite_differences(functor, x) assert approx_equal(functor.partial_derivatives(x), fd_grads, 1e-4) do_polynomial_fit(x, params)
def exercise_polynomial_fit(): def do_polynomial_fit(x, params): n_terms = len(params) y = flex.double(x.size()) for i in range(len(params)): y += params[i] * flex.pow(x, i) fit = curve_fitting.univariate_polynomial_fit(x, y, degree=n_terms-1) assert approx_equal(params, fit.params, eps=1e-4) x = flex.double(range(-50,50)) do_polynomial_fit(x, (2,3,5)) # y = 2 + 3x + 5x^2 do_polynomial_fit(x, (-0.0002, -1000)) # y = -0.0002 -1000x for n_terms in range(1, 6): params = [100*random.random() for i in range(n_terms)] x = flex.double(frange(-random.randint(1,10), random.randint(1,10), 0.1)) functor = curve_fitting.univariate_polynomial(*params) fd_grads = finite_differences(functor, x) assert approx_equal(functor.partial_derivatives(x), fd_grads, 1e-4) do_polynomial_fit(x, params)