示例#1
0
# Create 2 Fit objects with the same data but with different model functions
linear_fit = Fit(data=xy_data, model_function=linear_model)
exponential_fit = Fit(data=xy_data, model_function=exponential_model)

# Optional: Assign LaTeX strings to parameters and model functions.
linear_fit.assign_parameter_latex_names(a='a', b='b')
linear_fit.assign_model_function_latex_expression("{a}{x} + {b}")
exponential_fit.assign_parameter_latex_names(A0='A_0', x0='x_0')
exponential_fit.assign_model_function_latex_expression("{A0} e^{{{x}/{x0}}}")

# Perform the fits.
linear_fit.do_fit()
exponential_fit.do_fit()

# Optional: Print out a report on the result of each fit.
linear_fit.report()
exponential_fit.report()

# Optional: Create a plot of the fit results using Plot.
p = Plot(fit_objects=[linear_fit, exponential_fit], separate_figures=False)

# Optional: Customize the plot appearance: only show the data points once.
p.customize('data', 'color', values=['k',
                                     'none'])  # hide points for second fit
p.customize('data', 'label', values=['data points',
                                     None])  # no second legend entry

# Do the plotting.
p.plot(fit_info=True)

# Optional: Create a contour plot for the exponential fit to show the parameter correlations.
示例#2
0
from kafe2 import XYContainer, Fit, Plot
import matplotlib.pyplot as plt

# Create an XYContainer object to hold the xy data for the fit.
xy_data = XYContainer(x_data=[1.0, 2.0, 3.0, 4.0], y_data=[2.3, 4.2, 7.5, 9.4])
# x_data and y_data are combined depending on their order.
# The above translates to the points (1.0, 2.3), (2.0, 4.2), and (4.0, 9.4).

# Important: Specify uncertainties for the data.
xy_data.add_error(axis='x', err_val=0.1)
xy_data.add_error(axis='y', err_val=0.4)

# Create an XYFit object from the xy data container.
# By default, a linear function f=a*x+b will be used as the model function.
line_fit = Fit(data=xy_data)

# Perform the fit: Find values for a and b that minimize the
#     difference between the model function and the data.
line_fit.do_fit()  # This will throw an exception if no errors were specified.

# Optional: Print out a report on the fit results on the console.
line_fit.report()

# Optional: Create a plot of the fit results using Plot.
plot = Plot(fit_objects=line_fit)  # Create a kafe2 plot object.
plot.plot()  # Do the plot.

# Show the fit result.
plt.show()
示例#3
0
of entries N of the histogram.
"""

import numpy as np
import matplotlib.pyplot as plt
from kafe2 import HistContainer, Fit, Plot


def normal_distribution_pdf(x, mu, sigma):
    return np.exp(-0.5 *
                  ((x - mu) / sigma)**2) / np.sqrt(2.0 * np.pi * sigma**2)


# random dataset of 100 random values, following a normal distribution with mu=0 and sigma=1
data = np.random.normal(loc=0, scale=1, size=100)

# Create a histogram from the dataset by specifying the bin range and the amount of bins.
# Alternatively the bin edges can be set.
histogram = HistContainer(n_bins=10, bin_range=(-5, 5), fill_data=data)

# create the Fit object by specifying a density function
fit = Fit(data=histogram, model_function=normal_distribution_pdf)

fit.do_fit()  # do the fit
fit.report()  # Optional: print a report to the terminal

# Optional: create a plot and show it
plot = Plot(fit)
plot.plot()
plt.show()
示例#4
0
# Constrain model parameters to measurements
fit.add_parameter_constraint(name='l', value=l, uncertainty=delta_l)
fit.add_parameter_constraint(name='r', value=r, uncertainty=delta_r)
fit.add_parameter_constraint(name='y_0',
                             value=y_0,
                             uncertainty=delta_y_0,
                             relative=True)

# Because the model function is oscillating the fit needs to be initialized with near guesses for
# unconstrained parameters in order to converge
g_initial = 9.81  # initial guess for g
c_initial = 0.01  # initial guess for c
fit.set_parameter_values(g=g_initial, c=c_initial)

# Optional: Set the initial values of the remaining parameters to correspond to their constraint
# values (this may help some minimization algorithms converge)
fit.set_parameter_values(y_0=y_0, l=l, r=r)

# Perform the fit
fit.do_fit()

# Optional: Print out a report on the fit results on the console.
fit.report()

# Optional: plot the fit results
plot = Plot(fit)
plot.plot(fit_info=True)

plt.show()
示例#5
0
xerr = 3.000000e-01
y = [
    2.650644e-01, 1.472682e-01, 8.077234e-02, 1.850181e-01, 5.326301e-02,
    1.984233e-02, 1.866309e-02, 1.230001e-02, 9.694612e-03, 2.412357e-03
]
yerr = [
    1.060258e-01, 5.890727e-02, 3.230893e-02, 7.400725e-02, 2.130520e-02,
    7.936930e-03, 7.465238e-03, 4.920005e-03, 3.877845e-03, 9.649427e-04
]

# create a fit object from the data arrays
fit = Fit(data=[x, y], model_function=exponential)
fit.add_error(axis='x', err_val=xerr)  # add the x-error to the fit
fit.add_error(axis='y', err_val=yerr)  # add the y-errors to the fit

fit.do_fit()  # perform the fit
fit.report(asymmetric_parameter_errors=True
           )  # print a report with asymmetric uncertainties

# Optional: create a plot
plot = Plot(fit)
plot.plot(asymmetric_parameter_errors=True,
          ratio=True)  # add the ratio data/function and asymmetric errors

# Optional: create the contours profiler
cpf = ContoursProfiler(fit)
cpf.plot_profiles_contours_matrix(
)  # plot the contour profile matrix for all parameters

plt.show()
    m_ds = np.array([d1, d2])
    # - focal widths of lens system
    m_fs = f1 * f2 / (f1 + f2 - m_ds)
    # - sum of distances of principal planes
    m_hsums = -m_fs * m_ds * m_ds / (f1 * f2)
    # express inputs in terms of model values
    m_hus = m_hsums - hgs
    m_hgs = m_hsums - hus
    return np.concatenate((m_fs, m_hus, m_hgs, m_ds))


f1f2Fit = Fit(iData, all_from_f1f2d)
f1f2Fit.model_label = 'all from f1, f2, d'
f1f2Fit.do_fit()

f1f2Fit.report()

f1f2Plot = Plot(f1f2Fit)
f1f2Plot.plot(residual=True)

print("\n*==*: Fit with PhyPraKit.phyFit/xFit\n")

# the same with PhyPraKit.phyFit.xFit
from PhyPraKit.phyFit import xFit


# define the physics model
#  looks slightly different as data is passed to model as 1st argumen
def _from_f1f2d(data, f1=10, f2=20, d1=10., d2=10.):
    # calulate f, hu, hg (and d)
    #### data = iData.data
示例#7
0
def k2hFit(fitf,
           data,
           bin_edges,
           p0=None,
           constraints=None,
           fixPars=None,
           limits=None,
           use_GaussApprox=False,
           plot=True,
           plot_cor=False,
           showplots=True,
           plot_band=True,
           plot_residual=False,
           quiet=True,
           axis_labels=['x', 'counts/bin = f(x, *par)'],
           data_legend='Histogram Data',
           model_legend='Model',
           model_expression=None,
           model_name=None,
           model_band=r'$\pm 1 \sigma$',
           fit_info=True,
           asym_parerrs=True):
    """Wrapper function to fit a density distribution f(x, \*par) 
  to binned data (histogram) with class mnFit 
  
  The cost function is two times the negative log-likelihood of the 
  Poisson  distribution, or - optionally - of the Gaussian approximation.

  Uncertainties are determined from the model values in order to avoid biases 
  and to take account of empty bins of an histogram. 

  Args:
    * fitf: model function to fit, arguments (float:x, float: \*args)
    * data: the data to be histogrammed  
    * bin_edges: bin edges 

    fit options

      * p0: array-like, initial guess of parameters
      * constraints: (nested) list(s) [name or id, value, error] 
      * limits: (nested) list(s) [name or id, min, max]
      * use_GaussApprox: Gaussian approximation instead of Poisson 

    output options

      * plot: show data and model if True
      * plot_cor: show profile likelihoods and confidence contours
      * plot_band: plot uncertainty band around model function
      * plot_residual: also plot residuals w.r.t. model
      * showplots: show plots on screen
      * quiet: suppress printout
      * axis_labes: list of tow strings, axis labels
      * data_legend: legend entry for data
      * model_legend: legend entry for model 
      * plot: flag to switch off graphical output
      * axis_labels: list of strings, axis labels x and y
      * model_name: latex name for model function
      * model_expression: latex expression for model function
      * model_band: legend entry for model uncertainty band
      * fit_info: controls display of fit results on figure
      * asym_parerrs: show (asymmetric) errors from profile-likelihood scan

  Returns:
    * list: parameter names
    * np-array of float: parameter values
    * np-array of float: negative and positive parameter errors
    * np-array: cor   correlation matrix 
    * float: goodness-of-fit (equiv. chi2 for large number of entries/bin)

  """

    # for fit with kafe2
    from kafe2 import HistContainer, Fit, Plot, ContoursProfiler
    from kafe2.fit.histogram import HistCostFunction_NegLogLikelihood

    # create a data container from input
    nbins = len(bin_edges) - 1
    bin_range = (bin_edges[0], bin_edges[-1])
    hdat = HistContainer(nbins, bin_range, bin_edges=bin_edges, fill_data=data)

    # set up fit object
    if use_GaussApprox:
        print(
            'Gauss Approx. for histogram data not yet implemented - exiting!')
        exit(1)
        ## hfit = Fit(hdat, fitf,
        ##            cost_function=CostFunction_GaussApproximation)
    else:
        hfit = Fit(hdat,
                   fitf,
                   cost_function=HistCostFunction_NegLogLikelihood(
                       data_point_distribution='poisson'))
    # text for labeling
    hfit.assign_model_function_latex_name(model_name)
    hfit.assign_model_function_latex_expression(model_expression)
    hfit.model_label = model_legend

    # - provide text for labeling ...
    hdat.label = data_legend
    hdat.axis_labels = axis_labels

    # initialize and run fit
    if p0 is not None: hfit.set_all_parameter_values(p0)

    if constraints is not None:
        if not (isinstance(constraints[0], tuple)
                or isinstance(constraints[0], list)):
            constraints = (constraints, )
        for c in constraints:
            hfit.add_parameter_constraint(*c)

    if limits is not None:
        if isinstance(limits[1], list):
            for l in limits:
                hfit.limit_parameter(l[0], l[1], l[2])
        else:
            hfit.limit_parameter(limits[0], limits[1], limits[2])

    hfit.do_fit()

    # harvest results
    #  par, perr, cov, chi2 = fit.get_results() # for kafe vers. > 1.1.0
    parn = np.array(hfit.parameter_names)
    parv = np.array(hfit.parameter_values)
    pare = np.array(hfit.parameter_errors)
    cor = np.array(hfit.parameter_cor_mat)
    gof = hfit.goodness_of_fit
    if asym_parerrs:
        parae = np.array(hfit.asymmetric_parameter_errors)
    else:
        parae = np.array(list(zip(-pare, pare)))

    if not quiet:
        hfit.report(asymmetric_parameter_errors=True)

    if plot:
        # plot data, uncertainties, model line and model uncertainties
        kplot = Plot(hfit)
        # set some 'nice' options
        kplot.customize('data', 'marker', ['o'])
        kplot.customize('data', 'markersize', [6])
        kplot.customize('data', 'color', ['darkblue'])
        ## the following not (yet) defined for kafe2 Histogram Fit
        ##    kplot.customize('model_line', 'color', ['darkorange'])
        ##    kplot.customize('model_line', 'linestyle', ['--'])
        ##    if not plot_band:
        ##      kplot.customize('model_error_band', 'hide', [True])
        ##    else:
        ##      kplot.customize('model_error_band', 'color', ['green'])
        ##      kplot.customize('model_error_band', 'label', [model_band])
        ##      kplot.customize('model_error_band', 'alpha', [0.1])

        # plot with defined options
        kplot.plot(fit_info=fit_info,
                   residual=plot_residual,
                   asymmetric_parameter_errors=True)

        if plot_cor:
            cpf = ContoursProfiler(hfit)
            cpf.plot_profiles_contours_matrix(
            )  # plot profile likelihood and contours

        if showplots: plt.show()

    return parv, parae, cor, gof
示例#8
0
import matplotlib.pyplot as plt
from kafe2 import XYContainer, Fit, Plot
from kafe2.fit.tools import ContoursProfiler

# Construct a fit with data loaded from a yaml file. The model function is the default of f(x) = a * x + b
nonlinear_fit = Fit(data=XYContainer.from_file('x_errors.yml'))

# The x errors are much bigger than the y errors. This will cause a distortion of the likelihood function.
nonlinear_fit.add_error('x', 1.0)
nonlinear_fit.add_error('y', 0.1)

# Perform the fit.
nonlinear_fit.do_fit()

# Optional: Print out a report on the fit results on the console.
# Note the asymmetric_parameter_errors flag
nonlinear_fit.report(asymmetric_parameter_errors=True)

# Optional: Create a plot of the fit results using Plot.
# Note the asymmetric_parameter_errors flag
plot = Plot(nonlinear_fit)
plot.plot(fit_info=True, asymmetric_parameter_errors=True)

# Optional: Calculate a detailed representation of the profile likelihood
# Note how the actual chi2 profile differs from the parabolic approximation that you would expect with a linear fit.
profiler = ContoursProfiler(nonlinear_fit)
profiler.plot_profiles_contours_matrix(show_grid_for='all')

plt.show()