Example #1
0
def chi2fit(x,y,yerr,func,guesses,label="Data",ax=None,col='k',text=False,draw=True):
    chi = Chi2Regression(func,x,y,yerr)
    gstr = ""
    for key in guesses:
        gstr+=str(key)+"="+str(guesses[key])+","
        
    mistr = "Minuit(chi,pedantic = False,print_level=0,"+gstr+")"
    mi = eval(mistr)
    mi.migrad()
    
    if(draw):
        xx = np.linspace(min(x),max(x),1000)
        if not ax:
            fig, ax = plt.subplots()
        ax.errorbar(x,y,yerr=yerr,fmt = col+'.',capsize = 2,label = label)
        dstr = "Fit values:\n"
        for i,key in enumerate(guesses):
            dstr+=str(key)+"={:.3}$\pm${:.3} ".format(mi.args[i],mi.errors[i])+"\n"
        dstr+="\n"
        dstr+="$\chi^2$={:.3}\np-value={:.3}".format(mi.fval,stats.chi2.sf(mi.fval,len(y)-len(guesses)))
        ax.plot(xx,func(xx,*mi.args),ls='--',c='b',label = "Fit")
        if(text):
            ax.text(text[0],text[1],dstr,transform=ax.transAxes)
        ax.legend()
    
    return mi,ax
def linfit(x, y, a0, b0, e):
    if e > 0:
        chi2_object = Chi2Regression(fit_function, x, y, e)
    else:
        chi2_object = Chi2Regression(fit_function, x, y)
    minuit = Minuit(chi2_object, pedantic=False, b=b0, a=a0, print_level=0)
    minuit.migrad()
    minuit_output = [minuit.get_fmin(), minuit.get_param_states()]
    b_fit = minuit.values['b']
    a_fit = minuit.values['a']
    sigma_b_fit = minuit.errors['b']
    sigma_a_fit = minuit.errors['a']
    Nvar = 2
    Ndof_fit = len(x) - Nvar
    Chi2_fit = minuit.fval  # The chi2 value
    Prob_fit = stats.chi2.sf(
        Chi2_fit, Ndof_fit)  # The chi2 probability given N degrees of freedom
    return a_fit, b_fit, sigma_a_fit, sigma_b_fit, Chi2_fit, Prob_fit
Example #3
0
def chi2_ball_on_incline(x, y, err_y):
    # Now we define a ChiSquare to be minimised (using probfit), where we set various settings and starting parameters:
    chi2_object = Chi2Regression(fitting_function, x, y, err_y) 
    minuit = Minuit(chi2_object, pedantic=False, alpha0=0.0, alpha1=0.0, alpha2=0.5, print_level=0)  
    minuit.migrad() # Perform the actual fit
    minuit_output = [minuit.get_fmin(), minuit.get_param_states()] # Save the output parameters in case needed
    
    # Here we extract the fitting parameters and their errors
    alpha0_fit = minuit.values['alpha0']
    alpha1_fit = minuit.values['alpha1']
    alpha2_fit = minuit.values['alpha2']
    sigma_alpha0_fit = minuit.errors['alpha0']
    sigma_alpha1_fit = minuit.errors['alpha1']
    sigma_alpha2_fit = minuit.errors['alpha2']

    Npoints = np.shape(x)[0] # Number of data points
    Nvar = 3 # Number of variables
    Ndof_fit = Npoints - Nvar    # Number of degrees of freedom = Number of data points - Number of variables
    
    Chi2_fit = minuit.fval                          # The chi2 value
    Prob_fit = stats.chi2.sf(Chi2_fit, Ndof_fit)    # The chi2 probability given N degrees of freedom

    # Plotting
    """
    fig, ax = plt.subplots(figsize=(8,8))
    plotting_times = np.linspace(0.0, 0.6, 1000)
    ax.errorbar(x, y, yerr=err_y, fmt='ko', ecolor='k', barsabove=False, elinewidth=1, capsize=2, capthick=1)
    ax.plot(plotting_times, fitting_function(plotting_times, alpha0_fit, alpha1_fit, alpha2_fit), '-r')
    ax.set_xlabel("Time (s)")
    ax.set_ylabel("Position (m)")
    ax.set_xlim(-0.01,0.55)
    ax.set_ylim(-0.01,0.55)
    # Add nice text
    blank = ''
    d = {" Result of the fit:": blank,
        f"  y-intercept    = {'{0:.4f}'.format(alpha0_fit)} \u00B1 {'{0:.4f}'.format(sigma_alpha0_fit)}": blank,
        f"  Start velocity = {'{0:.4f}'.format(alpha1_fit)} \u00B1 {'{0:.4f}'.format(sigma_alpha1_fit)}": blank,
        f"  Acceleration   = {'{0:.4f}'.format(alpha2_fit)} \u00B1 {'{0:.4f}'.format(sigma_alpha2_fit)}": blank,
        f'  Chi2 = {"{0:.4f}".format(Chi2_fit)} and P = {"{0:.4f}".format(Prob_fit)}': blank,
        }
    text = nice_string_output(d, extra_spacing=2, decimals=3)
    add_text_to_ax(0.02, 0.95, text, ax, fontsize=14)
    fig.tight_layout()
    #plt.show()
    """
    # Print results
    #print(f"Acceleration={alpha2_fit} +- {sigma_alpha2_fit}")
    #print(f"Chi2={Chi2_fit}, prop={Prob_fit}")

    return alpha2_fit, sigma_alpha2_fit, Chi2_fit, Prob_fit
Example #4
0
def fit_gauss(x,y,yerr,muguess=None,ax=None,col='k',label = "Data"):
    if not muguess:
        muguess = (max(x)-min(x))/2
    func = lambda x,mu,sigma,a: stats.norm.pdf(x,mu,sigma)*a
    chi = Chi2Regression(func,x,y,yerr)
    mi = Minuit(chi,pedantic = False,print_level=0,mu = muguess,sigma = 1,a=1)
    mi.migrad()
    mu,sigma,a = mi.args
    
    xx = np.linspace(min(x),max(x),1000)
    if not ax:
        fig, ax = plt.subplots()
    ax.errorbar(x,y,yerr=yerr,fmt = col+'.',capsize = 2,label = label)
    ax.plot(xx,func(xx,*mi.args),ls='--',c='b',label = "Gauss$(\mu: {:.3},\sigma: {:.3})$".format(mu,sigma))
    ax.legend()
    
    return mi,ax
Example #5
0
def weighted_avg(y,x=None,yerr = None,ax = None,label="Data",col = "k"):
    if not x:
        x = range(len(y))
    if not yerr:
        yerr = np.ones(len(y))*np.std(y)
    func = lambda x,a: a + 0*x
    chi = Chi2Regression(func,x,y,yerr)
    mi = Minuit(chi,pedantic = False,print_level=0,a=1)
    mi.migrad()
    a = mi.args[0]
    err = mi.errors[0]
    xx = np.linspace(min(x),max(x),1000)
    
    if not ax:
        fig, ax = plt.subplots()
    ax.errorbar(x,y,yerr=yerr,fmt = col+'.',capsize = 2,label = label)
    ax.plot(xx,func(xx,a),ls='--',c='b',label = "Average: ${:.3}\pm{:.3}$".format(a,err))
    ax.legend()
    
    return mi,ax
Example #6
0
def fit_multiple(func, x, y, y_std):
    import sys  # Modules to see files and folders in directories
    sys.path.append('External_Functions')
    from ExternalFunctions import Chi2Regression
    import numpy as np
    from iminuit import Minuit
    fit_slope = []
    slope_uncertainty = []
    fit_intersection = []
    intersection_uncertainty = []
    for i in range(len(x)):
        chi2_object = Chi2Regression(func, x[i], y, y_std)
        minuitLin = Minuit(chi2_object,
                           pedantic=False,
                           intersection=0,
                           slope=1.5,
                           print_level=0)
        minuitLin.migrad()
        fit_slope.append(minuitLin.args[0])
        slope_uncertainty.append(minuitLin.errors["slope"])
        fit_intersection.append(minuitLin.args[1])
        intersection_uncertainty.append(minuitLin.errors["intersection"])
    return np.array(fit_slope), np.array(fit_intersection), np.array(
        slope_uncertainty), np.array(intersection_uncertainty)
Example #7
0
def double_gauss_fit(mass,
                     bins=100,
                     range=(400, 600),
                     ax=None,
                     verbose=True,
                     guesses=None,
                     max_size=None,
                     plimit=0,
                     color="red"):
    # Fit third degree polynomium to background
    def background_fit(x, a, b, c, d):
        res = a * (x - 498)**3 + b * (x - 498)**2 + c * (x - 498) + d
        return res

    # The double gauss signal
    def add_signal(x, mean, sig, size, ratio, sig_ratio):
        return size * binwidth *  (ratio * norm.pdf(x, mean, sig) + \
                                   (1 - ratio) * norm.pdf(x, mean, sig_ratio * sig))

    # The full fit
    def full_fit(x, mean, sig, size, ratio, sig_ratio, a, b, c, d):
        return background_fit(x, a, b, c, d) + add_signal(
            x, mean, sig, size, ratio, sig_ratio)

    # Make histogram
    vals, edges = np.histogram(mass, bins=bins, range=range)
    xs = (edges[1:] + edges[:-1]) / 2

    binwidth = xs[1] - xs[0]

    mask = vals > 0
    vals = vals[mask]
    xs = xs[mask]
    errs = np.sqrt(vals)

    # Get guesses for a background fit
    if not guesses:
        back_data_mask = abs(xs - xs[np.argmax(vals)]) > 10
        background_guess = [0, 0, (vals[-1] - vals[0]) / 100, vals.min()]

        if len(vals[back_data_mask]) == 0:
            return None, None, None, None

        try:
            vals_b, cov_b = curve_fit(background_fit,
                                      xs[back_data_mask],
                                      vals[back_data_mask],
                                      p0=background_guess)
        except:
            vals_b = background_guess
        b1, b2, b3, b4 = vals_b

        bkgr_chi2 = Chi2Regression(background_fit, xs[back_data_mask],
                                   vals[back_data_mask], errs[back_data_mask])
        bkgr_min = Minuit(bkgr_chi2, pedantic=False, a=b1, b=b2, c=b3, d=b4)
        bkgr_min.migrad()
        counter = 0
        while not bkgr_min.valid and counter < 50:
            bkgr_min.migrad()
            counter += 1
        if not bkgr_min.valid: print("No background valid minimum found!")

        #Save guesses
        b1, b2, b3, b4 = bkgr_min.args

        guesses_sig = [498, 7, 2000, 0.5, 2]
        try:
            vals_f, cov_f = curve_fit(full_fit,
                                      xs,
                                      vals,
                                      p0=guesses_sig + [b1, b2, b3, b4])
        except:
            vals_f = np.hstack([guesses_sig, vals_b])

        s1, s2, s3, s4, s5, b1, b2, b3, b4 = vals_f
    else:
        s1, s2, s3, s4, s5, b1, b2, b3, b4 = guesses

    full_chi2 = Chi2Regression(full_fit, xs, vals, errs)
    full_min  = Minuit(full_chi2, pedantic = False, a = b1, b = b2, c = b3, d = b4, \
                       mean = s1, sig = s2, size = s3, ratio = s4, sig_ratio = s5, limit_sig_ratio = (1, 4), \
                       limit_ratio = (0, 1.0), limit_mean = (490, 510), limit_size = (0, max_size), limit_sig = (3, 10))
    full_min.migrad()

    full_min.migrad()
    counter = 0
    while not full_min.valid and counter < 200:
        full_min.migrad()
        counter += 1
    if not full_min.valid: print("No valid minimum found!")

    # Check fit
    chi = full_min.fval
    pval = chi2.sf(chi, np.sum(mask) - len(full_min.args))

    if verbose:
        print(f"Completed fit with Chi2: {chi:.1f}, p-val: {pval:.3f} and the total amount of signal " + \
            f"{full_min.values['size']:.0f} +/- {full_min.errors['size']:.0f}, background: {len(mass) - int(full_min.values['size'])}")

    if ax:
        ax.plot(xs, vals, alpha=1, color=color)


#         ax.errorbar(xs, vals, errs, elinewidth = 1, color = 'k', capsize = 2, linestyle = 'none', alpha = 0.25)
#         ax.plot(xs, full_fit(xs, *full_min.args), '--', alpha = 0.5)

    if True:  #full_min.errors['size'] < full_min.values['size'] and full_min.valid and pval > plimit:
        return full_min.values['size'], len(mass) - full_min.values[
            'size'], full_min.errors['size'], full_min.args
    else:
        return None, None, None, None
Example #8
0
def chi2_fit(func,
             x,
             y,
             yerr,
             get_values=False,
             pedantic=False,
             print_level=0,
             latex_format=False,
             **kwdarg):
    '''
    ChiSquare fit of a given function to a given data set.
    
    Returns the fitted parameters for further plotting.
    
    **kwdarg allows the user to specify initial parameter 
    values and fix values using the syntax from Minuit
    '''
    chi2obj = Chi2Regression(func, x, y, yerr)
    minuit_obj = Minuit(chi2obj,
                        pedantic=pedantic,
                        print_level=print_level,
                        **kwdarg)

    minuit_obj.migrad()

    if (not minuit_obj.get_fmin().is_valid):  # Check if the fit converged
        print("    WARNING: The ChiSquare fit DID NOT converge!!!")

    Chi2_value = minuit_obj.fval  # The Chi2 value
    NvarModel = len(minuit_obj.args)
    Ndof = len(x) - NvarModel
    ProbChi2 = stats.chi2.sf(Chi2_value, Ndof)
    if latex_format:
        print(
            r'''----------------------------------------------------------------------------------
NB! Units, caption, label and sometimes parameter names must be changed in LaTex.
----------------------------------------------------------------------------------
        
\begin{table}[b]
    \centering
    \begin{tabular}{lrr}
    \hline
    \hline
        Parameter & Value (Unit) & Unc. (Unit) \\
    \hline''')
        for name in minuit_obj.parameters:
            print(
                f'        ${name}$ & ${minuit_obj.values[name]:.5f}$ & ${minuit_obj.errors[name]:.5f}$ \\\ '
            )
        print(r'''    \hline
    \hline''')
        print(
            r'        $\chi^2$-value = {0:.3f} & Ndof = {1} & $\chi^2$-prob = {2:.3f} \\'
            .format(Chi2_value, Ndof, ProbChi2))
        print(r'''    \hline
    \hline
    \end{tabular}
    \caption{Results of $\chi^2$-fit.}
    \label{tab:chi2_fit}
\end{table}''')
    else:
        print(f'''
    _____________________________________________________
    -----------------------------------------------------
               ChiSquare Fit Results
    -----------------------------------------------------
    Chi2-value = {Chi2_value:.3f}
    Ndof       = {Ndof}
    Chi2-prob  = {ProbChi2:.2%}
    -----------------------------------------------------''')
        for name in minuit_obj.parameters:
            print(
                f'\n    Chi2 Fit result:    {name} = {minuit_obj.values[name]:.5f} +/- {minuit_obj.errors[name]:.5f}'
            )
        print('    _____________________________________________________')
    if get_values:
        return minuit_obj.args, Chi2_value, Ndof, ProbChi2
    else:
        return minuit_obj.args
def fit(infiles):
    array_alpha0 = np.zeros(len(infiles))
    array_alpha1 = np.zeros(len(infiles))
    array_alpha2 = np.zeros(len(infiles))
    array_Chi2 = np.zeros(len(infiles))
    array_Prob = np.zeros(len(infiles))
    array_time = np.zeros((len(infiles), 5))
    array_alpha2_err = np.zeros(len(infiles))
    for iexp in range(len(infiles)):
        time, voltage = read_csv(infiles[iexp])

        time_l, etime = para(time, voltage)

        def fit_function2(x, alpha0, alpha1, alpha2):
            return alpha0 + alpha1 * x + alpha2 * x**2

    # Now we define a ChiSquare to be minimised (using probfit), where we set various settings and starting parameters:

        chi2_object = Chi2Regression(fit_function2, time_l, d_mean, d_error)
        minuit = Minuit(chi2_object,
                        pedantic=False,
                        alpha0=3.0,
                        alpha1=0.0,
                        alpha2=0.0,
                        print_level=0)
        minuit.migrad()
        # Perform the actual fit
        minuit_output = [minuit.get_fmin(),
                         minuit.get_param_states()
                         ]  # Save the output parameters in case needed

        # Here we extract the fitting parameters and their errors
        alpha0_fit = minuit.values['alpha0']
        alpha1_fit = minuit.values['alpha1']
        alpha2_fit = minuit.values['alpha2']
        sigma_alpha0_fit = minuit.errors['alpha0']
        sigma_alpha1_fit = minuit.errors['alpha1']
        sigma_alpha2_fit = minuit.errors['alpha2']

        Nvar = 3  # Number of variables (alpha0 and alpha1)
        Ndof_fit = 2  # Number of degrees of freedom = Number of data points - Number of variables

        Chi2_fit = minuit.fval  # The chi2 value

        Prob_fit = stats.chi2.sf(
            Chi2_fit,
            Ndof_fit)  # The chi2 probability given N degrees of freedom

        # Fill the arrays with fit results (to produce plots of these at the end):
        array_alpha0[iexp] = alpha0_fit
        array_alpha1[iexp] = alpha1_fit
        array_alpha2[iexp] = alpha2_fit
        array_Chi2[iexp] = Chi2_fit
        array_Prob[iexp] = Prob_fit
        array_time[iexp, :] = time_l
        array_alpha2_err[iexp] = sigma_alpha2_fit
        avg_alpha0 = statistics.mean(array_alpha0)
        avg_alpha1 = statistics.mean(array_alpha1)
        avg_alpha2 = statistics.mean(array_alpha2)

        # Let us see what the fit gives for the first couple of data sets:
        if (iexp < 100):
            print(
                f"  Fit: a0={alpha0_fit:6.4f}+-{sigma_alpha0_fit:5.4f}  a1={alpha1_fit:5.4f}+-{sigma_alpha1_fit:5.4f} a2={alpha2_fit:5.4f}+-{sigma_alpha2_fit:5.4f}  p={Prob_fit:6.4f} Chi2={Chi2_fit:6.4f}"
            )
    return array_alpha2, array_alpha2_err, avg_alpha2
N_bins = 50


def gauss_pdf(x, mu, sigma):
    return 1.0 / np.sqrt(2.0 * np.pi) / sigma * np.exp(
        -(x - mu)**2.0 / 2.0 / sigma**2.0)


def gauss_ext(x, N, mu, sigma):
    return N * gauss_pdf(x, mu, sigma)


y_a1, xedge = np.histogram(a1, bins=N_bins, range=(min(a1), max(a1)))
x_a1 = (xedge[1:] + xedge[:-1]) / 2

chi2_object_a1 = Chi2Regression(gauss_ext, x_a1, y_a1)
minuit = Minuit(chi2_object_a1,
                pedantic=False,
                N=1,
                mu=a1avg,
                sigma=statistics.stdev(a1),
                print_level=1)
minuit.migrad()  # Perform the actual fit
minuit_output = [minuit.get_fmin(), minuit.get_param_states()]
Na1 = minuit.values['N']
mua1 = minuit.values['mu']
sigmaa1 = minuit.values['sigma']

print(Na1, mua1, sigmaa1)
Chi2_fit_a1 = minuit.fval
Prob_fit_a1 = stats.chi2.sf(Chi2_fit_a1, len(x_a1) - 3)
Example #11
0
    def fit_mass(self,
                 mass,
                 ax=None,
                 double=1,
                 poly_degree=3,
                 depth=50,
                 plot=True):
        """Gauss fit. If double we fit with mu_1=mu_2, with polynomial backgground fit:
        Returns fig, ax, the full Minuit object, background amount and signal amount:
        """
        vals, binedges = np.histogram(mass,
                                      bins=self.bins,
                                      range=self.mass_range)
        xs = 0.5 * (binedges[:-1] + binedges[1:])
        mask = vals > 0
        xs, vals, errs = xs[mask], vals[mask], np.sqrt(va)[mask]

        #look into automatizing the guesses
        #Find peak:

        def find_peak(xs=xs, vals=vals):
            '''Linearly decorrelate and return argmax. 
            Works best for appropriate # of bins'''
            c = np.cov(xs, vals)
            beta = c[0, 1] / c[0, 0]
            alpha = vals.mean() - beta * xs.mean()
            return np.argmax(vals - (beta * xs + alpha))

        mu = find_peak()
        sigma = 5  #Arbitrarily set

        #Make a 5 sigma cut so only background is here
        bkgr_mask = (xs < mu - 5 * sigma) | (xs > mu + 5 * sigma)
        guesses_bkgr = np.zeros(poly_degree + 1)
        guesses_bkgr[-1], guesses[-2] = (vals[0] + vals[-1]) / 2, (
            vals[-1] - vals[0]) / bins

        def background_fit(x, a, b, c, d):
            return a * (x - mu)**3 + b * (x - mu)**2 + c * (x - mu) + d

        # Background fit under here
        if sum(bkgr[mask]) < poly_degree + 1:

            def background_fit(xs, a, b, c, d):
                return 0 * xs

            b1, b2, b3, b4 = 0, 0, 0, 0
        else:
            vals_b, cov_b = curve_fit(background_fit,
                                      xs[bkgr_mask],
                                      vals[bkgr_mask],
                                      p0=guesses_bkgr)
            b1, b2, b3, b4 = vals_b
            bkgr_chi2 = Chi2Regression(background_fit, xs[bkgr_mask],
                                       vals[bkgr_mask], errs[bkgr_mask])
            bkgr_min = Minuit(bkgr_chi2,
                              pedantic=False,
                              a=b1,
                              b=b2,
                              c=b3,
                              d=b4,
                              limit_d=[0, 2 * b4])
            bkgr_min.migrad()
            counter = 0
            bkgr_min
            while not bkgr_min.valid and counter < depth:
                bkgr_min.migrad()
                counter += 1
            if not bkgr_min.valid: print("No background valid minimum found!")

        #Save guesses
        b1, b2, b3, b4 = bkgr_min.args

        # The signal fit  Here gauss
        def gauss(x, mean, sig, size):
            return size * norm.pdf(x, mean, sig)

        # def gauss2(x, mean, sig, size):
        #     return size*norm.pdf(x, mean, sig)

        if double:
            # Full fit for double gauss
            def full_fit(x, mean, sig, size, f, sigmp, a, b, c, d):
                return background_fit(
                    x, a, b, c, d) + f * gauss(x, mean, sig, size) + (
                        1 - f) * gauss(x, mean, sigmp * sig, size)
        else:
            # Full fit for single gauss
            def full_fit(x, mean, sig, size, a, b, c, d):
                return background_fit(x, a, b, c, d) + gauss(
                    x, mean, sig, size)