Exemplo n.º 1
0
def _profileObtainAndVerifyBounds(f, df, ddf, x0, lb, ub, full_output=False):
    res = minimize(
        fun=f,
        jac=df,  # hess=ddf,
        x0=x0,
        bounds=np.reshape(np.append(lb, ub), (len(lb), 2), 'F'),
        method='l-bfgs-b',
        options={'maxiter': 1000})

    if res["success"] == False:
        raise EstimateError("Failure in estimation of the profile " +
                            "likelihood: " + res['message'])
    else:
        res["method"] = "Direct Minimization"

    if full_output:
        return res['x'], res
    else:
        return res['x']
Exemplo n.º 2
0
def _profileObtainAndVerify(f, df, x0, full_output=False):
    '''
    Find the solution of the profile likelihood and check
    that the algorithm has converged.
    '''
    x, cov, infodict, mesg, ier = leastsq(func=f,
                                          x0=x0,
                                          Dfun=df,
                                          maxfev=10000,
                                          full_output=True)

    if ier not in (1, 2, 3, 4):
        raise EstimateError(
            "Failure in estimation of the profile likelihood: " + mesg)

    if full_output:
        output = dict()
        output['cov'] = cov
        output['infodict'] = infodict
        output['mesg'] = mesg
        output['ier'] = ier
        return x, output
    else:
        return x
Exemplo n.º 3
0
def _profileObtainViaNuisance(theta,
                              xhat,
                              i,
                              alpha,
                              obj,
                              lb,
                              ub,
                              obtainLB=True,
                              full_output=False):
    '''
    Find the profile likelihood confidence interval by iteratively minimizing
    over the nuisance parameters first then minimizing the parameter of
    interest, rather than tackling them all at the same time.
    '''
    xhatT = theta.copy()
    funcF = _profileF(xhat, i, alpha, obj)
    funcG = _profileG(xhat, i, alpha, obj)

    lbT, ubT = lb.copy(), ub.copy()
    # ubT = ub.copy()

    p = len(theta)
    setIndex = set(range(p))
    activeIndex = list(setIndex - set([i]))

    # note that the bounds here needs to be reversed.
    if obtainLB:
        ubT[activeIndex] = xhat[activeIndex]
    else:
        lbT[activeIndex] = xhat[activeIndex]

    # define the corresponding objective function that minimizes the nuisance
    # parameters internally.
    def ABC1(beta):
        xhatT[i] = beta
        xhatT[activeIndex] = _profileOptimizeNuisance(xhatT, i, obj, lbT, ubT)
        return funcG(xhatT)[i]

    def ABCJac(beta):
        xhatT[0] = beta
        xhatT[activeIndex] = _profileOptimizeNuisance(xhatT, 0, obj, lb, ub)
        g = funcG(xhatT)
        return np.array([2 * g[0] * obj.gradient()[0]])

    try:
        res = root(ABC1, xhatT[i])
    except Exception:
        raise EstimateError("Error in using the direct root finder")

    ## res1 = root(ABC1,xhatL[0],jac=ABCJac)
    ## res = scipy.optimize.minimize_scalar(ABC,bounds=(?,?))

    if res['success'] == True:
        if obtainLB:
            # if we want the lower bound, then the estimate should not
            # be higher than the MLE
            if obj._theta[i] >= xhat[i]:
                raise EstimateError("Estimate higher than MLE")
        else:
            if obj._theta[i] <= xhat[i]:
                raise EstimateError("Estimate lower than MLE")

        res['method'] = 'Nested Minimization'
        if full_output is True:
            return obj._theta.copy(), res
        else:
            return obj._theta.copy()
    else:
        raise EstimateError(
            "Failure in estimation of the profile likelihood: " +
            res['message'])