示例#1
0
 def solver(dms, regs, nparams=nparams):
     gradientchain = [_zerovector(nparams)]
     gradientchain.extend(itertools.chain(dms, regs))
     gradient = reduce(lambda g, m: m.sum_gradient(g), gradientchain)
     hessianchain = [_zeromatrix((nparams, nparams))]
     hessianchain.extend(itertools.chain(dms, regs))
     hessian = reduce(lambda h, m: m.sum_hessian(h), hessianchain)
     p = linsys_solver(hessian, -1*gradient)
     residuals = [d.data - d.get_predicted(p) for d in dms]
     misfit = sum(d.get_misfit(res) for d, res in itertools.izip(dms,
                  residuals))
     goal = misfit + sum(r.value(p) for r in regs)
     yield {'estimate':p, 'misfits':[misfit], 'goals':[goal],
            'residuals':residuals}
示例#2
0
 def solver(dms, regs, initial=initial_array, maxit=maxit, tol=tol):
     """
     Inverse problem solver using Newton's method.
     """
     if len(dms) == 0:
         raise ValueError("Need at least 1 data module. None given")
     p = initial
     nparams = len(p)
     residuals = [d.data - d.get_predicted(p) for d in dms]
     misfit = sum(
         d.get_misfit(res) for d, res in itertools.izip(dms, residuals))
     goal = misfit + sum(r.value(p) for r in regs)
     misfits = [misfit]
     goals = [goal]
     yield {
         'estimate': p,
         'misfits': misfits,
         'goals': goals,
         'residuals': residuals
     }
     for it in xrange(maxit):
         gradient = _zerovector(nparams)
         for d, res in itertools.izip(dms, residuals):
             gradient = d.sum_gradient(gradient, p, res)
         for r in regs:
             gradient = r.sum_gradient(gradient, p)
         hessian = _zeromatrix((nparams, nparams))
         for m in itertools.chain(dms, regs):
             hessian = m.sum_hessian(hessian, p)
         p = p + linsys_solver(hessian, -1 * gradient)
         residuals = [d.data - d.get_predicted(p) for d in dms]
         misfit = sum(
             d.get_misfit(res) for d, res in itertools.izip(dms, residuals))
         goal = misfit + sum(r.value(p) for r in regs)
         misfits.append(misfit)
         goals.append(goal)
         yield {
             'estimate': p,
             'misfits': misfits,
             'goals': goals,
             'residuals': residuals
         }
         # Check if goal function decreases more than a threshold
         if (goals[-1] < goals[-2] and abs(
             (goals[-1] - goals[-2]) / goals[-2]) <= tol):
             break
示例#3
0
 def solver(dms, regs, nparams=nparams):
     gradientchain = [_zerovector(nparams)]
     gradientchain.extend(itertools.chain(dms, regs))
     gradient = reduce(lambda g, m: m.sum_gradient(g), gradientchain)
     hessianchain = [_zeromatrix((nparams, nparams))]
     hessianchain.extend(itertools.chain(dms, regs))
     hessian = reduce(lambda h, m: m.sum_hessian(h), hessianchain)
     p = linsys_solver(hessian, -1 * gradient)
     residuals = [d.data - d.get_predicted(p) for d in dms]
     misfit = sum(
         d.get_misfit(res) for d, res in itertools.izip(dms, residuals))
     goal = misfit + sum(r.value(p) for r in regs)
     yield {
         'estimate': p,
         'misfits': [misfit],
         'goals': [goal],
         'residuals': residuals
     }
示例#4
0
 def solver(dms, regs, initial=initial_array, maxit=maxit, tol=tol):
     """
     Inverse problem solver using Newton's method.
     """
     if len(dms) == 0:
         raise ValueError("Need at least 1 data module. None given")
     p = initial
     nparams = len(p)
     residuals = [d.data - d.get_predicted(p) for d in dms]
     misfit = sum(d.get_misfit(res)
                  for d, res in itertools.izip(dms, residuals))
     goal = misfit + sum(r.value(p) for r in regs)
     misfits = [misfit]
     goals = [goal]
     yield {'estimate':p, 'misfits':misfits, 'goals':goals,
            'residuals':residuals}
     for it in xrange(maxit):
         gradient = _zerovector(nparams)
         for d, res in itertools.izip(dms, residuals):
             gradient = d.sum_gradient(gradient, p, res)
         for r in regs:
             gradient = r.sum_gradient(gradient, p)
         hessian = _zeromatrix((nparams, nparams))
         for m in itertools.chain(dms, regs):
             hessian = m.sum_hessian(hessian, p)
         p = p + linsys_solver(hessian, -1*gradient)
         residuals = [d.data - d.get_predicted(p) for d in dms]
         misfit = sum(d.get_misfit(res) for d, res in itertools.izip(dms,
                      residuals))
         goal = misfit + sum(r.value(p) for r in regs)
         misfits.append(misfit)
         goals.append(goal)
         yield {'estimate':p, 'misfits':misfits, 'goals':goals,
                'residuals':residuals}
         # Check if goal function decreases more than a threshold
         if (goals[-1] < goals[-2] and
             abs((goals[-1] - goals[-2])/goals[-2]) <= tol):
             break
示例#5
0
 def solver(dms, regs, initial=initial, damp=damp, factor=factor,
     maxsteps=maxsteps, maxit=maxit, tol=tol):
     if len(dms) == 0:
         raise ValueError("Need at least 1 data module. None given")
     p = initial
     nparams = len(p)
     residuals = [d.data - d.get_predicted(p) for d in dms]
     misfit = sum(d.get_misfit(res)
                  for d, res in itertools.izip(dms, residuals))
     goal = misfit + sum(r.value(p) for r in regs)
     misfits = [misfit]
     goals = [goal]
     yield {'estimate':p, 'misfits':misfits, 'goals':goals,
            'residuals':residuals}
     step = damp
     for it in xrange(maxit):
         gradient = _zerovector(nparams)
         for d, res in itertools.izip(dms, residuals):
             gradient = d.sum_gradient(gradient, p, res)
         for r in regs:
             gradient = r.sum_gradient(gradient, p)
         # Multiply the gradient now so that doesn't do this inside the loop
         gradient *= -1
         hessian = _zeromatrix((nparams, nparams))
         for m in itertools.chain(dms, regs):
             hessian = m.sum_hessian(hessian, p)
         hessian_diag = hessian.diagonal()
         stagnation = True
         # The loop to determine the best step size
         for itstep in xrange(maxsteps):
             ptmp = p + linsys_solver(hessian + step*hessian_diag, gradient)
             restmp = [d.data - d.get_predicted(ptmp) for d in dms]
             misfit = sum(d.get_misfit(res) for d, res in itertools.izip(dms,
                          restmp))
             goal = misfit + sum(r.value(ptmp) for r in regs)
             if goal < goals[-1]:
                 # Don't let the damping factor be smaller than this
                 if step > 10.**(-10):
                     step /= factor
                 stagnation = False
                 break
             else:
                 # Don't let the damping factor be larger than this
                 if step < 10**(10):
                     step *= factor
                 else:
                     break
         if stagnation:
             if it == 0:
                 msg = "  Levenberg-Marquardt didn't take any steps"
             else:
                 msg = "  Levenberg-Marquardt finished: couldn't take a step"
             print >> sys.stderr, msg
             break
         p = ptmp
         residuals = restmp
         misfits.append(misfit)
         goals.append(goal)
         yield {'estimate':p, 'misfits':misfits, 'goals':goals,
                'residuals':residuals}
         # Check if goal function decreases more than a threshold
         if abs((goals[-1] - goals[-2])/goals[-2]) <= tol:
             break
示例#6
0
 def solver(dms,
            regs,
            initial=initial,
            damp=damp,
            factor=factor,
            maxsteps=maxsteps,
            maxit=maxit,
            tol=tol):
     if len(dms) == 0:
         raise ValueError("Need at least 1 data module. None given")
     p = initial
     nparams = len(p)
     residuals = [d.data - d.get_predicted(p) for d in dms]
     misfit = sum(
         d.get_misfit(res) for d, res in itertools.izip(dms, residuals))
     goal = misfit + sum(r.value(p) for r in regs)
     misfits = [misfit]
     goals = [goal]
     yield {
         'estimate': p,
         'misfits': misfits,
         'goals': goals,
         'residuals': residuals
     }
     step = damp
     for it in xrange(maxit):
         gradient = _zerovector(nparams)
         for d, res in itertools.izip(dms, residuals):
             gradient = d.sum_gradient(gradient, p, res)
         for r in regs:
             gradient = r.sum_gradient(gradient, p)
         # Multiply the gradient now so that doesn't do this inside the loop
         gradient *= -1
         hessian = _zeromatrix((nparams, nparams))
         for m in itertools.chain(dms, regs):
             hessian = m.sum_hessian(hessian, p)
         hessian_diag = hessian.diagonal()
         stagnation = True
         # The loop to determine the best step size
         for itstep in xrange(maxsteps):
             ptmp = p + linsys_solver(hessian + step * hessian_diag,
                                      gradient)
             restmp = [d.data - d.get_predicted(ptmp) for d in dms]
             misfit = sum(
                 d.get_misfit(res)
                 for d, res in itertools.izip(dms, restmp))
             goal = misfit + sum(r.value(ptmp) for r in regs)
             if goal < goals[-1]:
                 # Don't let the damping factor be smaller than this
                 if step > 10.**(-10):
                     step /= factor
                 stagnation = False
                 break
             else:
                 # Don't let the damping factor be larger than this
                 if step < 10**(10):
                     step *= factor
                 else:
                     break
         if stagnation:
             if it == 0:
                 msg = "  Levenberg-Marquardt didn't take any steps"
             else:
                 msg = "  Levenberg-Marquardt finished: couldn't take a step"
             print >> sys.stderr, msg
             break
         p = ptmp
         residuals = restmp
         misfits.append(misfit)
         goals.append(goal)
         yield {
             'estimate': p,
             'misfits': misfits,
             'goals': goals,
             'residuals': residuals
         }
         # Check if goal function decreases more than a threshold
         if abs((goals[-1] - goals[-2]) / goals[-2]) <= tol:
             break