Esempio n. 1
0
  def T(self,x,out=None):
    """Computes the antiderivative of *x* with mean zero."""
    x = x.core()
    scratch = self.scratch

    if out is None:
      out = NumpyVector(np.ndarray(x.shape))

    scratch[0] = 0
    for k in range(self.N):
      scratch[k+1] = scratch[k] + self.h*x[k]

    y=out.core()
    for k in range(self.N):
      y[k] = 0.5*(scratch[k] + scratch[k+1])
    
    ybar = sum(y)/self.N
    y -= ybar

    return out
Esempio n. 2
0
                    help='standard deviation of added noise')
  parser.add_option("-a","--algorithm",type='choice',choices=['sd','nlcg','ign'],default='nlcg',
                    help="algorithm to use [sd,nlcg,ign]: sd=steepest descent, nlcg=nonlinear conjugate gradient (default), ign=incomplete Gauss-Newton")
  parser.add_option("-t","--test_linearization",action='store_true',
                    help='test linearization and adjoint (and exit)')
  parser.add_option("-d","--discrepancy_fraction",type='float',default=1.0,metavar="D",
                    help='remove the fraction D of the actual error (D<1 to overfit and D>1 to underfit)')

  (options, args) = parser.parse_args()

  N = options.node_count
  Linf_error = options.l_infty_noise

  forward_problem = CoeffForwardProblem(N)
  x = forward_problem.x
  beta=NumpyVector((N,))
  beta.core()[:] = np.sin(x*2*pi)


  if options.test_linearization:    
    h = siple.rand.random_vector(beta,scale=1.)
    
    (Fp1,Fp2) =  forward_problem.testT(beta,h,t=1e-6)
    dF = Fp1.copy(); dF -= Fp2
    print 'Relative T error: %g' % (dF.norm('linf')/Fp1.norm('linf'))
    
    g = siple.rand.random_vector(beta,scale=1.)
    (ip1,ip2) = forward_problem.testTStar(beta,h,g)
    print 'Relative T* error: %g' % (abs(ip1-ip2)/abs(ip1))

  else: