Beispiel #1
0
def airpls_baseline(intensities, smoothness_param=100, max_iters=10,
                    conv_thresh=0.001, verbose=False):
  '''
  Baseline corr. using adaptive iteratively reweighted penalized least squares.
  Also known as airPLS, 2010.
  http://pubs.rsc.org/EN/content/articlehtml/2010/an/b922045c
  https://code.google.com/p/airpls/
  https://airpls.googlecode.com/svn/trunk/airPLS.py
  '''
  smoother = WhittakerSmoother(intensities, smoothness_param)
  total_intensity = np.abs(intensities).sum()
  w = np.ones(intensities.shape[0])
  for i in range(1, max_iters+1):
    baseline = smoother.smooth(w)
    # Compute error (sum of distances below the baseline).
    corrected = intensities - baseline
    mask = corrected < 0
    baseline_error = -corrected[mask]
    total_error = baseline_error.sum()
    # Check convergence as a fraction of total intensity.
    conv = total_error / total_intensity
    if verbose:
      print(i, conv)
    if conv < conv_thresh:
      break
    # Set peak weights to zero.
    w[~mask] = 0
    # Set baseline weights.
    baseline_error /= total_error
    w[mask] = np.exp(i*baseline_error)
    w[0] = np.exp(i*baseline_error.min())
    w[-1] = w[0]
  else:
    print('airPLS did not converge in %d iterations' % max_iters)
  return baseline
Beispiel #2
0
def als_baseline(intensities, asymmetry_param=0.05, smoothness_param=1e6,
                 max_iters=10, conv_thresh=1e-5, verbose=False):
  '''Perform asymmetric least squares baseline removal.
  * http://www.science.uva.nl/~hboelens/publications/draftpub/Eilers_2005.pdf

  smoothness_param: Relative importance of smoothness of the predicted response.
  asymmetry_param (p): if y > z, w = p, otherwise w = 1-p.
                       Setting p=1 is effectively a hinge loss.
  '''
  smoother = WhittakerSmoother(intensities, smoothness_param, deriv_order=2)
  # Rename p for concision.
  p = asymmetry_param
  # Initialize weights.
  w = np.ones(intensities.shape[0])
  for i in range(max_iters):
    z = smoother.smooth(w)
    mask = intensities > z
    new_w = p*mask + (1-p)*(~mask)
    conv = np.linalg.norm(new_w - w)
    if verbose:
      print(i+1), conv
    if conv < conv_thresh:
      break
    w = new_w
  else:
    print('ALS did not converge in %d iterations' % max_iters)
  return z
Beispiel #3
0
def fabc_baseline(intensities, dilation_param=50, smoothness_param=1e3):
  '''Fully Automatic Baseline Correction, by Carlos Cobas (2006).
  http://www.sciencedirect.com/science/article/pii/S1090780706002266
  '''
  cwt = scipy.signal.cwt(intensities, scipy.signal.ricker, (dilation_param,))
  dY = cwt.ravel()**2

  is_baseline = iterative_threshold(dY)
  is_baseline[0] = True
  is_baseline[-1] = True

  smoother = WhittakerSmoother(intensities, smoothness_param, deriv_order=1)
  return smoother.smooth(is_baseline)