Exemple #1
0
def cdf_processing(fpath, structure):
    with suppress_stdout():
        # Read CDF
        dat = ANDI_reader(fpath)
        # Construct int matrix ~ (n_scan, n_mz)
        im = build_intensity_matrix_i(dat)
        # Basic noise and baseline corrections
        smooth = savitzky_golay_im(im)
        norm = tophat_im(smooth, struct=structure)
    return norm
Exemple #2
0
def test_tophat_im(im):
    # Use TopHat baseline correction on all IC's in the IM
    im_base_corr = tophat_im(im, struct="1.5m")
    assert isinstance(im_base_corr, IntensityMatrix)

    # find the IC for derivatisation product ion before smoothing
    ic = im.get_ic_at_index(73)
    assert isinstance(ic, IonChromatogram)

    # find the IC for derivatisation product ion after smoothing
    ic_base_corr = im_base_corr.get_ic_at_index(73)
    assert isinstance(ic_base_corr, IonChromatogram)
Exemple #3
0
"""proc.py
"""

from pyms.GCMS.IO.ANDI import ANDI_reader
from pyms.IntensityMatrix import build_intensity_matrix
from pyms.TopHat import tophat_im

# This file has been replaced by jupyter/BaselineCorrection.ipynb

# read the raw data
andi_file = "data/gc01_0812_066.cdf"
data = ANDI_reader(andi_file)

# build an intensity matrix object from the data
im = build_intensity_matrix(data)

# Use TopHat baseline correction on all IC's in the IM
print("Smoothing ...")
im_base_corr = tophat_im(im, struct="1.5m")
print("Done")

# find the IC for derivatisation product ion before smoothing
ic = im.get_ic_at_index(73)

# find the IC for derivatisation product ion after smoothing
ic_base_corr = im_base_corr.get_ic_at_index(73)

ic.write("output/ic.dat", minutes=True)
ic_base_corr.write("output/ic_smooth.dat", minutes=True)
Exemple #4
0
 def test_im_errors(self, im, struct, expects):
     with pytest.raises(expects):
         tophat_im(im, struct)
Exemple #5
0
 def test_im_errors(self, obj):
     with pytest.raises(TypeError):
         tophat_im(obj, "1m")