def testLoadMap(): from_ = u"data/SHORT_MS2_FILE.mzXML" ds = ms.loadPeakMap(from_) assert osp.basename(ds.meta.get("source")) == osp.basename(from_) # with unicode ms.storePeakMap(ds, u"temp_output/utilstest.mzML") ds2 = ms.loadPeakMap(u"temp_output/utilstest.mzML") assert len(ds) == len(ds2) # without unicode ms.storePeakMap(ds2, "temp_output/utilstest.mzData") ds3 = ms.loadPeakMap("temp_output/utilstest.mzData") assert len(ds) == len(ds3)
def testLoadMap(): from_ = u"data/SHORT_MS2_FILE.mzXML" ds = ms.loadPeakMap(from_) assert osp.basename(ds.meta.get("source")) == osp.basename(from_) # with unicode ms.storePeakMap(ds, u"temp_output/utilstest.mzML") ds2 = ms.loadPeakMap(u"temp_output/utilstest.mzML") assert len(ds)==len(ds2) # without unicode ms.storePeakMap(ds2, "temp_output/utilstest.mzData") ds3 = ms.loadPeakMap("temp_output/utilstest.mzData") assert len(ds)==len(ds3)
def process(self, path): try: print "read ", path pm = ms.loadPeakMap(path) except Exception, e: print e print "reading FAILED" return None
def testMatchedFilter(): pm = ms.loadPeakMap("data/test.mzXML") table = ms.runMatchedFilters(pm, destination="temp_output", configid="std", mzdiff=0, fwhm=50, steps=1, step=0.6) assert len(table) == 340, len(table) assert len(table.getColNames()) == 18, len(table.getColNames()) assert len(table.getColTypes()) == 18
def process(self, path): import ms from ms._metabo import metaboFeatureFinder try: print "read ", path ds = ms.loadPeakMap(path) except Exception, e: print e print "reading FAILED" return None
def testCentwaveFeatureDetector(): print "load" ds = ms.loadPeakMap("data/test_mini.mzXML") print "loaded" det = libms.RConnect.CentwaveFeatureDetector(ppm=3, peakwidth=(8, 13), snthresh=40, prefilter=(8, 10000), mzdiff=1.5 ) assert det.__doc__ != None table = det.process(ds) table.print_() assert len(table) == 1, len(table) assert len(table.getColNames()) == 16, len(table.getColNames()) assert len(table.getColTypes()) == 16, len(table.getColTypes()) assert "polarity" in table.getColNames()
def testRunCentwave(): pm = ms.loadPeakMap("data/test_mini.mzXML") table = ms.runCentwave(pm, ppm=3, peakwidth=(8, 13), snthresh=40, prefilter=(8, 10000), mzdiff=1.5 ) #runCentwave("data/test.mzXML", destination="temp_output", configid="std") #assert len(glob.glob("temp_output/test.csv")) == 1 ##assert len(tables) == 1 #table=tables[0] assert len(table) == 1, len(table) assert len(table.getColNames()) == 16, len(table.getColNames()) assert len(table.getColTypes()) == 16
def testCentwaveFeatureDetector(): print "load" ds = ms.loadPeakMap("data/test_mini.mzXML") print "loaded" det = libms.RConnect.CentwaveFeatureDetector(ppm=3, peakwidth=(8, 13), snthresh=40, prefilter=(8, 10000), mzdiff=1.5) assert det.__doc__ != None table = det.process(ds) table.print_() assert len(table) == 1, len(table) assert len(table.getColNames()) == 16, len(table.getColNames()) assert len(table.getColTypes()) == 16, len(table.getColTypes()) assert "polarity" in table.getColNames()
def test_ffm(): import ms pm = ms.loadPeakMap("data/test.mzXML") pm = pm.extract(rtmin=6, rtmax=12, mzmin=350, mzmax=400) #ftab = ms.feature_detectors.runMetaboFeatureFinder(pm) #assert len(ftab) == 19, len(ftab) #ftab.print_() #ftab2 = ms.feature_detectors.runMetaboFeatureFinder(pm, config_id="std") #assert len(ftab2) == 30, len(ftab2) #ftab2.print_() ftab2 = ms.feature_detectors.runMetaboFeatureFinder(pm, ms_level=1) assert len(ftab2) == 19, len(ftab2) ftab2.print_() al1, al2 = ms.rtAlign([ftab2, ftab2], refTable=ftab2, destination=".", nPeaks=10)
def run(integrator, areatobe, rmsetobe): assert len(str(integrator))>0 try: ds = run.ds except: ds = run.ds = ms.loadPeakMap("data/SHORT_MS2_FILE.mzData") integrator.setPeakMap(ds) rtmin = ds.spectra[0].rt rtmax = ds.spectra[30].rt mzmin = ds.spectra[0].peaks[10,0] mzmax = ds.spectra[0].peaks[-10,0] result = integrator.integrate(mzmin, mzmax, rtmin, rtmax, 1) area=result.get("area") rmse=result.get("rmse") print "area: is=%e tobe=%e" % (area, areatobe) print "rmse: is=%e tobe=%e" % (rmse, rmsetobe) if area > 0: assert abs(area-areatobe)/areatobe < .01, area else: assert area == 0.0, area if rmse > 0: assert abs(rmse-rmsetobe)/rmsetobe < .01, rmse else: assert rmse == 0.0, rmse params = result.get("params") rts = [ spec.rt for spec in ds.spectra ] x, y = integrator.getSmoothed(rts, params) return x,y, params
def run(integrator, areatobe, rmsetobe): assert len(str(integrator)) > 0 try: ds = run.ds except: ds = run.ds = ms.loadPeakMap("data/SHORT_MS2_FILE.mzData") integrator.setPeakMap(ds) rtmin = ds.spectra[0].rt rtmax = ds.spectra[30].rt mzmin = ds.spectra[0].peaks[10, 0] mzmax = ds.spectra[0].peaks[-10, 0] result = integrator.integrate(mzmin, mzmax, rtmin, rtmax, 1) area = result.get("area") rmse = result.get("rmse") print "area: is=%e tobe=%e" % (area, areatobe) print "rmse: is=%e tobe=%e" % (rmse, rmsetobe) if area > 0: assert abs(area - areatobe) / areatobe < .01, area else: assert area == 0.0, area if rmse > 0: assert abs(rmse - rmsetobe) / rmsetobe < .01, rmse else: assert rmse == 0.0, rmse params = result.get("params") rts = [spec.rt for spec in ds.spectra] x, y = integrator.getSmoothed(rts, params) return x, y, params
def testPeakPicking(): pp = libms.PeakPicking.PeakPickerHiRes() ds = ms.loadPeakMap("data/gauss_data.mzML") ds2 = pp.pickPeakMap(ds) assert len(ds) == len(ds2) assert ds2.spectra[0].peaks.shape == (9570, 2)
default=0.5) mode = gui.ChoiceItem( "mode of operation", ["explore", "targeted extraction"], default=1, help="for explore mode no parameter table is needed") path_para = gui.FileOpenItem("parameter table", ("csv")) path_data = gui.FileOpenItem("data file to process", ("mzXML", "mzML", "mzData")) frontend = SRMExplorer() exit_code = frontend.show() if exit_code == 1: # means: ok button pressed print "LOAD DATA" peakmap = ms.loadPeakMap(frontend.path_data) if frontend.mode == 1: transitions = ms.loadCSV(frontend.path_para) # Edit parameter table required_columns = [ "name", "fragment", "precursor", "rtmin", "rtmax" ] assert transitions.hasColumns(*required_columns),\ "Please check column names in parameter table !!" transitions.title = "SRM parameter table" else: transitions = None print "PROCESS DATA" result = process_srm_data(peakmap, transitions, frontend.delta_mz)
# here we build the result: for i in range(1, len(table)): result = result.leftJoin(table[i]) return result def add_postfix_to_column_names(table, postfix): mapping = dict((name, name + postfix) for name in table.colNames) table.renameColumns(mapping) if __name__ == "__main__": import libms.gui.DialogBuilder as gui class SRMExplorer(gui.WorkflowFrontend): n_digits = gui.IntItem("no of significant digits\nof precursor m/z", default=2) path = gui.FileOpenItem("data file to process", ("mzXML", "mzML", "mzData")) frontend = SRMExplorer() exit_code = frontend.show() if exit_code: print "LOAD DATA" peakmap = ms.loadPeakMap(frontend.path) print "PROCESS DATA" result = process_srm_data(peakmap, frontend.n_digits) ms.inspect(result)
def test(): import ms pm = ms.loadPeakMap("emzed_files/example1.mzXML") t = metaboFeatureFinder(pm, epdet_width_filtering="auto") ms.inspect(t)