def kineticsTest(): """Tests kinetics data for paper""" p = Pipeline() colheaderlabels = 'wt 5,wt 3,wt 2,68 5,68 3,68 2,138 5,138 3,138 2,248 5,248 3,248 2' rowheaderlabels = '3.2,1.6,0.8,0.4,0.2,0.1,0.05,0.025' conf = { 'format': 'kineticsdata', 'delimeter': 'tab', 'rowstart': 3, 'colend': 12, 'rowrepeat': 9, 'colheaderlabels': colheaderlabels, 'rowheaderlabels': rowheaderlabels, 'decimalsymbol': ',', 'xformat': '%M:%S', 'groupbyname': 1, 'parsenamesindex': 2, 'parsemethod': 'numeric', 'model1': 'linear', 'model2': 'Michaelis-Menten', 'model3': '1 pKa 2 Chemical shifts', 'variable1': 'a', 'variable2': 'Km', 'variable3': 'pKa', #'xerror':.1,'yerror':0.05, } p = Pipeline() p.createConfig('temp.conf', **conf) path = 'testfiles/kineticsdata/jan/setF/MM/rep1' p.addFolder(path) p.run() return
def peakDetectionTest(path=None, noise=0.08): """Use pre-processing funcs to detect peaks""" if path == None: path = "testfiles" names = Utilities.createRandomStrings(8, 6) fname = os.path.join(path, 'spectraldata.txt') peaks = Utilities.createSimulatedSpectralData(fname, names, noise=noise) conf = { 'format': 'databycolumn', 'saveplots': 1, 'marker': '-', 'markers': '-,x', 'alpha': 0.7, 'normalise': 1, 'function1': 'smooth', 'function2': 'baselinecorrection', 'function3': 'detectpeaks' } p = Pipeline() p.createConfig('temp.conf', **conf) p.openRaw(fname) results = p.run() #compare predicted peaks successrates = [] res = results[results.keys()[0]] for name in peaks: #print name, sorted(peaks[name]), results[name][0] orig = set(peaks[name]) pred = set(res[name][0]) s = float(len(orig.intersection(pred))) / (len(orig)) successrates.append(s) return np.mean(successrates)
def replicatesTest(): """Tests handling of replicates""" p = Pipeline() conf = { 'format': 'databycolumn', 'groupbyname': 1, 'parsenamesindex': 1, 'parsemethod': 'numeric', 'replicates': 1, 'model1': 'linear', 'variable1': 'a', 'model2': 'sigmoid', 'variable2': 'tm' } p.createConfig('temp.conf', **conf) reps = ['rep1', 'rep2', 'rep3'] path = 'testfiles/replicates' Utilities.createDirectory(path) names = Utilities.createRandomStrings(3, 6) for r in reps: rpath = os.path.join(path, r) Utilities.createGroupedData(rpath, names=names) p.addFolder(path) p.run() return
def fitPropagationTest(): """Tests the propagation of fit data direct from a dict - no importing""" start = time.time() p = Pipeline() conf = { 'model1': 'linear', 'model2': 'Michaelis-Menten', 'model3': 'sigmoid', 'variable1': 'a', 'variable2': 'Km', 'variable3': 'tm', #'xerror':.1,'yerror':0.05, } p.createConfig('temp.conf', **conf) data = Utilities.createNestedData() Em = EkinProject() E, fits = p.processFits(data, Em=Em) print 'final fits', fits fname = os.path.join(p.workingdir, 'results') Em.saveProject(fname) p.saveEkinPlotstoImages(Em, fname) print 'completed fit propagation test' print 'took %s seconds' % round((time.time() - start), 2) print '-------------------' return
def multiFolderTest(): """Handling of multiple folders in a hierarchy with replicates""" p = Pipeline() conf = { 'format': 'databycolumn', 'groupbyname': 1, 'parsenamesindex': 0, 'parsemethod': 'numeric', 'replicates': 1, #'saveplots':1, 'model1': 'linear', 'variable1': 'a', 'model2': 'sigmoid', 'variable2': 'tm' } p.createConfig('temp.conf', **conf) path = 'testfiles/multifolders' Utilities.createDirectory(path) phs = range(2, 10) reps = range(1, 4) #replicates names = Utilities.createRandomStrings(3, 6) today = str(datetime.date.today()) for i in phs: #sigmoid dependence of the slopes on 'ph' #so we know we are getting the right results val = 1 / (1 + exp((i - 4) / 1.04)) folder = os.path.join(path, 'ph' + str(i)) Utilities.createDirectory(folder) for r in reps: fname = os.path.join(folder, 'r' + str(r) + '_' + today + '.txt') Utilities.createTempData(fname, names, val) p.addFolder(path) p.run() return
def doTest(info, name='test', path='testfiles'): print 'running test %s' % name p = Pipeline() conf = info[0] filename = info[1] confpath = os.path.join(p.defaultpath, 'temp.conf') p.createConfig(confpath, **conf) lines = p.openRaw(os.path.join(path, filename)) data = p.doImport(lines) if p.model1 != '': p.run() return
def groupbyFieldsTest(): """Tests grouping by fields function using NMRdata""" conf = { 'format': 'databycolumn', 'colheaderlabels': '15N,1H', 'parsenamesindex': 0, 'parsemethod': 'numeric', 'delimeter': ' ', 'groupbyfields': 1, 'extension': '.inp' } path = 'testfiles/NMRdata' p = Pipeline() p.createConfig('temp.conf', **conf) p.addFolder(path) p.run() return
def multiFileTest(): """Test handling of single datasets per file with grouping per filename""" path = 'testfiles/singlefiles' Utilities.createSingleFileData(path) conf = { 'format': 'databycolumn', 'groupbyname': 1, 'parsenamesindex': '0,1', 'parsemethod': 'both', 'model1': 'linear', 'variable1': 'a', 'model2': 'sigmoid', 'variable2': 'tm' } p = Pipeline() p.createConfig('temp.conf', **conf) p.addFolder(path) p.run() return
def preProcessingTest(): """Test processing steps like differentation of the data""" path = "testfiles" names = Utilities.createRandomStrings(8, 6) fname = os.path.join(path, 'preprocessingtest.txt') Utilities.createCDData(fname, names, 300, .5) conf = { 'format': 'databycolumn', 'model1': 'gaussian', 'function1': 'differentiate', 'function2': 'gaussiansmooth', 'iterations': 100, 'variable1': 'a', 'saveplots': 1 } p = Pipeline() p.createConfig('temp.conf', **conf) p.openRaw(fname) p.run() return
def groupedFilesTest(): """Tests the processing and grouping of multiple files with the same sets of datasets in all files""" path = 'testfiles/grouped' Utilities.createGroupedData(path) conf = { 'format': 'databycolumn', 'groupbyname': 1, 'parsenamesindex': 0, 'parsemethod': 'numeric', 'model1': 'linear', 'variable1': 'a', 'model2': 'sigmoid', 'variable2': 'tm' } p = Pipeline() p.createConfig('temp.conf', **conf) p.addFolder(path) p.run() return
def main(): from optparse import OptionParser parser = OptionParser() parser.add_option("-c", "--conf", dest="conf", help="Provide a conf file", metavar="FILE") parser.add_option("-f", "--file", dest="file", help="Raw file", metavar="FILE") parser.add_option("-d", "--dir", dest="directory", help="Folder of raw files") parser.add_option("-p", "--project", dest="project", help="Project file", metavar="FILE") opts, remainder = parser.parse_args() P = Pipeline() if opts.project != None: P = loadProject(opts.project) else: if opts.conf != None: P.parseConfig(opts.conf) if opts.file != None: P.openRaw(opts.file) if opts.directory != None: P.addFolder(opts.directory) P.run()
def setUp(self): self.p = Pipeline() modulepath = os.path.dirname(DataPipeline.__file__) self.confpath = os.path.join(self.p.defaultpath, 'temp.conf') self.filepath = os.path.join(modulepath, 'testfiles')