def main(options):

    # load TraML file
    targeted = pyopenms.TargetedExperiment()
    pyopenms.TraMLFile().load(options.traml_in, targeted)

    # Create empty files as input and finally as output
    empty_swath = pyopenms.MSExperiment()
    trafo = pyopenms.TransformationDescription()
    output = pyopenms.MSExperiment()

    # load input
    for infile in options.infiles:
        exp = pyopenms.MSExperiment()
        pyopenms.FileHandler().loadExperiment(infile, exp)

        transition_exp_used = pyopenms.TargetedExperiment()

        do_continue = True
        if options.is_swath:
            do_continue = pyopenms.OpenSwathHelper(
            ).checkSwathMapAndSelectTransitions(exp, targeted,
                                                transition_exp_used,
                                                options.min_upper_edge_dist)
        else:
            transition_exp_used = targeted

        if do_continue:
            # set up extractor and run
            tmp_out = pyopenms.MSExperiment()
            extractor = pyopenms.ChromatogramExtractor()
            extractor.extractChromatograms(exp, tmp_out, targeted,
                                           options.extraction_window,
                                           options.ppm, trafo,
                                           options.rt_extraction_window,
                                           options.extraction_function)
            # add all chromatograms to the output
            for chrom in tmp_out.getChromatograms():
                output.addChromatogram(chrom)

    dp = pyopenms.DataProcessing()
    pa = pyopenms.DataProcessing().ProcessingAction().SMOOTHING
    dp.setProcessingActions(set([pa]))

    chromatograms = output.getChromatograms()
    for chrom in chromatograms:
        this_dp = chrom.getDataProcessing()
        this_dp.append(dp)
        chrom.setDataProcessing(this_dp)

    output.setChromatograms(chromatograms)

    pyopenms.MzMLFile().store(options.outfile, output)
Example #2
0
def algorithm(chromatogram_map, targeted, precursor_tolerance, product_tolerance, allow_unmapped=True, allow_double_mappings=False):

    output = copy.copy(chromatogram_map)
    output.clear(False);
    empty_chromats = []
    output.setChromatograms(empty_chromats);

    notmapped = 0
    for chrom in chromatogram_map.getChromatograms():
        mapped_already = False
        for transition in targeted.getTransitions():
            if (abs(chrom.getPrecursor().getMZ() - transition.getPrecursorMZ()) < precursor_tolerance and
                abs(chrom.getProduct().getMZ()  -  transition.getProductMZ()) < product_tolerance):
                if mapped_already:
                    this_peptide = targeted.getPeptideByRef(transition.getPeptideRef() ).sequence
                    other_peptide = chrom.getPrecursor().getMetaValue("peptide_sequence")
                    print "Found mapping of", chrom.getPrecursor().getMZ(), "/", chrom.getProduct().getMZ(), "to", transition.getPrecursorMZ(), "/",transition.getProductMZ()
                    print "Of peptide", this_peptide
                    print "But the chromatogram is already mapped to", other_peptide
                    if not allow_double_mappings: raise Exception("Cannot map twice")
                mapped_already = True
                precursor = chrom.getPrecursor();
                peptide = targeted.getPeptideByRef(transition.getPeptideRef() )
                precursor.setMetaValue("peptide_sequence", peptide.sequence)
                chrom.setPrecursor(precursor)
                chrom.setNativeID(transition.getNativeID())
        if not mapped_already:
            notmapped += 1
            print "Did not find a mapping for chromatogram", chrom.getNativeID()
            if not allow_unmapped: raise Exception("No mapping")
        else:
            output.addChromatogram(chrom)

    if notmapped > 0:
        print "Could not find mapping for", notmapped, "chromatogram(s)"


    dp = pyopenms.DataProcessing()
    # dp.setProcessingActions(ProcessingAction:::FORMAT_CONVERSION)
    pa = pyopenms.DataProcessing().ProcessingAction().FORMAT_CONVERSION
    dp.setProcessingActions(set([pa]))

    chromatograms = output.getChromatograms();
    for chrom in chromatograms:
        this_dp = chrom.getDataProcessing()
        this_dp.append(dp)
        chrom.setDataProcessing(this_dp)

    output.setChromatograms(chromatograms);
    return output
Example #3
0
def testDataProcessing(dp=pyopenms.DataProcessing()):
    """
    @tests:
     DataProcessing.__init__
     DataProcessing.getKeys
     DataProcessing.getMetaValue
     DataProcessing.getProcessingActions
     DataProcessing.getSoftware
     DataProcessing.isMetaEmpty
     DataProcessing.metaValueExists
     DataProcessing.removeMetaValue
     DataProcessing.setCompletionTime
     DataProcessing.setMetaValue
     DataProcessing.setProcessingActions
     DataProcessing.setSoftware
     DataProcessing.__eq__
     DataProcessing.__ge__
     DataProcessing.__gt__
     DataProcessing.__le__
     DataProcessing.__lt__
     DataProcessing.__ne__
     DataProcessing.clearMetaInfo
     DataProcessing.getCompletionTime
    """

    _testMetaInfoInterface(dp)

    assert dp == dp
    assert not dp != dp

    assert isinstance(dp.getCompletionTime().getDate(), str)
    assert isinstance(dp.getCompletionTime().getTime(), str)
    dp.clearMetaInfo()
    k = []
    dp.getKeys(k)
    assert k == []
    dp.getMetaValue
    ac = dp.getProcessingActions()
    assert ac == set(())
    dp.setProcessingActions(ac)
    assert isinstance(dp.getSoftware().getName(), str)
    assert isinstance(dp.getSoftware().getVersion(), str)
    dp.isMetaEmpty()
    dp.metaValueExists
    dp.removeMetaValue
    dp.setCompletionTime(pyopenms.DateTime.now())
    s = dp.getSoftware()
    s.setName("pyopenms")
    dp.setSoftware(s)

    assert dp.getSoftware().getName() == "pyopenms"
Example #4
0
def _addDataProcessing(item, params):
    dp = item.getDataProcessing()
    p = pms.DataProcessing()
    p.setProcessingActions(set([pms.ProcessingAction.ALIGNMENT]))
    sw = p.getSoftware()
    sw.setName(os.path.basename(sys.argv[0]))
    sw.setVersion(pms.VersionInfo.getVersion())
    p.setSoftware(sw)
    p.setCompletionTime(pms.DateTime.now())

    for k, v in params.asDict().items():
        p.setMetaValue("parameter: "+k, pms.DataValue(v))

    dp.append(p)
    item.setDataProcessing(dp)
    return item
Example #5
0
def _addDataProcessing(item, params, action):
    dp = item.getDataProcessing()
    p = pms.DataProcessing()
    p.setProcessingActions(set([action]))
    sw = p.getSoftware()
    sw.setName(os.path.basename(sys.argv[0]))
    sw.setVersion(pms.VersionInfo.getVersion())
    p.setSoftware(sw)
    p.setCompletionTime(
        pms.DateTime.now()
    )  # TODO: check if this is the reason for the many data processing entries

    for k, v in params.asDict().items():
        p.setMetaValue(b"parameter: " + k, v)

    dp.append(p)
    item.setDataProcessing(dp)
    return item
Example #6
0
def testFeatureMap():
    """
    @tests:
     FeatureMap.__init__
     FeatureMap.__add__
     FeatureMap.__iadd__
     FeatureMap.__radd__
     FeatureMap.__getitem__
     FeatureMap.clear
     FeatureMap.clearUniqueId
     FeatureMap.ensureUniqueId
     FeatureMap.getDataProcessing
     FeatureMap.getProteinIdentifications
     FeatureMap.getUnassignedPeptideIdentifications
     FeatureMap.getUniqueId
     FeatureMap.hasInvalidUniqueId
     FeatureMap.hasValidUniqueId
     FeatureMap.push_back
     FeatureMap.setDataProcessing
     FeatureMap.setProteinIdentifications
     FeatureMap.setUnassignedPeptideIdentifications
     FeatureMap.setUniqueIds
     FeatureMap.size
     FeatureMap.sortByIntensity
     FeatureMap.sortByMZ
     FeatureMap.sortByOverallQuality
     FeatureMap.sortByPosition
     FeatureMap.sortByRT
     FeatureMap.swap
     FeatureMap.updateRanges
    """
    fm = pyopenms.FeatureMap()
    _testUniqueIdInterface(fm)
    fm.clear()
    fm.clearUniqueId()

    f = pyopenms.Feature()
    fm.push_back(f)

    assert fm.size() == 1
    assert fm[0] == f

    fm.sortByIntensity()
    assert fm.size() == 1
    assert fm[0] == f

    fm.sortByIntensity(False)
    assert fm.size() == 1
    assert fm[0] == f

    fm.sortByPosition()
    assert fm.size() == 1
    assert fm[0] == f

    fm.sortByRT()
    assert fm.size() == 1
    assert fm[0] == f

    fm.sortByMZ()
    assert fm.size() == 1
    assert fm[0] == f

    fm.sortByOverallQuality()
    assert fm.size() == 1
    assert fm[0] == f

    fm2 = pyopenms.FeatureMap()

    fm.swap(fm2)
    assert fm2.size() == 1
    assert fm2[0] == f

    assert fm.size() == 0

    fm2.updateRanges()

    assert fm2.getProteinIdentifications() == []
    fm2.setProteinIdentifications([])

    assert fm2.getUnassignedPeptideIdentifications() == []
    fm2.setUnassignedPeptideIdentifications([])

    fm2.clear()
    assert fm2.size() == 0

    dp = pyopenms.DataProcessing()
    fm2.setDataProcessing([dp])
    assert fm2.getDataProcessing() == [dp]
    testDataProcessing(dp)

    fm2.setUniqueIds()

    fm += fm
    assert fm + fm != fm
Example #7
0
def main(options):

    # make sure that the ids are "correct" for the testcase
    date_time = pyopenms.DateTime()
    if options.test:
        date_time.set("1999-12-31 23:59:59")
        pyopenms.UniqueIdGenerator().setSeed(date_time)
    else:
        date_time = pyopenms.DateTime.now()

    exp = pyopenms.MSExperiment()
    out_map = pyopenms.ConsensusMap()
    pyopenms.FileHandler().loadExperiment(options.infile, exp)
    exp.updateRanges()

    #
    # 1. filter MS1 level (only keep MS1)
    #
    tmp = copy.copy(exp)
    tmp.clear(False)
    for spectrum in exp:
        if spectrum.getMSLevel() == 1:
            tmp.push_back(spectrum)
    exp = tmp
    exp.sortSpectra(True)

    #
    # 2. set parameters
    #
    analyzer = pyopenms.SILACAnalyzer()
    analyzer.initialize(
        # section sample
        options.selected_labels,
        options.charge_min,
        options.charge_max,
        options.missed_cleavages,
        options.isotopes_per_peptide_min,
        options.isotopes_per_peptide_max,
        # section "algorithm"
        options.rt_threshold,
        options.rt_min,
        options.intensity_cutoff,
        options.intensity_correlation,
        options.model_deviation,
        options.allow_missing_peaks,
        # labels
        options.label_identifiers)

    #
    # 3. run
    #
    analyzer.run_all(exp, out_map)

    #
    # 4. set dataprocessing and output meta information
    #
    out_map.sortByPosition()

    dp = out_map.getDataProcessing()
    p = pyopenms.DataProcessing()
    p.setProcessingActions(
        set([
            pyopenms.ProcessingAction().DATA_PROCESSING,
            pyopenms.ProcessingAction().PEAK_PICKING,
            pyopenms.ProcessingAction().FILTERING,
            pyopenms.ProcessingAction().QUANTITATION
        ]))
    p.setCompletionTime(date_time)

    sw = p.getSoftware()
    sw.setName("SILACAnalyzer")
    if options.test:
        sw.setVersion("version_string")
        p.setSoftware(sw)
        p.setMetaValue("parameter: mode", "test_mode")
    else:
        sw.setVersion("pyTOPP v1.10")
        p.setSoftware(sw)
    dp.append(p)
    out_map.setDataProcessing(dp)

    #
    # 5. write output
    #
    analyzer.writeConsensus(pyopenms.String(options.outfile), out_map)