Beispiel #1
0
 def _prepopulate_averagine_cache(self):
     if 'averagine' in self.ms1_deconvolution_args:
         averagine = self.ms1_deconvolution_args['averagine']
         ms1_truncate_after = self.ms1_deconvolution_args.get(
             'truncate_after', constants.TRUNCATE_AFTER)
         ms1_ignore_below = self.ms1_deconvolution_args.get(
             'ignore_below', constants.IGNORE_BELOW)
         ms1_charge_range = self.ms1_deconvolution_args.get(
             'charge_range', (1, 8))
         ms1_charge_carrier = self.ms1_deconvolution_args.get(
             'charge_carrier', PROTON)
         if isinstance(averagine, (list, tuple)):
             averagine = [
                 AveragineCache(a).populate(
                     truncate_after=ms1_truncate_after,
                     ignore_below=ms1_ignore_below,
                     min_charge=ms1_charge_range[0],
                     max_charge=ms1_charge_range[1],
                     charge_carrier=ms1_charge_carrier) for a in averagine
             ]
         else:
             averagine = AveragineCache(averagine).populate(
                 truncate_after=ms1_truncate_after,
                 ignore_below=ms1_ignore_below,
                 min_charge=ms1_charge_range[0],
                 max_charge=ms1_charge_range[1],
                 charge_carrier=ms1_charge_carrier)
         self.ms1_deconvolution_args['averagine'] = averagine
     if 'averagine' in self.msn_deconvolution_args:
         averagine = self.msn_deconvolution_args['averagine']
         msn_truncate_after = self.msn_deconvolution_args.get(
             'truncate_after', constants.TRUNCATE_AFTER)
         msn_ignore_below = self.msn_deconvolution_args.get(
             'ignore_below', constants.IGNORE_BELOW)
         msn_charge_range = self.msn_deconvolution_args.get(
             'charge_range', (1, 8))
         msn_charge_carrier = self.msn_deconvolution_args.get(
             'charge_carrier', PROTON)
         if isinstance(averagine, (list, tuple)):
             averagine = [
                 AveragineCache(a).populate(
                     truncate_after=msn_truncate_after,
                     ignore_below=msn_ignore_below,
                     min_charge=msn_charge_range[0],
                     max_charge=msn_charge_range[1],
                     charge_carrier=msn_charge_carrier) for a in averagine
             ]
         else:
             averagine = AveragineCache(averagine).populate(
                 truncate_after=msn_truncate_after,
                 ignore_below=msn_ignore_below,
                 min_charge=msn_charge_range[0],
                 max_charge=msn_charge_range[1],
                 charge_carrier=msn_charge_carrier)
         self.msn_deconvolution_args['averagine'] = averagine
Beispiel #2
0
    def __init__(self,
                 peaklist,
                 averagines=None,
                 scorer=penalized_msdeconv,
                 use_subtraction=True,
                 scale_method=SCALE_METHOD,
                 merge_isobaric_peaks=True,
                 minimum_intensity=5.,
                 verbose=False,
                 *args,
                 **kwargs):
        self.peaklist = prepare_peaklist(peaklist)
        self.scorer = scorer
        self.use_subtraction = use_subtraction
        self.scale_method = scale_method

        cache_backend = dict
        if averagines is None:
            averagines = [peptide, glycopeptide, glycan]
        averagines = [
            AveragineCache(avg, backend=cache_backend())
            if not isinstance(avg, AveragineCache) else avg
            for avg in averagines
        ]
        self.averagines = averagines
        self.verbose = verbose

        self._deconvoluted_peaks = []

        super(MultiAveragineDeconvoluter,
              self).__init__(use_subtraction, scale_method,
                             merge_isobaric_peaks, minimum_intensity, *args,
                             **kwargs)
Beispiel #3
0
 def __init__(self,
              feature_map,
              averagine,
              scorer,
              precursor_map=None,
              minimum_size=3,
              maximum_time_gap=0.25,
              prefer_multiply_charged=True,
              copy=True):
     if precursor_map is None:
         precursor_map = PrecursorMap({})
     if isinstance(feature_map, LCMSFeatureMap):
         if copy:
             feature_map = feature_map.clone(deep=True)
     else:
         feature_map = LCMSFeatureMap(
             [f.clone(deep=True) if copy else f for f in feature_map])
     self.feature_map = feature_map
     self.averagine = AveragineCache(averagine)
     self.prefer_multiply_charged = prefer_multiply_charged
     self.scorer = scorer
     self.precursor_map = precursor_map
     self.minimum_size = minimum_size
     self.maximum_time_gap = maximum_time_gap
     self.dependence_network = FeatureDependenceGraph(self.feature_map)
     self.orphaned_nodes = []
Beispiel #4
0
    def __init__(self, peaklist, averagine=None, scorer=penalized_msdeconv,
                 use_subtraction=True, scale_method=SCALE_METHOD,
                 verbose=False, **kwargs):
        if averagine is None:
            averagine = AveragineCache(peptide, dict())
        else:
            if not isinstance(averagine, AveragineCache):
                averagine = AveragineCache(averagine, dict())
        self.peaklist = prepare_peaklist(peaklist)
        self.averagine = averagine
        self.scorer = scorer
        self._deconvoluted_peaks = []
        self.verbose = verbose

        super(AveragineDeconvoluter, self).__init__(
            use_subtraction, scale_method, merge_isobaric_peaks=True, **kwargs)
Beispiel #5
0
    def test_extraction_cached_averagine(self):
        scan = self.scan
        cache = AveragineCache(peptide)
        cache.populate(truncate_after=0.8)
        peak2, deconvoluter = self.build_deconvoluter(scan, cache)
        deconvoluter._deconvolution_step(0,
                                         truncate_after=0.8,
                                         charge_range=(1, 4))

        with open(datafile("extraction_cached_averagine.pkl"), 'rb') as fh:
            reference_averagine = pickle.load(fh)

        diff = set(deconvoluter.averagine.backend) - set(
            reference_averagine.backend)
        assert len(diff) == 0
        assert len(deconvoluter.averagine.backend) == 23960
        assert reference_averagine == deconvoluter.averagine

        cluster2 = deconvoluter.peak_dependency_network.find_cluster_for(peak2)
        spanned2 = cluster2.fits_using_mz(peak2.mz)
        assert len(cluster2) == 4
        assert len(spanned2) == 2
        assert np.isclose(cluster2.best_fit.monoisotopic_peak.mz, 138.19520)
        assert cluster2.best_fit.charge == 3
 def __init__(self,
              feature_map,
              averagine,
              scorer,
              precursor_map=None,
              minimum_size=3,
              maximum_time_gap=0.25):
     if precursor_map is None:
         precursor_map = PrecursorMap({})
     self.feature_map = LCMSFeatureMap(
         [f.clone(deep=True) for f in feature_map])
     self.averagine = AveragineCache(averagine)
     self.scorer = scorer
     self.precursor_map = precursor_map
     self.minimum_size = minimum_size
     self.maximum_time_gap = maximum_time_gap
     self.dependence_network = FeatureDependenceGraph(self.feature_map)
     self.orphaned_nodes = []