Пример #1
0
def _simplify_peak_set(peaks, bin_width=5.0):
    bin_edges = np.arange(0, peaks[-1].mz + bin_width, bin_width)
    bins = []
    for i, bin_edge in enumerate(bin_edges, 1):
        if i == len(bin_edges):
            next_edge = bin_edges[-1] + bin_width
        else:
            next_edge = bin_edges[i]
        subset = peaks.between(bin_edge, next_edge)
        bins.append(subset)

    thresholds = []
    reduced_subsets = {}
    k = 0
    for b in bins:
        if len(b) > 0:
            bin_intensities = np.array([p.intensity for p in b])
            thresholds.append(np.max(bin_intensities) / 3.)
            for p in b:
                if p.intensity > thresholds[-1]:
                    reduced_subsets[p.peak_count] = p
            k += (bin_intensities > thresholds[-1]).sum()
        else:
            thresholds.append(0.0)
    subset_peaks = PeakSet(sorted(reduced_subsets.values(),
                                  key=lambda x: x.mz)).clone()
    subset_peaks.reindex()
    return PeakIndex(np.array([]), np.array([]), subset_peaks)
Пример #2
0
def convert_map_to_scan_peak_list(feature_map,
                                  peak_loader,
                                  time_precision=4,
                                  deconvoluted=True):
    metadata_map = {}
    scan_accumulator = defaultdict(list)
    for scan_id, metadata in peak_loader.extended_index.ms1_ids.items():
        metadata_map[round(metadata["scan_time"], time_precision)] = metadata
    for feature in feature_map:
        for node in feature:
            scan_accumulator[round(node.time,
                                   time_precision)].extend(node.members)

    packed = []
    for key, peaks in sorted(scan_accumulator.items(), key=lambda x: x[0]):
        template = peak_loader.get_scan_by_time(key)
        if deconvoluted:
            peak_set = PeakSet([])
            deconvoluted_peak_set = DeconvolutedPeakSet(peaks)
        else:
            peak_set = PeakSet(peaks)
            deconvoluted_peak_set = DeconvolutedPeakSet([])
        peak_set.reindex()
        deconvoluted_peak_set.reindex()
        scan = ProcessedScan(template.id, template.title, None,
                             template.ms_level, template.scan_time,
                             template.index, peak_set, deconvoluted_peak_set,
                             template.polarity, None)
        packed.append(scan)
    return packed
def convert_map_to_scan_peak_list(feature_map, peak_loader, time_precision=4, deconvoluted=True):
    metadata_map = {}
    scan_accumulator = defaultdict(list)
    for scan_id, metadata in peak_loader.extended_index.ms1_ids.items():
        metadata_map[round(metadata["scan_time"], time_precision)] = metadata
    for feature in feature_map:
        for node in feature:
            scan_accumulator[round(node.time, time_precision)].extend(node.members)

    packed = []
    for key, peaks in sorted(scan_accumulator.items(), key=lambda x: x[0]):
        template = peak_loader.get_scan_by_time(key)
        if deconvoluted:
            peak_set = PeakSet([])
            deconvoluted_peak_set = DeconvolutedPeakSet(peaks)
        else:
            peak_set = PeakSet(peaks)
            deconvoluted_peak_set = DeconvolutedPeakSet([])
        peak_set.reindex()
        deconvoluted_peak_set.reindex()
        scan = ProcessedScan(
            template.id, template.title, None, template.ms_level, template.scan_time,
            template.index, peak_set, deconvoluted_peak_set, template.polarity,
            None)
        packed.append(scan)
    return packed
Пример #4
0
 def _pick_peaks_vendor(self, scan, *args, **kwargs):
     scan_info = Business.Scan.FromFile(self._source, scan.scan_number + 1)
     if scan_info.HasCentroidStream:
         stream = self._source.GetCentroidStream(scan.scan_number + 1, 0)
         mzs = stream.Masses
         intens = stream.Intensities
         peaks = PeakSet([simple_peak(mzs[i], intens[i], 0.001) for i in range(len(mzs))])
         peaks.reindex()
         arrays = self._scan_arrays(scan)
         return PeakIndex(arrays[0], arrays[1], peaks)
     else:
         raise NotImplementedError()
Пример #5
0
def deserialize_peak_set(scan_dict):
    mz_array = scan_dict['m/z array']
    intensity_array = scan_dict['intensity array']
    n = len(scan_dict['m/z array'])
    peaks = []
    for i in range(n):
        peak = FittedPeak(
            mz_array[i], intensity_array[i], 1, i, i,
            0, intensity_array[i], 0, 0)
        peaks.append(peak)
    peak_set = PeakSet(peaks)
    peak_set.reindex()
    return PeakIndex(np.array([]), np.array([]), peak_set)
Пример #6
0
def envelopes_to_peak_set(self):
    """Convert a set of deconvoluted peaks with fitted isotopic envelopes into a
    set of centroids representing those envelope peaks.

    Returns
    -------
    :class:`ms_peak_picker.PeakSet`
    """
    peaks = []
    for peak in self:
        for point in peak.envelope:
            peaks.append(
                simple_peak(point.mz, point.intensity, peak.full_width_at_half_max))
    new_peak_set = FittedPeakSet(peaks)
    new_peak_set.reindex()
    return new_peak_set
Пример #7
0
def prepare_peaklist(peaks):
    '''Ensure ``peaks`` is a :class:`~.PeakSet` object,
    converting from other compatible types as needed. Additionally, make a deep
    copy of the peaks as signal subtraction methods will modify peaks in place.

    This function ensures that any of the following common input types are coerced
    to the appropriate type:

    1. :class:`ms_peak_picker.PeakSet` will be copied and indexed
    2. :class:`ms_peak_picker.PeakIndex` will have its peaks extracted and copied
    3. Any other *sequence* of :class:`PeakLike` objects (objects having an mz and
       intensity attribute) will be converted into a :class:`ms_peak_picker.PeakSet`
    4. Any *sequence* of :class:`tuple` or :class:`list` having at least two entries
       will be converted into a :class:`ms_peak_picker.PeakSet` with the m/z value
       of each peak being the the `p[0]` of each entry and the intensity `p[1]`. Any
       other entries will be ignored.

    Parameters
    ----------
    peaks: Sequence
        Any sequence of :class:`~.FittedPeak` objects, objects
        with ``mz`` and ``intensity`` attributes, or :class:`list` / :class:`tuple`
        objects containing paired values for ``mz`` and ``intensity``

    Returns
    -------
    :class:`~.PeakSet`
    '''
    if isinstance(peaks, PeakIndex):
        peaks = PeakSet(peaks.peaks).clone()
    else:
        peaks = tuple(peaks)
        if len(peaks) == 0:
            return PeakSet([])
        if not isinstance(peaks[0], FittedPeak):
            if is_peak(peaks[0]):
                peaks = [simple_peak(p.mz, p.intensity, 0.01) for p in peaks]
            elif isinstance(peaks[0], (list, tuple)):
                peaks = [simple_peak(p[0], p[1], 0.01) for p in peaks]
            else:
                raise TypeError("Cannot convert peaks into a PeakSet")

        peaks = PeakSet(peaks).clone()
    peaks.reindex()
    return peaks