def convert_map_to_scan_peak_list(feature_map, peak_loader, time_precision=4, deconvoluted=True): metadata_map = {} scan_accumulator = defaultdict(list) for scan_id, metadata in peak_loader.extended_index.ms1_ids.items(): metadata_map[round(metadata["scan_time"], time_precision)] = metadata for feature in feature_map: for node in feature: scan_accumulator[round(node.time, time_precision)].extend(node.members) packed = [] for key, peaks in sorted(scan_accumulator.items(), key=lambda x: x[0]): template = peak_loader.get_scan_by_time(key) if deconvoluted: peak_set = PeakSet([]) deconvoluted_peak_set = DeconvolutedPeakSet(peaks) else: peak_set = PeakSet(peaks) deconvoluted_peak_set = DeconvolutedPeakSet([]) peak_set.reindex() deconvoluted_peak_set.reindex() scan = ProcessedScan( template.id, template.title, None, template.ms_level, template.scan_time, template.index, peak_set, deconvoluted_peak_set, template.polarity, None) packed.append(scan) return packed
def threshold_peaks(deconvoluted_peak_set, threshold_fn=lambda peak: True): deconvoluted_peak_set = DeconvolutedPeakSet([ p for p in deconvoluted_peak_set if threshold_fn(p) ]) deconvoluted_peak_set._reindex() return deconvoluted_peak_set
def build_deconvoluted_peak_set_from_arrays(mz_array, intensity_array, charge_array): peaks = [] for i in range(len(mz_array)): peak = RankedPeak( neutral_mass(mz_array[i], charge_array[i]), intensity_array[i], charge_array[i], intensity_array[i], i) peaks.append(peak) peak_set = DeconvolutedPeakSet(peaks) peak_set.reindex() return peak_set
def rank(self, cache=True): if 'ranked_peaks' not in self.annotations or not cache: peaks = self.deconvoluted_peak_set intensity_rank(peaks) peaks = DeconvolutedPeakSet([p for p in peaks if p.rank > 0]) peaks.reindex() if cache: self.annotations['ranked_peaks'] = peaks return peaks return self.annotations['ranked_peaks']
def threshold_peaks(deconvoluted_peak_set, threshold_fn=lambda peak: True): """Filter a deconvoluted peak set by a predicate function. Parameters ---------- deconvoluted_peak_set : :class:`ms_deisotope.DeconvolutedPeakSet` The deconvoluted peaks to filter threshold_fn : Callable The predicate function to use to decide whether or not to keep a peak. Returns ------- :class:`ms_deisotope.DeconvolutedPeakSet` """ deconvoluted_peak_set = DeconvolutedPeakSet( [p for p in deconvoluted_peak_set if threshold_fn(p)]) deconvoluted_peak_set._reindex() return deconvoluted_peak_set
def convert_map_to_scan_peak_list(feature_map, peak_loader, time_precision=4, deconvoluted=True): metadata_map = {} scan_accumulator = defaultdict(list) for scan_id, metadata in peak_loader.extended_index.ms1_ids.items(): metadata_map[round(metadata["scan_time"], time_precision)] = metadata for feature in feature_map: for node in feature: scan_accumulator[round(node.time, time_precision)].extend(node.members) packed = [] for key, peaks in sorted(scan_accumulator.items(), key=lambda x: x[0]): template = peak_loader.get_scan_by_time(key) if deconvoluted: peak_set = PeakSet([]) deconvoluted_peak_set = DeconvolutedPeakSet(peaks) else: peak_set = PeakSet(peaks) deconvoluted_peak_set = DeconvolutedPeakSet([]) peak_set.reindex() deconvoluted_peak_set.reindex() scan = ProcessedScan(template.id, template.title, None, template.ms_level, template.scan_time, template.index, peak_set, deconvoluted_peak_set, template.polarity, None) packed.append(scan) return packed
def convert(self, fitted=True, deconvoluted=True): precursor_information = self.precursor_information.convert( ) if self.precursor_information is not None else None session = object_session(self) conn = session.connection() if fitted: q = conn.execute(select([FittedPeak.__table__]).where( FittedPeak.__table__.c.scan_id == self.id)).fetchall() peak_set_items = list( map(make_memory_fitted_peak, q)) peak_set = PeakSet(peak_set_items) peak_set._index() peak_index = PeakIndex(np.array([], dtype=np.float64), np.array( [], dtype=np.float64), peak_set) else: peak_index = PeakIndex(np.array([], dtype=np.float64), np.array( [], dtype=np.float64), PeakSet([])) if deconvoluted: q = conn.execute(select([DeconvolutedPeak.__table__]).where( DeconvolutedPeak.__table__.c.scan_id == self.id)).fetchall() deconvoluted_peak_set_items = list( map(make_memory_deconvoluted_peak, q)) deconvoluted_peak_set = DeconvolutedPeakSet( deconvoluted_peak_set_items) deconvoluted_peak_set._reindex() else: deconvoluted_peak_set = DeconvolutedPeakSet([]) info = self.info or {} scan = ProcessedScan( self.scan_id, self.title, precursor_information, int(self.ms_level), float(self.scan_time), self.index, peak_index, deconvoluted_peak_set, activation=info.get('activation')) return scan
def threshold_peaks(deconvoluted_peak_set, threshold_fn=lambda peak: True): deconvoluted_peak_set = DeconvolutedPeakSet( [p for p in deconvoluted_peak_set if threshold_fn(p)]) deconvoluted_peak_set._reindex() return deconvoluted_peak_set