def update_signal_ratio(self):
     self.next_signal_magnitude = sum(f.total_signal for f in self.processor.feature_map)
     self.total_signal_ratio = (self.last_signal_magnitude - self.next_signal_magnitude) / self.next_signal_magnitude
     printer("Signal Ratio: %0.3e (%0.3e, %0.3e)" % (
         self.total_signal_ratio, self.last_signal_magnitude, self.next_signal_magnitude))
     self.last_signal_magnitude = self.next_signal_magnitude
     return self.total_signal_ratio
    def map_fits(self, report_interval=5):
        i = 0
        n = len(self.processor.feature_map)
        interval = int(max(n // report_interval, 1000))
        for feature in sorted(self.processor.feature_map, key=lambda x: x.mz, reverse=True):
            fits = self.processor.charge_state_determination(
                feature,
                error_tolerance=self.error_tolerance,
                charge_range=self.charge_range,
                left_search=self.left_search,
                right_search=self.right_search,
                charge_carrier=self.charge_carrier,
                truncate_after=self.truncate_after,
                max_missed_peaks=self.max_missed_peaks,
                threshold_scale=self.threshold_scale)
            if self.debug:
                print(feature, fits)
            i += 1
            if i % interval == 0:
                printer("\t%0.1f%%" % ((100. * i) / n,))
        self.all_fits = list(self.processor.dependence_network.dependencies)
        self.disjoint_feature_clusters = self.processor.dependence_network.find_non_overlapping_intervals()

        if self.relfitter is not None:
            self.relations = self.relfitter.fit(
                (d for cluster in self.disjoint_feature_clusters
                 for d in cluster), self.solutions)
            self.relfitter.predict((d for cluster in self.disjoint_feature_clusters
                                    for d in cluster))
        printer("\tExtracting Fits")
        self.fits = self.processor.select_best_disjoint_subgraphs(self.disjoint_feature_clusters)
    def map_fits(self, report_interval=5):
        i = 0
        n = len(self.processor.feature_map)
        interval = int(max(n // report_interval, 1000))
        for feature in sorted(self.processor.feature_map, key=lambda x: x.mz, reverse=True):
            fits = self.processor.charge_state_determination(
                feature,
                error_tolerance=self.error_tolerance,
                charge_range=self.charge_range,
                left_search=self.left_search,
                right_search=self.right_search,
                charge_carrier=self.charge_carrier,
                truncate_after=self.truncate_after,
                max_missed_peaks=self.max_missed_peaks,
                threshold_scale=self.threshold_scale)
            if self.debug:
                print(feature, fits)
            i += 1
            if i % interval == 0:
                printer("\t%0.1f%%" % ((100. * i) / n,))
        self.all_fits = list(self.processor.dependence_network.dependencies)
        self.disjoint_feature_clusters = self.processor.dependence_network.find_non_overlapping_intervals()

        if self.relfitter is not None:
            self.relations = self.relfitter.fit(
                (d for cluster in self.disjoint_feature_clusters
                 for d in cluster), self.solutions)
            self.relfitter.predict((d for cluster in self.disjoint_feature_clusters
                                    for d in cluster))
        printer("\tExtracting Fits")
        self.fits = self.processor.select_best_disjoint_subgraphs(self.disjoint_feature_clusters)
 def update_signal_ratio(self):
     self.next_signal_magnitude = sum(f.total_signal for f in self.processor.feature_map)
     self.total_signal_ratio = (self.last_signal_magnitude - self.next_signal_magnitude) / self.next_signal_magnitude
     printer("Signal Ratio: %0.3e (%0.3e, %0.3e)" % (
         self.total_signal_ratio, self.last_signal_magnitude, self.next_signal_magnitude))
     self.last_signal_magnitude = self.next_signal_magnitude
     return self.total_signal_ratio
    def _map_precursors(self, error_tolerance):
        printer("\tConstructing Precursor Seeds")
        rt_map = RTMap(self.feature_map)
        cache = dict()
        seeds = set()

        for key, pinfo in (self.precursor_map.mapping.items()):
            time, ix = key
            try:
                candidates = cache[time]
            except KeyError:
                candidates = rt_map.rt_tree.contains_point(time)
                cache[time] = candidates
            mz = pinfo["mz"]
            hits = [c.members[0] for c in candidates if c.contains_mz(mz, error_tolerance)]
            seeds.update(hits)
        return seeds
    def _map_precursors(self, error_tolerance):
        printer("\tConstructing Precursor Seeds")
        rt_map = RTMap(self.feature_map)
        cache = dict()
        seeds = set()

        for key, pinfo in (self.precursor_map.mapping.items()):
            time, ix = key
            try:
                candidates = cache[time]
            except KeyError:
                candidates = rt_map.rt_tree.contains_point(time)
                cache[time] = candidates
            mz = pinfo["mz"]
            hits = [c.members[0] for c in candidates if c.contains_mz(mz, error_tolerance)]
            seeds.update(hits)
        return seeds
 def setup(self):
     printer("Begin Iteration %d" % (self.iteration_count, ))
     printer("Total Signal: %0.3e" %
             (sum(f.total_signal for f in self.processor.feature_map), ))
     self.processor.remove_peaks_below_threshold(self.minimum_intensity)
     self.processor.build_dependence_network()
 def setup(self):
     printer("Begin Iteration %d" % (self.iteration_count,))
     printer("Total Signal: %0.3e" % (sum(f.total_signal for f in self.processor.feature_map),))
     self.processor.remove_peaks_below_threshold(self.minimum_intensity)
     self.processor.build_dependence_network()