def test_hfsigma(): u"Tests ForwardBackwardFilter, NonLinearFilter" arr = np.arange(10) * 1. assert hfsigma(arr) == np.median(np.diff(arr)) arr[0] = np.nan assert nanhfsigma(arr) == np.median(np.diff(arr[np.isfinite(arr)])) arr = 1. * np.arange(20)**2 assert hfsigma(arr) == np.median(np.diff(arr)) arr = np.insert(arr, range(0, 20, 2), np.nan) assert nanhfsigma(arr) == np.median(np.diff(arr[np.isfinite(arr)]))
def apply(signals, phase): "Aggregates signals" if len(signals) == 0: return 0. if len(signals) == 1: res = np.copy(signals[0]) else: rng = slice(*phase) wei = np.clip( np.array([nanhfsigma(i[rng]) for i in signals], dtype='f4'), 5e-4, 1e-2) wei[np.isnan(wei)] = .01 wei = (.01 - wei) wei[np.isnan(wei)] = 0. res = np.zeros(max(len(i) for i in signals), dtype='f4') tot = np.zeros_like(res) for i, j in zip(signals, wei): fin = np.isfinite(i) res[fin] += i[fin] * j tot[fin] += fin * j good = tot > 1e-6 res[good] /= tot[good] res[~good] = np.NaN return res
def __cyclebaseinfo( self, frame: Union[Cycles, Beads], key: Tuple[int, int], arr: np.ndarray, zmag: np.ndarray ) -> Tuple[int, int, float, float, float, float, float]: pop = float(np.isfinite(arr).sum()*100./max(1, len(arr))) if pop == 0.: return (key[0], key[1], pop, 0., 0., 0., nanhfsigma(arr)) pha = [frame.phase(key[1], j) for i in self.phases for j in range(i, i+2)] delta = min(pha[1]-pha[0], pha[3]-pha[2]) return ( key[0], key[1], pop, pd.Series(arr).corr(pd.Series(zmag)), np.diff(np.nanpercentile(arr, list(self.extentrange)))[0], np.nanpercentile( np.abs(arr[pha[1]-delta:pha[1]]-arr[pha[2]:pha[2]+delta][::-1]), self.extentrange[1] ), nanhfsigma(arr) )
def __call__(self, ibead: int) -> float: return max( PrecisionAlg.MINPRECISION, sum( nanhfsigma(self.beads[ibead], zip(*i), self.rate) * j for i, j in self.phases))
def __call__(self, ibead: int) -> float: return max(PrecisionAlg.MINPRECISION, nanhfsigma(self.beads[ibead], zip(*self.phases), self.rate))
def dataframe(self, beads: Beads) -> pd.DataFrame: # pylint: disable=too-many-locals """ Creates a dataframe for all beads in a track. """ ext1: Tuple[List[float], ...] = ([], [], []) ext2: Tuple[List[float], ...] = ([], [], []) var: Tuple[List[float], ...] = ([], [], []) sig: Tuple[List[float], ...] = ([], [], []) pop: List[float] = [] drops: List[float] = [] isgood: List[bool] = [] extfast, extslow = self.__exts(beads) def _append(vals, itms): vals = np.asarray(vals) good = vals[np.isfinite(vals)] if len(good) == 0: for i in itms[:3]: i.append(np.NaN) elif len(good) == 1: itms[0].append(good[0]) itms[1].append(np.NaN) itms[2].append(np.NaN) else: itms[0].append(np.mean(good)) try: itms[1].append(np.std(good)) except FloatingPointError as fperr: itms[1].append(np.NaN) if IS_TEAMCITY: _log_error(vals=vals, good=good, track=beads.track) raise fperr itms[2].append(np.percentile(good, self.threshold)) with warnings.catch_warnings(): warnings.filterwarnings(action='ignore', category=RuntimeWarning, message='.*slice.*') for _, data in beads: cycs = self.__cycles(beads, data) pop.append(self.population(cycs)) drops.append( self.drops.measure(cycs.track, cast(dict, cycs.data)[0])) _append(extslow(cycs), ext1) _append(extfast(data), ext2) _append(np.diff(self.cyclesock(cycs), axis=0).ravel(), var) _append([nanhfsigma(i) for i in cycs.values()], sig) isgood.append( ext1[-1][-1] <= self.maxextent and ext2[0][-1] <= self.maxextent and var[-1][-1] <= self.maxdiff and self.minhfsigma <= sig[-1][-1] <= self.maxhfsigma) def _vals(i, j): return {i + k: l for k, l in zip(('mean', 'std', 'percentile'), j)} return pd.DataFrame( dict(bead=list(beads.keys()), good=isgood, pop=pop, drops=drops, **_vals('slowext', ext1), **_vals('fastext', ext2), **_vals('var', var), **_vals('sig', sig)))
def peakhfsigma(events: PeaksArray): "returns the hfsigma on the peak" out = [nanhfsigma(np.concatenate(i['data'])) for i in events if len(i)] return np.nanmedian(out) if len(out) else np.NaN