def test_aggregate_deep_path(self): """Make sure that the aggregator will work on a deep path.""" elist = (Pipeline().from_source( TimeSeries( dict(name='events', events=DEEP_EVENT_LIST))).emit_on('flush').aggregate( dict(out_max={'direction.out': Functions.max() })).to_event_list()) self.assertEqual(elist[0].get('out_max'), 4) # Make sure it works with the the non-string version to aggregate # multiple columns elist = (Pipeline().from_source( TimeSeries( dict(name='events', events=DEEP_EVENT_LIST))).emit_on('flush').aggregate({ 'in_max': { 'direction.in': Functions.max() }, 'out_max': { 'direction.out': Functions.max() }, }).to_event_list()) self.assertEqual(elist[0].get('out_max'), 4) self.assertEqual(elist[0].get('in_max'), 8)
def test_nested_wire_format(self): """make sure nested format round trips correctly.""" data_flow = dict(name="traffic", columns=["time", "direction"], points=[[1400425947000, { 'in': 1, 'out': 2 }], [1400425948000, { 'in': 3, 'out': 4 }], [1400425949000, { 'in': 5, 'out': 6 }], [1400425950000, { 'in': 7, 'out': 8 }]]) series = TimeSeries(data_flow) wire = self._call_interop_script('event', series.to_string()) new_series = TimeSeries(wire) self.assertEqual(new_series.at(0).value('direction').get('in'), 1) self.assertEqual(new_series.at(0).value('direction').get('out'), 2) self.assertEqual(new_series.at(1).value('direction').get('in'), 3) self.assertEqual(new_series.at(1).value('direction').get('out'), 4)
def test_fixed_window(self): """Test fixed window rollup""" timeseries = TimeSeries(SEPT_2014_DATA) daily_avg = timeseries.fixed_window_rollup( '1d', dict(value=dict(value=Functions.avg()))) self.assertEqual(daily_avg.size(), 5) self.assertEqual(daily_avg.at(0).value(), 46.875) self.assertEqual(daily_avg.at(2).value(), 54.083333333333336) self.assertEqual(daily_avg.at(4).value(), 51.85) # not really a rollup, each data point will create one # aggregation index. timeseries = TimeSeries(SEPT_2014_DATA) hourly_avg = timeseries.hourly_rollup( dict(value=dict(value=Functions.avg()))) self.assertEqual(hourly_avg.size(), len(SEPT_2014_DATA.get('points'))) self.assertEqual(hourly_avg.at(0).value(), 80.0) self.assertEqual(hourly_avg.at(2).value(), 52.0) self.assertEqual(hourly_avg.at(4).value(), 26.0)
def test_merge_sum_and_map(self): """test the time series merging/map static methods.""" t_in = TimeSeries(TRAFFIC_DATA_IN) t_out = TimeSeries(TRAFFIC_DATA_OUT) t_merged = TimeSeries.timeseries_list_merge(dict(name='traffic'), [t_in, t_out]) self.assertEqual(t_merged.at(2).get('in'), 26) self.assertEqual(t_merged.at(2).get('out'), 67) self.assertEqual(t_merged.name(), 'traffic') t_summed = TimeSeries.timeseries_list_sum(dict(name='traffic'), [t_in, t_in], 'in') self.assertEqual(t_summed.at(0).get('in'), 104) self.assertEqual(t_summed.at(1).get('in'), 36) # more variations for coverage test_idx_data = dict(name="availability", columns=["index", "uptime"], points=[ ["2014-07", 100], ["2014-08", 88], ["2014-09", 95], ["2014-10", 99], ["2014-11", 91], ["2014-12", 99], ["2015-01", 100], ["2015-02", 92], ["2015-03", 99], ["2015-04", 87], ["2015-05", 92], ["2015-06", 100], ]) t_idx = TimeSeries(test_idx_data) idx_sum = TimeSeries.timeseries_list_sum(dict(name='available'), [t_idx, t_idx], 'uptime') self.assertEqual(idx_sum.at(0).get('uptime'), 200) self.assertEqual(idx_sum.at(1).get('uptime'), 176) self.assertEqual(idx_sum.at(2).get('uptime'), 190) test_outage = dict( name="outages", columns=["timerange", "length", "esnet_ticket"], points=[ [[1429673400000, 1429707600000], 23, "ESNET-20080101-001"], [[1429673500000, 1429707700000], 54, "ESNET-20080101-002"], ], ) t_tr = TimeSeries(test_outage) tr_sum = TimeSeries.timeseries_list_sum(dict(name='outage length'), [t_tr, t_tr], 'length') self.assertEqual(tr_sum.at(0).get('length'), 46) self.assertEqual(tr_sum.at(1).get('length'), 108)
def test_indexed_event_series(self): """test a series of IndexedEvent objects.""" indexed_event_series = dict(name="availability", columns=["index", "uptime"], points=[ ["2014-07", "100%"], ["2014-08", "88%"], ["2014-09", "95%"], ["2014-10", "99%"], ["2014-11", "91%"], ["2014-12", "99%"], ["2015-01", "100%"], ["2015-02", "92%"], ["2015-03", "99%"], ["2015-04", "87%"], ["2015-05", "92%"], ["2015-06", "100%"], ]) series = TimeSeries(indexed_event_series) wire = self._call_interop_script('indexed_event', series.to_string()) new_series = TimeSeries(wire) new_json = new_series.to_json() self._validate_wire_points(indexed_event_series, new_json) self.assertTrue(new_json.get('utc')) # again with more involved data availability_series = dict( name="availability", columns=["index", "uptime", "notes", "outages"], points=[ ["2014-08", 88, "", 17], ["2014-09", 100, "", 2], ["2014-09", 95, "", 6], ["2014-10", 99, "", 3], ["2014-11", 91, "", 14], ["2014-12", 99, "", 3], ["2015-01", 100, "", 0], ["2015-02", 92, "", 12], ["2015-03", 99, "Minor outage March 2", 4], ["2015-04", 87, "Planned downtime in April", 82], ["2015-05", 92, "Router failure June 12", 26], ["2015-06", 100, "", 0], ]) series = TimeSeries(availability_series) wire = self._call_interop_script('indexed_event', series.to_string()) new_series = TimeSeries(wire) new_json = new_series.to_json() self._validate_wire_points(availability_series, new_json)
def setUp(self): """setup.""" # canned collection self._canned_collection = Collection(EVENT_LIST) # canned series objects self._canned_event_series = TimeSeries( dict(name='collection', collection=self._canned_collection)) self._canned_wire_series = TimeSeries(DATA) # canned index self._canned_index_series = TimeSeries(INDEXED_DATA)
def test_simple_take(self): """take 10 events in batch.""" timeseries = TimeSeries(SEPT_2014_DATA) kcol = ( Pipeline().from_source(timeseries).take(10).to_keyed_collections()) new_ts = TimeSeries(dict(name='result', collection=kcol.get('all'))) self.assertEqual(new_ts.size(), 10)
def test_single_select(self): """select a single column.""" timeseries = TimeSeries(IN_OUT_DATA) kcol = (Pipeline().from_source(timeseries).select( 'in').to_keyed_collections()) new_ts = TimeSeries( dict(name='new_timeseries', collection=kcol.get('all'))) self.assertEqual(new_ts.columns(), ['in'])
def test_subset_select(self): """select multiple columns.""" timeseries = TimeSeries(IN_OUT_DATA) kcol = (Pipeline().from_source(timeseries).select( ['out', 'perpendicular']).to_keyed_collections()) new_ts = TimeSeries( dict(name='new_timeseries', collection=kcol.get('all'))) self.assertEqual(set(new_ts.columns()), set(['out', 'perpendicular']))
def test_fill_event_variants(self): """fill time range and indexed events.""" range_list = [ TimeRangeEvent((aware_utcnow(), aware_utcnow() + datetime.timedelta(minutes=1)), {'in': 100}), TimeRangeEvent((aware_utcnow(), aware_utcnow() + datetime.timedelta(minutes=2)), {'in': None}), TimeRangeEvent((aware_utcnow(), aware_utcnow() + datetime.timedelta(minutes=3)), {'in': None}), TimeRangeEvent((aware_utcnow(), aware_utcnow() + datetime.timedelta(minutes=4)), {'in': 90}), TimeRangeEvent((aware_utcnow(), aware_utcnow() + datetime.timedelta(minutes=5)), {'in': 80}), TimeRangeEvent((aware_utcnow(), aware_utcnow() + datetime.timedelta(minutes=6)), {'in': 70}), ] coll = Collection(range_list) # canned series objects rts = TimeSeries(dict(name='collection', collection=coll)) new_rts = rts.fill(field_spec='in') self.assertEqual(new_rts.at(1).get('in'), 0) self.assertEqual(new_rts.at(2).get('in'), 0) # indexed events index_list = [ IndexedEvent('1d-12355', {'value': 42}), IndexedEvent('1d-12356', {'value': None}), IndexedEvent('1d-12357', {'value': None}), IndexedEvent('1d-12358', {'value': 52}), IndexedEvent('1d-12359', {'value': 55}), IndexedEvent('1d-12360', {'value': 58}), ] coll = Collection(index_list) its = TimeSeries(dict(name='collection', collection=coll)) new_its = its.fill() self.assertEqual(new_its.at(1).get(), 0) self.assertEqual(new_its.at(2).get(), 0)
def test_equality_methods(self): """test equal/same static methods.""" ser1 = TimeSeries(DATA) ser2 = TimeSeries(DATA) self.assertTrue(TimeSeries.equal(ser1, ser1)) self.assertTrue(TimeSeries.same(ser1, ser1)) self.assertFalse(TimeSeries.equal(ser1, ser2)) self.assertTrue(TimeSeries.same(ser1, ser2)) copy_ctor = TimeSeries(ser1) self.assertTrue(TimeSeries.equal(copy_ctor, ser1)) self.assertFalse(copy_ctor is ser1)
def test_rate_bins(self): """replicate basic esmond rates.""" # | 100 | | | | 200 | v # | | | | | | | | # 60 89 90 120 150 180 181 210 t -> # | | | | | | # |<- ? --------->|<- 1.08/s --->|<- 1.08/s --->|<- 1.08/s --->|<- ? ------->| result raw_rates = dict( name="traffic", columns=["time", "value"], points=[ [89000, 100], [181000, 200] ] ) ts = TimeSeries(raw_rates) rates = ts.align(window='30s').rate() self.assertEqual(rates.size(), 3) self.assertEqual(rates.at(0).get('value_rate'), 1.0869565217391313) self.assertEqual(rates.at(1).get('value_rate'), 1.0869565217391293) self.assertEqual(rates.at(2).get('value_rate'), 1.0869565217391313)
def parse(self): """Parse the provided XML data.""" items = self.xml.findall('query-reply/item') points = [] for item in items: addr = item.find('key').get('id') if self.resolve_dns: dns_name = self._dns_lookup(addr) else: dns_name = "[DNS resolution not enabled]" if self.redact: addr = '.'.join(addr.split('.')[:3]) + ".xxx" max_val = int(item.find('max').get('value')) time = int(item.find('time').get('value')) * self.timestamp_scalar points.append([time, addr, dns_name, max_val]) points.sort() ts = TimeSeries({ "name": "top talkers", "columns": ["time", "ip_addr", "dns_name", "max"], "points": points }) return ts
def _items_to_timeseries(self, item): """Convert items to timeseries. INTERNAL USE ONLY.""" keys = item.findall("key") if len(keys) == 2: name = keys[1].get("name") else: name = item.get("name") # if we have an AS based report, augment the name with the ASN if self.filter2 and self.filter2.startswith("as_"): asn = item.get("id").split("|")[1] name += "|AS%s" % (asn, ) elif self.filter1 and self.filter1.startswith("as_"): asn = item.get("id") name += "|AS%s" % (asn, ) raw_points = self._get_points(item) points = [] for i, data_in in enumerate(raw_points["in"]): time = (self.begin + (i * self.frequency)) * self.timestamp_scalar points.append([time, data_in, raw_points["out"][i]]) return TimeSeries({ "name": name, "columns": ["time", "in", "out"], "points": points })
def test_bad_args(self): """error states for coverage.""" # various bad values with self.assertRaises(ProcessorException): Align(dict()) with self.assertRaises(ProcessorException): Rate(dict()) with self.assertRaises(ProcessorException): self._simple_ts.align(method='bogus') with self.assertRaises(ProcessorException): self._simple_ts.align(limit='bogus') # non event types ticket_range = dict( name="outages", columns=["timerange", "title", "esnet_ticket"], points=[ [[1429673400000, 1429707600000], "BOOM", "ESNET-20080101-001"], [[1429673400000, 1429707600000], "BAM!", "ESNET-20080101-002"], ], ) ts = TimeSeries(ticket_range) with self.assertRaises(ProcessorException): ts.align() with self.assertRaises(ProcessorException): ts.rate()
def test_negative_derivatives(self): """Test behavior on counter resets.""" raw_rates = dict( name="traffic", columns=["time", "value"], points=[ [89000, 100], [181000, 50] ] ) ts = TimeSeries(raw_rates) rates = ts.align(window='30s').rate() # lower counter will produce negative derivatives self.assertEqual(rates.size(), 3) self.assertEqual(rates.at(0).get('value_rate'), -0.5434782608695656) self.assertEqual(rates.at(1).get('value_rate'), -0.5434782608695646) self.assertEqual(rates.at(2).get('value_rate'), -0.5434782608695653) rates = ts.align(window='30s').rate(allow_negative=False) self.assertEqual(rates.size(), 3) self.assertEqual(rates.at(0).get('value_rate'), None) self.assertEqual(rates.at(1).get('value_rate'), None) self.assertEqual(rates.at(2).get('value_rate'), None)
def test_quantile(self): """test TimeSeries.quantile()""" series = TimeSeries( dict(name="Sensor values", columns=["time", "temperature"], points=[[1400425951000, 22.3], [1400425952000, 32.4], [1400425953000, 12.1], [1400425955000, 76.8], [1400425956000, 87.3], [1400425957000, 54.6], [1400425958000, 45.5], [1400425959000, 87.9]])) self.assertEqual(series.quantile(4, field_path='temperature'), [29.875, 50.05, 79.425]) self.assertEqual( series.quantile(4, field_path='temperature', method='linear'), [29.875, 50.05, 79.425]) self.assertEqual( series.quantile(4, field_path='temperature', method='lower'), [22.3, 45.5, 76.8]) self.assertEqual( series.quantile(4, field_path='temperature', method='higher'), [32.4, 54.6, 87.3]) self.assertEqual( series.quantile(4, field_path='temperature', method='nearest'), [32.4, 54.6, 76.8]) self.assertEqual( series.quantile(4, field_path='temperature', method='midpoint'), [27.35, 50.05, 82.05]) self.assertEqual(series.quantile(1, 'temperature', 'linear'), []) with self.assertRaises(CollectionException): series.quantile(15, field_path='temperature')
def test_non_fixed_rollups(self): """Work the calendar rollup logic / utc / etc.""" timeseries = TimeSeries(SEPT_2014_DATA) # just silence the warnings, not do anything with them. with warnings.catch_warnings(record=True): daily_avg = timeseries.daily_rollup( dict(value=dict(value=Functions.avg()))) ts_1 = SEPT_2014_DATA.get('points')[0][0] self.assertEqual( Index.get_daily_index_string(dt_from_ms(ts_1), utc=False), daily_avg.at(0).index().to_string()) monthly_avg = timeseries.monthly_rollup( dict(value=dict(value=Functions.avg()))) self.assertEqual( Index.get_monthly_index_string(dt_from_ms(ts_1), utc=False), monthly_avg.at(0).index().to_string()) yearly_avg = timeseries.yearly_rollup( dict(value=dict(value=Functions.avg()))) self.assertEqual( Index.get_yearly_index_string(dt_from_ms(ts_1), utc=False), yearly_avg.at(0).index().to_string())
def test_invalid_point(self): """make sure non-numeric values are handled properly.""" bad_point = copy.deepcopy(SIMPLE_GAP_DATA) bad_point.get('points')[-2][1] = 'non_numeric_value' ts = TimeSeries(bad_point) with warnings.catch_warnings(record=True) as wrn: aligned = ts.align(window='1m') self.assertEqual(len(wrn), 1) self.assertTrue(issubclass(wrn[0].category, ProcessorWarning)) self.assertEqual(aligned.size(), 8) self.assertEqual(aligned.at(0).get(), 1.25) self.assertEqual(aligned.at(1).get(), 1.8571428571428572) self.assertEqual(aligned.at(2).get(), 1.2857142857142856) self.assertEqual(aligned.at(3).get(), 1.0) self.assertEqual(aligned.at(4).get(), 1.0) self.assertEqual(aligned.at(5).get(), 1.0) self.assertEqual(aligned.at(6).get(), None) # bad value self.assertEqual(aligned.at(7).get(), None) # bad value with warnings.catch_warnings(record=True) as wrn: a_diff = aligned.rate() self.assertEqual(len(wrn), 1) self.assertTrue(issubclass(wrn[0].category, ProcessorWarning)) self.assertEqual(a_diff.at(5).get(), None) # bad value self.assertEqual(a_diff.at(6).get(), None) # bad value
def test_bad_args(self): """Trigger exceptions and warnings, etc.""" uin = Stream() with warnings.catch_warnings(record=True) as wrn: Pipeline().from_source(uin).window_by('1h', utc=False) self.assertEqual(len(wrn), 1) self.assertTrue(issubclass(wrn[0].category, PipelineWarning)) # bad arg with self.assertRaises(PipelineException): Pipeline().from_source(dict()) # no source with self.assertRaises(PipelineException): Pipeline().to_keyed_collections() # can't iterate on unbounded source with self.assertRaises(PipelineIOException): list(uin.events()) # bad emit on type with self.assertRaises(PipelineIOException): (Pipeline().from_source( TimeSeries( dict(name='events', events=DEEP_EVENT_LIST))).emit_on('BOGUS').aggregate({ 'max_in': { 'direction.in': Functions.max() } }).to_event_list())
def test_series_creation(self): """test timeseries creation. Calls to to_json() are to trigger coverage for different variants. """ # from a wire format event list ts1 = TimeSeries(DATA) self.assertEqual(ts1.size(), len(DATA.get('points'))) # from a wire format index ts2 = TimeSeries(AVAILABILITY_DATA) self.assertEqual(ts2.size(), len(AVAILABILITY_DATA.get('points'))) self.assertEqual(ts2.to_json().get('name'), 'availability') # from a list of events ts3 = TimeSeries(dict(name='events', events=EVENT_LIST)) self.assertEqual(ts3.size(), len(EVENT_LIST)) # from a collection ts4 = TimeSeries( dict(name='collection', collection=self._canned_collection)) self.assertEqual(ts4.size(), self._canned_collection.size()) # copy constructor ts5 = TimeSeries(ts4) self.assertEqual(ts4.size(), ts5.size()) # from a wire format time range ts6 = TimeSeries(TICKET_RANGE) self.assertEqual(ts6.size(), len(TICKET_RANGE.get('points'))) self.assertEqual(ts6.to_json().get('name'), 'outages') # non-utc indexed data variant mostly for coverage idxd = copy.deepcopy(INDEXED_DATA) idxd['utc'] = False ts7 = TimeSeries(idxd) self.assertFalse(ts7.is_utc()) self.assertFalse(ts7.to_json().get('utc')) # indexed data variant using Index object - for coverage as well idxd2 = copy.deepcopy(INDEXED_DATA) idxd2['index'] = Index(idxd2.get('index')) ts8 = TimeSeries(idxd2) self.assertEqual(ts8.to_json().get('index'), '1d-625') # make sure complex/deep/nested wire format is being handled correctly. ts7 = TimeSeries(DATA_FLOW) self.assertEqual(ts7.at(0).value('direction').get('in'), 1) self.assertEqual(ts7.at(0).value('direction').get('out'), 2) self.assertEqual(ts7.at(1).value('direction').get('in'), 3) self.assertEqual(ts7.at(1).value('direction').get('out'), 4)
def test_fixed_window_collect(self): """Make collections for each day in the timeseries.""" timeseries = TimeSeries(SEPT_2014_DATA) colls = timeseries.collect_by_fixed_window('1d') self.assertEqual(colls.get('1d-16314').size(), 24) self.assertEqual(colls.get('1d-16318').size(), 20)
def test_zero_fill(self): """test using the filler to fill missing values with zero.""" simple_missing_data = dict( name="traffic", columns=["time", "direction"], points=[ [1400425947000, { 'in': 1, 'out': None }], [1400425948000, { 'in': None, 'out': 4 }], [1400425949000, { 'in': 5, 'out': None }], [1400425950000, { 'in': None, 'out': 8 }], [1400425960000, { 'in': 9, 'out': None }], [1400425970000, { 'in': None, 'out': 12 }], ]) ts = TimeSeries(simple_missing_data) # fill all invalid values new_ts = ts.fill(field_spec=['direction.in', 'direction.out']) self.assertEqual(new_ts.size(), 6) self.assertEqual(new_ts.at(0).get('direction.out'), 0) self.assertEqual(new_ts.at(2).get('direction.out'), 0) self.assertEqual(new_ts.at(1).get('direction.in'), 0) # fill one column new_ts = ts.fill(field_spec='direction.in') self.assertEqual(new_ts.size(), 6) self.assertEqual(new_ts.at(1).get('direction.in'), 0) self.assertEqual(new_ts.at(3).get('direction.in'), 0) self.assertIsNone(new_ts.at(0).get('direction.out')) self.assertIsNone(new_ts.at(2).get('direction.out'))
def test_assymetric_linear_fill(self): """Test new chained/assymetric linear default fill in TimeSeries.""" simple_missing_data = dict( name="traffic", columns=["time", "direction"], points=[ [1400425947000, { 'in': 1, 'out': None }], [1400425948000, { 'in': None, 'out': None }], [1400425949000, { 'in': None, 'out': None }], [1400425950000, { 'in': 3, 'out': 8 }], [1400425960000, { 'in': None, 'out': None }], [1400425970000, { 'in': 5, 'out': 12 }], [1400425980000, { 'in': 6, 'out': 13 }], ]) ts = TimeSeries(simple_missing_data) new_ts = ts.fill(method='linear', field_spec=['direction.in', 'direction.out']) self.assertEqual(new_ts.at(0).get('direction.in'), 1) self.assertEqual(new_ts.at(1).get('direction.in'), 1.6666666666666665) # filled self.assertEqual(new_ts.at(2).get('direction.in'), 2.333333333333333) # filled self.assertEqual(new_ts.at(3).get('direction.in'), 3) self.assertEqual(new_ts.at(4).get('direction.in'), 4.0) # filled self.assertEqual(new_ts.at(5).get('direction.in'), 5) self.assertEqual(new_ts.at(0).get('direction.out'), None) # can't fill self.assertEqual(new_ts.at(1).get('direction.out'), None) # can't fill self.assertEqual(new_ts.at(2).get('direction.out'), None) # can't fill self.assertEqual(new_ts.at(3).get('direction.out'), 8) self.assertEqual(new_ts.at(4).get('direction.out'), 10.0) # filled self.assertEqual(new_ts.at(5).get('direction.out'), 12)
def test_percentile_empty(self): """percentile of an empty timeseries.""" series = TimeSeries( dict(name="Sensor values", columns=["time", "temperature"], points=[])) self.assertIsNone(series.percentile(0, 'temperature')) self.assertIsNone(series.percentile(100, 'temperature'))
def setUp(self): """set up for all tests.""" # canned collection self._canned_collection = Collection(EVENT_LIST) # canned series objects self._canned_event_series = TimeSeries( dict(name='collection', collection=self._canned_collection)) global RESULTS RESULTS = None
def test_various_bad_args(self): """ensure proper exceptions are being raised.""" ser1 = TimeSeries(DATA) with self.assertRaises(CollectionException): ser1.aggregate(dict()) with self.assertRaises(CollectionException): ser1.aggregate(Functions.sum(), dict())
def test_ts_offset_chain(self): """test running the offset chain directly from the TimeSeries.""" timeseries = TimeSeries(DATA) kcol = (timeseries.pipeline().offset_by( 1, 'value').offset_by(2).to_keyed_collections()) self.assertEqual(kcol['all'].at(0).get(), 55) self.assertEqual(kcol['all'].at(1).get(), 21) self.assertEqual(kcol['all'].at(2).get(), 29) self.assertEqual(kcol['all'].at(3).get(), 96)
def test_percentile_single(self): """percentile of an timeseries with one point.""" series = TimeSeries( dict(name="Sensor values", columns=["time", "temperature"], points=[[1400425951000, 22.3]])) self.assertEqual(series.percentile(0, 'temperature'), 22.3) self.assertEqual(series.percentile(50, 'temperature'), 22.3) self.assertEqual(series.percentile(100, 'temperature'), 22.3)
def test_simple_collapse(self): """collapse a subset of columns.""" timeseries = TimeSeries(IN_OUT_DATA) kcol = (Pipeline().from_source(timeseries).collapse( ['in', 'out'], 'in_out_sum', Functions.sum()).emit_on('flush').to_keyed_collections()) self.assertEqual(kcol.get('all').at(0).get('in_out_sum'), 117) self.assertEqual(kcol.get('all').at(1).get('in_out_sum'), 110) self.assertEqual(kcol.get('all').at(2).get('in_out_sum'), 108)