def test_data_accessors(self): """methods to get metadata and such.""" self.assertEqual(self._canned_wire_series.name(), 'traffic') self.assertTrue(self._canned_wire_series.is_utc()) # index stuff self.assertEqual( self._canned_index_series.index_as_string(), INDEXED_DATA.get('index')) self.assertTrue(isinstance(self._canned_index_series.index(), Index)) self.assertEqual( self._canned_index_series.index_as_range().to_json(), [54000000000, 54086400000]) self.assertEqual( self._canned_wire_series.meta(), {'utc': True, 'name': 'traffic'}) self.assertEqual(self._canned_wire_series.meta('name'), 'traffic') self.assertEqual(len(list(self._canned_wire_series.events())), 4) self.assertEqual( self._canned_event_series.collection(), self._canned_event_series._collection) # pylint: disable=protected-access self.assertEqual(self._canned_event_series.size_valid('in'), 3) # at_first() and at_last() and at_time() self.assertTrue(Event.same(self._canned_event_series.at_first(), EVENT_LIST[0])) self.assertTrue(Event.same(self._canned_event_series.at_last(), EVENT_LIST[2])) ref_dtime = EVENT_LIST[1].timestamp() + datetime.timedelta(seconds=3) self.assertTrue(Event.same(self._canned_event_series.at_time(ref_dtime), EVENT_LIST[1]))
def test_sum_avg_with_filtering(self): """test summing multiple events together via combine on the back end.""" # combine them all events = [ self._create_event(self.aware_ts, { 'a': 5, 'b': 6, 'c': 7 }), self._create_event(self.aware_ts, { 'a': None, 'b': None, 'c': 4 }), self._create_event(self.aware_ts, { 'a': 1, 'b': 2, 'c': 3 }), ] result = Event.sum(events, filter_func=Filters.zero_missing) self.assertEqual(result[0].get('a'), 6) result = Event.sum(events, filter_func=Filters.propagate_missing) self.assertIsNone(result[0].get('a')) result = Event.avg(events, filter_func=Filters.ignore_missing) self.assertEqual(result[0].get('b'), 4) result = Event.avg(events, filter_func=Filters.propagate_missing) self.assertIsNone(result[0].get('b'))
def test_data_accessors(self): """methods to get metadata and such.""" self.assertEqual(self._canned_wire_series.name(), 'traffic') self.assertTrue(self._canned_wire_series.is_utc()) # index stuff self.assertEqual(self._canned_index_series.index_as_string(), INDEXED_DATA.get('index')) self.assertTrue(isinstance(self._canned_index_series.index(), Index)) self.assertEqual(self._canned_index_series.index_as_range().to_json(), [54000000000, 54086400000]) self.assertEqual(self._canned_wire_series.meta(), { 'utc': True, 'name': 'traffic' }) self.assertEqual(self._canned_wire_series.meta('name'), 'traffic') self.assertEqual(len(list(self._canned_wire_series.events())), 4) self.assertEqual(self._canned_event_series.collection(), self._canned_event_series._collection) # pylint: disable=protected-access self.assertEqual(self._canned_event_series.size_valid('in'), 3) # at_first() and at_last() and at_time() self.assertTrue( Event.same(self._canned_event_series.at_first(), EVENT_LIST[0])) self.assertTrue( Event.same(self._canned_event_series.at_last(), EVENT_LIST[2])) ref_dtime = EVENT_LIST[1].timestamp() + datetime.timedelta(seconds=3) self.assertTrue( Event.same(self._canned_event_series.at_time(ref_dtime), EVENT_LIST[1]))
def test_event_same(self): """test Event.same() static method.""" ev1 = copy.copy(self.canned_event) ev2 = copy.copy(self.canned_event) self.assertTrue(Event.same(ev1, ev2)) # make a new one with same data but new timestamp. ev3 = Event(freeze(dict(time=self.aware_ts, data=ev1.data()))) self.assertFalse(Event.same(ev1, ev3))
def test_event_map_function_arg_and_reduce(self): # pylint: disable=invalid-name """Test Event.map() with a custom function and Event.reduce()""" def map_sum(event): # pylint: disable=missing-docstring # return 'sum', event.get('in') + event.get('out') return dict(sum=event.get('in') + event.get('out')) result = Event.map(self._get_event_series(), map_sum) self.assertEqual(set(result), set({'sum': [13, 17, 21, 26]})) res = Event.reduce(result, Functions.avg()) self.assertEqual(set(res), set({'sum': 19.25}))
def test_nested_merge(self): """trigger merging nested data.""" # pylint: disable=invalid-name e_ts = aware_utcnow() e1 = Event(e_ts, dict(payload=dict(a=1))) e2 = Event(e_ts, dict(payload=dict(b=2))) emerge = Event.merge([e1, e2]) self.assertEqual(emerge[0].get('payload.a'), 1) self.assertEqual(emerge[0].get('payload.b'), 2)
def test_event_merge(self): """Test Event.merge()/merge_events()""" # same timestamp, different keys # good ones, same ts, different payloads pay1 = dict(foo='bar', baz='quux') ev1 = Event(self.aware_ts, pay1) pay2 = dict(foo2='bar', baz2='quux') ev2 = Event(self.aware_ts, pay2) merged = Event.merge([ev1, ev2]) self.assertEqual(set(thaw(merged[0].data())), set(dict(pay1, **pay2)))
def test_mutators(self): """test collection mutation.""" extra_event = Event(1429673580000, {'in': 7, 'out': 8}) new_coll = self._canned_collection.add_event(extra_event) self.assertEqual(new_coll.size(), 4) # test slice() here since this collection is longer. sliced = new_coll.slice(1, 3) self.assertEqual(sliced.size(), 2) self.assertTrue(Event.same(sliced.at(0), EVENT_LIST[1])) # work stddev as well self.assertEqual(new_coll.stdev('in'), 2.23606797749979) self.assertEqual(new_coll.median('in'), 4)
def test_event_valid(self): """test Event.is_valid_value()""" dct = dict( good='good', also_good=[], none=None, nan=float('NaN'), empty_string='', # presume this is undefined ) event = Event(self.aware_ts, dct) self.assertTrue(Event.is_valid_value(event, 'good')) self.assertTrue(Event.is_valid_value(event, 'also_good')) self.assertFalse(Event.is_valid_value(event, 'none')) self.assertFalse(Event.is_valid_value(event, 'nan')) self.assertFalse(Event.is_valid_value(event, 'empty_string'))
def setUp(self): super(TestConverter, self).setUp() self._event = Event(dt_from_ms(1426316400000), 3) self._tre = TimeRangeEvent(TimeRange([1426316400000, 1426320000000]), 3) self._idxe = IndexedEvent("1h-396199", 3)
def test_event_map_no_key_map_all(self): """Test Event.map() with no field key - it will map everything""" result = Event.map(self._get_event_series()) self.assertEqual(set(result), set({'in': [2, 4, 6, 8], 'name': ['source1', 'source1', 'source1', 'source1'], 'out': [11, 13, 15, 18]}))
def test_event_map_multi_key(self): """Test Event.map() with multiple field keys.""" result = Event.map(self._get_event_series(), ['in', 'out']) self.assertEqual(set(result), set({ 'out': [11, 13, 15, 18], 'in': [2, 4, 6, 8] }))
def test_indexed_event_merge(self): """test merging indexed events.""" index = '1h-396206' event1 = IndexedEvent(index, {'a': 5, 'b': 6}) event2 = IndexedEvent(index, freeze({'c': 2})) # pmap for coverage merged = Event.merge([event1, event2]) self.assertEqual(merged[0].get('a'), 5) self.assertEqual(merged[0].get('b'), 6) self.assertEqual(merged[0].get('c'), 2) # bad merges # wrong length/etc self.assertEqual(Event.merge({}), []) self.assertEqual(Event.merge([]), [])
def test_regular_with_event_copy(self): """create a regular event with copy constructor/existing event.""" data = {'a': 3, 'b': 6} event = self._create_event(self.aware_ts, data) event2 = Event(event) self._base_checks(event2, data, dtime=self.aware_ts)
def test_event_selector(self): """test Event.selector()""" new_deep = dict({'WestRoute': {'in': 567, 'out': 890}}, **DEEP_EVENT_DATA) event = self._create_event(self.aware_ts, new_deep) ev2 = Event.selector(event, 'NorthRoute') self.assertEqual(len(list(ev2.data().keys())), 1) self.assertIsNotNone(ev2.data().get('NorthRoute')) ev3 = Event.selector(event, ['WestRoute', 'SouthRoute']) self.assertEqual(len(list(ev3.data().keys())), 2) self.assertIsNotNone(ev3.data().get('SouthRoute')) self.assertIsNotNone(ev3.data().get('WestRoute')) # bad args for coverage ev4 = Event.selector(event, 2) self.assertEqual(event, ev4)
def test_event_map_no_key_map_all(self): """Test Event.map() with no field key - it will map everything""" result = Event.map(self._get_event_series()) self.assertEqual( set(result), set({ 'in': [2, 4, 6, 8], 'name': ['source1', 'source1', 'source1', 'source1'], 'out': [11, 13, 15, 18] }))
def test_time_range_event_merge(self): """Test merging.""" t_range = TimeRange(self.test_begin_ts, self.test_end_ts) tr1 = TimeRangeEvent(t_range, dict(a=5, b=6)) tr2 = TimeRangeEvent(t_range, dict(c=2)) merged = Event.merge([tr1, tr2]) self.assertEqual(merged[0].get('a'), 5) self.assertEqual(merged[0].get('b'), 6) self.assertEqual(merged[0].get('c'), 2)
def test_sum_avg_with_filtering(self): """test summing multiple events together via combine on the back end.""" # combine them all events = [ self._create_event(self.aware_ts, {'a': 5, 'b': 6, 'c': 7}), self._create_event(self.aware_ts, {'a': None, 'b': None, 'c': 4}), self._create_event(self.aware_ts, {'a': 1, 'b': 2, 'c': 3}), ] result = Event.sum(events, filter_func=Filters.zero_missing) self.assertEqual(result[0].get('a'), 6) result = Event.sum(events, filter_func=Filters.propagate_missing) self.assertIsNone(result[0].get('a')) result = Event.avg(events, filter_func=Filters.ignore_missing) self.assertEqual(result[0].get('b'), 4) result = Event.avg(events, filter_func=Filters.propagate_missing) self.assertIsNone(result[0].get('b'))
def test_event_selector(self): """test Event.selector()""" new_deep = dict({'WestRoute': { 'in': 567, 'out': 890 }}, **DEEP_EVENT_DATA) event = self._create_event(self.aware_ts, new_deep) ev2 = Event.selector(event, 'NorthRoute') self.assertEqual(len(list(ev2.data().keys())), 1) self.assertIsNotNone(ev2.data().get('NorthRoute')) ev3 = Event.selector(event, ['WestRoute', 'SouthRoute']) self.assertEqual(len(list(ev3.data().keys())), 2) self.assertIsNotNone(ev3.data().get('SouthRoute')) self.assertIsNotNone(ev3.data().get('WestRoute')) # bad args for coverage ev4 = Event.selector(event, 2) self.assertEqual(event, ev4)
def test_aggregation_filtering(self): """Test the new filtering methods for cleaning stuff.""" elist = [ Event(1429673400000, { 'in': 1, 'out': 1 }), Event(1429673460000, { 'in': 2, 'out': 5 }), Event(1429673520000, { 'in': 3, 'out': None }), ] coll = Collection(elist) self.assertEqual(coll.aggregate(Functions.sum(), 'in'), 6) self.assertEqual( coll.aggregate(Functions.sum(Filters.propagate_missing), 'in'), 6) self.assertEqual( coll.aggregate(Functions.sum(Filters.propagate_missing), 'out'), None) self.assertEqual( coll.aggregate(Functions.avg(Filters.ignore_missing), 'in'), 2) self.assertEqual( coll.aggregate(Functions.avg(Filters.ignore_missing), 'out'), 3) self.assertEqual( coll.aggregate(Functions.avg(Filters.zero_missing), 'in'), 2) self.assertEqual( coll.aggregate(Functions.avg(Filters.zero_missing), 'out'), 2)
def test_aggregation_filtering(self): """test the filtering modifers to the agg functions.""" event_objects = [ Event(1429673400000, { 'in': 1, 'out': 2 }), Event(1429673460000, { 'in': 3, 'out': None }), Event(1429673520000, { 'in': 5, 'out': 6 }), ] series = TimeSeries(dict(name='events', events=event_objects)) self.assertEqual(series.sum('out', Filters.ignore_missing), 8) self.assertEqual(series.avg('out', Filters.ignore_missing), 4) self.assertEqual(series.min('out', Filters.zero_missing), 0) self.assertEqual(series.max('out', Filters.propagate_missing), None) self.assertEqual(series.mean('out', Filters.ignore_missing), 4) self.assertEqual(series.median('out', Filters.zero_missing), 2) self.assertEqual(series.stdev('out', Filters.zero_missing), 2.494438257849294) avg_f = Functions.avg(Filters.none_if_empty) self.assertIsNone(avg_f([])) def bad_filtering_function(): # pylint: disable=missing-docstring pass with self.assertRaises(FilterException): series.sum('out', bad_filtering_function)
def test_linear_stream(self): """Test streaming on linear fill""" def cback(collection, window_key, group_by): """the callback""" global RESULTS # pylint: disable=global-statement RESULTS = collection events = [ Event(1400425947000, 1), Event(1400425948000, 2), Event(1400425949000, dict(value=None)), Event(1400425950000, dict(value=None)), Event(1400425951000, dict(value=None)), Event(1400425952000, 5), Event(1400425953000, 6), Event(1400425954000, 7), ] stream = Stream() (Pipeline().from_source(stream).fill(method='linear', field_spec='value').to( CollectionOut, cback)) for i in events: stream.add_event(i) self.assertEqual(RESULTS.size(), len(events)) self.assertEqual(RESULTS.at(0).get(), 1) self.assertEqual(RESULTS.at(1).get(), 2) self.assertEqual(RESULTS.at(2).get(), 2.75) # filled self.assertEqual(RESULTS.at(3).get(), 3.5) # filled self.assertEqual(RESULTS.at(4).get(), 4.25) # filled self.assertEqual(RESULTS.at(5).get(), 5) self.assertEqual(RESULTS.at(6).get(), 6) self.assertEqual(RESULTS.at(7).get(), 7)
def test_slices_and_permutations(self): """methods that slice/etc the underlying series.""" # bisect search = dt_from_ms(1400425949000 + 30) bsect_idx = self._canned_wire_series.bisect(search) # bisect with bad arg bad_search = datetime.datetime.now() with self.assertRaises(CollectionException): self._canned_wire_series.bisect(bad_search) bsection = self._canned_wire_series.at(bsect_idx) self.assertEqual(bsection.data().get('status'), 'fail') # clean self.assertEqual(self._canned_event_series.clean('in').size(), 3) self.assertEqual(self._canned_event_series.clean('bogus_value').size(), 0) # slice sliced = self._canned_event_series.slice(1, 3) self.assertEqual(sliced.size(), 2) self.assertTrue(Event.same(sliced.at(0), EVENT_LIST[1]))
def test_accessor_methods(self): """test various access methods. Mostly for coverage.""" col = self._canned_collection # basic accessors self.assertEqual(col.to_json(), EVENT_LIST) self.assertEqual(len(json.loads(col.to_string())), 3) # test at() - corollary to array index self.assertTrue(Event.same(col.at(2), EVENT_LIST[2])) # overshoot with self.assertRaises(CollectionException): col.at(5) # test at_time() # get timestamp of second event and add some time to it ref_dtime = EVENT_LIST[1].timestamp() + datetime.timedelta(seconds=3) self.assertTrue(Event.same(col.at_time(ref_dtime), EVENT_LIST[1])) # overshoot the end of the list for coverage ref_dtime = EVENT_LIST[2].timestamp() + datetime.timedelta(seconds=3) self.assertTrue(Event.same(col.at_time(ref_dtime), EVENT_LIST[2])) # make sure returning index 0 works ref_dtime = EVENT_LIST[0].timestamp() + datetime.timedelta(seconds=3) self.assertTrue(Event.same(col.at_time(ref_dtime), EVENT_LIST[0])) # hit dead on for coverage self.assertEqual(col.at_time(EVENT_LIST[1].timestamp()).get('in'), 3) # empty collection for coverage empty_coll = Collection(col, copy_events=False) self.assertIsNone(empty_coll.at_time(ref_dtime)) # at_first() and at_last() self.assertTrue(Event.same(col.at_first(), EVENT_LIST[0])) self.assertTrue(Event.same(col.at_last(), EVENT_LIST[2])) # get the raw event list self.assertTrue(is_pvector(col.event_list())) self.assertTrue(isinstance(col.event_list_as_list(), list))
def test_slices_and_permutations(self): """methods that slice/etc the underlying series.""" # bisect search = dt_from_ms(1400425949000 + 30) bsect_idx = self._canned_wire_series.bisect(search) # bisect with bad arg bad_search = datetime.datetime.now() with self.assertRaises(CollectionException): self._canned_wire_series.bisect(bad_search) bsection = self._canned_wire_series.at(bsect_idx) self.assertEqual(bsection.data().get('status'), 'fail') # clean self.assertEqual(self._canned_event_series.clean('in').size(), 3) self.assertEqual( self._canned_event_series.clean('bogus_value').size(), 0) # slice sliced = self._canned_event_series.slice(1, 3) self.assertEqual(sliced.size(), 2) self.assertTrue(Event.same(sliced.at(0), EVENT_LIST[1]))
def test_aggregate_and_conversion(self): """Aggregate/average and convert to TimeRangeEvent.""" events_in = [ Event( aware_dt_from_args( dict(year=2015, month=3, day=14, hour=1, minute=57)), { 'in': 3, 'out': 1 }), Event( aware_dt_from_args( dict(year=2015, month=3, day=14, hour=1, minute=58)), { 'in': 9, 'out': 2 }), Event( aware_dt_from_args( dict(year=2015, month=3, day=14, hour=1, minute=59)), { 'in': 6, 'out': 6 }), Event( aware_dt_from_args( dict(year=2015, month=3, day=14, hour=2, minute=0)), { 'in': 4, 'out': 7 }), Event( aware_dt_from_args( dict(year=2015, month=3, day=14, hour=2, minute=1)), { 'in': 5, 'out': 9 }), ] def cback(event): """callback to pass in.""" global RESULTS # pylint: disable=global-statement if RESULTS is None: RESULTS = dict() RESULTS['{0}'.format(ms_from_dt(event.timestamp()))] = event uin = Stream() (Pipeline().from_source(uin).window_by('1h').emit_on( 'eachEvent').aggregate({ 'in_avg': { 'in': Functions.avg() }, 'out_avg': { 'out': Functions.avg() }, }).as_time_range_events(dict(alignment='lag')).to(EventOut, cback)) for i in events_in: uin.add_event(i) self.assertEqual(RESULTS.get('1426294800000').get('in_avg'), 6) self.assertEqual(RESULTS.get('1426294800000').get('out_avg'), 3) self.assertEqual(RESULTS.get('1426298400000').get('in_avg'), 4.5) self.assertEqual(RESULTS.get('1426298400000').get('out_avg'), 8)
def in_only(event): """make new events wtin only data in - same as .select() basically.""" return Event(event.timestamp(), {'in': event.get('in')})
def test_event_map_single_key(self): """Test Event.map() with single field key""" result = Event.map(self._get_event_series(), 'in') self.assertEqual(set(result), set({'in': [2, 4, 6, 8]}))
FilterException, PipelineIOException, TimeSeriesException, ) from pypond.functions import Functions, Filters from pypond.index import Index from pypond.indexed_event import IndexedEvent from pypond.range import TimeRange from pypond.series import TimeSeries from pypond.timerange_event import TimeRangeEvent from pypond.util import is_pvector, ms_from_dt, aware_utcnow, dt_from_ms # taken from the pipeline tests EVENT_LIST = [ Event(1429673400000, { 'in': 1, 'out': 2 }), Event(1429673460000, { 'in': 3, 'out': 4 }), Event(1429673520000, { 'in': 5, 'out': 6 }), ] # taken from the series tests DATA = dict(name="traffic", columns=["time", "value", "status"], points=[[1400425947000, 52, "ok"], [1400425948000, 18, "ok"],
def test_simple_map_reduce(self): """test simple map/reduce.""" result = Event.map_reduce(self._get_event_series(), ['in', 'out'], Functions.avg()) self.assertEqual(set(result), set({'in': 5.0, 'out': 14.25}))
def test_is_duplicate(self): """Test Event.is_duplicate()""" # events # pylint: disable=invalid-name e_ts = aware_utcnow() e1 = Event(e_ts, 23) e2 = Event(e_ts, 23) self.assertTrue(Event.is_duplicate(e1, e2)) self.assertTrue(Event.is_duplicate(e1, e2, ignore_values=False)) e3 = Event(e_ts, 25) self.assertTrue(Event.is_duplicate(e1, e3)) self.assertFalse(Event.is_duplicate(e1, e3, ignore_values=False)) # indexed events ie1 = IndexedEvent('1d-12355', {'value': 42}) ie2 = IndexedEvent('1d-12355', {'value': 42}) self.assertTrue(Event.is_duplicate(ie1, ie2)) self.assertTrue(Event.is_duplicate(ie1, ie2, ignore_values=False)) ie3 = IndexedEvent('1d-12355', {'value': 44}) self.assertTrue(Event.is_duplicate(ie1, ie3)) self.assertFalse(Event.is_duplicate(ie1, ie3, ignore_values=False)) # time range events test_end_ts = aware_utcnow() test_begin_ts = test_end_ts - datetime.timedelta(hours=12) test_end_ms = ms_from_dt(test_end_ts) test_begin_ms = ms_from_dt(test_begin_ts) tre1 = TimeRangeEvent((test_begin_ms, test_end_ms), 11) tre2 = TimeRangeEvent((test_begin_ms, test_end_ms), 11) self.assertTrue(Event.is_duplicate(tre1, tre2)) self.assertTrue(Event.is_duplicate(tre1, tre2, ignore_values=False)) tre3 = TimeRangeEvent((test_begin_ms, test_end_ms), 22) self.assertTrue(Event.is_duplicate(tre1, tre3)) self.assertFalse(Event.is_duplicate(tre1, tre3, ignore_values=False))
def test_sum_events_with_combine(self): """test summing multiple events together via combine on the back end.""" # combine them all events = [ self._create_event(self.aware_ts, {'a': 5, 'b': 6, 'c': 7}), self._create_event(self.aware_ts, {'a': 2, 'b': 3, 'c': 4}), self._create_event(self.aware_ts, {'a': 1, 'b': 2, 'c': 3}), ] result = Event.sum(events) self.assertEqual(result[0].get('a'), 8) self.assertEqual(result[0].get('b'), 11) self.assertEqual(result[0].get('c'), 14) # combine single field result = Event.sum(events, 'a') self.assertEqual(result[0].get('a'), 8) self.assertIsNone(result[0].get('b')) self.assertIsNone(result[0].get('c')) # grab multiple fields result = Event.sum(events, ['a', 'c']) self.assertEqual(result[0].get('a'), 8) self.assertIsNone(result[0].get('b')) self.assertEqual(result[0].get('c'), 14) # average result = Event.avg( events + [self._create_event(self.aware_ts, {'a': 1, 'b': 1, 'c': 2})], 'c') self.assertEqual(result[0].get('c'), 4) # bad arg self.assertEqual(Event.sum([]), []) self.assertEqual(Event.avg([]), []) # work the extra reducer functions in Functions module result = Event.combine(events, 'c', Functions.max()) self.assertEqual(result[0].get('c'), 7) result = Event.combine(events, 'c', Functions.min()) self.assertEqual(result[0].get('c'), 3) result = Event.combine(events, 'c', Functions.count()) self.assertEqual(result[0].get('c'), 3) result = Event.combine(events, 'c', Functions.first()) self.assertEqual(result[0].get('c'), 7) result = Event.combine(events, 'c', Functions.last()) self.assertEqual(result[0].get('c'), 3) result = Event.combine(events, 'c', Functions.difference()) self.assertEqual(result[0].get('c'), 4) self.assertIsNone(Functions.first()([])) self.assertIsNone(Functions.last()([]))
def test_event_series(self): """test a series that contains basic event objects.""" event_series = dict( name="traffic", columns=["time", "value", "status"], points=[ [1400425947000, 52, "ok"], [1400425948000, 18, "ok"], [1400425949000, 26, "fail"], [1400425950000, 93, "offline"] ] ) series = TimeSeries(event_series) wire = self._call_interop_script('event', series.to_string()) new_series = TimeSeries(wire) new_json = new_series.to_json() self._validate_wire_points(event_series, new_json) self.assertTrue(new_json.get('utc')) # try something a bit fancier with different types interface_series = dict( name="star-cr5:to_anl_ip-a_v4", description="star-cr5->anl(as683):100ge:site-ex:show:intercloud", device="star-cr5", id=169, interface="to_anl_ip-a_v4", is_ipv6=False, is_oscars=False, oscars_id=None, resource_uri="", site="anl", site_device="noni", site_interface="et-1/0/0", stats_type="Standard", title=None, columns=["time", "in", "out"], points=[ [1400425947000, 52, 34], [1400425948000, 18, 13], [1400425949000, 26, 67], [1400425950000, 93, 91] ] ) series = TimeSeries(interface_series) wire = self._call_interop_script('event', series.to_string()) new_series = TimeSeries(wire) new_json = new_series.to_json() self._validate_wire_points(interface_series, new_json) # Now with a list of events event_objects = [ Event(1429673400000, {'in': 1, 'out': 2}), Event(1429673460000, {'in': 3, 'out': 4}), Event(1429673520000, {'in': 5, 'out': 6}), ] series = TimeSeries(dict(name='events', events=event_objects)) wire = self._call_interop_script('event', series.to_string()) new_series = TimeSeries(wire) for i in enumerate(event_objects): self.assertTrue(Event.same(i[1], new_series.at(i[0])))
def test_collect_and_aggregate(self): """collect events together and aggregate.""" events_in = [ Event( aware_dt_from_args( dict(year=2015, month=3, day=14, hour=7, minute=57)), { 'type': 'a', 'in': 3, 'out': 1 }), Event( aware_dt_from_args( dict(year=2015, month=3, day=14, hour=7, minute=58)), { 'type': 'a', 'in': 9, 'out': 2 }), Event( aware_dt_from_args( dict(year=2015, month=3, day=14, hour=7, minute=59)), { 'type': 'b', 'in': 6, 'out': 6 }), Event( aware_dt_from_args( dict(year=2015, month=3, day=14, hour=8, minute=0)), { 'type': 'a', 'in': 4, 'out': 7 }), Event( aware_dt_from_args( dict(year=2015, month=3, day=14, hour=8, minute=1)), { 'type': 'b', 'in': 5, 'out': 9 }), ] def cback(event): """callback to pass in.""" global RESULTS # pylint: disable=global-statement if RESULTS is None: RESULTS = dict() RESULTS['{0}:{1}'.format(event.index(), event.get('type'))] = event uin = Stream() (Pipeline().from_source(uin).group_by('type').window_by( Capsule(duration='1h', type='fixed')).emit_on('eachEvent').aggregate({ 'type': { 'type': Functions.keep() }, 'in_avg': { 'in': Functions.avg() }, 'out_avg': { 'out': Functions.avg() } }).to(EventOut, cback)) for i in events_in: uin.add_event(i) self.assertEqual(RESULTS.get('1h-396199:a').get('in_avg'), 6) self.assertEqual(RESULTS.get('1h-396199:a').get('out_avg'), 1.5) self.assertEqual(RESULTS.get('1h-396199:b').get('in_avg'), 6) self.assertEqual(RESULTS.get('1h-396199:b').get('out_avg'), 6) self.assertEqual(RESULTS.get('1h-396200:a').get('in_avg'), 4) self.assertEqual(RESULTS.get('1h-396200:a').get('out_avg'), 7) self.assertEqual(RESULTS.get('1h-396200:b').get('in_avg'), 5) self.assertEqual(RESULTS.get('1h-396200:b').get('out_avg'), 9)
def test_windowed_average(self): """aggregate events into by windowed avg.""" events_in = [ Event( aware_dt_from_args( dict(year=2015, month=3, day=14, hour=7, minute=57)), { 'in': 3, 'out': 1 }), Event( aware_dt_from_args( dict(year=2015, month=3, day=14, hour=7, minute=58)), { 'in': 9, 'out': 2 }), Event( aware_dt_from_args( dict(year=2015, month=3, day=14, hour=7, minute=59)), { 'in': 6, 'out': 6 }), Event( aware_dt_from_args( dict(year=2015, month=3, day=14, hour=8, minute=0)), { 'in': 4, 'out': 7 }), Event( aware_dt_from_args( dict(year=2015, month=3, day=14, hour=8, minute=1)), { 'in': 5, 'out': 9 }), ] def cback(event): """callback to pass in.""" global RESULTS # pylint: disable=global-statement if RESULTS is None: RESULTS = dict() RESULTS['{0}'.format(event.index())] = event uin = Stream() (Pipeline().from_source(uin).window_by('1h').emit_on( 'eachEvent').aggregate({ 'in_avg': { 'in': Functions.avg() }, 'out_avg': { 'out': Functions.avg() } }).to(EventOut, cback)) for i in events_in: uin.add_event(i) self.assertEqual(RESULTS.get('1h-396199').get('in_avg'), 6) self.assertEqual(RESULTS.get('1h-396199').get('out_avg'), 3) self.assertEqual(RESULTS.get('1h-396200').get('in_avg'), 4.5) self.assertEqual(RESULTS.get('1h-396200').get('out_avg'), 8)
DATA = dict(name="traffic", columns=["time", "value", "status"], points=[[1400425947000, 52, "ok"], [1400425948000, 18, "ok"], [1400425949000, 26, "fail"], [1400425950000, 93, "offline"]]) def _strp(dstr): """decode some existing test ts strings from js tests.""" fmt = '%Y-%m-%dT%H:%M:%SZ' return datetime.datetime.strptime(dstr, fmt).replace(tzinfo=pytz.UTC) EVENTLIST1 = [ Event(_strp("2015-04-22T03:30:00Z"), { 'in': 1, 'out': 2 }), Event(_strp("2015-04-22T03:31:00Z"), { 'in': 3, 'out': 4 }), Event(_strp("2015-04-22T03:32:00Z"), { 'in': 5, 'out': 6 }), ] SEPT_2014_DATA = dict(name="traffic", columns=["time", "value"], points=[[1409529600000, 80], [1409533200000, 88], [1409536800000, 52], [1409540400000, 80],
def test_sum_events_with_combine(self): """test summing multiple events together via combine on the back end.""" # combine them all events = [ self._create_event(self.aware_ts, { 'a': 5, 'b': 6, 'c': 7 }), self._create_event(self.aware_ts, { 'a': 2, 'b': 3, 'c': 4 }), self._create_event(self.aware_ts, { 'a': 1, 'b': 2, 'c': 3 }), ] result = Event.sum(events) self.assertEqual(result[0].get('a'), 8) self.assertEqual(result[0].get('b'), 11) self.assertEqual(result[0].get('c'), 14) # combine single field result = Event.sum(events, 'a') self.assertEqual(result[0].get('a'), 8) self.assertIsNone(result[0].get('b')) self.assertIsNone(result[0].get('c')) # grab multiple fields result = Event.sum(events, ['a', 'c']) self.assertEqual(result[0].get('a'), 8) self.assertIsNone(result[0].get('b')) self.assertEqual(result[0].get('c'), 14) # average result = Event.avg( events + [self._create_event(self.aware_ts, { 'a': 1, 'b': 1, 'c': 2 })], 'c') self.assertEqual(result[0].get('c'), 4) # bad arg self.assertEqual(Event.sum([]), []) self.assertEqual(Event.avg([]), []) # work the extra reducer functions in Functions module result = Event.combine(events, 'c', Functions.max()) self.assertEqual(result[0].get('c'), 7) result = Event.combine(events, 'c', Functions.min()) self.assertEqual(result[0].get('c'), 3) result = Event.combine(events, 'c', Functions.count()) self.assertEqual(result[0].get('c'), 3) result = Event.combine(events, 'c', Functions.first()) self.assertEqual(result[0].get('c'), 7) result = Event.combine(events, 'c', Functions.last()) self.assertEqual(result[0].get('c'), 3) result = Event.combine(events, 'c', Functions.difference()) self.assertEqual(result[0].get('c'), 4) self.assertIsNone(Functions.first()([])) self.assertIsNone(Functions.last()([]))
def _create_event(self, arg1, arg2=None): # pylint: disable=no-self-use return Event(arg1, arg2)
def in_only(event): """make new events wtin only data in.""" return Event(event.timestamp(), {'in': event.get('in')})
def test_event_map_multi_key(self): """Test Event.map() with multiple field keys.""" result = Event.map(self._get_event_series(), ['in', 'out']) self.assertEqual(set(result), set({'out': [11, 13, 15, 18], 'in': [2, 4, 6, 8]}))
def test_new_same(self): """trigger an error for coverage.""" self.assertFalse(Event.same(EVENT_LIST[0], IDX_EVENT_DUP[0]))