def test_data_with__timestamp_gets_a__period_start_ats(self): incoming_data = {'foo': 'bar', '_timestamp': d_tz(2013, 2, 2, 0, 0, 0)} some_record = Record(incoming_data) assert_that(some_record.meta["_week_start_at"], is_(d_tz(2013, 1, 28))) assert_that(some_record.meta["_month_start_at"], is_(d_tz(2013, 2, 1)))
def test_period_group_query_adds_missing_periods_in_correct_order(self): self.mock_storage.execute_query.return_value = [ {'some_group': 'val1', '_week_start_at': d(2013, 1, 14), '_count': 23}, {'some_group': 'val1', '_week_start_at': d(2013, 1, 21), '_count': 41}, {'some_group': 'val2', '_week_start_at': d(2013, 1, 14), '_count': 31}, {'some_group': 'val2', '_week_start_at': d(2013, 1, 28), '_count': 12}, ] data = self.data_set.execute_query( Query.create(period=WEEK, group_by=['some_group'], start_at=d_tz(2013, 1, 7, 0, 0, 0), end_at=d_tz(2013, 2, 4, 0, 0, 0))) assert_that(data, has_item(has_entries({ "some_group": "val1", "values": contains( has_entries({"_start_at": d_tz(2013, 1, 7), "_count": 0}), has_entries({"_start_at": d_tz(2013, 1, 14), "_count": 23}), has_entries({"_start_at": d_tz(2013, 1, 21), "_count": 41}), has_entries({"_start_at": d_tz(2013, 1, 28), "_count": 0}), ), }))) assert_that(data, has_item(has_entries({ "some_group": "val2", "values": contains( has_entries({"_start_at": d_tz(2013, 1, 7), "_count": 0}), has_entries({"_start_at": d_tz(2013, 1, 14), "_count": 31}), has_entries({"_start_at": d_tz(2013, 1, 21), "_count": 0}), has_entries({"_start_at": d_tz(2013, 1, 28), "_count": 12}), ), })))
def test_basic_query_with_time_limits(self): self._save_all('foo_bar', {'_timestamp': d_tz(2012, 12, 12)}, {'_timestamp': d_tz(2012, 12, 14)}, {'_timestamp': d_tz(2012, 12, 11)}) # start at results = self.engine.execute_query('foo_bar', Query.create( start_at=d_tz(2012, 12, 12, 13))) assert_that(results, contains( has_entry('_timestamp', d_tz(2012, 12, 14)))) # end at results = self.engine.execute_query('foo_bar', Query.create( end_at=d_tz(2012, 12, 11, 13))) assert_that(results, contains( has_entry('_timestamp', d_tz(2012, 12, 11)))) # both results = self.engine.execute_query('foo_bar', Query.create( start_at=d_tz(2012, 12, 11, 12), end_at=d_tz(2012, 12, 12, 12))) assert_that(results, contains( has_entry('_timestamp', d_tz(2012, 12, 12))))
def test_month_and_group_query_with_start_and_end_at(self): self.mock_storage.execute_query.return_value = [ {'some_group': 'val1', '_month_start_at': d(2013, 1, 1), '_count': 1}, {'some_group': 'val1', '_month_start_at': d(2013, 2, 1), '_count': 5}, {'some_group': 'val2', '_month_start_at': d(2013, 3, 1), '_count': 2}, {'some_group': 'val2', '_month_start_at': d(2013, 4, 1), '_count': 6}, {'some_group': 'val2', '_month_start_at': d(2013, 7, 1), '_count': 6}, ] data = self.data_set.execute_query( Query.create(period=MONTH, group_by=['some_group'], start_at=d(2013, 1, 1), end_at=d(2013, 4, 2))) assert_that(data, has_item(has_entries({"values": has_length(4)}))) assert_that(data, has_item(has_entries({"values": has_length(4)}))) first_group = data[0]["values"] assert_that(first_group, has_item(has_entries({ "_start_at": d_tz(2013, 3, 1)}))) assert_that(first_group, has_item(has_entries({ "_start_at": d_tz(2013, 4, 1)}))) first_group = data[1]["values"] assert_that(first_group, has_item(has_entries({ "_start_at": d_tz(2013, 1, 1)}))) assert_that(first_group, has_item(has_entries({ "_start_at": d_tz(2013, 2, 1)})))
def test_basic_query_with_time_limits(self): self._save_all('foo_bar', {'_timestamp': d_tz(2012, 12, 12)}, {'_timestamp': d_tz(2012, 12, 14)}, {'_timestamp': d_tz(2012, 12, 11)}) # start at results = self.engine.execute_query( 'foo_bar', Query.create(start_at=d_tz(2012, 12, 12, 13))) assert_that(results, contains(has_entry('_timestamp', d_tz(2012, 12, 14)))) # end at results = self.engine.execute_query( 'foo_bar', Query.create(end_at=d_tz(2012, 12, 11, 13))) assert_that(results, contains(has_entry('_timestamp', d_tz(2012, 12, 11)))) # both results = self.engine.execute_query( 'foo_bar', Query.create(start_at=d_tz(2012, 12, 11, 12), end_at=d_tz(2012, 12, 12, 12))) assert_that(results, contains(has_entry('_timestamp', d_tz(2012, 12, 12))))
def test_month_and_group_query_with_start_and_end_at(self): self.mock_repository.multi_group.return_value = [ { "some_group": "val1", "_count": 6, "_group_count": 2, "_subgroup": [ { "_month_start_at": d(2013, 1, 1, 0, 0, 0), "_count": 1 }, { "_month_start_at": d(2013, 2, 1, 0, 0, 0), "_count": 5 } ] }, { "some_group": "val2", "_count": 8, "_group_count": 2, "_subgroup": [ { "_month_start_at": d(2013, 3, 1, 0, 0, 0), "_count": 2 }, { "_month_start_at": d(2013, 4, 1, 0, 0, 0), "_count": 6 }, { "_month_start_at": d(2013, 7, 1, 0, 0, 0), "_count": 6 } ] } ] query_result = self.bucket.query(Query.create(period=MONTH, group_by="some_group", start_at=d(2013, 1, 1), end_at=d(2013, 4, 2))) data = query_result.data() assert_that(data, has_item(has_entries({"values": has_length(4)}))) assert_that(data, has_item(has_entries({"values": has_length(4)}))) first_group = data[0]["values"] assert_that(first_group, has_item(has_entries({ "_start_at": d_tz(2013, 3, 1)}))) assert_that(first_group, has_item(has_entries({ "_start_at": d_tz(2013, 4, 1)}))) first_group = data[1]["values"] assert_that(first_group, has_item(has_entries({ "_start_at": d_tz(2013, 1, 1)}))) assert_that(first_group, has_item(has_entries({ "_start_at": d_tz(2013, 2, 1)})))
def test__month_start_at_is_always_the_start_of_the_month(self): incoming_data_1 = {'foo': 'bar', '_timestamp': d_tz(2013, 2, 7)} incoming_data_2 = {'foo': 'bar', '_timestamp': d_tz(2013, 3, 14)} record_1 = Record(incoming_data_1) record_2 = Record(incoming_data_2) assert_that(record_1.meta["_month_start_at"], is_(d_tz(2013, 2, 1))) assert_that(record_2.meta["_month_start_at"], is_(d_tz(2013, 3, 1)))
def test_datetime_is_converted_to_utc(self): us_eastern_time = d_tz(2012, 12, 12, 12, tzinfo=pytz.timezone("US/Eastern")) assert_that(parse_time_as_utc(us_eastern_time), equal_to(d_tz(2012, 12, 12, 17)))
def test_period_group_query_adds_missing_periods_in_correct_order(self): self.mock_repository.multi_group.return_value = [ { "some_group": "val1", "_count": 6, "_group_count": 2, "_subgroup": [ { "_week_start_at": d(2013, 1, 14, 0, 0, 0), "_count": 23 }, { "_week_start_at": d(2013, 1, 21, 0, 0, 0), "_count": 41 } ] }, { "some_group": "val2", "_count": 8, "_group_count": 2, "_subgroup": [ { "_week_start_at": d(2013, 1, 14, 0, 0, 0), "_count": 31 }, { "_week_start_at": d(2013, 1, 28, 0, 0, 0), "_count": 12 } ] } ] query_result = self.bucket.query( Query.create(period="week", group_by="some_group", start_at=d_tz(2013, 1, 7, 0, 0, 0), end_at=d_tz(2013, 2, 4, 0, 0, 0))).data() assert_that(query_result, has_item(has_entries({ "some_group": "val1", "values": contains( has_entries({"_start_at": d_tz(2013, 1, 7), "_count": 0}), has_entries({"_start_at": d_tz(2013, 1, 14), "_count": 23}), has_entries({"_start_at": d_tz(2013, 1, 21), "_count": 41}), has_entries({"_start_at": d_tz(2013, 1, 28), "_count": 0}), ), }))) assert_that(query_result, has_item(has_entries({ "some_group": "val2", "values": contains( has_entries({"_start_at": d_tz(2013, 1, 7), "_count": 0}), has_entries({"_start_at": d_tz(2013, 1, 14), "_count": 31}), has_entries({"_start_at": d_tz(2013, 1, 21), "_count": 0}), has_entries({"_start_at": d_tz(2013, 1, 28), "_count": 12}), ), })))
def test_save_document_adding_timestamps(self, now): now.return_value = d_tz(2013, 4, 9, 13, 32, 5) self.repo.save({"name": "Gummo"}) self.mongo.save.assert_called_once_with({ "name": "Gummo", "_updated_at": d_tz(2013, 4, 9, 13, 32, 5) })
def test_month_and_group_query_with_start_and_end_at(self): self.mock_repository.multi_group.return_value = [ { "some_group": "val1", "_count": 6, "_group_count": 2, "_subgroup": [ { "_month_start_at": d(2013, 1, 1, 0, 0, 0), "_count": 1 }, { "_month_start_at": d(2013, 2, 1, 0, 0, 0), "_count": 5 } ] }, { "some_group": "val2", "_count": 8, "_group_count": 2, "_subgroup": [ { "_month_start_at": d(2013, 3, 1, 0, 0, 0), "_count": 2 }, { "_month_start_at": d(2013, 4, 1, 0, 0, 0), "_count": 6 }, { "_month_start_at": d(2013, 7, 1, 0, 0, 0), "_count": 6 } ] } ] query_result = self.bucket.query(Query.create(period="month", group_by="some_group", start_at=d(2013, 1, 1), end_at=d(2013, 4, 2))) data = query_result.data() assert_that(data, has_item(has_entries({"values": has_length(4)}))) assert_that(data, has_item(has_entries({"values": has_length(4)}))) first_group = data[0]["values"] assert_that(first_group, has_item(has_entries({ "_start_at": d_tz(2013, 3, 1)}))) assert_that(first_group, has_item(has_entries({ "_start_at": d_tz(2013, 4, 1)}))) first_group = data[1]["values"] assert_that(first_group, has_item(has_entries({ "_start_at": d_tz(2013, 1, 1)}))) assert_that(first_group, has_item(has_entries({ "_start_at": d_tz(2013, 2, 1)})))
def test_period_queries_get_sorted_by__week_start_at(self): self.setup__timestamp_data() query = Query.create(period="week") result = query.execute(self.bucket.repository) assert_that( result.data(), contains(has_entry('_start_at', d_tz(2012, 12, 31)), has_entry('_start_at', d_tz(2013, 1, 28)), has_entry('_start_at', d_tz(2013, 2, 25))))
def test_period_queries_get_sorted_by__week_start_at(self): self.setup__timestamp_data() query = Query.create(period=WEEK) result = self.data_set.execute_query(query) assert_that( result, contains(has_entry('_start_at', d_tz(2012, 12, 31)), has_entry('_start_at', d_tz(2013, 1, 28)), has_entry('_start_at', d_tz(2013, 2, 25))))
def test_period_queries_get_sorted_by__week_start_at(self): self.setup__timestamp_data() query = Query.create(period=WEEK) result = self.data_set.execute_query(query) assert_that(result, contains( has_entry('_start_at', d_tz(2012, 12, 31)), has_entry('_start_at', d_tz(2013, 1, 28)), has_entry('_start_at', d_tz(2013, 2, 25)) ))
def test_period_queries_get_sorted_by__week_start_at(self): self.setup__timestamp_data() query = Query.create(period="week") result = query.execute(self.bucket.repository) assert_that(result.data(), contains( has_entry('_start_at', d_tz(2012, 12, 31)), has_entry('_start_at', d_tz(2013, 1, 28)), has_entry('_start_at', d_tz(2013, 2, 25)) ))
def test_period_query_is_executed(self, mock_query): mock_query.return_value = NoneData() self.app.get('/data/some-group/some-type?period=week&' + 'start_at=' + urllib.quote("2012-11-05T00:00:00Z") + '&' + 'end_at=' + urllib.quote("2012-12-03T00:00:00Z")) mock_query.assert_called_with( Query.create(period=WEEK, start_at=d_tz(2012, 11, 5), end_at=d_tz(2012, 12, 3)))
def test_build_query_with_start_and_end_at(self): query = Query.create( start_at = d_tz(2012, 3, 17, 17, 10, 6), end_at = d_tz(2012, 3, 19, 17, 10, 6)) assert_that(query.to_mongo_query(), is_({ "_timestamp": { "$gte": d_tz(2012, 3, 17, 17, 10, 6), "$lt": d_tz(2012, 3, 19, 17, 10, 6) } }))
def test_adding_mongo_doc_to_collection_expands_month_start_at(self): stub_doc = {"_month_start_at": d(2013, 4, 1), "_count": 5} period_data = PeriodData([stub_doc], period="month") assert_that(len(period_data.data()), is_(1)) assert_that(period_data.data()[0], has_entry("_count", 5)) assert_that(period_data.data()[0], has_entry("_start_at", d_tz(2013, 4, 1))) assert_that(period_data.data()[0], has_entry("_end_at", d_tz(2013, 5, 1)))
def test_period_query_is_executed(self, mock_query): mock_query.return_value = NoneData() self.app.get( '/data/some-group/some-type?period=week&' + 'start_at=' + urllib.quote("2012-11-05T00:00:00Z") + '&' + 'end_at=' + urllib.quote("2012-12-03T00:00:00Z")) mock_query.assert_called_with( Query.create(period=WEEK, start_at=d_tz(2012, 11, 5), end_at=d_tz(2012, 12, 3)))
def test_build_query_with_start_and_end_at(self): query = Query.create(start_at=d_tz(2012, 3, 17, 17, 10, 6), end_at=d_tz(2012, 3, 19, 17, 10, 6)) assert_that( query.to_mongo_query(), is_({ "_timestamp": { "$gte": d_tz(2012, 3, 17, 17, 10, 6), "$lt": d_tz(2012, 3, 19, 17, 10, 6) } }))
def test_adding_mongo_document_to_collection_expands_week_start_at(self): stub_doc = {"_week_start_at": d(2013, 5, 6), "_count": 42} period_data = PeriodData([stub_doc], period="week") assert_that(len(period_data.data()), is_(1)) assert_that(period_data.data()[0], has_entry("_count", 42)) assert_that(period_data.data()[0], has_entry("_start_at", d_tz(2013, 5, 6))) assert_that(period_data.data()[0], has_entry("_end_at", d_tz(2013, 5, 13)))
def test_week_start_at_gets_expanded_in_subgroups_when_added(self): stub_document = { "_subgroup": [{ "_week_start_at": d(2013, 4, 1), "_count": 5 }] } data = PeriodGroupedData([stub_document], WEEK) values = data.data()[0]['values'] assert_that(values, has_item(has_entry("_start_at", d_tz(2013, 4, 1)))) assert_that(values, has_item(has_entry("_end_at", d_tz(2013, 4, 8)))) assert_that(values, has_item(has_entry("_count", 5)))
def test_shift_query_backwards(self): query = Query.create( start_at=d_tz(2014, 1, 9, 0, 0, 0), period=Day(), duration=6, ) shifted = query.get_shifted_query(-5) assert_that(shifted.start_at, is_(d_tz(2014, 1, 4, 0, 0, 0))) assert_that(shifted.end_at, is_(d_tz(2014, 1, 10, 0, 0, 0)))
def test_basic_query_with_inclusive_time_limits(self): self._save_all('foo_bar', {'_timestamp': d_tz(2014, 12, 01)}, {'_timestamp': d_tz(2014, 12, 02)}, {'_timestamp': d_tz(2014, 12, 03)}) # start at results = self.engine.execute_query('foo_bar', Query.create( start_at=d_tz(2014, 12, 01), end_at=d_tz(2014, 12, 03), inclusive=True)) assert_that(len(results), is_(3))
def test_shift_query_backwards(self): query = Query.create( start_at=d_tz(2014, 1, 9, 0, 0, 0), period=Day(), duration=6, ) shifted = query.get_shifted_query(-5) assert_that(shifted.start_at, is_( d_tz(2014, 1, 4, 0, 0, 0))) assert_that(shifted.end_at, is_( d_tz(2014, 1, 10, 0, 0, 0)))
def test_week_start_at_gets_expanded_in_subgroups_when_added(self): stub_document = { "_subgroup": [ { "_week_start_at": d(2013, 4, 1), "_count": 5 } ] } data = PeriodGroupedData([stub_document], WEEK) values = data.data()[0]['values'] assert_that(values, has_item(has_entry("_start_at", d_tz(2013, 4, 1)))) assert_that(values, has_item(has_entry("_end_at", d_tz(2013, 4, 8)))) assert_that(values, has_item(has_entry("_count", 5)))
def test_batch_last_updated(self): records = { # timestamps in ascending order 'some_data': [ {'_timestamp': d_tz(2018, 1, 1)}, {'_timestamp': d_tz(2019, 1, 1)}, {'_timestamp': d_tz(2020, 1, 1)}, ], # timestamps in descending order 'some_other_data': [ {'_timestamp': d_tz(2017, 1, 1)}, {'_timestamp': d_tz(2016, 1, 1)}, {'_timestamp': d_tz(2015, 1, 1)}, ] } for key, items in records.iteritems(): self.engine.create_data_set(key, 0) for item in items: self.engine.save_record(key, item) some_data_set = DataSet(self.engine, {'name': 'some_data'}) some_other_data_set = DataSet(self.engine, {'name': 'some_other_data'}) yet_another_data_set = DataSet(self.engine, {'name': 'yet_another_data'}) self.engine.batch_last_updated([some_data_set, some_other_data_set, yet_another_data_set]) some_data_set_last_updated = some_data_set.get_last_updated() some_other_data_set_last_updated = some_other_data_set.get_last_updated() yet_another_data_set_last_updated = yet_another_data_set.get_last_updated() assert_that(some_data_set_last_updated, is_(d_tz(2020, 1, 1, 0, 0, 0))) assert_that(some_other_data_set_last_updated, is_(d_tz(2017, 1, 1, 0, 0, 0))) assert_that(yet_another_data_set_last_updated, is_(none()))
def test_adding_mongo_document_to_collection_expands_week_start_at(self): stub_doc = { "_week_start_at": d(2013, 5, 6), "_count": 42 } period_data = PeriodData([stub_doc], period="week") assert_that(len(period_data.data()), is_(1)) assert_that(period_data.data()[0], has_entry("_count", 42)) assert_that(period_data.data()[0], has_entry("_start_at", d_tz(2013, 5, 6))) assert_that(period_data.data()[0], has_entry("_end_at", d_tz(2013, 5, 13)))
def test_adding_mongo_doc_to_collection_expands_month_start_at(self): stub_doc = { "_month_start_at": d(2013, 4, 1), "_count": 5 } period_data = PeriodData([stub_doc], period="month") assert_that(len(period_data.data()), is_(1)) assert_that(period_data.data()[0], has_entry("_count", 5)) assert_that(period_data.data()[0], has_entry("_start_at", d_tz(2013, 4, 1))) assert_that(period_data.data()[0], has_entry("_end_at", d_tz(2013, 5, 1)))
def test_that_it_expands_the_limits_of_the_range_if_midweek(self): range = WEEK.range(d_tz(2013, 4, 3), d_tz(2013, 4, 19)) assert_that(list(range), contains( (d_tz(2013, 4, 1), d_tz(2013, 4, 8)), (d_tz(2013, 4, 8), d_tz(2013, 4, 15)), (d_tz(2013, 4, 15), d_tz(2013, 4, 22)) ))
def test__timestamp_is_returned_as_datetime(self): incoming_data = { 'foo': 'bar', '_timestamp': d_tz( 2013, 2, 2, 0, 2, 0 ) } some_record = Record(incoming_data) assert_that( some_record.data['_timestamp'], is_( d_tz(2013, 2, 2, 0, 2, 0) ) )
def test_query_grouped_by_period(self): self._save_all_with_periods( 'foo_bar', {'_timestamp': d_tz(2012, 12, 12, 12)}, {'_timestamp': d_tz(2012, 12, 12, 15)}, {'_timestamp': d_tz(2012, 12, 13, 12)}) results = self.engine.execute_query('foo_bar', Query.create( period=DAY)) assert_that(results, contains_inanyorder( has_entries( {'_day_start_at': d_tz(2012, 12, 12), '_count': 2}), has_entries( {'_day_start_at': d_tz(2012, 12, 13), '_count': 1})))
def test_seconds_out_of_date_shows_correct_number_of_seconds_out_of_date(self): with freeze_time('2014-01-28'): # We expect it to be 0 seconds out of date self.setup_config({'max_age_expected': int(0)}) # But it's a day out of date, so it should be 1day's worth of seconds out of date self.mock_storage.get_last_updated.return_value = d_tz(2014, 1, 27) assert_that(self.data_set.get_seconds_out_of_date(), is_(86400)) with freeze_time('2014-01-28'): # We expect it to be a day out of date self.setup_config({'max_age_expected': int(86400)}) self.mock_storage.get_last_updated.return_value = d_tz(2014, 1, 25) # It's three days out, so we should get 2 days past sell by date assert_that(self.data_set.get_seconds_out_of_date(), is_(172800))
def test_data_with__timestamp_gets_a__period_start_ats(self): incoming_data = { 'foo': 'bar', '_timestamp': d_tz(2013, 2, 2, 0, 0, 0) } some_record = Record(incoming_data) assert_that( some_record.meta["_week_start_at"], is_(d_tz(2013, 1, 28)) ) assert_that( some_record.meta["_month_start_at"], is_(d_tz(2013, 2, 1)) )
def setUpPeopleLocationData(self): super(TestRepositoryIntegration_Grouping, self).setUp() people = ["Jack", "Jill", "John", "Jane"] places = ["Kettering", "Kew", "Kennington", "Kingston"] hair = ["red", "dark", "blond"] times = [d_tz(2013, 3, 11), d_tz(2013, 3, 18), d_tz(2013, 3, 25)] self._save_location("Jack", "Kettering", "red", d_tz(2013, 3, 11)) self._save_location("Jill", "Kennington", "blond", d_tz(2013, 3, 25)) self._save_location("John", "Kettering", "blond", d_tz(2013, 3, 18)) self._save_location("John", "Kettering", "dark", d_tz(2013, 3, 18)) self._save_location("John", "Kennington", "dark", d_tz(2013, 3, 11)) self._save_location("Jane", "Kingston", "red", d_tz(2013, 3, 18))
def test_adding_more_mongo_documents_to_collection(self): stub_doc = {"_week_start_at": d(2013, 5, 6), "_count": 42} another_stub_doc = {"_week_start_at": d(2013, 5, 13), "_count": 66} period_data = PeriodData([stub_doc, another_stub_doc], period="week") assert_that(len(period_data.data()), is_(2)) assert_that(period_data.data()[0], has_entry("_start_at", d_tz(2013, 5, 6))) assert_that(period_data.data()[0], has_entry("_end_at", d_tz(2013, 5, 13))) assert_that(period_data.data()[1], has_entry("_start_at", d_tz(2013, 5, 13))) assert_that(period_data.data()[1], has_entry("_end_at", d_tz(2013, 5, 20)))
def test_adding_documents_converts_timestamps_to_utc(self): stub_document = { "_timestamp": d(2014, 1, 1) } data = SimpleData([stub_document]) assert_that(data.data(), has_length(1)) assert_that(data.data()[0], has_entry("_timestamp", d_tz(2014, 1, 1)))
def test__period_start_ats_get_time_zeroed(self): incoming_data = {'foo': 'bar', '_timestamp': d_tz(2013, 2, 7, 7, 7, 7)} meta_info = Record(incoming_data).meta assert_that(meta_info['_week_start_at'].time(), equal_to(datetime.time(0, 0, 0))) assert_that(meta_info['_month_start_at'].time(), equal_to(datetime.time(0, 0, 0)))
def test_converts_customer_satisfaction_raw_data_to_normalised_data(self): may = d_tz(2013, 5, 1) june = d_tz(2013, 6, 1) july = d_tz(2013, 7, 1) raw_data = \ self.ignore_rows(4) + \ [[may, 0.1, 0.2], [june, 0.3, 0.4], [july, 0.5, 0.6], ["Total Result", 1, 2]] data = list(customer_satisfaction(raw_data)) assert_that(data, is_([["_timestamp", "_id", "satisfaction_tax_disc", "satisfaction_sorn"], ["2013-05-01T00:00:00+00:00", "2013-05-01", 0.1, 0.2], ["2013-06-01T00:00:00+00:00", "2013-06-01", 0.3, 0.4], ["2013-07-01T00:00:00+00:00", "2013-07-01", 0.5, 0.6]]))
def test_datetimes_are_returned_as_utc(self): self._save_all('foo_bar', {'_timestamp': datetime.datetime(2012, 8, 8)}) results = self.engine.execute_query('foo_bar', Query.create()) assert_that(results, contains(has_entries({'_timestamp': d_tz(2012, 8, 8)})))
def test_to_mongo(self): record = Record({ 'name': 'bob', '_timestamp': d_tz(2013, 4, 4, 4, 4, 4) }) assert_that(record.to_mongo(), has_key('name')) assert_that(record.to_mongo(), has_key('_timestamp')) assert_that(record.to_mongo(), has_key('_week_start_at'))
def test_timestamp_gets_parsed(self): """Test that timestamps get parsed For unit tests on timestamp parsing, including failure modes, see the backdrop.core.records module """ self.data_set.store([{'_timestamp': '2012-12-12T00:00:00+00:00'}]) self.mock_storage.save_record.assert_called_with( 'test_data_set', match(has_entry('_timestamp', d_tz(2012, 12, 12))))
def test_get_last_updated(self): self.engine.create_data_set('foo_bar', 0) with freeze_time('2012-12-12'): self.engine.save_record('foo_bar', {'foo': 'first'}) with freeze_time('2012-11-12'): self.engine.save_record('foo_bar', {'foo': 'second'}) assert_that(self.engine.get_last_updated('foo_bar'), is_(d_tz(2012, 12, 12)))
def test_flattened_sorted_week_and_group_query(self): self.mock_storage.execute_query.return_value = [ {'some_group': 'val1', '_week_start_at': d(2013, 1, 7), '_count': 1}, {'some_group': 'val1', '_week_start_at': d(2013, 1, 14), '_count': 5}, {'some_group': 'val2', '_week_start_at': d(2013, 1, 7), '_count': 2}, {'some_group': 'val2', '_week_start_at': d(2013, 1, 14), '_count': 6}, ] query = Query.create(period=WEEK, group_by=['some_group'], sort_by=["_count", "descending"], flatten=True) data = self.data_set.execute_query(query) assert_that(data, contains( has_entries({'_start_at': d_tz(2013, 1, 14)}), has_entries({'_start_at': d_tz(2013, 1, 14)}), has_entries({'_start_at': d_tz(2013, 1, 7)}), has_entries({'_start_at': d_tz(2013, 1, 7)}), ))
def test_datetimes_are_returned_as_utc(self): self._save_all('foo_bar', {'_timestamp': datetime.datetime(2012, 8, 8)}) results = self.engine.execute_query('foo_bar', Query.create()) assert_that(results, contains( has_entries({'_timestamp': d_tz(2012, 8, 8)})))
def test_filling_data_for_missing_weeks(self): stub_document = { "_subgroup": [{ "_count": 1, "_week_start_at": d(2013, 4, 1) }, { "_week_start_at": d(2013, 4, 15), "_count": 5 }] } data = PeriodGroupedData([stub_document], WEEK) data.fill_missing_periods(d(2013, 4, 1), d(2013, 4, 16)) assert_that(data.data()[0]["values"], has_length(3)) assert_that( data.data()[0]["values"], has_items(has_entry("_start_at", d_tz(2013, 4, 1)), has_entry("_start_at", d_tz(2013, 4, 8)), has_entry("_start_at", d_tz(2013, 4, 15))))
def test__month_start_at_is_always_the_start_of_the_month(self): incoming_data_1 = { 'foo': 'bar', '_timestamp': d_tz(2013, 2, 7) } incoming_data_2 = { 'foo': 'bar', '_timestamp': d_tz(2013, 3, 14) } record_1 = Record(incoming_data_1) record_2 = Record(incoming_data_2) assert_that( record_1.meta["_month_start_at"], is_(d_tz(2013, 2, 1)) ) assert_that( record_2.meta["_month_start_at"], is_(d_tz(2013, 3, 1)) )