示例#1
0
文件: api.py 项目: alphagov/backdrop
def _empty_data_set(data_set_config):
    audit_delete(data_set_config['name'])
    data_set = DataSet(storage, data_set_config)
    data_set.create_if_not_exists()
    data_set.empty()
    return jsonify(
        status='ok',
        message='{} now contains 0 records'.format(data_set_config['name']))
示例#2
0
    def setUp(self):
        self.storage = MongoStorageEngine.create(HOSTS, PORT, DB_NAME)

        self.config = {
            'name': DATA_SET,
            'data_group': "group",
            'data_type': "type",
            'max_age_expected': 1000,
        }

        self.data_set = DataSet(self.storage, self.config)

        self.mongo_collection = MongoClient(HOSTS, PORT)[DB_NAME][DATA_SET]
示例#3
0
    def test_batch_last_updated(self):
        records = {
            # timestamps in ascending order
            'some_data': [
                {
                    '_timestamp': d_tz(2018, 1, 1)
                },
                {
                    '_timestamp': d_tz(2019, 1, 1)
                },
                {
                    '_timestamp': d_tz(2020, 1, 1)
                },
            ],
            # timestamps in descending order
            'some_other_data': [
                {
                    '_timestamp': d_tz(2017, 1, 1)
                },
                {
                    '_timestamp': d_tz(2016, 1, 1)
                },
                {
                    '_timestamp': d_tz(2015, 1, 1)
                },
            ]
        }

        for key, items in records.iteritems():
            self.engine.create_data_set(key, 0)
            for item in items:
                self.engine.save_record(key, item)

        some_data_set = DataSet(self.engine, {'name': 'some_data'})
        some_other_data_set = DataSet(self.engine, {'name': 'some_other_data'})
        yet_another_data_set = DataSet(self.engine,
                                       {'name': 'yet_another_data'})

        self.engine.batch_last_updated(
            [some_data_set, some_other_data_set, yet_another_data_set])

        some_data_set_last_updated = some_data_set.get_last_updated()
        some_other_data_set_last_updated = some_other_data_set.get_last_updated(
        )
        yet_another_data_set_last_updated = yet_another_data_set.get_last_updated(
        )

        assert_that(some_data_set_last_updated, is_(d_tz(2020, 1, 1, 0, 0, 0)))
        assert_that(some_other_data_set_last_updated,
                    is_(d_tz(2017, 1, 1, 0, 0, 0)))
        assert_that(yet_another_data_set_last_updated, is_(none()))
示例#4
0
    def test_batch_last_updated(self):
        timestamp = time_as_utc(datetime.datetime.utcnow())
        self.engine.create_data_set('some_data', 0)
        self.engine.save_record('some_data', {
            '_timestamp': timestamp,
        })

        data_set = DataSet(self.engine, {'name': 'some_data'})

        self.engine.batch_last_updated([data_set])
        last_updated = data_set.get_last_updated()

        assert_that(last_updated.year, is_(timestamp.year))
        assert_that(last_updated.month, is_(timestamp.month))
        assert_that(last_updated.day, is_(timestamp.day))
        assert_that(last_updated.hour, is_(timestamp.hour))
        assert_that(last_updated.minute, is_(timestamp.minute))
        assert_that(last_updated.second, is_(timestamp.second))
示例#5
0
    def test_batch_last_updated(self):
        records = {
            # timestamps in ascending order
            'some_data': [
                {'_timestamp': d_tz(2018, 1, 1)},
                {'_timestamp': d_tz(2019, 1, 1)},
                {'_timestamp': d_tz(2020, 1, 1)},
            ],
            # timestamps in descending order
            'some_other_data': [
                {'_timestamp': d_tz(2017, 1, 1)},
                {'_timestamp': d_tz(2016, 1, 1)},
                {'_timestamp': d_tz(2015, 1, 1)},
            ]
        }

        for key, items in records.iteritems():
            self.engine.create_data_set(key, 0)
            for item in items:
                self.engine.save_record(key, item)

        some_data_set = DataSet(self.engine, {'name': 'some_data'})
        some_other_data_set = DataSet(self.engine, {'name': 'some_other_data'})
        yet_another_data_set = DataSet(self.engine, {'name': 'yet_another_data'})

        self.engine.batch_last_updated([some_data_set, some_other_data_set, yet_another_data_set])

        some_data_set_last_updated = some_data_set.get_last_updated()
        some_other_data_set_last_updated = some_other_data_set.get_last_updated()
        yet_another_data_set_last_updated = yet_another_data_set.get_last_updated()

        assert_that(some_data_set_last_updated, is_(d_tz(2020, 1, 1, 0, 0, 0)))
        assert_that(some_other_data_set_last_updated, is_(d_tz(2017, 1, 1, 0, 0, 0)))
        assert_that(yet_another_data_set_last_updated, is_(none()))
示例#6
0
def _empty_data_set(data_set_config):
    data_set = DataSet(storage, data_set_config)
    data_set.create_if_not_exists()
    data_set.empty()
    return jsonify(
        status='ok',
        message='{} now contains 0 records'.format(data_set_config['name']))
    def setUp(self):
        self.storage = MongoStorageEngine.create(HOSTS, PORT, DB_NAME)

        self.config = {
            'name': DATA_SET,
            'data_group': "group",
            'data_type': "type",
            'max_age_expected': 1000,
        }

        self.data_set = DataSet(self.storage, self.config)

        self.mongo_collection = MongoClient(HOSTS, PORT)[DB_NAME][DATA_SET]
    def setUp(self):
        self.storage = PostgresStorageEngine(DATABASE_URL)

        self.config = {
            'name': DATA_SET,
            'data_group': "group",
            'data_type': "type",
            'max_age_expected': 1000,
        }

        self.data_set = DataSet(self.storage, self.config)

        self.storage.create_table_and_indices()
    def setUp(self):
        self.storage = MongoStorageEngine.create(DATABASE_URL)

        self.config = {
            'name': DATA_SET,
            'data_group': "group",
            'data_type': "type",
            'max_age_expected': 1000,
        }

        self.data_set = DataSet(self.storage, self.config)

        database = MongoClient(DATABASE_URL).get_database()
        self.mongo_collection = database[DATA_SET]
示例#10
0
class TestDataSetIntegration(unittest.TestCase):

    def setUp(self):
        self.storage = MongoStorageEngine.create(HOSTS, PORT, DB_NAME)

        self.config = {
            'name': DATA_SET,
            'data_group': "group",
            'data_type': "type",
            'max_age_expected': 1000,
        }

        self.data_set = DataSet(self.storage, self.config)

        self.mongo_collection = MongoClient(HOSTS, PORT)[DB_NAME][DATA_SET]

    def setup__timestamp_data(self):
        self.mongo_collection.save({
            "_id": 'last',
            "_timestamp": d_tz(2013, 3, 1),
            "_week_start_at": d_tz(2013, 2, 25),
            "_updated_at": d_tz(2013, 8, 10)
        })
        self.mongo_collection.save({
            "_id": 'first',
            "_timestamp": d_tz(2013, 1, 1),
            "_week_start_at": d_tz(2012, 12, 31),
            "_updated_at": d_tz(2013, 9, 10)
        })
        self.mongo_collection.save({
            "_id": 'second',
            "_timestamp": d_tz(2013, 2, 1),
            "_week_start_at": d_tz(2013, 1, 28),
            "_updated_at": d_tz(2013, 10, 10)
        })

    def tearDown(self):
        self.mongo_collection.drop()

    def test_period_queries_get_sorted_by__week_start_at(self):
        self.setup__timestamp_data()
        query = Query.create(period=WEEK)
        result = self.data_set.execute_query(query)
        assert_that(result, contains(
            has_entry('_start_at', d_tz(2012, 12, 31)),
            has_entry('_start_at', d_tz(2013, 1, 28)),
            has_entry('_start_at', d_tz(2013, 2, 25))
        ))

    def test_data_set_is_recent_enough(self):
        self.mongo_collection.save({
            "_id": "first",
            "_updated_at": datetime.datetime.now() - datetime.timedelta(seconds=500)
        })
        assert_that(self.data_set.is_recent_enough())

    def test_data_set_is_not_recent_enough(self):
        self.mongo_collection.save({
            "_id": "first",
            "_updated_at": datetime.datetime.now() - datetime.timedelta(seconds=50000)
        })
        assert_that(not self.data_set.is_recent_enough())
示例#11
0
def _patch_data_set(data_set_config, data_set_id, data):
    audit_patch(data_set_config['name'], data)
    data_set = DataSet(storage, data_set_config)
    return data_set.patch(data_set_id, data)
示例#12
0
class TestDataSetIntegration(unittest.TestCase):
    def setUp(self):
        self.storage = MongoStorageEngine.create(HOSTS, PORT, DB_NAME)

        self.config = {
            'name': DATA_SET,
            'data_group': "group",
            'data_type': "type",
            'max_age_expected': 1000,
        }

        self.data_set = DataSet(self.storage, self.config)

        self.mongo_collection = MongoClient(HOSTS, PORT)[DB_NAME][DATA_SET]

    def setup__timestamp_data(self):
        self.mongo_collection.save({
            "_id": 'last',
            "_timestamp": d_tz(2013, 3, 1),
            "_week_start_at": d_tz(2013, 2, 25),
            "_updated_at": d_tz(2013, 8, 10)
        })
        self.mongo_collection.save({
            "_id": 'first',
            "_timestamp": d_tz(2013, 1, 1),
            "_week_start_at": d_tz(2012, 12, 31),
            "_updated_at": d_tz(2013, 9, 10)
        })
        self.mongo_collection.save({
            "_id": 'second',
            "_timestamp": d_tz(2013, 2, 1),
            "_week_start_at": d_tz(2013, 1, 28),
            "_updated_at": d_tz(2013, 10, 10)
        })

    def tearDown(self):
        self.mongo_collection.drop()

    def test_period_queries_get_sorted_by__week_start_at(self):
        self.setup__timestamp_data()
        query = Query.create(period=WEEK)
        result = self.data_set.execute_query(query)
        assert_that(
            result,
            contains(has_entry('_start_at', d_tz(2012, 12, 31)),
                     has_entry('_start_at', d_tz(2013, 1, 28)),
                     has_entry('_start_at', d_tz(2013, 2, 25))))

    def test_data_set_is_recent_enough(self):
        self.mongo_collection.save({
            "_id":
            "first",
            "_updated_at":
            datetime.datetime.now() - datetime.timedelta(seconds=500)
        })
        assert_that(self.data_set.is_recent_enough())

    def test_data_set_is_not_recent_enough(self):
        self.mongo_collection.save({
            "_id":
            "first",
            "_updated_at":
            datetime.datetime.now() - datetime.timedelta(seconds=50000)
        })
        assert_that(not self.data_set.is_recent_enough())
示例#13
0
def _append_to_data_set(data_set_config, data):
    data_set = DataSet(storage, data_set_config)
    data_set.create_if_not_exists()
    return data_set.store(data)
示例#14
0
文件: api.py 项目: alphagov/backdrop
def _delete_data_set(data_set_config, data_set_id):
    data_set = DataSet(storage, data_set_config)
    return data_set.delete(data_set_id)
示例#15
0
文件: api.py 项目: alphagov/backdrop
def _patch_data_set(data_set_config, data_set_id, data):
    audit_patch(data_set_config['name'], data)
    data_set = DataSet(storage, data_set_config)
    return data_set.patch(data_set_id, data)
示例#16
0
文件: api.py 项目: alphagov/backdrop
def _append_to_data_set(data_set_config, data):
    audit_append(data_set_config['name'], data)
    data_set = DataSet(storage, data_set_config)
    data_set.create_if_not_exists()
    return data_set.store(data)
示例#17
0
def _delete_data_set(data_set_config, data_set_id):
    data_set = DataSet(storage, data_set_config)
    return data_set.delete(data_set_id)