def test_can_insert_fact_record(empty_warehouse): Warehouse.use(empty_warehouse) BoringEvent.build() Date.update() Place.update() fact_1 = BoringEvent() fact_1.date = date(2000, 7, 16) fact_1.place = "MOON" fact_1.people = 3 fact_1.duration = 10.7 fact_1.very_boring = False BoringEvent.insert(fact_1) connection = Warehouse.get() cursor = connection.cursor(dictionary=True) cursor.execute("select * from %s" % BoringEvent.__tablename__) data = cursor.fetchall() cursor.close() assert len(data) == 1 datum = data[0] assert datum["num_people"] == 3 assert datum["duration"] == 10.7 assert bool(datum["very_boring"]) is False
def test_can_insert_fact_record_from_staging_source(empty_warehouse): Warehouse.use(empty_warehouse) Staging.build() BoringEvent.build() # Prepare expansion data ready for expansion. connection = Warehouse.get() with closing(connection.cursor()) as cursor: cursor.execute("""\ create table extra_table ( id int primary key, colour varchar(20), size varchar(20) ) charset=utf8 collate=utf8_bin """) cursor.execute("""\ insert into extra_table (id, colour, size) values (12, 'grün', '37kg'), (13, 'orange', '9 miles') """) connection.commit() # Insert staging record. Staging.insert(Staging("boring", { "when": date(2000, 7, 16).isoformat(), "where": "MOON", "num_people": 3, "duration": 10.7, "very_boring": False, "pointless_ignored_value": "spoon", "expansion_key_1": 12, })) # Perform update. BoringEvent.update() # Check a record has been correctly inserted. with closing(connection.cursor(dictionary=True)) as cursor: cursor.execute("select * from %s" % BoringEvent.__tablename__) data = cursor.fetchall() assert len(data) == 1 datum = data[0] assert datum["num_people"] == 3 assert datum["duration"] == 10.7 assert bool(datum["very_boring"]) is False # mysql returns unicode as bytearrays. assert datum["colour_of_stuff"].decode('utf8') == u"grün" assert datum["size_of_stuff"].decode('utf8') == "37kg"
def fetch(cls, since=None, historical=False): """ Create date instances as there's no remote data source for this one. """ table_name = cls.__tablename__ log.info("Fetching data from the depths of time itself", extra={"table_name": table_name}) # Get the last inserted date sql = "SELECT MAX(`date`) FROM %s" % escaped(table_name) connection = Warehouse.get() cursor = connection.cursor() cursor.execute(sql) cur_date = cursor.fetchall()[0][0] cursor.close() if cur_date is None: # Build history. cur_date = cls.start_date dates = [] while cur_date <= cls.end_date: yield Date(cur_date) cur_date = cur_date + timedelta(days=1)
def _fetch_store_row_count(self): connection = Warehouse.get() with closing(connection.cursor()) as cursor: cursor.execute('SELECT COUNT(*) FROM %s' % Store.__tablename__) rows = cursor.fetchall() return rows[0][0]
def _fetch_store_row_count(self): connection = Warehouse.get() with closing(connection.cursor()) as cursor: cursor.execute('SELECT COUNT(*) FROM %s' % Store.__tablename__) rows = cursor.fetchall() return rows[0][0]
def _get_rows(self): connection = Warehouse.get() with closing(connection.cursor(dictionary=True)) as cursor: cursor.execute('SELECT * FROM %s' % Person.__tablename__) rows = cursor.fetchall() return rows