async def test_newer_items_superceed_older(self, db_session, temperature_datapoints, populated_db, get_data, cleaner): # Pick any of the deployment ids and find the pair of DataPoints in that deployment id deployment_1 = temperature_datapoints[0].deployment_id existing = [ datapoint for datapoint in temperature_datapoints if datapoint.deployment_id == deployment_1 ] old_location = [ datapoint for datapoint in existing if datapoint.sensor_name == "Location" ][0] old_temperature = [ datapoint for datapoint in existing if datapoint.sensor_name == "Temperature" ][0] new_location = DataPoint( sensor_name="Location", deployment_id=deployment_1, collected_at=datetime.datetime(2031, 4, 1, 12, 0, 0), data=(-10.5, 150.1), ) new_temperature = DataPoint( sensor_name="Temperature", deployment_id=deployment_1, collected_at=datetime.datetime(2031, 4, 1, 12, 0, 0), data=21, ) for data in [new_location, new_temperature]: insert = datapoint_table.insert().values(**data._asdict()) db_session.execute(insert) data = get_data() deployments = 0 async for deployment, data_points in cleaner(data): deployments += 1 cleaned = {a async for a in cleaner(data_points)} # There should only be one deployment assert deployments == 1 # We expect the newer one, not the older assert (new_location.data, new_temperature.data) in cleaned assert (old_location.data, old_temperature.data) not in cleaned # The cleaner converts two data points to one plottable assert len(cleaned) == len(temperature_datapoints) / 2
async def get_data() -> t.AsyncIterator[DataPoint]: db_session = db_session_var.get() loop = asyncio.get_running_loop() query = db_session.query(datapoint_table) rows = await loop.run_in_executor(None, query.all) for row in rows: yield DataPoint.from_sql_result(row)
async def get_data( sensor_name: t.Optional[str] = None, deployment_id: t.Optional[UUID] = None, collected_before: t.Optional[datetime.datetime] = None, collected_after: t.Optional[datetime.datetime] = None, ) -> t.AsyncIterator[DataPoint]: db_session = db_session_var.get() loop = asyncio.get_running_loop() query = db_session.query(datapoint_table) if sensor_name: query = query.filter(datapoint_table.c.sensor_name == sensor_name) if deployment_id: query = query.filter(datapoint_table.c.deployment_id == deployment_id) if collected_before: query = query.filter(datapoint_table.c.collected_at < collected_before) if collected_after: query = query.filter(datapoint_table.c.collected_at > collected_after) query = query.order_by( datapoint_table.c.deployment_id, datapoint_table.c.sensor_name, datapoint_table.c.collected_at, ) rows = await loop.run_in_executor(None, query.all) for row in rows: yield DataPoint.from_sql_result(row)
async def generate_datapoints(datas): deployment_id = uuid.uuid4() for i, (time, data) in enumerate(datas, start=1): yield DataPoint( id=i, collected_at=time, sensor_name="TestSensor", data=data, deployment_id=deployment_id, )
def temperature_datapoints(self, migrated_db, db_session, temperature_data): datas = [] now = datetime.datetime.now() for (coord, temp) in temperature_data.items(): deployment_id = uuid.uuid4() datas.append( DataPoint( sensor_name="Location", deployment_id=deployment_id, collected_at=now, data=coord, )) datas.append( DataPoint( sensor_name="Temperature", deployment_id=deployment_id, collected_at=now, data=temp, )) return datas
def generate_points(time_to_wait): while True: time.sleep(time_to_wait) yield DataPoint(sensor_name="Fake", collected_at=datetime.now(), data=random.choice([1, 2, 3]))