示例#1
0
async def _insert_sum_statistics(
    hass: HomeAssistant,
    metadata: StatisticMetaData,
    start: datetime.datetime,
    end: datetime.datetime,
    max_diff: float,
):
    statistics: list[StatisticData] = []
    now = start
    sum_ = 0.0
    statistic_id = metadata["statistic_id"]

    last_stats = await get_instance(hass).async_add_executor_job(
        get_last_statistics, hass, 1, statistic_id, True)
    if statistic_id in last_stats:
        sum_ = last_stats[statistic_id][0]["sum"] or 0
    while now < end:
        sum_ = sum_ + random() * max_diff
        statistics.append({
            "start": now,
            "sum": sum_,
        })
        now = now + datetime.timedelta(hours=1)

    async_add_external_statistics(hass, metadata, statistics)
示例#2
0
文件: test_init.py 项目: jbouwh/core
async def test_demo_statistics_growth(hass, recorder_mock):
    """Test that the demo sum statistics adds to the previous state."""
    now = dt_util.now()
    last_week = now - datetime.timedelta(days=7)
    last_week_midnight = last_week.replace(hour=0,
                                           minute=0,
                                           second=0,
                                           microsecond=0)

    statistic_id = f"{DOMAIN}:energy_consumption_kwh"
    metadata = {
        "source": DOMAIN,
        "name": "Energy consumption 1",
        "statistic_id": statistic_id,
        "unit_of_measurement": "kWh",
        "has_mean": False,
        "has_sum": True,
    }
    statistics = [{
        "start": last_week_midnight,
        "sum": 2**20,
    }]
    async_add_external_statistics(hass, metadata, statistics)
    await async_wait_recording_done(hass)

    assert await async_setup_component(hass, DOMAIN, {DOMAIN: {}})
    await hass.async_block_till_done()
    await hass.async_start()
    await async_wait_recording_done(hass)

    statistics = await get_instance(hass).async_add_executor_job(
        get_last_statistics, hass, 1, statistic_id, False)
    assert statistics[statistic_id][0]["sum"] > 2**20
示例#3
0
async def _insert_statistics(hass):
    """Insert some fake statistics."""
    now = dt_util.now()
    yesterday = now - datetime.timedelta(days=1)
    yesterday_midnight = yesterday.replace(hour=0,
                                           minute=0,
                                           second=0,
                                           microsecond=0)

    # Fake yesterday's temperatures
    metadata = {
        "source": DOMAIN,
        "statistic_id": f"{DOMAIN}:temperature_outdoor",
        "unit_of_measurement": "°C",
        "has_mean": True,
        "has_sum": False,
    }
    statistics = _generate_mean_statistics(
        yesterday_midnight, yesterday_midnight + datetime.timedelta(days=1),
        15, 1)
    async_add_external_statistics(hass, metadata, statistics)

    # Fake yesterday's energy consumption
    metadata = {
        "source": DOMAIN,
        "statistic_id": f"{DOMAIN}:energy_consumption",
        "unit_of_measurement": "kWh",
        "has_mean": False,
        "has_sum": True,
    }
    statistic_id = f"{DOMAIN}:energy_consumption"
    sum_ = 0
    last_stats = await hass.async_add_executor_job(get_last_statistics, hass,
                                                   1, statistic_id, True)
    if "domain:energy_consumption" in last_stats:
        sum_ = last_stats["domain.electricity_total"]["sum"] or 0
    statistics = _generate_sum_statistics(
        yesterday_midnight, yesterday_midnight + datetime.timedelta(days=1),
        sum_, 1)
    async_add_external_statistics(hass, metadata, statistics)
示例#4
0
def test_duplicate_statistics_handle_integrity_error(hass_recorder, caplog):
    """Test the recorder does not blow up if statistics is duplicated."""
    hass = hass_recorder()
    wait_recording_done(hass)

    period1 = dt_util.as_utc(dt_util.parse_datetime("2021-09-01 00:00:00"))
    period2 = dt_util.as_utc(dt_util.parse_datetime("2021-09-30 23:00:00"))

    external_energy_metadata_1 = {
        "has_mean": False,
        "has_sum": True,
        "name": "Total imported energy",
        "source": "test",
        "statistic_id": "test:total_energy_import_tariff_1",
        "unit_of_measurement": "kWh",
    }
    external_energy_statistics_1 = [
        {
            "start": period1,
            "last_reset": None,
            "state": 3,
            "sum": 5,
        },
    ]
    external_energy_statistics_2 = [
        {
            "start": period2,
            "last_reset": None,
            "state": 3,
            "sum": 6,
        }
    ]

    with patch.object(
        statistics, "_statistics_exists", return_value=False
    ), patch.object(
        statistics, "_insert_statistics", wraps=statistics._insert_statistics
    ) as insert_statistics_mock:
        async_add_external_statistics(
            hass, external_energy_metadata_1, external_energy_statistics_1
        )
        async_add_external_statistics(
            hass, external_energy_metadata_1, external_energy_statistics_1
        )
        async_add_external_statistics(
            hass, external_energy_metadata_1, external_energy_statistics_2
        )
        wait_recording_done(hass)
        assert insert_statistics_mock.call_count == 3

    with session_scope(hass=hass) as session:
        tmp = session.query(recorder.models.Statistics).all()
        assert len(tmp) == 2

    assert "Blocked attempt to insert duplicated statistic rows" in caplog.text
示例#5
0
async def test_fossil_energy_consumption(hass, hass_ws_client):
    """Test fossil_energy_consumption with co2 sensor data."""
    now = dt_util.utcnow()
    later = dt_util.as_utc(dt_util.parse_datetime("2022-09-01 00:00:00"))

    await hass.async_add_executor_job(init_recorder_component, hass)
    await async_setup_component(hass, "history", {})
    await async_setup_component(hass, "sensor", {})

    period1 = dt_util.as_utc(dt_util.parse_datetime("2021-09-01 00:00:00"))
    period2 = dt_util.as_utc(dt_util.parse_datetime("2021-09-30 23:00:00"))
    period2_day_start = dt_util.as_utc(
        dt_util.parse_datetime("2021-09-30 00:00:00"))
    period3 = dt_util.as_utc(dt_util.parse_datetime("2021-10-01 00:00:00"))
    period4 = dt_util.as_utc(dt_util.parse_datetime("2021-10-31 23:00:00"))
    period4_day_start = dt_util.as_utc(
        dt_util.parse_datetime("2021-10-31 00:00:00"))

    external_energy_statistics_1 = (
        {
            "start": period1,
            "last_reset": None,
            "state": 0,
            "sum": 2,
        },
        {
            "start": period2,
            "last_reset": None,
            "state": 1,
            "sum": 3,
        },
        {
            "start": period3,
            "last_reset": None,
            "state": 2,
            "sum": 4,
        },
        {
            "start": period4,
            "last_reset": None,
            "state": 3,
            "sum": 5,
        },
    )
    external_energy_metadata_1 = {
        "has_mean": False,
        "has_sum": True,
        "name": "Total imported energy",
        "source": "test",
        "statistic_id": "test:total_energy_import_tariff_1",
        "unit_of_measurement": "kWh",
    }
    external_energy_statistics_2 = (
        {
            "start": period1,
            "last_reset": None,
            "state": 0,
            "sum": 20,
        },
        {
            "start": period2,
            "last_reset": None,
            "state": 1,
            "sum": 30,
        },
        {
            "start": period3,
            "last_reset": None,
            "state": 2,
            "sum": 40,
        },
        {
            "start": period4,
            "last_reset": None,
            "state": 3,
            "sum": 50,
        },
    )
    external_energy_metadata_2 = {
        "has_mean": False,
        "has_sum": True,
        "name": "Total imported energy",
        "source": "test",
        "statistic_id": "test:total_energy_import_tariff_2",
        "unit_of_measurement": "kWh",
    }
    external_co2_statistics = (
        {
            "start": period1,
            "last_reset": None,
            "mean": 10,
        },
        {
            "start": period2,
            "last_reset": None,
            "mean": 30,
        },
        {
            "start": period3,
            "last_reset": None,
            "mean": 60,
        },
        {
            "start": period4,
            "last_reset": None,
            "mean": 90,
        },
    )
    external_co2_metadata = {
        "has_mean": True,
        "has_sum": False,
        "name": "Fossil percentage",
        "source": "test",
        "statistic_id": "test:fossil_percentage",
        "unit_of_measurement": "%",
    }

    async_add_external_statistics(hass, external_energy_metadata_1,
                                  external_energy_statistics_1)
    async_add_external_statistics(hass, external_energy_metadata_2,
                                  external_energy_statistics_2)
    async_add_external_statistics(hass, external_co2_metadata,
                                  external_co2_statistics)
    await async_wait_recording_done_without_instance(hass)

    client = await hass_ws_client()
    await client.send_json({
        "id":
        1,
        "type":
        "energy/fossil_energy_consumption",
        "start_time":
        now.isoformat(),
        "end_time":
        later.isoformat(),
        "energy_statistic_ids": [
            "test:total_energy_import_tariff_1",
            "test:total_energy_import_tariff_2",
        ],
        "co2_statistic_id":
        "test:fossil_percentage",
        "period":
        "hour",
    })
    response = await client.receive_json()
    assert response["success"]
    assert response["result"] == {
        period2.isoformat(): pytest.approx((33.0 - 22.0) * 0.3),
        period3.isoformat(): pytest.approx((44.0 - 33.0) * 0.6),
        period4.isoformat(): pytest.approx((55.0 - 44.0) * 0.9),
    }

    await client.send_json({
        "id":
        2,
        "type":
        "energy/fossil_energy_consumption",
        "start_time":
        now.isoformat(),
        "end_time":
        later.isoformat(),
        "energy_statistic_ids": [
            "test:total_energy_import_tariff_1",
            "test:total_energy_import_tariff_2",
        ],
        "co2_statistic_id":
        "test:fossil_percentage",
        "period":
        "day",
    })
    response = await client.receive_json()
    assert response["success"]
    assert response["result"] == {
        period2_day_start.isoformat(): pytest.approx((33.0 - 22.0) * 0.3),
        period3.isoformat(): pytest.approx((44.0 - 33.0) * 0.6),
        period4_day_start.isoformat(): pytest.approx((55.0 - 44.0) * 0.9),
    }

    await client.send_json({
        "id":
        3,
        "type":
        "energy/fossil_energy_consumption",
        "start_time":
        now.isoformat(),
        "end_time":
        later.isoformat(),
        "energy_statistic_ids": [
            "test:total_energy_import_tariff_1",
            "test:total_energy_import_tariff_2",
        ],
        "co2_statistic_id":
        "test:fossil_percentage",
        "period":
        "month",
    })
    response = await client.receive_json()
    assert response["success"]
    assert response["result"] == {
        period1.isoformat():
        pytest.approx((33.0 - 22.0) * 0.3),
        period3.isoformat():
        pytest.approx(((44.0 - 33.0) * 0.6) + ((55.0 - 44.0) * 0.9)),
    }
示例#6
0
async def test_fossil_energy_consumption_no_data(hass, hass_ws_client):
    """Test fossil_energy_consumption when there is no data."""
    now = dt_util.utcnow()
    later = dt_util.as_utc(dt_util.parse_datetime("2022-09-01 00:00:00"))

    await hass.async_add_executor_job(init_recorder_component, hass)
    await async_setup_component(hass, "history", {})
    await async_setup_component(hass, "sensor", {})

    period1 = dt_util.as_utc(dt_util.parse_datetime("2021-09-01 00:00:00"))
    period2 = dt_util.as_utc(dt_util.parse_datetime("2021-09-30 23:00:00"))
    period3 = dt_util.as_utc(dt_util.parse_datetime("2021-10-01 00:00:00"))
    period4 = dt_util.as_utc(dt_util.parse_datetime("2021-10-31 23:00:00"))

    external_energy_statistics_1 = (
        {
            "start": period1,
            "last_reset": None,
            "state": 0,
            "sum": None,
        },
        {
            "start": period2,
            "last_reset": None,
            "state": 1,
            "sum": 3,
        },
        {
            "start": period3,
            "last_reset": None,
            "state": 2,
            "sum": 5,
        },
        {
            "start": period4,
            "last_reset": None,
            "state": 3,
            "sum": 8,
        },
    )
    external_energy_metadata_1 = {
        "has_mean": False,
        "has_sum": True,
        "name": "Total imported energy",
        "source": "test",
        "statistic_id": "test:total_energy_import_tariff_1",
        "unit_of_measurement": "kWh",
    }
    external_energy_statistics_2 = (
        {
            "start": period1,
            "last_reset": None,
            "state": 0,
            "sum": 20,
        },
        {
            "start": period2,
            "last_reset": None,
            "state": 1,
            "sum": None,
        },
        {
            "start": period3,
            "last_reset": None,
            "state": 2,
            "sum": 50,
        },
        {
            "start": period4,
            "last_reset": None,
            "state": 3,
            "sum": 80,
        },
    )
    external_energy_metadata_2 = {
        "has_mean": False,
        "has_sum": True,
        "name": "Total imported energy",
        "source": "test",
        "statistic_id": "test:total_energy_import_tariff_2",
        "unit_of_measurement": "kWh",
    }

    async_add_external_statistics(hass, external_energy_metadata_1,
                                  external_energy_statistics_1)
    async_add_external_statistics(hass, external_energy_metadata_2,
                                  external_energy_statistics_2)
    await async_wait_recording_done_without_instance(hass)

    client = await hass_ws_client()
    await client.send_json({
        "id":
        1,
        "type":
        "energy/fossil_energy_consumption",
        "start_time":
        now.isoformat(),
        "end_time":
        later.isoformat(),
        "energy_statistic_ids": [
            "test:total_energy_import_tariff_1_missing",
            "test:total_energy_import_tariff_2_missing",
        ],
        "co2_statistic_id":
        "test:co2_ratio_missing",
        "period":
        "hour",
    })
    response = await client.receive_json()
    assert response["success"]
    assert response["result"] == {}

    await client.send_json({
        "id":
        2,
        "type":
        "energy/fossil_energy_consumption",
        "start_time":
        now.isoformat(),
        "end_time":
        later.isoformat(),
        "energy_statistic_ids": [
            "test:total_energy_import_tariff_1_missing",
            "test:total_energy_import_tariff_2_missing",
        ],
        "co2_statistic_id":
        "test:co2_ratio_missing",
        "period":
        "day",
    })
    response = await client.receive_json()
    assert response["success"]
    assert response["result"] == {}

    await client.send_json({
        "id":
        3,
        "type":
        "energy/fossil_energy_consumption",
        "start_time":
        now.isoformat(),
        "end_time":
        later.isoformat(),
        "energy_statistic_ids": [
            "test:total_energy_import_tariff_1_missing",
            "test:total_energy_import_tariff_2_missing",
        ],
        "co2_statistic_id":
        "test:co2_ratio_missing",
        "period":
        "month",
    })
    response = await client.receive_json()
    assert response["success"]
    assert response["result"] == {}
async def test_get_statistics_metadata(hass, hass_ws_client, units, attributes, unit):
    """Test get_statistics_metadata."""
    now = dt_util.utcnow()

    hass.config.units = units
    await hass.async_add_executor_job(init_recorder_component, hass)
    await async_setup_component(hass, "history", {"history": {}})
    await async_setup_component(hass, "sensor", {})
    await async_init_recorder_component(hass)
    await hass.async_add_executor_job(hass.data[recorder.DATA_INSTANCE].block_till_done)

    client = await hass_ws_client()
    await client.send_json({"id": 1, "type": "recorder/get_statistics_metadata"})
    response = await client.receive_json()
    assert response["success"]
    assert response["result"] == []

    period1 = dt_util.as_utc(dt_util.parse_datetime("2021-09-01 00:00:00"))
    period2 = dt_util.as_utc(dt_util.parse_datetime("2021-09-30 23:00:00"))
    period3 = dt_util.as_utc(dt_util.parse_datetime("2021-10-01 00:00:00"))
    period4 = dt_util.as_utc(dt_util.parse_datetime("2021-10-31 23:00:00"))
    external_energy_statistics_1 = (
        {
            "start": period1,
            "last_reset": None,
            "state": 0,
            "sum": 2,
        },
        {
            "start": period2,
            "last_reset": None,
            "state": 1,
            "sum": 3,
        },
        {
            "start": period3,
            "last_reset": None,
            "state": 2,
            "sum": 5,
        },
        {
            "start": period4,
            "last_reset": None,
            "state": 3,
            "sum": 8,
        },
    )
    external_energy_metadata_1 = {
        "has_mean": False,
        "has_sum": True,
        "name": "Total imported energy",
        "source": "test",
        "statistic_id": "test:total_gas",
        "unit_of_measurement": unit,
    }

    async_add_external_statistics(
        hass, external_energy_metadata_1, external_energy_statistics_1
    )

    hass.states.async_set("sensor.test", 10, attributes=attributes)
    await hass.async_block_till_done()

    await hass.async_add_executor_job(trigger_db_commit, hass)
    await hass.async_block_till_done()

    hass.states.async_set("sensor.test2", 10, attributes=attributes)
    await hass.async_block_till_done()

    await hass.async_add_executor_job(trigger_db_commit, hass)
    await hass.async_block_till_done()

    await client.send_json(
        {
            "id": 2,
            "type": "recorder/get_statistics_metadata",
            "statistic_ids": ["sensor.test"],
        }
    )
    response = await client.receive_json()
    assert response["success"]
    assert response["result"] == [
        {
            "statistic_id": "sensor.test",
            "has_mean": False,
            "has_sum": True,
            "name": None,
            "source": "recorder",
            "unit_of_measurement": unit,
        }
    ]

    hass.data[recorder.DATA_INSTANCE].do_adhoc_statistics(start=now)
    await hass.async_add_executor_job(hass.data[recorder.DATA_INSTANCE].block_till_done)
    # Remove the state, statistics will now be fetched from the database
    hass.states.async_remove("sensor.test")
    await hass.async_block_till_done()

    await client.send_json(
        {
            "id": 3,
            "type": "recorder/get_statistics_metadata",
            "statistic_ids": ["sensor.test"],
        }
    )
    response = await client.receive_json()
    assert response["success"]
    assert response["result"] == [
        {
            "statistic_id": "sensor.test",
            "has_mean": False,
            "has_sum": True,
            "name": None,
            "source": "recorder",
            "unit_of_measurement": unit,
        }
    ]
def test_monthly_statistics(hass_recorder, caplog, timezone):
    """Test inserting external statistics."""
    dt_util.set_default_time_zone(dt_util.get_time_zone(timezone))

    hass = hass_recorder()
    wait_recording_done(hass)
    assert "Compiling statistics for" not in caplog.text
    assert "Statistics already compiled" not in caplog.text

    zero = dt_util.utcnow()
    period1 = dt_util.as_utc(dt_util.parse_datetime("2021-09-01 00:00:00"))
    period2 = dt_util.as_utc(dt_util.parse_datetime("2021-09-30 23:00:00"))
    period3 = dt_util.as_utc(dt_util.parse_datetime("2021-10-01 00:00:00"))
    period4 = dt_util.as_utc(dt_util.parse_datetime("2021-10-31 23:00:00"))

    external_statistics = (
        {
            "start": period1,
            "last_reset": None,
            "state": 0,
            "sum": 2,
        },
        {
            "start": period2,
            "last_reset": None,
            "state": 1,
            "sum": 3,
        },
        {
            "start": period3,
            "last_reset": None,
            "state": 2,
            "sum": 4,
        },
        {
            "start": period4,
            "last_reset": None,
            "state": 3,
            "sum": 5,
        },
    )
    external_metadata = {
        "has_mean": False,
        "has_sum": True,
        "name": "Total imported energy",
        "source": "test",
        "statistic_id": "test:total_energy_import",
        "unit_of_measurement": "kWh",
    }

    async_add_external_statistics(hass, external_metadata, external_statistics)
    wait_recording_done(hass)
    stats = statistics_during_period(hass, zero, period="month")
    sep_start = dt_util.as_utc(dt_util.parse_datetime("2021-09-01 00:00:00"))
    sep_end = dt_util.as_utc(dt_util.parse_datetime("2021-10-01 00:00:00"))
    oct_start = dt_util.as_utc(dt_util.parse_datetime("2021-10-01 00:00:00"))
    oct_end = dt_util.as_utc(dt_util.parse_datetime("2021-11-01 00:00:00"))
    assert stats == {
        "test:total_energy_import": [
            {
                "statistic_id": "test:total_energy_import",
                "start": sep_start.isoformat(),
                "end": sep_end.isoformat(),
                "max": None,
                "mean": None,
                "min": None,
                "last_reset": None,
                "state": approx(1.0),
                "sum": approx(3.0),
            },
            {
                "statistic_id": "test:total_energy_import",
                "start": oct_start.isoformat(),
                "end": oct_end.isoformat(),
                "max": None,
                "mean": None,
                "min": None,
                "last_reset": None,
                "state": approx(3.0),
                "sum": approx(5.0),
            },
        ]
    }

    dt_util.set_default_time_zone(dt_util.get_time_zone("UTC"))
def test_external_statistics_errors(hass_recorder, caplog):
    """Test validation of external statistics."""
    hass = hass_recorder()
    wait_recording_done(hass)
    assert "Compiling statistics for" not in caplog.text
    assert "Statistics already compiled" not in caplog.text

    zero = dt_util.utcnow()
    period1 = zero.replace(minute=0, second=0,
                           microsecond=0) + timedelta(hours=1)

    _external_statistics = {
        "start": period1,
        "last_reset": None,
        "state": 0,
        "sum": 2,
    }

    _external_metadata = {
        "has_mean": False,
        "has_sum": True,
        "name": "Total imported energy",
        "source": "test",
        "statistic_id": "test:total_energy_import",
        "unit_of_measurement": "kWh",
    }

    # Attempt to insert statistics for an entity
    external_metadata = {
        **_external_metadata,
        "statistic_id": "sensor.total_energy_import",
    }
    external_statistics = {**_external_statistics}
    with pytest.raises(HomeAssistantError):
        async_add_external_statistics(hass, external_metadata,
                                      (external_statistics, ))
    wait_recording_done(hass)
    assert statistics_during_period(hass, zero, period="hour") == {}
    assert list_statistic_ids(hass) == []
    assert get_metadata(hass,
                        statistic_ids=("sensor.total_energy_import", )) == {}

    # Attempt to insert statistics for the wrong domain
    external_metadata = {**_external_metadata, "source": "other"}
    external_statistics = {**_external_statistics}
    with pytest.raises(HomeAssistantError):
        async_add_external_statistics(hass, external_metadata,
                                      (external_statistics, ))
    wait_recording_done(hass)
    assert statistics_during_period(hass, zero, period="hour") == {}
    assert list_statistic_ids(hass) == []
    assert get_metadata(hass,
                        statistic_ids=("test:total_energy_import", )) == {}

    # Attempt to insert statistics for an naive starting time
    external_metadata = {**_external_metadata}
    external_statistics = {
        **_external_statistics,
        "start": period1.replace(tzinfo=None),
    }
    with pytest.raises(HomeAssistantError):
        async_add_external_statistics(hass, external_metadata,
                                      (external_statistics, ))
    wait_recording_done(hass)
    assert statistics_during_period(hass, zero, period="hour") == {}
    assert list_statistic_ids(hass) == []
    assert get_metadata(hass,
                        statistic_ids=("test:total_energy_import", )) == {}

    # Attempt to insert statistics for an invalid starting time
    external_metadata = {**_external_metadata}
    external_statistics = {
        **_external_statistics, "start": period1.replace(minute=1)
    }
    with pytest.raises(HomeAssistantError):
        async_add_external_statistics(hass, external_metadata,
                                      (external_statistics, ))
    wait_recording_done(hass)
    assert statistics_during_period(hass, zero, period="hour") == {}
    assert list_statistic_ids(hass) == []
    assert get_metadata(hass,
                        statistic_ids=("test:total_energy_import", )) == {}
async def test_external_statistics(hass, hass_ws_client, caplog):
    """Test inserting external statistics."""
    client = await hass_ws_client()
    await async_init_recorder_component(hass)

    assert "Compiling statistics for" not in caplog.text
    assert "Statistics already compiled" not in caplog.text

    zero = dt_util.utcnow()
    period1 = zero.replace(minute=0, second=0,
                           microsecond=0) + timedelta(hours=1)
    period2 = zero.replace(minute=0, second=0,
                           microsecond=0) + timedelta(hours=2)

    external_statistics1 = {
        "start": period1,
        "last_reset": None,
        "state": 0,
        "sum": 2,
    }
    external_statistics2 = {
        "start": period2,
        "last_reset": None,
        "state": 1,
        "sum": 3,
    }

    external_metadata = {
        "has_mean": False,
        "has_sum": True,
        "name": "Total imported energy",
        "source": "test",
        "statistic_id": "test:total_energy_import",
        "unit_of_measurement": "kWh",
    }

    async_add_external_statistics(hass, external_metadata,
                                  (external_statistics1, external_statistics2))
    await async_wait_recording_done_without_instance(hass)
    stats = statistics_during_period(hass, zero, period="hour")
    assert stats == {
        "test:total_energy_import": [
            {
                "statistic_id": "test:total_energy_import",
                "start": period1.isoformat(),
                "end": (period1 + timedelta(hours=1)).isoformat(),
                "max": None,
                "mean": None,
                "min": None,
                "last_reset": None,
                "state": approx(0.0),
                "sum": approx(2.0),
            },
            {
                "statistic_id": "test:total_energy_import",
                "start": period2.isoformat(),
                "end": (period2 + timedelta(hours=1)).isoformat(),
                "max": None,
                "mean": None,
                "min": None,
                "last_reset": None,
                "state": approx(1.0),
                "sum": approx(3.0),
            },
        ]
    }
    statistic_ids = list_statistic_ids(hass)
    assert statistic_ids == [{
        "has_mean": False,
        "has_sum": True,
        "statistic_id": "test:total_energy_import",
        "name": "Total imported energy",
        "source": "test",
        "unit_of_measurement": "kWh",
    }]
    metadata = get_metadata(hass, statistic_ids=("test:total_energy_import", ))
    assert metadata == {
        "test:total_energy_import": (
            1,
            {
                "has_mean": False,
                "has_sum": True,
                "name": "Total imported energy",
                "source": "test",
                "statistic_id": "test:total_energy_import",
                "unit_of_measurement": "kWh",
            },
        )
    }
    last_stats = get_last_statistics(hass, 1, "test:total_energy_import", True)
    assert last_stats == {
        "test:total_energy_import": [
            {
                "statistic_id": "test:total_energy_import",
                "start": period2.isoformat(),
                "end": (period2 + timedelta(hours=1)).isoformat(),
                "max": None,
                "mean": None,
                "min": None,
                "last_reset": None,
                "state": approx(1.0),
                "sum": approx(3.0),
            },
        ]
    }

    # Update the previously inserted statistics
    external_statistics = {
        "start": period1,
        "last_reset": None,
        "state": 5,
        "sum": 6,
    }
    async_add_external_statistics(hass, external_metadata,
                                  (external_statistics, ))
    await async_wait_recording_done_without_instance(hass)
    stats = statistics_during_period(hass, zero, period="hour")
    assert stats == {
        "test:total_energy_import": [
            {
                "statistic_id": "test:total_energy_import",
                "start": period1.isoformat(),
                "end": (period1 + timedelta(hours=1)).isoformat(),
                "max": None,
                "mean": None,
                "min": None,
                "last_reset": None,
                "state": approx(5.0),
                "sum": approx(6.0),
            },
            {
                "statistic_id": "test:total_energy_import",
                "start": period2.isoformat(),
                "end": (period2 + timedelta(hours=1)).isoformat(),
                "max": None,
                "mean": None,
                "min": None,
                "last_reset": None,
                "state": approx(1.0),
                "sum": approx(3.0),
            },
        ]
    }

    # Update the previously inserted statistics
    external_statistics = {
        "start": period1,
        "max": 1,
        "mean": 2,
        "min": 3,
        "last_reset": None,
        "state": 4,
        "sum": 5,
    }
    async_add_external_statistics(hass, external_metadata,
                                  (external_statistics, ))
    await async_wait_recording_done_without_instance(hass)
    stats = statistics_during_period(hass, zero, period="hour")
    assert stats == {
        "test:total_energy_import": [
            {
                "statistic_id": "test:total_energy_import",
                "start": period1.isoformat(),
                "end": (period1 + timedelta(hours=1)).isoformat(),
                "max": approx(1.0),
                "mean": approx(2.0),
                "min": approx(3.0),
                "last_reset": None,
                "state": approx(4.0),
                "sum": approx(5.0),
            },
            {
                "statistic_id": "test:total_energy_import",
                "start": period2.isoformat(),
                "end": (period2 + timedelta(hours=1)).isoformat(),
                "max": None,
                "mean": None,
                "min": None,
                "last_reset": None,
                "state": approx(1.0),
                "sum": approx(3.0),
            },
        ]
    }

    await client.send_json({
        "id": 1,
        "type": "recorder/adjust_sum_statistics",
        "statistic_id": "test:total_energy_import",
        "start_time": period2.isoformat(),
        "adjustment": 1000.0,
    })
    response = await client.receive_json()
    assert response["success"]

    await async_wait_recording_done_without_instance(hass)
    stats = statistics_during_period(hass, zero, period="hour")
    assert stats == {
        "test:total_energy_import": [
            {
                "statistic_id": "test:total_energy_import",
                "start": period1.isoformat(),
                "end": (period1 + timedelta(hours=1)).isoformat(),
                "max": approx(1.0),
                "mean": approx(2.0),
                "min": approx(3.0),
                "last_reset": None,
                "state": approx(4.0),
                "sum": approx(5.0),
            },
            {
                "statistic_id": "test:total_energy_import",
                "start": period2.isoformat(),
                "end": (period2 + timedelta(hours=1)).isoformat(),
                "max": None,
                "mean": None,
                "min": None,
                "last_reset": None,
                "state": approx(1.0),
                "sum": approx(1003.0),
            },
        ]
    }
示例#11
0
    async def _insert_statistics(self):
        """Insert Tibber statistics."""
        for home in self._tibber_connection.get_homes():
            if not home.hourly_consumption_data:
                continue
            for sensor_type in (
                    "consumption",
                    "totalCost",
            ):
                statistic_id = (f"{TIBBER_DOMAIN}:energy_"
                                f"{sensor_type.lower()}_"
                                f"{home.home_id.replace('-', '')}")

                last_stats = await get_instance(
                    self.hass).async_add_executor_job(get_last_statistics,
                                                      self.hass, 1,
                                                      statistic_id, True)

                if not last_stats:
                    # First time we insert 5 years of data (if available)
                    hourly_consumption_data = await home.get_historic_data(
                        5 * 365 * 24)

                    _sum = 0
                    last_stats_time = None
                else:
                    # hourly_consumption_data contains the last 30 days
                    # of consumption data.
                    # We update the statistics with the last 30 days
                    # of data to handle corrections in the data.
                    hourly_consumption_data = home.hourly_consumption_data

                    start = dt_util.parse_datetime(
                        hourly_consumption_data[0]["from"]) - timedelta(
                            hours=1)
                    stat = await get_instance(self.hass
                                              ).async_add_executor_job(
                                                  statistics_during_period,
                                                  self.hass,
                                                  start,
                                                  None,
                                                  [statistic_id],
                                                  "hour",
                                                  True,
                                              )
                    _sum = stat[statistic_id][0]["sum"]
                    last_stats_time = stat[statistic_id][0]["start"]

                statistics = []

                for data in hourly_consumption_data:
                    if data.get(sensor_type) is None:
                        continue

                    start = dt_util.parse_datetime(data["from"])
                    if last_stats_time is not None and start <= last_stats_time:
                        continue

                    _sum += data[sensor_type]

                    statistics.append(
                        StatisticData(
                            start=start,
                            state=data[sensor_type],
                            sum=_sum,
                        ))

                if sensor_type == "consumption":
                    unit = ENERGY_KILO_WATT_HOUR
                else:
                    unit = home.currency
                metadata = StatisticMetaData(
                    has_mean=False,
                    has_sum=True,
                    name=f"{home.name} {sensor_type}",
                    source=TIBBER_DOMAIN,
                    statistic_id=statistic_id,
                    unit_of_measurement=unit,
                )
                async_add_external_statistics(self.hass, metadata, statistics)
示例#12
0
async def test_fossil_energy_consumption_hole(hass, hass_ws_client,
                                              recorder_mock):
    """Test fossil_energy_consumption when some data points lack sum."""
    now = dt_util.utcnow()
    later = dt_util.as_utc(dt_util.parse_datetime("2022-09-01 00:00:00"))

    await async_setup_component(hass, "history", {})
    await async_setup_component(hass, "sensor", {})
    await async_recorder_block_till_done(hass)

    period1 = dt_util.as_utc(dt_util.parse_datetime("2021-09-01 00:00:00"))
    period2 = dt_util.as_utc(dt_util.parse_datetime("2021-09-30 23:00:00"))
    period2_day_start = dt_util.as_utc(
        dt_util.parse_datetime("2021-09-30 00:00:00"))
    period3 = dt_util.as_utc(dt_util.parse_datetime("2021-10-01 00:00:00"))
    period4 = dt_util.as_utc(dt_util.parse_datetime("2021-10-31 23:00:00"))
    period4_day_start = dt_util.as_utc(
        dt_util.parse_datetime("2021-10-31 00:00:00"))

    external_energy_statistics_1 = (
        {
            "start": period1,
            "last_reset": None,
            "state": 0,
            "sum": None,
        },
        {
            "start": period2,
            "last_reset": None,
            "state": 1,
            "sum": 3,
        },
        {
            "start": period3,
            "last_reset": None,
            "state": 2,
            "sum": 5,
        },
        {
            "start": period4,
            "last_reset": None,
            "state": 3,
            "sum": 8,
        },
    )
    external_energy_metadata_1 = {
        "has_mean": False,
        "has_sum": True,
        "name": "Total imported energy",
        "source": "test",
        "statistic_id": "test:total_energy_import_tariff_1",
        "unit_of_measurement": "kWh",
    }
    external_energy_statistics_2 = (
        {
            "start": period1,
            "last_reset": None,
            "state": 0,
            "sum": 20,
        },
        {
            "start": period2,
            "last_reset": None,
            "state": 1,
            "sum": None,
        },
        {
            "start": period3,
            "last_reset": None,
            "state": 2,
            "sum": 50,
        },
        {
            "start": period4,
            "last_reset": None,
            "state": 3,
            "sum": 80,
        },
    )
    external_energy_metadata_2 = {
        "has_mean": False,
        "has_sum": True,
        "name": "Total imported energy",
        "source": "test",
        "statistic_id": "test:total_energy_import_tariff_2",
        "unit_of_measurement": "kWh",
    }

    async_add_external_statistics(hass, external_energy_metadata_1,
                                  external_energy_statistics_1)
    async_add_external_statistics(hass, external_energy_metadata_2,
                                  external_energy_statistics_2)
    await async_wait_recording_done(hass)

    client = await hass_ws_client()
    await client.send_json({
        "id":
        1,
        "type":
        "energy/fossil_energy_consumption",
        "start_time":
        now.isoformat(),
        "end_time":
        later.isoformat(),
        "energy_statistic_ids": [
            "test:total_energy_import_tariff_1",
            "test:total_energy_import_tariff_2",
        ],
        "co2_statistic_id":
        "test:co2_ratio_missing",
        "period":
        "hour",
    })
    response = await client.receive_json()
    assert response["success"]
    assert response["result"] == {
        period2.isoformat(): pytest.approx(3.0 - 20.0),
        period3.isoformat(): pytest.approx(55.0 - 3.0),
        period4.isoformat(): pytest.approx(88.0 - 55.0),
    }

    await client.send_json({
        "id":
        2,
        "type":
        "energy/fossil_energy_consumption",
        "start_time":
        now.isoformat(),
        "end_time":
        later.isoformat(),
        "energy_statistic_ids": [
            "test:total_energy_import_tariff_1",
            "test:total_energy_import_tariff_2",
        ],
        "co2_statistic_id":
        "test:co2_ratio_missing",
        "period":
        "day",
    })
    response = await client.receive_json()
    assert response["success"]
    assert response["result"] == {
        period2_day_start.isoformat(): pytest.approx(3.0 - 20.0),
        period3.isoformat(): pytest.approx(55.0 - 3.0),
        period4_day_start.isoformat(): pytest.approx(88.0 - 55.0),
    }

    await client.send_json({
        "id":
        3,
        "type":
        "energy/fossil_energy_consumption",
        "start_time":
        now.isoformat(),
        "end_time":
        later.isoformat(),
        "energy_statistic_ids": [
            "test:total_energy_import_tariff_1",
            "test:total_energy_import_tariff_2",
        ],
        "co2_statistic_id":
        "test:co2_ratio_missing",
        "period":
        "month",
    })
    response = await client.receive_json()
    assert response["success"]
    assert response["result"] == {
        period1.isoformat(): pytest.approx(3.0 - 20.0),
        period3.isoformat(): pytest.approx((55.0 - 3.0) + (88.0 - 55.0)),
    }
示例#13
0
async def _insert_statistics(hass: HomeAssistant) -> None:
    """Insert some fake statistics."""
    now = dt_util.now()
    yesterday = now - datetime.timedelta(days=1)
    yesterday_midnight = yesterday.replace(hour=0,
                                           minute=0,
                                           second=0,
                                           microsecond=0)
    today_midnight = yesterday_midnight + datetime.timedelta(days=1)

    # Fake yesterday's temperatures
    metadata: StatisticMetaData = {
        "source": DOMAIN,
        "name": "Outdoor temperature",
        "statistic_id": f"{DOMAIN}:temperature_outdoor",
        "unit_of_measurement": "°C",
        "has_mean": True,
        "has_sum": False,
    }
    statistics = _generate_mean_statistics(yesterday_midnight, today_midnight,
                                           15, 1)
    async_add_external_statistics(hass, metadata, statistics)

    # Add external energy consumption in kWh, ~ 12 kWh / day
    # This should be possible to pick for the energy dashboard
    metadata = {
        "source": DOMAIN,
        "name": "Energy consumption 1",
        "statistic_id": f"{DOMAIN}:energy_consumption_kwh",
        "unit_of_measurement": "kWh",
        "has_mean": False,
        "has_sum": True,
    }
    await _insert_sum_statistics(hass, metadata, yesterday_midnight,
                                 today_midnight, 2)

    # Add external energy consumption in MWh, ~ 12 kWh / day
    # This should not be possible to pick for the energy dashboard
    metadata = {
        "source": DOMAIN,
        "name": "Energy consumption 2",
        "statistic_id": f"{DOMAIN}:energy_consumption_mwh",
        "unit_of_measurement": "MWh",
        "has_mean": False,
        "has_sum": True,
    }
    await _insert_sum_statistics(hass, metadata, yesterday_midnight,
                                 today_midnight, 0.002)

    # Add external gas consumption in m³, ~6 m3/day
    # This should be possible to pick for the energy dashboard
    metadata = {
        "source": DOMAIN,
        "name": "Gas consumption 1",
        "statistic_id": f"{DOMAIN}:gas_consumption_m3",
        "unit_of_measurement": "m³",
        "has_mean": False,
        "has_sum": True,
    }
    await _insert_sum_statistics(hass, metadata, yesterday_midnight,
                                 today_midnight, 1)

    # Add external gas consumption in ft³, ~180 ft3/day
    # This should not be possible to pick for the energy dashboard
    metadata = {
        "source": DOMAIN,
        "name": "Gas consumption 2",
        "statistic_id": f"{DOMAIN}:gas_consumption_ft3",
        "unit_of_measurement": "ft³",
        "has_mean": False,
        "has_sum": True,
    }
    await _insert_sum_statistics(hass, metadata, yesterday_midnight,
                                 today_midnight, 30)
示例#14
0
def test_external_statistics(hass_recorder, caplog):
    """Test inserting external statistics."""
    hass = hass_recorder()
    wait_recording_done(hass)
    assert "Compiling statistics for" not in caplog.text
    assert "Statistics already compiled" not in caplog.text

    zero = dt_util.utcnow()
    period1 = zero.replace(minute=0, second=0, microsecond=0) + timedelta(hours=1)
    period2 = zero.replace(minute=0, second=0, microsecond=0) + timedelta(hours=2)

    external_statistics1 = {
        "start": period1,
        "last_reset": None,
        "state": 0,
        "sum": 2,
    }
    external_statistics2 = {
        "start": period2,
        "last_reset": None,
        "state": 1,
        "sum": 3,
    }

    external_metadata = {
        "has_mean": False,
        "has_sum": True,
        "name": "Total imported energy",
        "source": "test",
        "statistic_id": "test:total_energy_import",
        "unit_of_measurement": "kWh",
    }

    async_add_external_statistics(
        hass, external_metadata, (external_statistics1, external_statistics2)
    )
    wait_recording_done(hass)
    stats = statistics_during_period(hass, zero, period="hour")
    assert stats == {
        "test:total_energy_import": [
            {
                "statistic_id": "test:total_energy_import",
                "start": period1.isoformat(),
                "end": (period1 + timedelta(hours=1)).isoformat(),
                "max": None,
                "mean": None,
                "min": None,
                "last_reset": None,
                "state": approx(0.0),
                "sum": approx(2.0),
            },
            {
                "statistic_id": "test:total_energy_import",
                "start": period2.isoformat(),
                "end": (period2 + timedelta(hours=1)).isoformat(),
                "max": None,
                "mean": None,
                "min": None,
                "last_reset": None,
                "state": approx(1.0),
                "sum": approx(3.0),
            },
        ]
    }
    statistic_ids = list_statistic_ids(hass)
    assert statistic_ids == [
        {
            "statistic_id": "test:total_energy_import",
            "name": "Total imported energy",
            "source": "test",
            "unit_of_measurement": "kWh",
        }
    ]
    metadata = get_metadata(hass, statistic_ids=("test:total_energy_import",))
    assert metadata == {
        "test:total_energy_import": (
            1,
            {
                "has_mean": False,
                "has_sum": True,
                "name": "Total imported energy",
                "source": "test",
                "statistic_id": "test:total_energy_import",
                "unit_of_measurement": "kWh",
            },
        )
    }

    # Update the previously inserted statistics
    external_statistics = {
        "start": period1,
        "last_reset": None,
        "state": 5,
        "sum": 6,
    }
    async_add_external_statistics(hass, external_metadata, (external_statistics,))
    wait_recording_done(hass)
    stats = statistics_during_period(hass, zero, period="hour")
    assert stats == {
        "test:total_energy_import": [
            {
                "statistic_id": "test:total_energy_import",
                "start": period1.isoformat(),
                "end": (period1 + timedelta(hours=1)).isoformat(),
                "max": None,
                "mean": None,
                "min": None,
                "last_reset": None,
                "state": approx(5.0),
                "sum": approx(6.0),
            },
            {
                "statistic_id": "test:total_energy_import",
                "start": period2.isoformat(),
                "end": (period2 + timedelta(hours=1)).isoformat(),
                "max": None,
                "mean": None,
                "min": None,
                "last_reset": None,
                "state": approx(1.0),
                "sum": approx(3.0),
            },
        ]
    }

    # Update the previously inserted statistics
    external_statistics = {
        "start": period1,
        "max": 1,
        "mean": 2,
        "min": 3,
        "last_reset": None,
        "state": 4,
        "sum": 5,
    }
    async_add_external_statistics(hass, external_metadata, (external_statistics,))
    wait_recording_done(hass)
    stats = statistics_during_period(hass, zero, period="hour")
    assert stats == {
        "test:total_energy_import": [
            {
                "statistic_id": "test:total_energy_import",
                "start": period1.isoformat(),
                "end": (period1 + timedelta(hours=1)).isoformat(),
                "max": approx(1.0),
                "mean": approx(2.0),
                "min": approx(3.0),
                "last_reset": None,
                "state": approx(4.0),
                "sum": approx(5.0),
            },
            {
                "statistic_id": "test:total_energy_import",
                "start": period2.isoformat(),
                "end": (period2 + timedelta(hours=1)).isoformat(),
                "max": None,
                "mean": None,
                "min": None,
                "last_reset": None,
                "state": approx(1.0),
                "sum": approx(3.0),
            },
        ]
    }
示例#15
0
    async def _insert_statistics(reset=False):
        """ Insert edata statistics """
        statistic_id = {}
        statistic_id["total"] = f"{DOMAIN}:{scups.lower()}_consumption"
        statistic_id["p1"] = f"{DOMAIN}:{scups.lower()}_p1_consumption"
        statistic_id["p2"] = f"{DOMAIN}:{scups.lower()}_p2_consumption"
        statistic_id["p3"] = f"{DOMAIN}:{scups.lower()}_p3_consumption"

        last_stats = {
            x: await hass.async_add_executor_job(get_last_statistics, hass, 1,
                                                 statistic_id[x], True)
            for x in ["total", "p1", "p2", "p3"]
        }

        _sum = {
            x: last_stats[x][statistic_id[x]][0].get("sum", 0)
            if last_stats[x] and not reset else 0
            for x in ["total", "p1", "p2", "p3"]
        }

        statistics = {'total': [], 'p1': [], 'p2': [], 'p3': []}

        if reset:
            _LOGGER.warning(
                f"clearing statistics for {[statistic_id[x] for x in statistic_id]}"
            )
            await hass.async_add_executor_job(
                clear_statistics, hass.data[DATA_INSTANCE],
                [statistic_id[x] for x in statistic_id])

        try:
            last_stats_time = last_stats["total"][
                statistic_id["total"]][0]["end"]
        except KeyError as e:
            last_stats_time = None

        for data in api.data.get("consumptions", {}):
            if reset or last_stats_time is None or dt_util.as_local(
                    data["datetime"]) >= dt_util.parse_datetime(
                        last_stats_time):
                _p = du.get_pvpc_tariff(data["datetime"])
                _sum["total"] += data["value_kWh"]
                statistics["total"].append(
                    StatisticData(start=dt_util.as_local(data["datetime"]),
                                  state=data["value_kWh"],
                                  sum=_sum["total"]))
                _sum[_p] += data["value_kWh"]
                statistics[_p].append(
                    StatisticData(start=dt_util.as_local(data["datetime"]),
                                  state=data["value_kWh"],
                                  sum=_sum[_p]))

        for _scope in ["p1", "p2", "p3", "total"]:
            metadata = StatisticMetaData(
                has_mean=False,
                has_sum=True,
                name=f"{DOMAIN}_{scups} {_scope} energy consumption",
                source=DOMAIN,
                statistic_id=statistic_id[_scope],
                unit_of_measurement=ENERGY_KILO_WATT_HOUR,
            )
            async_add_external_statistics(hass, metadata, statistics[_scope])