1
mirror of https://github.com/home-assistant/core synced 2024-08-15 18:25:44 +02:00

Keep storing statistics for sensors which change device class (#79155)

This commit is contained in:
Erik Montnemery 2022-09-27 20:33:28 +02:00 committed by GitHub
parent a561b608bf
commit 9084beda32
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
2 changed files with 90 additions and 11 deletions

View File

@ -156,10 +156,16 @@ def _normalize_states(
entity_id: str,
) -> tuple[str | None, str | None, list[tuple[float, State]]]:
"""Normalize units."""
old_metadata = old_metadatas[entity_id][1] if entity_id in old_metadatas else None
state_unit: str | None = None
if device_class not in UNIT_CONVERTERS:
# We're not normalizing this device class, return the state as they are
if device_class not in UNIT_CONVERTERS or (
old_metadata
and old_metadata["unit_of_measurement"]
!= UNIT_CONVERTERS[device_class].NORMALIZED_UNIT
):
# We're either not normalizing this device class or this entity is not stored
# normalized, return the states as they are
fstates = []
for state in entity_history:
try:
@ -176,10 +182,10 @@ def _normalize_states(
if entity_id not in hass.data[WARN_UNSTABLE_UNIT]:
hass.data[WARN_UNSTABLE_UNIT].add(entity_id)
extra = ""
if old_metadata := old_metadatas.get(entity_id):
if old_metadata:
extra = (
" and matches the unit of already compiled statistics "
f"({old_metadata[1]['unit_of_measurement']})"
f"({old_metadata['unit_of_measurement']})"
)
_LOGGER.warning(
"The unit of %s is changing, got multiple %s, generation of long term "

View File

@ -2142,9 +2142,9 @@ def test_compile_hourly_statistics_changing_units_3(
@pytest.mark.parametrize(
"device_class, state_unit, statistic_unit, unit_class, mean, min, max",
"device_class, state_unit, statistic_unit, unit_class, mean1, mean2, min, max",
[
("power", "kW", "W", None, 13.050847, -10, 30),
("power", "kW", "W", None, 13.050847, 13.333333, -10, 30),
],
)
def test_compile_hourly_statistics_changing_device_class_1(
@ -2154,7 +2154,8 @@ def test_compile_hourly_statistics_changing_device_class_1(
state_unit,
statistic_unit,
unit_class,
mean,
mean1,
mean2,
min,
max,
):
@ -2194,7 +2195,7 @@ def test_compile_hourly_statistics_changing_device_class_1(
"statistic_id": "sensor.test1",
"start": process_timestamp_to_utc_isoformat(zero),
"end": process_timestamp_to_utc_isoformat(zero + timedelta(minutes=5)),
"mean": approx(mean),
"mean": approx(mean1),
"min": approx(min),
"max": approx(max),
"last_reset": None,
@ -2204,7 +2205,7 @@ def test_compile_hourly_statistics_changing_device_class_1(
]
}
# Update device class and record additional states
# Update device class and record additional states in the original UoM
attributes["device_class"] = device_class
four, _states = record_states(
hass, zero + timedelta(minutes=5), "sensor.test1", attributes
@ -2220,6 +2221,65 @@ def test_compile_hourly_statistics_changing_device_class_1(
# Run statistics again, we get a warning, and no additional statistics is generated
do_adhoc_statistics(hass, start=zero + timedelta(minutes=10))
wait_recording_done(hass)
statistic_ids = list_statistic_ids(hass)
assert statistic_ids == [
{
"statistic_id": "sensor.test1",
"display_unit_of_measurement": state_unit,
"has_mean": True,
"has_sum": False,
"name": None,
"source": "recorder",
"statistics_unit_of_measurement": state_unit,
"unit_class": unit_class,
},
]
stats = statistics_during_period(hass, zero, period="5minute")
assert stats == {
"sensor.test1": [
{
"statistic_id": "sensor.test1",
"start": process_timestamp_to_utc_isoformat(zero),
"end": process_timestamp_to_utc_isoformat(zero + timedelta(minutes=5)),
"mean": approx(mean1),
"min": approx(min),
"max": approx(max),
"last_reset": None,
"state": None,
"sum": None,
},
{
"statistic_id": "sensor.test1",
"start": process_timestamp_to_utc_isoformat(
zero + timedelta(minutes=10)
),
"end": process_timestamp_to_utc_isoformat(zero + timedelta(minutes=15)),
"mean": approx(mean2),
"min": approx(min),
"max": approx(max),
"last_reset": None,
"state": None,
"sum": None,
},
]
}
# Update device class and record additional states in a different UoM
attributes["unit_of_measurement"] = statistic_unit
four, _states = record_states(
hass, zero + timedelta(minutes=15), "sensor.test1", attributes
)
states["sensor.test1"] += _states["sensor.test1"]
four, _states = record_states(
hass, zero + timedelta(minutes=20), "sensor.test1", attributes
)
states["sensor.test1"] += _states["sensor.test1"]
hist = history.get_significant_states(hass, zero, four)
assert dict(states) == dict(hist)
# Run statistics again, we get a warning, and no additional statistics is generated
do_adhoc_statistics(hass, start=zero + timedelta(minutes=20))
wait_recording_done(hass)
assert (
f"The normalized unit of sensor.test1 ({statistic_unit}) does not match the "
f"unit of already compiled statistics ({state_unit})" in caplog.text
@ -2244,13 +2304,26 @@ def test_compile_hourly_statistics_changing_device_class_1(
"statistic_id": "sensor.test1",
"start": process_timestamp_to_utc_isoformat(zero),
"end": process_timestamp_to_utc_isoformat(zero + timedelta(minutes=5)),
"mean": approx(mean),
"mean": approx(mean1),
"min": approx(min),
"max": approx(max),
"last_reset": None,
"state": None,
"sum": None,
}
},
{
"statistic_id": "sensor.test1",
"start": process_timestamp_to_utc_isoformat(
zero + timedelta(minutes=10)
),
"end": process_timestamp_to_utc_isoformat(zero + timedelta(minutes=15)),
"mean": approx(mean2),
"min": approx(min),
"max": approx(max),
"last_reset": None,
"state": None,
"sum": None,
},
]
}
assert "Error while processing event StatisticsTask" not in caplog.text