Increase coverage and optimize history skip_initial_state (#42014)

Avoids a database query when all states have not
changed in the time window when skip_initial_state
is set.
This commit is contained in:
J. Nick Koston 2020-10-19 02:53:47 -05:00 committed by GitHub
parent 388a5d8c91
commit 9649525fe6
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
2 changed files with 61 additions and 2 deletions

View File

@ -1,11 +1,11 @@
"""Provide pre-made queries on top of the recorder component."""
from collections import defaultdict
from datetime import timedelta
from datetime import datetime as dt, timedelta
from itertools import groupby
import json
import logging
import time
from typing import Optional, cast
from typing import Iterable, Optional, cast
from aiohttp import web
from sqlalchemy import and_, bindparam, func, not_, or_
@ -33,6 +33,7 @@ from homeassistant.helpers.entityfilter import (
CONF_ENTITY_GLOBS,
INCLUDE_EXCLUDE_BASE_FILTER_SCHEMA,
)
from homeassistant.helpers.typing import HomeAssistantType
import homeassistant.util.dt as dt_util
# mypy: allow-untyped-defs, no-check-untyped-defs
@ -502,6 +503,13 @@ class HistoryPeriodView(HomeAssistantView):
hass = request.app["hass"]
if (
not include_start_time_state
and entity_ids
and not _entities_may_have_state_changes_after(hass, entity_ids, start_time)
):
return self.json([])
return cast(
web.Response,
await hass.async_add_executor_job(
@ -660,6 +668,19 @@ def _glob_to_like(glob_str):
return States.entity_id.like(glob_str.translate(GLOB_TO_SQL_CHARS))
def _entities_may_have_state_changes_after(
hass: HomeAssistantType, entity_ids: Iterable, start_time: dt
) -> bool:
"""Check the state machine to see if entities have changed since start time."""
for entity_id in entity_ids:
state = hass.states.get(entity_id)
if state is None or state.last_changed > start_time:
return True
return False
class LazyState(State):
"""A lazy version of core State."""

View File

@ -970,3 +970,41 @@ async def test_entity_ids_limit_via_api(hass, hass_client):
assert len(response_json) == 2
assert response_json[0][0]["entity_id"] == "light.kitchen"
assert response_json[1][0]["entity_id"] == "light.cow"
async def test_entity_ids_limit_via_api_with_skip_initial_state(hass, hass_client):
"""Test limiting history to entity_ids with skip_initial_state."""
await hass.async_add_executor_job(init_recorder_component, hass)
await async_setup_component(
hass,
"history",
{"history": {}},
)
await hass.async_add_executor_job(hass.data[recorder.DATA_INSTANCE].block_till_done)
hass.states.async_set("light.kitchen", "on")
hass.states.async_set("light.cow", "on")
hass.states.async_set("light.nomatch", "on")
await hass.async_block_till_done()
await hass.async_add_executor_job(trigger_db_commit, hass)
await hass.async_block_till_done()
await hass.async_add_executor_job(hass.data[recorder.DATA_INSTANCE].block_till_done)
client = await hass_client()
response = await client.get(
f"/api/history/period/{dt_util.utcnow().isoformat()}?filter_entity_id=light.kitchen,light.cow&skip_initial_state",
)
assert response.status == 200
response_json = await response.json()
assert len(response_json) == 0
when = dt_util.utcnow() - timedelta(minutes=1)
response = await client.get(
f"/api/history/period/{when.isoformat()}?filter_entity_id=light.kitchen,light.cow&skip_initial_state",
)
assert response.status == 200
response_json = await response.json()
assert len(response_json) == 2
assert response_json[0][0]["entity_id"] == "light.kitchen"
assert response_json[1][0]["entity_id"] == "light.cow"