Typing and code quality for recorder history (#68647)

This commit is contained in:
J. Nick Koston 2022-03-24 14:58:38 -10:00 committed by GitHub
parent 20c0a5a838
commit c5c34bc0d7
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
7 changed files with 138 additions and 71 deletions

View File

@ -163,6 +163,7 @@ homeassistant.components.pure_energie.*
homeassistant.components.rainmachine.*
homeassistant.components.rdw.*
homeassistant.components.recollect_waste.*
homeassistant.components.recorder.history
homeassistant.components.recorder.purge
homeassistant.components.recorder.repack
homeassistant.components.recorder.statistics

View File

@ -227,7 +227,7 @@ def is_entity_recorded(hass: HomeAssistant, entity_id: str) -> bool:
return hass.data[DATA_INSTANCE].entity_filter(entity_id)
def run_information(hass, point_in_time: datetime | None = None):
def run_information(hass, point_in_time: datetime | None = None) -> RecorderRuns | None:
"""Return information about current run.
There is also the run that covers point_in_time.
@ -240,7 +240,9 @@ def run_information(hass, point_in_time: datetime | None = None):
return run_information_with_session(session, point_in_time)
def run_information_from_instance(hass, point_in_time: datetime | None = None):
def run_information_from_instance(
hass, point_in_time: datetime | None = None
) -> RecorderRuns | None:
"""Return information about current run from the existing instance.
Does not query the database for older runs.
@ -249,9 +251,12 @@ def run_information_from_instance(hass, point_in_time: datetime | None = None):
if point_in_time is None or point_in_time > ins.recording_start:
return ins.run_info
return None
def run_information_with_session(session, point_in_time: datetime | None = None):
def run_information_with_session(
session, point_in_time: datetime | None = None
) -> RecorderRuns | None:
"""Return information about current run from the database."""
recorder_runs = RecorderRuns

View File

@ -2,6 +2,7 @@
from __future__ import annotations
from collections import defaultdict
from collections.abc import Iterable, MutableMapping
from datetime import datetime
from itertools import groupby
import logging
@ -10,6 +11,7 @@ from typing import Any
from sqlalchemy import Column, Text, and_, bindparam, func, or_
from sqlalchemy.ext import baked
from sqlalchemy.orm.session import Session
from sqlalchemy.sql.expression import literal
from homeassistant.components import recorder
@ -18,6 +20,7 @@ import homeassistant.util.dt as dt_util
from .models import (
LazyState,
RecorderRuns,
StateAttributes,
States,
process_timestamp_to_utc_isoformat,
@ -123,29 +126,50 @@ def bake_query_and_join_attributes(
return bakery(lambda session: session.query(*QUERY_STATES)), True
def async_setup(hass):
def async_setup(hass: HomeAssistant) -> None:
"""Set up the history hooks."""
hass.data[HISTORY_BAKERY] = baked.bakery()
def get_significant_states(hass, *args, **kwargs):
def get_significant_states(
hass: HomeAssistant,
start_time: datetime,
end_time: datetime | None = None,
entity_ids: list[str] | None = None,
filters: Any | None = None,
include_start_time_state: bool = True,
significant_changes_only: bool = True,
minimal_response: bool = False,
no_attributes: bool = False,
) -> MutableMapping[str, Iterable[LazyState | State | dict[str, Any]]]:
"""Wrap get_significant_states_with_session with an sql session."""
with session_scope(hass=hass) as session:
return get_significant_states_with_session(hass, session, *args, **kwargs)
return get_significant_states_with_session(
hass,
session,
start_time,
end_time,
entity_ids,
filters,
include_start_time_state,
significant_changes_only,
minimal_response,
no_attributes,
)
def get_significant_states_with_session(
hass,
session,
start_time,
end_time=None,
entity_ids=None,
filters=None,
include_start_time_state=True,
significant_changes_only=True,
minimal_response=False,
no_attributes=False,
):
hass: HomeAssistant,
session: Session,
start_time: datetime,
end_time: datetime | None = None,
entity_ids: list[str] | None = None,
filters: Any = None,
include_start_time_state: bool = True,
significant_changes_only: bool = True,
minimal_response: bool = False,
no_attributes: bool = False,
) -> MutableMapping[str, Iterable[LazyState | State | dict[str, Any]]]:
"""
Return states changes during UTC period start_time - end_time.
@ -238,7 +262,7 @@ def state_changes_during_period(
descending: bool = False,
limit: int | None = None,
include_start_time_state: bool = True,
) -> dict[str, list[State]]:
) -> MutableMapping[str, Iterable[LazyState | State | dict[str, Any]]]:
"""Return states changes during UTC period start_time - end_time."""
with session_scope(hass=hass) as session:
baked_query, join_attributes = bake_query_and_join_attributes(
@ -288,7 +312,9 @@ def state_changes_during_period(
)
def get_last_state_changes(hass, number_of_states, entity_id):
def get_last_state_changes(
hass: HomeAssistant, number_of_states: int, entity_id: str
) -> MutableMapping[str, Iterable[LazyState | State | dict[str, Any]]]:
"""Return the last number_of_states."""
start_time = dt_util.utcnow()
@ -330,20 +356,21 @@ def get_last_state_changes(hass, number_of_states, entity_id):
def get_states(
hass,
utc_point_in_time,
entity_ids=None,
run=None,
filters=None,
no_attributes=False,
):
hass: HomeAssistant,
utc_point_in_time: datetime,
entity_ids: list[str] | None = None,
run: RecorderRuns | None = None,
filters: Any = None,
no_attributes: bool = False,
) -> list[LazyState]:
"""Return the states at a specific point in time."""
if run is None:
run = recorder.run_information_from_instance(hass, utc_point_in_time)
if (
run is None
and (run := (recorder.run_information_from_instance(hass, utc_point_in_time)))
is None
):
# History did not run before utc_point_in_time
if run is None:
return []
return []
with session_scope(hass=hass) as session:
return _get_states_with_session(
@ -352,26 +379,27 @@ def get_states(
def _get_states_with_session(
hass,
session,
utc_point_in_time,
entity_ids=None,
run=None,
filters=None,
no_attributes=False,
):
hass: HomeAssistant,
session: Session,
utc_point_in_time: datetime,
entity_ids: list[str] | None = None,
run: RecorderRuns | None = None,
filters: Any | None = None,
no_attributes: bool = False,
) -> list[LazyState]:
"""Return the states at a specific point in time."""
if entity_ids and len(entity_ids) == 1:
return _get_single_entity_states_with_session(
hass, session, utc_point_in_time, entity_ids[0], no_attributes
)
if run is None:
run = recorder.run_information_with_session(session, utc_point_in_time)
if (
run is None
and (run := (recorder.run_information_from_instance(hass, utc_point_in_time)))
is None
):
# History did not run before utc_point_in_time
if run is None:
return []
return []
# We have more than one entity to look at so we need to do a query on states
# since the last recorder run started.
@ -444,13 +472,17 @@ def _get_states_with_session(
StateAttributes, (States.attributes_id == StateAttributes.attributes_id)
)
attr_cache = {}
attr_cache: dict[str, dict[str, Any]] = {}
return [LazyState(row, attr_cache) for row in execute(query)]
def _get_single_entity_states_with_session(
hass, session, utc_point_in_time, entity_id, no_attributes=False
):
hass: HomeAssistant,
session: Session,
utc_point_in_time: datetime,
entity_id: str,
no_attributes: bool = False,
) -> list[LazyState]:
# Use an entirely different (and extremely fast) query if we only
# have a single entity id
baked_query, join_attributes = bake_query_and_join_attributes(hass, no_attributes)
@ -473,16 +505,16 @@ def _get_single_entity_states_with_session(
def _sorted_states_to_dict(
hass,
session,
states,
start_time,
entity_ids,
filters=None,
include_start_time_state=True,
minimal_response=False,
no_attributes=False,
):
hass: HomeAssistant,
session: Session,
states: Iterable[States],
start_time: datetime,
entity_ids: list[str] | None,
filters: Any = None,
include_start_time_state: bool = True,
minimal_response: bool = False,
no_attributes: bool = False,
) -> MutableMapping[str, Iterable[LazyState | State | dict[str, Any]]]:
"""Convert SQL results into JSON friendly data structure.
This takes our state list and turns it into a JSON friendly data
@ -494,7 +526,7 @@ def _sorted_states_to_dict(
each list of states, otherwise our graphs won't start on the Y
axis correctly.
"""
result = defaultdict(list)
result: dict[str, list[LazyState | dict[str, Any]]] = defaultdict(list)
# Set all entity IDs to empty lists in result set to maintain the order
if entity_ids is not None:
for ent_id in entity_ids:
@ -526,10 +558,10 @@ def _sorted_states_to_dict(
_process_timestamp_to_utc_isoformat = process_timestamp_to_utc_isoformat
# Append all changes to it
for ent_id, group in groupby(states, lambda state: state.entity_id):
for ent_id, group in groupby(states, lambda state: state.entity_id): # type: ignore[no-any-return]
domain = split_entity_id(ent_id)[0]
ent_results = result[ent_id]
attr_cache = {}
attr_cache: dict[str, dict[str, Any]] = {}
if not minimal_response or domain in NEED_ATTRIBUTE_DOMAINS:
ent_results.extend(LazyState(db_state, attr_cache) for db_state in group)
@ -542,6 +574,7 @@ def _sorted_states_to_dict(
ent_results.append(LazyState(next(group), attr_cache))
prev_state = ent_results[-1]
assert isinstance(prev_state, LazyState)
initial_state_count = len(ent_results)
for db_state in group:
@ -570,7 +603,13 @@ def _sorted_states_to_dict(
return {key: val for key, val in result.items() if val}
def get_state(hass, utc_point_in_time, entity_id, run=None, no_attributes=False):
def get_state(
hass: HomeAssistant,
utc_point_in_time: datetime,
entity_id: str,
run: RecorderRuns | None = None,
no_attributes: bool = False,
) -> LazyState | None:
"""Return a state at a specific point in time."""
states = get_states(hass, utc_point_in_time, (entity_id,), run, None, no_attributes)
states = get_states(hass, utc_point_in_time, [entity_id], run, None, no_attributes)
return states[0] if states else None

View File

@ -22,6 +22,7 @@ from sqlalchemy import (
distinct,
)
from sqlalchemy.dialects import mysql, oracle, postgresql
from sqlalchemy.engine.row import Row
from sqlalchemy.ext.declarative import declared_attr
from sqlalchemy.orm import declarative_base, relationship
from sqlalchemy.orm.session import Session
@ -536,7 +537,9 @@ class LazyState(State):
"_attr_cache",
]
def __init__(self, row, attr_cache=None): # pylint: disable=super-init-not-called
def __init__( # pylint: disable=super-init-not-called
self, row: Row, attr_cache: dict[str, dict[str, Any]] | None = None
) -> None:
"""Init the lazy state."""
self._row = row
self.entity_id = self._row.entity_id

View File

@ -114,7 +114,7 @@ def commit(session, work):
def execute(
qry: Query, to_native: bool = False, validate_entity_ids: bool = True
) -> list | None:
) -> list:
"""Query the database and convert the objects to HA native form.
This method also retries a few times in the case of stale connections.
@ -157,7 +157,7 @@ def execute(
raise
time.sleep(QUERY_RETRY_WAIT)
return None
assert False # unreachable
def validate_or_move_away_sqlite_database(dburl: str) -> bool:

View File

@ -2,12 +2,12 @@
from __future__ import annotations
from collections import defaultdict
from collections.abc import Callable, Iterable
from collections.abc import Callable, Iterable, MutableMapping
import datetime
import itertools
import logging
import math
from typing import Any
from typing import Any, cast
from sqlalchemy.orm.session import Session
@ -19,6 +19,7 @@ from homeassistant.components.recorder import (
)
from homeassistant.components.recorder.const import DOMAIN as RECORDER_DOMAIN
from homeassistant.components.recorder.models import (
LazyState,
StatisticData,
StatisticMetaData,
StatisticResult,
@ -416,9 +417,9 @@ def _compile_statistics( # noqa: C901
entities_full_history = [
i.entity_id for i in sensor_states if "sum" in wanted_statistics[i.entity_id]
]
history_list = {}
history_list: MutableMapping[str, Iterable[LazyState | State | dict[str, Any]]] = {}
if entities_full_history:
history_list = history.get_significant_states_with_session( # type: ignore[no-untyped-call]
history_list = history.get_significant_states_with_session(
hass,
session,
start - datetime.timedelta.resolution,
@ -432,7 +433,7 @@ def _compile_statistics( # noqa: C901
if "sum" not in wanted_statistics[i.entity_id]
]
if entities_significant_history:
_history_list = history.get_significant_states_with_session( # type: ignore[no-untyped-call]
_history_list = history.get_significant_states_with_session(
hass,
session,
start - datetime.timedelta.resolution,
@ -455,7 +456,14 @@ def _compile_statistics( # noqa: C901
device_class = _state.attributes.get(ATTR_DEVICE_CLASS)
entity_history = history_list[entity_id]
unit, fstates = _normalize_states(
hass, session, old_metadatas, entity_history, device_class, entity_id
hass,
session,
old_metadatas,
# entity_history does not contain minimal responses
# so we must cast here
cast(list[State], entity_history),
device_class,
entity_id,
)
if not fstates:

View File

@ -1595,6 +1595,17 @@ no_implicit_optional = true
warn_return_any = true
warn_unreachable = true
[mypy-homeassistant.components.recorder.history]
check_untyped_defs = true
disallow_incomplete_defs = true
disallow_subclassing_any = true
disallow_untyped_calls = true
disallow_untyped_decorators = true
disallow_untyped_defs = true
no_implicit_optional = true
warn_return_any = true
warn_unreachable = true
[mypy-homeassistant.components.recorder.purge]
check_untyped_defs = true
disallow_incomplete_defs = true