1
mirror of https://github.com/home-assistant/core synced 2024-08-02 23:40:32 +02:00

Tibber cost statistics (#63626)

* Tibber cost statistics

Signed-off-by: Daniel Hjelseth Høyer <github@dahoiv.net>

* Tibber cost statistics

Signed-off-by: Daniel Hjelseth Høyer <github@dahoiv.net>

* unit

Signed-off-by: Daniel Hjelseth Høyer <github@dahoiv.net>

* unit

Signed-off-by: Daniel Hjelseth Høyer <github@dahoiv.net>

* Update homeassistant/components/tibber/sensor.py

Co-authored-by: Martin Hjelmare <marhje52@gmail.com>

* Update homeassistant/components/tibber/sensor.py

Co-authored-by: Martin Hjelmare <marhje52@gmail.com>

* break lines

Signed-off-by: Daniel Hjelseth Høyer <github@dahoiv.net>

Co-authored-by: Martin Hjelmare <marhje52@gmail.com>
This commit is contained in:
Daniel Hjelseth Høyer 2022-01-09 12:20:53 +01:00 committed by GitHub
parent 1d24fb7ad9
commit 0e0ef0aa17
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
2 changed files with 98 additions and 57 deletions

View File

@ -561,67 +561,78 @@ class TibberDataCoordinator(update_coordinator.DataUpdateCoordinator):
for home in self._tibber_connection.get_homes():
if not home.hourly_consumption_data:
continue
statistic_id = (
f"{TIBBER_DOMAIN}:energy_consumption_{home.home_id.replace('-', '')}"
)
last_stats = await self.hass.async_add_executor_job(
get_last_statistics, self.hass, 1, statistic_id, True
)
if not last_stats:
# First time we insert 5 years of data (if available)
hourly_consumption_data = await home.get_historic_data(5 * 365 * 24)
_sum = 0
last_stats_time = None
else:
# hourly_consumption_data contains the last 30 days of consumption data.
# We update the statistics with the last 30 days of data to handle corrections in the data.
hourly_consumption_data = home.hourly_consumption_data
start = dt_util.parse_datetime(
hourly_consumption_data[0]["from"]
) - timedelta(hours=1)
stat = await self.hass.async_add_executor_job(
statistics_during_period,
self.hass,
start,
None,
[statistic_id],
"hour",
True,
for sensor_type in (
"consumption",
"totalCost",
):
statistic_id = (
f"{TIBBER_DOMAIN}:energy_"
f"{sensor_type.lower()}_"
f"{home.home_id.replace('-', '')}"
)
_sum = stat[statistic_id][0]["sum"]
last_stats_time = stat[statistic_id][0]["start"]
statistics = []
last_stats = await self.hass.async_add_executor_job(
get_last_statistics, self.hass, 1, statistic_id, True
)
for data in hourly_consumption_data:
if data.get("consumption") is None:
continue
if not last_stats:
# First time we insert 5 years of data (if available)
hourly_consumption_data = await home.get_historic_data(5 * 365 * 24)
start = dt_util.parse_datetime(data["from"])
if last_stats_time is not None and start <= last_stats_time:
continue
_sum = 0
last_stats_time = None
else:
# hourly_consumption_data contains the last 30 days
# of consumption data.
# We update the statistics with the last 30 days
# of data to handle corrections in the data.
hourly_consumption_data = home.hourly_consumption_data
_sum += data["consumption"]
statistics.append(
StatisticData(
start=start,
state=data["consumption"],
sum=_sum,
start = dt_util.parse_datetime(
hourly_consumption_data[0]["from"]
) - timedelta(hours=1)
stat = await self.hass.async_add_executor_job(
statistics_during_period,
self.hass,
start,
None,
[statistic_id],
"hour",
True,
)
)
_sum = stat[statistic_id][0]["sum"]
last_stats_time = stat[statistic_id][0]["start"]
metadata = StatisticMetaData(
has_mean=False,
has_sum=True,
name=f"{home.name} consumption",
source=TIBBER_DOMAIN,
statistic_id=statistic_id,
unit_of_measurement=ENERGY_KILO_WATT_HOUR,
)
async_add_external_statistics(self.hass, metadata, statistics)
statistics = []
for data in hourly_consumption_data:
if data.get(sensor_type) is None:
continue
start = dt_util.parse_datetime(data["from"])
if last_stats_time is not None and start <= last_stats_time:
continue
_sum += data[sensor_type]
statistics.append(
StatisticData(
start=start,
state=data[sensor_type],
sum=_sum,
)
)
if sensor_type == "consumption":
unit = ENERGY_KILO_WATT_HOUR
else:
unit = home.currency
metadata = StatisticMetaData(
has_mean=False,
has_sum=True,
name=f"{home.name} {sensor_type}",
source=TIBBER_DOMAIN,
statistic_id=statistic_id,
unit_of_measurement=unit,
)
async_add_external_statistics(self.hass, metadata, statistics)

View File

@ -33,7 +33,9 @@ async def test_async_setup_entry(hass):
def _get_homes():
tibber_home = AsyncMock()
tibber_home.name = "Name"
tibber_home.home_id = "home_id"
tibber_home.currency = "NOK"
tibber_home.get_historic_data.return_value = _CONSUMPTION_DATA_1
return [tibber_home]
@ -46,6 +48,7 @@ async def test_async_setup_entry(hass):
await coordinator._async_update_data()
await async_wait_recording_done_without_instance(hass)
# Validate consumption
statistic_id = "tibber:energy_consumption_home_id"
stats = await hass.async_add_executor_job(
@ -71,3 +74,30 @@ async def test_async_setup_entry(hass):
_sum += _CONSUMPTION_DATA_1[k]["consumption"]
assert stat["sum"] == _sum
# Validate cost
statistic_id = "tibber:energy_totalcost_home_id"
stats = await hass.async_add_executor_job(
statistics_during_period,
hass,
dt_util.parse_datetime(_CONSUMPTION_DATA_1[0]["from"]),
None,
[statistic_id],
"hour",
True,
)
assert len(stats) == 1
assert len(stats[statistic_id]) == 3
_sum = 0
for k, stat in enumerate(stats[statistic_id]):
assert stat["start"] == dt_util.parse_datetime(_CONSUMPTION_DATA_1[k]["from"])
assert stat["state"] == _CONSUMPTION_DATA_1[k]["totalCost"]
assert stat["mean"] is None
assert stat["min"] is None
assert stat["max"] is None
assert stat["last_reset"] is None
_sum += _CONSUMPTION_DATA_1[k]["totalCost"]
assert stat["sum"] == _sum