1
mirror of https://github.com/home-assistant/core synced 2024-07-12 07:21:24 +02:00

Add new sensors for energy produced (via Tibbber) (#76165)

The new sensors tibber:energy_(production|profit)_<home_id> are like the
existing consumption/totalCost ones except that they report outgoing
energy instead of incomeing.
This commit is contained in:
Erik Flodin 2022-08-23 10:02:58 +02:00 committed by GitHub
parent 680a477009
commit 9e66b30af9
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
3 changed files with 77 additions and 71 deletions

View File

@ -558,17 +558,21 @@ class TibberDataCoordinator(DataUpdateCoordinator):
async def _async_update_data(self):
"""Update data via API."""
await self._tibber_connection.fetch_consumption_data_active_homes()
await self._tibber_connection.fetch_production_data_active_homes()
await self._insert_statistics()
async def _insert_statistics(self):
"""Insert Tibber statistics."""
for home in self._tibber_connection.get_homes():
if not home.hourly_consumption_data:
continue
for sensor_type in (
"consumption",
"totalCost",
):
sensors = []
if home.hourly_consumption_data:
sensors.append(("consumption", False, ENERGY_KILO_WATT_HOUR))
sensors.append(("totalCost", False, home.currency))
if home.hourly_production_data:
sensors.append(("production", True, ENERGY_KILO_WATT_HOUR))
sensors.append(("profit", True, home.currency))
for sensor_type, is_production, unit in sensors:
statistic_id = (
f"{TIBBER_DOMAIN}:energy_"
f"{sensor_type.lower()}_"
@ -581,20 +585,26 @@ class TibberDataCoordinator(DataUpdateCoordinator):
if not last_stats:
# First time we insert 5 years of data (if available)
hourly_consumption_data = await home.get_historic_data(5 * 365 * 24)
hourly_data = await home.get_historic_data(
5 * 365 * 24, production=is_production
)
_sum = 0
last_stats_time = None
else:
# hourly_consumption_data contains the last 30 days
# of consumption data.
# hourly_consumption/production_data contains the last 30 days
# of consumption/production data.
# We update the statistics with the last 30 days
# of data to handle corrections in the data.
hourly_consumption_data = home.hourly_consumption_data
hourly_data = (
home.hourly_production_data
if is_production
else home.hourly_consumption_data
)
start = dt_util.parse_datetime(
hourly_consumption_data[0]["from"]
) - timedelta(hours=1)
start = dt_util.parse_datetime(hourly_data[0]["from"]) - timedelta(
hours=1
)
stat = await get_instance(self.hass).async_add_executor_job(
statistics_during_period,
self.hass,
@ -609,7 +619,7 @@ class TibberDataCoordinator(DataUpdateCoordinator):
statistics = []
for data in hourly_consumption_data:
for data in hourly_data:
if data.get(sensor_type) is None:
continue
@ -627,10 +637,6 @@ class TibberDataCoordinator(DataUpdateCoordinator):
)
)
if sensor_type == "consumption":
unit = ENERGY_KILO_WATT_HOUR
else:
unit = home.currency
metadata = StatisticMetaData(
has_mean=False,
has_sum=True,

View File

@ -20,6 +20,24 @@ CONSUMPTION_DATA_1 = [
},
]
PRODUCTION_DATA_1 = [
{
"from": "2022-01-03T00:00:00.000+01:00",
"profit": 0.1,
"production": 3.1,
},
{
"from": "2022-01-03T01:00:00.000+01:00",
"profit": 0.2,
"production": 3.2,
},
{
"from": "2022-01-03T02:00:00.000+01:00",
"profit": 0.3,
"production": 3.3,
},
]
def mock_get_homes(only_active=True):
"""Return a list of mocked Tibber homes."""
@ -32,5 +50,10 @@ def mock_get_homes(only_active=True):
tibber_home.country = "NO"
tibber_home.last_cons_data_timestamp = dt.datetime(2016, 1, 1, 12, 44, 57)
tibber_home.last_data_timestamp = dt.datetime(2016, 1, 1, 12, 48, 57)
tibber_home.get_historic_data.return_value = CONSUMPTION_DATA_1
def get_historic_data(n_data, resolution="HOURLY", production=False):
return PRODUCTION_DATA_1 if production else CONSUMPTION_DATA_1
tibber_home.get_historic_data.side_effect = get_historic_data
return [tibber_home]

View File

@ -5,7 +5,7 @@ from homeassistant.components.recorder.statistics import statistics_during_perio
from homeassistant.components.tibber.sensor import TibberDataCoordinator
from homeassistant.util import dt as dt_util
from .test_common import CONSUMPTION_DATA_1, mock_get_homes
from .test_common import CONSUMPTION_DATA_1, PRODUCTION_DATA_1, mock_get_homes
from tests.components.recorder.common import async_wait_recording_done
@ -15,62 +15,39 @@ async def test_async_setup_entry(hass, recorder_mock):
tibber_connection = AsyncMock()
tibber_connection.name = "tibber"
tibber_connection.fetch_consumption_data_active_homes.return_value = None
tibber_connection.fetch_production_data_active_homes.return_value = None
tibber_connection.get_homes = mock_get_homes
coordinator = TibberDataCoordinator(hass, tibber_connection)
await coordinator._async_update_data()
await async_wait_recording_done(hass)
# Validate consumption
statistic_id = "tibber:energy_consumption_home_id"
for (statistic_id, data, key) in (
("tibber:energy_consumption_home_id", CONSUMPTION_DATA_1, "consumption"),
("tibber:energy_totalcost_home_id", CONSUMPTION_DATA_1, "totalCost"),
("tibber:energy_production_home_id", PRODUCTION_DATA_1, "production"),
("tibber:energy_profit_home_id", PRODUCTION_DATA_1, "profit"),
):
stats = await hass.async_add_executor_job(
statistics_during_period,
hass,
dt_util.parse_datetime(data[0]["from"]),
None,
[statistic_id],
"hour",
True,
)
stats = await hass.async_add_executor_job(
statistics_during_period,
hass,
dt_util.parse_datetime(CONSUMPTION_DATA_1[0]["from"]),
None,
[statistic_id],
"hour",
True,
)
assert len(stats) == 1
assert len(stats[statistic_id]) == 3
_sum = 0
for k, stat in enumerate(stats[statistic_id]):
assert stat["start"] == dt_util.parse_datetime(data[k]["from"])
assert stat["state"] == data[k][key]
assert stat["mean"] is None
assert stat["min"] is None
assert stat["max"] is None
assert stat["last_reset"] is None
assert len(stats) == 1
assert len(stats[statistic_id]) == 3
_sum = 0
for k, stat in enumerate(stats[statistic_id]):
assert stat["start"] == dt_util.parse_datetime(CONSUMPTION_DATA_1[k]["from"])
assert stat["state"] == CONSUMPTION_DATA_1[k]["consumption"]
assert stat["mean"] is None
assert stat["min"] is None
assert stat["max"] is None
assert stat["last_reset"] is None
_sum += CONSUMPTION_DATA_1[k]["consumption"]
assert stat["sum"] == _sum
# Validate cost
statistic_id = "tibber:energy_totalcost_home_id"
stats = await hass.async_add_executor_job(
statistics_during_period,
hass,
dt_util.parse_datetime(CONSUMPTION_DATA_1[0]["from"]),
None,
[statistic_id],
"hour",
True,
)
assert len(stats) == 1
assert len(stats[statistic_id]) == 3
_sum = 0
for k, stat in enumerate(stats[statistic_id]):
assert stat["start"] == dt_util.parse_datetime(CONSUMPTION_DATA_1[k]["from"])
assert stat["state"] == CONSUMPTION_DATA_1[k]["totalCost"]
assert stat["mean"] is None
assert stat["min"] is None
assert stat["max"] is None
assert stat["last_reset"] is None
_sum += CONSUMPTION_DATA_1[k]["totalCost"]
assert stat["sum"] == _sum
_sum += data[k][key]
assert stat["sum"] == _sum