This commit is contained in:
Franck Nijhof 2024-04-12 13:20:48 +02:00 committed by GitHub
commit efe91815fb
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
44 changed files with 486 additions and 102 deletions

View File

@ -2,10 +2,10 @@
from __future__ import annotations
import logging
from typing import Any
from aranet4.client import Aranet4Advertisement, Version as AranetVersion
from bluetooth_data_tools import human_readable_name
import voluptuous as vol
from homeassistant.components.bluetooth import (
@ -18,11 +18,15 @@ from homeassistant.data_entry_flow import AbortFlow
from .const import DOMAIN
_LOGGER = logging.getLogger(__name__)
MIN_VERSION = AranetVersion(1, 2, 0)
def _title(discovery_info: BluetoothServiceInfoBleak) -> str:
return discovery_info.device.name or human_readable_name(
None, "Aranet", discovery_info.address
)
class AranetConfigFlow(ConfigFlow, domain=DOMAIN):
"""Handle a config flow for Aranet."""
@ -61,11 +65,8 @@ class AranetConfigFlow(ConfigFlow, domain=DOMAIN):
self, user_input: dict[str, Any] | None = None
) -> ConfigFlowResult:
"""Confirm discovery."""
assert self._discovered_device is not None
adv = self._discovered_device
assert self._discovery_info is not None
discovery_info = self._discovery_info
title = adv.readings.name if adv.readings else discovery_info.name
title = _title(self._discovery_info)
if user_input is not None:
return self.async_create_entry(title=title, data={})
@ -101,10 +102,7 @@ class AranetConfigFlow(ConfigFlow, domain=DOMAIN):
discovery_info.device, discovery_info.advertisement
)
if adv.manufacturer_data:
self._discovered_devices[address] = (
adv.readings.name if adv.readings else discovery_info.name,
adv,
)
self._discovered_devices[address] = (_title(discovery_info), adv)
if not self._discovered_devices:
return self.async_abort(reason="no_devices_found")

View File

@ -19,5 +19,5 @@
"documentation": "https://www.home-assistant.io/integrations/aranet",
"integration_type": "device",
"iot_class": "local_push",
"requirements": ["aranet4==2.2.2"]
"requirements": ["aranet4==2.3.3"]
}

View File

@ -812,6 +812,22 @@ class AutomationEntity(BaseAutomationEntity, RestoreEntity):
"""Log helper callback."""
self._logger.log(level, "%s %s", msg, self.name, **kwargs)
async def _async_trigger_if_enabled(
self,
run_variables: dict[str, Any],
context: Context | None = None,
skip_condition: bool = False,
) -> ScriptRunResult | None:
"""Trigger automation if enabled.
If the trigger starts but has a delay, the automation will be triggered
when the delay has passed so we need to make sure its still enabled before
executing the action.
"""
if not self._is_enabled:
return None
return await self.async_trigger(run_variables, context, skip_condition)
async def _async_attach_triggers(
self, home_assistant_start: bool
) -> Callable[[], None] | None:
@ -835,7 +851,7 @@ class AutomationEntity(BaseAutomationEntity, RestoreEntity):
return await async_initialize_triggers(
self.hass,
self._trigger_config,
self.async_trigger,
self._async_trigger_if_enabled,
DOMAIN,
str(self.name),
self._log_callback,

View File

@ -15,7 +15,7 @@
"quality_scale": "internal",
"requirements": [
"bleak==0.21.1",
"bleak-retry-connector==3.4.0",
"bleak-retry-connector==3.5.0",
"bluetooth-adapters==0.18.0",
"bluetooth-auto-recovery==1.4.0",
"bluetooth-data-tools==1.19.0",

View File

@ -34,6 +34,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
username=entry.data[CONF_USERNAME],
password=entry.data[CONF_PASSWORD],
ssl_verify_cert=entry.data[CONF_VERIFY_SSL],
timeout=10,
)
try:
await hass.async_add_executor_job(client.principal)

View File

@ -7,5 +7,5 @@
"integration_type": "service",
"iot_class": "cloud_polling",
"quality_scale": "platinum",
"requirements": ["forecast-solar==3.0.0"]
"requirements": ["forecast-solar==3.1.0"]
}

View File

@ -20,5 +20,5 @@
"documentation": "https://www.home-assistant.io/integrations/frontend",
"integration_type": "system",
"quality_scale": "internal",
"requirements": ["home-assistant-frontend==20240404.1"]
"requirements": ["home-assistant-frontend==20240404.2"]
}

View File

@ -112,8 +112,9 @@ class AsyncConfigEntryAuth:
raise GoogleTasksApiError(
f"Google Tasks API responded with error ({exception.status_code})"
) from exception
data = json.loads(response)
_raise_if_error(data)
if response:
data = json.loads(response)
_raise_if_error(data)
for task_id in task_ids:
batch.add(

View File

@ -196,7 +196,7 @@ SCHEMA_BACKUP_PARTIAL = SCHEMA_BACKUP_FULL.extend(
{
vol.Optional(ATTR_HOMEASSISTANT): cv.boolean,
vol.Optional(ATTR_FOLDERS): vol.All(cv.ensure_list, [cv.string]),
vol.Optional(ATTR_ADDONS): vol.All(cv.ensure_list, [cv.slug]),
vol.Optional(ATTR_ADDONS): vol.All(cv.ensure_list, [VALID_ADDON_SLUG]),
}
)
@ -211,7 +211,7 @@ SCHEMA_RESTORE_PARTIAL = SCHEMA_RESTORE_FULL.extend(
{
vol.Optional(ATTR_HOMEASSISTANT): cv.boolean,
vol.Optional(ATTR_FOLDERS): vol.All(cv.ensure_list, [cv.string]),
vol.Optional(ATTR_ADDONS): vol.All(cv.ensure_list, [cv.slug]),
vol.Optional(ATTR_ADDONS): vol.All(cv.ensure_list, [VALID_ADDON_SLUG]),
}
)

View File

@ -22,7 +22,7 @@ from .const import (
from .handler import async_apply_suggestion
from .issues import Issue, Suggestion
SUGGESTION_CONFIRMATION_REQUIRED = {"system_execute_reboot"}
SUGGESTION_CONFIRMATION_REQUIRED = {"system_adopt_data_disk", "system_execute_reboot"}
EXTRA_PLACEHOLDERS = {
"issue_mount_mount_failed": {

View File

@ -51,8 +51,15 @@
"title": "Multiple data disks detected",
"fix_flow": {
"step": {
"system_rename_data_disk": {
"description": "`{reference}` is a filesystem with the name hassos-data and is not the active data disk. This can cause Home Assistant to choose the wrong data disk at system reboot.\n\nUse the fix option to rename the filesystem to prevent this. Alternatively you can move the data disk to the drive (overwriting its contents) or remove the drive from the system."
"fix_menu": {
"description": "`{reference}` is a filesystem with the name hassos-data and is not the active data disk. This can cause Home Assistant to choose the wrong data disk at system reboot.\n\nUse the 'Rename' option to rename the filesystem to prevent this. Use the 'Adopt' option to make that your data disk and rename the existing one. Alternatively you can move the data disk to the drive (overwriting its contents) or remove the drive from the system.",
"menu_options": {
"system_rename_data_disk": "Rename",
"system_adopt_data_disk": "Adopt"
}
},
"system_adopt_data_disk": {
"description": "This fix will initiate a system reboot which will make Home Assistant and all the Add-ons inaccessible for a brief period. After the reboot `{reference}` will be the data disk of Home Assistant and your existing data disk will be renamed and ignored."
}
},
"abort": {

View File

@ -2,15 +2,36 @@
from __future__ import annotations
from functools import partial
from holidays import country_holidays
from homeassistant.config_entries import ConfigEntry
from homeassistant.const import Platform
from homeassistant.const import CONF_COUNTRY, Platform
from homeassistant.core import HomeAssistant
from homeassistant.setup import SetupPhases, async_pause_setup
from .const import CONF_PROVINCE
PLATFORMS: list[Platform] = [Platform.CALENDAR]
async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
"""Set up Holiday from a config entry."""
country: str = entry.data[CONF_COUNTRY]
province: str | None = entry.data.get(CONF_PROVINCE)
# We only import here to ensure that that its not imported later
# in the event loop since the platforms will call country_holidays
# which loads python code from disk.
with async_pause_setup(hass, SetupPhases.WAIT_IMPORT_PACKAGES):
# import executor job is used here because multiple integrations use
# the holidays library and it is not thread safe to import it in parallel
# https://github.com/python/cpython/issues/83065
await hass.async_add_import_executor_job(
partial(country_holidays, country, subdiv=province)
)
await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS)
return True

View File

@ -7,5 +7,5 @@
"iot_class": "calculated",
"loggers": ["yt_dlp"],
"quality_scale": "internal",
"requirements": ["yt-dlp==2024.03.10"]
"requirements": ["yt-dlp==2024.04.09"]
}

View File

@ -440,6 +440,9 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
async def async_reset_platform(hass: HomeAssistant, integration_name: str) -> None:
"""Release modbus resources."""
if DOMAIN not in hass.data:
_LOGGER.error("Modbus cannot reload, because it was never loaded")
return
_LOGGER.info("Modbus reloading")
hubs = hass.data[DOMAIN]
for name in hubs:

View File

@ -6,5 +6,5 @@
"iot_class": "local_polling",
"loggers": ["pymodbus"],
"quality_scale": "platinum",
"requirements": ["pymodbus==3.6.6"]
"requirements": ["pymodbus==3.6.7"]
}

View File

@ -7,5 +7,5 @@
"documentation": "https://www.home-assistant.io/integrations/neato",
"iot_class": "cloud_polling",
"loggers": ["pybotvac"],
"requirements": ["pybotvac==0.0.24"]
"requirements": ["pybotvac==0.0.25"]
}

View File

@ -123,7 +123,8 @@ class RainBirdSwitch(CoordinatorEntity[RainbirdUpdateCoordinator], SwitchEntity)
# The device reflects the old state for a few moments. Update the
# state manually and trigger a refresh after a short debounced delay.
self.coordinator.data.active_zones.remove(self._zone)
if self.is_on:
self.coordinator.data.active_zones.remove(self._zone)
self.async_write_ha_state()
await self.coordinator.async_request_refresh()

View File

@ -38,7 +38,9 @@ from homeassistant.helpers.storage import Store
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
from .const import (
CONF_CONCURRENCY,
DATA_COORDINATOR,
DEFAULT_CONCURRENCY,
DEFAULT_SCAN_INTERVAL,
DOMAIN,
EVENTS_COORDINATOR,
@ -85,7 +87,10 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
async def _async_setup_local_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
data = entry.data
risco = RiscoLocal(data[CONF_HOST], data[CONF_PORT], data[CONF_PIN])
concurrency = entry.options.get(CONF_CONCURRENCY, DEFAULT_CONCURRENCY)
risco = RiscoLocal(
data[CONF_HOST], data[CONF_PORT], data[CONF_PIN], concurrency=concurrency
)
try:
await risco.connect()
@ -96,7 +101,7 @@ async def _async_setup_local_entry(hass: HomeAssistant, entry: ConfigEntry) -> b
return False
async def _error(error: Exception) -> None:
_LOGGER.error("Error in Risco library: %s", error)
_LOGGER.error("Error in Risco library", exc_info=error)
entry.async_on_unload(risco.add_error_handler(_error))

View File

@ -35,8 +35,10 @@ from .const import (
CONF_CODE_ARM_REQUIRED,
CONF_CODE_DISARM_REQUIRED,
CONF_COMMUNICATION_DELAY,
CONF_CONCURRENCY,
CONF_HA_STATES_TO_RISCO,
CONF_RISCO_STATES_TO_HA,
DEFAULT_ADVANCED_OPTIONS,
DEFAULT_OPTIONS,
DOMAIN,
MAX_COMMUNICATION_DELAY,
@ -225,11 +227,8 @@ class RiscoOptionsFlowHandler(OptionsFlow):
self._data = {**DEFAULT_OPTIONS, **config_entry.options}
def _options_schema(self) -> vol.Schema:
return vol.Schema(
schema = vol.Schema(
{
vol.Required(
CONF_SCAN_INTERVAL, default=self._data[CONF_SCAN_INTERVAL]
): int,
vol.Required(
CONF_CODE_ARM_REQUIRED, default=self._data[CONF_CODE_ARM_REQUIRED]
): bool,
@ -239,6 +238,19 @@ class RiscoOptionsFlowHandler(OptionsFlow):
): bool,
}
)
if self.show_advanced_options:
self._data = {**DEFAULT_ADVANCED_OPTIONS, **self._data}
schema = schema.extend(
{
vol.Required(
CONF_SCAN_INTERVAL, default=self._data[CONF_SCAN_INTERVAL]
): int,
vol.Required(
CONF_CONCURRENCY, default=self._data[CONF_CONCURRENCY]
): int,
}
)
return schema
async def async_step_init(
self, user_input: dict[str, Any] | None = None

View File

@ -14,6 +14,7 @@ DATA_COORDINATOR = "risco"
EVENTS_COORDINATOR = "risco_events"
DEFAULT_SCAN_INTERVAL = 30
DEFAULT_CONCURRENCY = 4
TYPE_LOCAL = "local"
@ -25,6 +26,7 @@ CONF_CODE_DISARM_REQUIRED = "code_disarm_required"
CONF_RISCO_STATES_TO_HA = "risco_states_to_ha"
CONF_HA_STATES_TO_RISCO = "ha_states_to_risco"
CONF_COMMUNICATION_DELAY = "communication_delay"
CONF_CONCURRENCY = "concurrency"
RISCO_GROUPS = ["A", "B", "C", "D"]
RISCO_ARM = "arm"
@ -44,9 +46,13 @@ DEFAULT_HA_STATES_TO_RISCO = {
}
DEFAULT_OPTIONS = {
CONF_SCAN_INTERVAL: DEFAULT_SCAN_INTERVAL,
CONF_CODE_ARM_REQUIRED: False,
CONF_CODE_DISARM_REQUIRED: False,
CONF_RISCO_STATES_TO_HA: DEFAULT_RISCO_STATES_TO_HA,
CONF_HA_STATES_TO_RISCO: DEFAULT_HA_STATES_TO_RISCO,
}
DEFAULT_ADVANCED_OPTIONS = {
CONF_SCAN_INTERVAL: DEFAULT_SCAN_INTERVAL,
CONF_CONCURRENCY: DEFAULT_CONCURRENCY,
}

View File

@ -36,7 +36,8 @@
"init": {
"title": "Configure options",
"data": {
"scan_interval": "How often to poll Risco (in seconds)",
"scan_interval": "How often to poll Risco Cloud (in seconds)",
"concurrency": "Maximum concurrent requests in Risco local",
"code_arm_required": "Require PIN to arm",
"code_disarm_required": "Require PIN to disarm"
}

View File

@ -7,5 +7,5 @@
"integration_type": "hub",
"iot_class": "cloud_push",
"loggers": ["whirlpool"],
"requirements": ["whirlpool-sixth-sense==0.18.7"]
"requirements": ["whirlpool-sixth-sense==0.18.8"]
}

View File

@ -11,6 +11,7 @@ from homeassistant.const import CONF_COUNTRY, CONF_LANGUAGE
from homeassistant.core import HomeAssistant
from homeassistant.exceptions import ConfigEntryError
from homeassistant.helpers.issue_registry import IssueSeverity, async_create_issue
from homeassistant.setup import SetupPhases, async_pause_setup
from .const import CONF_PROVINCE, DOMAIN, PLATFORMS
@ -23,7 +24,11 @@ async def _async_validate_country_and_province(
if not country:
return
try:
await hass.async_add_executor_job(country_holidays, country)
with async_pause_setup(hass, SetupPhases.WAIT_IMPORT_PACKAGES):
# import executor job is used here because multiple integrations use
# the holidays library and it is not thread safe to import it in parallel
# https://github.com/python/cpython/issues/83065
await hass.async_add_import_executor_job(country_holidays, country)
except NotImplementedError as ex:
async_create_issue(
hass,
@ -41,9 +46,13 @@ async def _async_validate_country_and_province(
if not province:
return
try:
await hass.async_add_executor_job(
partial(country_holidays, country, subdiv=province)
)
with async_pause_setup(hass, SetupPhases.WAIT_IMPORT_PACKAGES):
# import executor job is used here because multiple integrations use
# the holidays library and it is not thread safe to import it in parallel
# https://github.com/python/cpython/issues/83065
await hass.async_add_import_executor_job(
partial(country_holidays, country, subdiv=province)
)
except NotImplementedError as ex:
async_create_issue(
hass,
@ -73,9 +82,13 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
await _async_validate_country_and_province(hass, entry, country, province)
if country and CONF_LANGUAGE not in entry.options:
cls: HolidayBase = await hass.async_add_executor_job(
partial(country_holidays, country, subdiv=province)
)
with async_pause_setup(hass, SetupPhases.WAIT_IMPORT_PACKAGES):
# import executor job is used here because multiple integrations use
# the holidays library and it is not thread safe to import it in parallel
# https://github.com/python/cpython/issues/83065
cls: HolidayBase = await hass.async_add_import_executor_job(
partial(country_holidays, country, subdiv=province)
)
default_language = cls.default_language
new_options = entry.options.copy()
new_options[CONF_LANGUAGE] = default_language

View File

@ -24,7 +24,7 @@
"bellows==0.38.1",
"pyserial==3.5",
"pyserial-asyncio==0.6",
"zha-quirks==0.0.113",
"zha-quirks==0.0.114",
"zigpy-deconz==0.23.1",
"zigpy==0.63.5",
"zigpy-xbee==0.20.1",

View File

@ -18,7 +18,7 @@ from .util.signal_type import SignalType
APPLICATION_NAME: Final = "HomeAssistant"
MAJOR_VERSION: Final = 2024
MINOR_VERSION: Final = 4
PATCH_VERSION: Final = "2"
PATCH_VERSION: Final = "3"
__short_version__: Final = f"{MAJOR_VERSION}.{MINOR_VERSION}"
__version__: Final = f"{__short_version__}.{PATCH_VERSION}"
REQUIRED_PYTHON_VER: Final[tuple[int, int, int]] = (3, 12, 0)

View File

@ -4,7 +4,7 @@ aiodhcpwatcher==1.0.0
aiodiscover==2.0.0
aiohttp-fast-url-dispatcher==0.3.0
aiohttp-zlib-ng==0.3.1
aiohttp==3.9.3
aiohttp==3.9.4
aiohttp_cors==0.7.0
astral==2.2
async-interrupt==1.1.1
@ -13,7 +13,7 @@ atomicwrites-homeassistant==1.4.1
attrs==23.2.0
awesomeversion==24.2.0
bcrypt==4.1.2
bleak-retry-connector==3.4.0
bleak-retry-connector==3.5.0
bleak==0.21.1
bluetooth-adapters==0.18.0
bluetooth-auto-recovery==1.4.0
@ -30,7 +30,7 @@ habluetooth==2.4.2
hass-nabucasa==0.78.0
hassil==1.6.1
home-assistant-bluetooth==1.12.0
home-assistant-frontend==20240404.1
home-assistant-frontend==20240404.2
home-assistant-intents==2024.4.3
httpx==0.27.0
ifaddr==0.2.0

View File

@ -4,7 +4,7 @@ build-backend = "setuptools.build_meta"
[project]
name = "homeassistant"
version = "2024.4.2"
version = "2024.4.3"
license = {text = "Apache-2.0"}
description = "Open-source home automation platform running on Python 3."
readme = "README.rst"
@ -23,7 +23,7 @@ classifiers = [
]
requires-python = ">=3.12.0"
dependencies = [
"aiohttp==3.9.3",
"aiohttp==3.9.4",
"aiohttp_cors==0.7.0",
"aiohttp-fast-url-dispatcher==0.3.0",
"aiohttp-zlib-ng==0.3.1",

View File

@ -3,7 +3,7 @@
-c homeassistant/package_constraints.txt
# Home Assistant Core
aiohttp==3.9.3
aiohttp==3.9.4
aiohttp_cors==0.7.0
aiohttp-fast-url-dispatcher==0.3.0
aiohttp-zlib-ng==0.3.1

View File

@ -467,7 +467,7 @@ aprslib==0.7.0
aqualogic==2.6
# homeassistant.components.aranet
aranet4==2.2.2
aranet4==2.3.3
# homeassistant.components.arcam_fmj
arcam-fmj==1.4.0
@ -556,7 +556,7 @@ bizkaibus==0.1.1
bleak-esphome==1.0.0
# homeassistant.components.bluetooth
bleak-retry-connector==3.4.0
bleak-retry-connector==3.5.0
# homeassistant.components.bluetooth
bleak==0.21.1
@ -883,7 +883,7 @@ fnv-hash-fast==0.5.0
foobot_async==1.0.0
# homeassistant.components.forecast_solar
forecast-solar==3.0.0
forecast-solar==3.1.0
# homeassistant.components.fortios
fortiosapi==1.0.5
@ -1077,7 +1077,7 @@ hole==0.8.0
holidays==0.46
# homeassistant.components.frontend
home-assistant-frontend==20240404.1
home-assistant-frontend==20240404.2
# homeassistant.components.conversation
home-assistant-intents==2024.4.3
@ -1718,7 +1718,7 @@ pybbox==0.0.5-alpha
pyblackbird==0.6
# homeassistant.components.neato
pybotvac==0.0.24
pybotvac==0.0.25
# homeassistant.components.braviatv
pybravia==0.3.3
@ -1973,7 +1973,7 @@ pymitv==1.4.3
pymochad==0.2.0
# homeassistant.components.modbus
pymodbus==3.6.6
pymodbus==3.6.7
# homeassistant.components.monoprice
pymonoprice==0.4
@ -2850,7 +2850,7 @@ webmin-xmlrpc==0.0.2
webrtc-noise-gain==1.2.3
# homeassistant.components.whirlpool
whirlpool-sixth-sense==0.18.7
whirlpool-sixth-sense==0.18.8
# homeassistant.components.whois
whois==0.9.27
@ -2919,7 +2919,7 @@ youless-api==1.0.1
youtubeaio==1.1.5
# homeassistant.components.media_extractor
yt-dlp==2024.03.10
yt-dlp==2024.04.09
# homeassistant.components.zamg
zamg==0.3.6
@ -2934,7 +2934,7 @@ zeroconf==0.132.0
zeversolar==0.3.1
# homeassistant.components.zha
zha-quirks==0.0.113
zha-quirks==0.0.114
# homeassistant.components.zhong_hong
zhong-hong-hvac==1.0.12

View File

@ -428,7 +428,7 @@ apprise==1.7.4
aprslib==0.7.0
# homeassistant.components.aranet
aranet4==2.2.2
aranet4==2.3.3
# homeassistant.components.arcam_fmj
arcam-fmj==1.4.0
@ -478,7 +478,7 @@ bimmer-connected[china]==0.14.6
bleak-esphome==1.0.0
# homeassistant.components.bluetooth
bleak-retry-connector==3.4.0
bleak-retry-connector==3.5.0
# homeassistant.components.bluetooth
bleak==0.21.1
@ -721,7 +721,7 @@ fnv-hash-fast==0.5.0
foobot_async==1.0.0
# homeassistant.components.forecast_solar
forecast-solar==3.0.0
forecast-solar==3.1.0
# homeassistant.components.freebox
freebox-api==1.1.0
@ -876,7 +876,7 @@ hole==0.8.0
holidays==0.46
# homeassistant.components.frontend
home-assistant-frontend==20240404.1
home-assistant-frontend==20240404.2
# homeassistant.components.conversation
home-assistant-intents==2024.4.3
@ -1350,7 +1350,7 @@ pybalboa==1.0.1
pyblackbird==0.6
# homeassistant.components.neato
pybotvac==0.0.24
pybotvac==0.0.25
# homeassistant.components.braviatv
pybravia==0.3.3
@ -1533,7 +1533,7 @@ pymeteoclimatic==0.1.0
pymochad==0.2.0
# homeassistant.components.modbus
pymodbus==3.6.6
pymodbus==3.6.7
# homeassistant.components.monoprice
pymonoprice==0.4
@ -2197,7 +2197,7 @@ webmin-xmlrpc==0.0.2
webrtc-noise-gain==1.2.3
# homeassistant.components.whirlpool
whirlpool-sixth-sense==0.18.7
whirlpool-sixth-sense==0.18.8
# homeassistant.components.whois
whois==0.9.27
@ -2257,7 +2257,7 @@ youless-api==1.0.1
youtubeaio==1.1.5
# homeassistant.components.media_extractor
yt-dlp==2024.03.10
yt-dlp==2024.04.09
# homeassistant.components.zamg
zamg==0.3.6
@ -2269,7 +2269,7 @@ zeroconf==0.132.0
zeversolar==0.3.1
# homeassistant.components.zha
zha-quirks==0.0.113
zha-quirks==0.0.114
# homeassistant.components.zha
zigpy-deconz==0.23.1

View File

@ -58,6 +58,14 @@ VALID_DATA_SERVICE_INFO = fake_service_info(
},
)
VALID_DATA_SERVICE_INFO_WITH_NO_NAME = fake_service_info(
None,
"0000fce0-0000-1000-8000-00805f9b34fb",
{
1794: b'\x21\x00\x02\x01\x00\x00\x00\x01\x8a\x02\xa5\x01\xb1&"Y\x01,\x01\xe8\x00\x88'
},
)
VALID_ARANET2_DATA_SERVICE_INFO = fake_service_info(
"Aranet2 12345",
"0000fce0-0000-1000-8000-00805f9b34fb",

View File

@ -12,6 +12,7 @@ from . import (
NOT_ARANET4_SERVICE_INFO,
OLD_FIRMWARE_SERVICE_INFO,
VALID_DATA_SERVICE_INFO,
VALID_DATA_SERVICE_INFO_WITH_NO_NAME,
)
from tests.common import MockConfigEntry
@ -36,6 +37,25 @@ async def test_async_step_bluetooth_valid_device(hass: HomeAssistant) -> None:
assert result2["result"].unique_id == "aa:bb:cc:dd:ee:ff"
async def test_async_step_bluetooth_device_without_name(hass: HomeAssistant) -> None:
"""Test discovery via bluetooth with a valid device that has no name."""
result = await hass.config_entries.flow.async_init(
DOMAIN,
context={"source": config_entries.SOURCE_BLUETOOTH},
data=VALID_DATA_SERVICE_INFO_WITH_NO_NAME,
)
assert result["type"] is FlowResultType.FORM
assert result["step_id"] == "bluetooth_confirm"
with patch("homeassistant.components.aranet.async_setup_entry", return_value=True):
result2 = await hass.config_entries.flow.async_configure(
result["flow_id"], user_input={}
)
assert result2["type"] is FlowResultType.CREATE_ENTRY
assert result2["title"] == "Aranet (EEFF)"
assert result2["data"] == {}
assert result2["result"].unique_id == "aa:bb:cc:dd:ee:ff"
async def test_async_step_bluetooth_not_aranet4(hass: HomeAssistant) -> None:
"""Test that we reject discovery via Bluetooth for an unrelated device."""
result = await hass.config_entries.flow.async_init(

View File

@ -2650,3 +2650,83 @@ def test_deprecated_constants(
import_and_test_deprecated_constant(
caplog, automation, constant_name, replacement.__name__, replacement, "2025.1"
)
async def test_automation_turns_off_other_automation(
hass: HomeAssistant, caplog: pytest.LogCaptureFixture
) -> None:
"""Test an automation that turns off another automation."""
hass.set_state(CoreState.not_running)
calls = async_mock_service(hass, "persistent_notification", "create")
hass.states.async_set("binary_sensor.presence", "on")
await hass.async_block_till_done()
assert await async_setup_component(
hass,
automation.DOMAIN,
{
automation.DOMAIN: [
{
"trigger": {
"platform": "state",
"entity_id": "binary_sensor.presence",
"from": "on",
},
"action": {
"service": "automation.turn_off",
"target": {
"entity_id": "automation.automation_1",
},
"data": {
"stop_actions": True,
},
},
"id": "automation_0",
"mode": "single",
},
{
"trigger": {
"platform": "state",
"entity_id": "binary_sensor.presence",
"from": "on",
"for": {
"hours": 0,
"minutes": 0,
"seconds": 5,
},
},
"action": {
"service": "persistent_notification.create",
"metadata": {},
"data": {
"message": "Test race",
},
},
"id": "automation_1",
"mode": "single",
},
]
},
)
await hass.async_start()
await hass.async_block_till_done()
hass.states.async_set("binary_sensor.presence", "off")
await hass.async_block_till_done()
assert len(calls) == 0
async_fire_time_changed(hass, dt_util.utcnow() + timedelta(seconds=5))
await hass.async_block_till_done()
assert len(calls) == 0
await hass.services.async_call(
"automation",
"turn_on",
{"entity_id": "automation.automation_1"},
blocking=True,
)
hass.states.async_set("binary_sensor.presence", "off")
await hass.async_block_till_done()
assert len(calls) == 0
async_fire_time_changed(hass, dt_util.utcnow() + timedelta(seconds=5))
await hass.async_block_till_done()
assert len(calls) == 0

View File

@ -90,9 +90,9 @@ async def test_upload_large_file(
file_upload.TEMP_DIR_NAME + f"-{getrandbits(10):03x}",
),
patch(
# Patch one megabyte to 8 bytes to prevent having to use big files in tests
# Patch one megabyte to 50 bytes to prevent having to use big files in tests
"homeassistant.components.file_upload.ONE_MEGABYTE",
8,
50,
),
):
res = await client.post("/api/file_upload", data={"file": large_file_io})
@ -152,9 +152,9 @@ async def test_upload_large_file_fails(
file_upload.TEMP_DIR_NAME + f"-{getrandbits(10):03x}",
),
patch(
# Patch one megabyte to 8 bytes to prevent having to use big files in tests
# Patch one megabyte to 50 bytes to prevent having to use big files in tests
"homeassistant.components.file_upload.ONE_MEGABYTE",
8,
50,
),
patch(
"homeassistant.components.file_upload.Path.open", return_value=_mock_open()

View File

@ -156,7 +156,7 @@ def create_response_object(api_response: dict | list) -> tuple[Response, bytes]:
def create_batch_response_object(
content_ids: list[str], api_responses: list[dict | list | Response]
content_ids: list[str], api_responses: list[dict | list | Response | None]
) -> tuple[Response, bytes]:
"""Create a batch response in the multipart/mixed format."""
assert len(api_responses) == len(content_ids)
@ -166,7 +166,7 @@ def create_batch_response_object(
body = ""
if isinstance(api_response, Response):
status = api_response.status
else:
elif api_response is not None:
body = json.dumps(api_response)
content.extend(
[
@ -194,7 +194,7 @@ def create_batch_response_object(
def create_batch_response_handler(
api_responses: list[dict | list | Response],
api_responses: list[dict | list | Response | None],
) -> Callable[[Any], tuple[Response, bytes]]:
"""Create a fake http2lib response handler that supports generating batch responses.
@ -598,11 +598,11 @@ async def test_partial_update_status(
[
LIST_TASK_LIST_RESPONSE,
LIST_TASKS_RESPONSE_MULTIPLE,
[EMPTY_RESPONSE, EMPTY_RESPONSE, EMPTY_RESPONSE], # Delete batch
[None, None, None], # Delete batch empty responses
LIST_TASKS_RESPONSE, # refresh after delete
]
)
)
),
],
)
async def test_delete_todo_list_item(

View File

@ -674,3 +674,116 @@ async def test_supervisor_issue_docker_config_repair_flow(
str(aioclient_mock.mock_calls[-1][1])
== "http://127.0.0.1/resolution/suggestion/1235"
)
async def test_supervisor_issue_repair_flow_multiple_data_disks(
hass: HomeAssistant,
aioclient_mock: AiohttpClientMocker,
hass_client: ClientSessionGenerator,
issue_registry: ir.IssueRegistry,
all_setup_requests,
) -> None:
"""Test fix flow for multiple data disks supervisor issue."""
mock_resolution_info(
aioclient_mock,
issues=[
{
"uuid": "1234",
"type": "multiple_data_disks",
"context": "system",
"reference": "/dev/sda1",
"suggestions": [
{
"uuid": "1235",
"type": "rename_data_disk",
"context": "system",
"reference": "/dev/sda1",
},
{
"uuid": "1236",
"type": "adopt_data_disk",
"context": "system",
"reference": "/dev/sda1",
},
],
},
],
)
assert await async_setup_component(hass, "hassio", {})
repair_issue = issue_registry.async_get_issue(domain="hassio", issue_id="1234")
assert repair_issue
client = await hass_client()
resp = await client.post(
"/api/repairs/issues/fix",
json={"handler": "hassio", "issue_id": repair_issue.issue_id},
)
assert resp.status == HTTPStatus.OK
data = await resp.json()
flow_id = data["flow_id"]
assert data == {
"type": "menu",
"flow_id": flow_id,
"handler": "hassio",
"step_id": "fix_menu",
"data_schema": [
{
"type": "select",
"options": [
["system_rename_data_disk", "system_rename_data_disk"],
["system_adopt_data_disk", "system_adopt_data_disk"],
],
"name": "next_step_id",
}
],
"menu_options": ["system_rename_data_disk", "system_adopt_data_disk"],
"description_placeholders": {"reference": "/dev/sda1"},
}
resp = await client.post(
f"/api/repairs/issues/fix/{flow_id}",
json={"next_step_id": "system_adopt_data_disk"},
)
assert resp.status == HTTPStatus.OK
data = await resp.json()
flow_id = data["flow_id"]
assert data == {
"type": "form",
"flow_id": flow_id,
"handler": "hassio",
"step_id": "system_adopt_data_disk",
"data_schema": [],
"errors": None,
"description_placeholders": {"reference": "/dev/sda1"},
"last_step": True,
"preview": None,
}
resp = await client.post(f"/api/repairs/issues/fix/{flow_id}")
assert resp.status == HTTPStatus.OK
data = await resp.json()
flow_id = data["flow_id"]
assert data == {
"type": "create_entry",
"flow_id": flow_id,
"handler": "hassio",
"description": None,
"description_placeholders": None,
}
assert not issue_registry.async_get_issue(domain="hassio", issue_id="1234")
assert aioclient_mock.mock_calls[-1][0] == "post"
assert (
str(aioclient_mock.mock_calls[-1][1])
== "http://127.0.0.1/resolution/suggestion/1236"
)

View File

@ -52,6 +52,15 @@ def mock_pymodbus_fixture():
"""Mock pymodbus."""
mock_pb = mock.AsyncMock()
mock_pb.close = mock.MagicMock()
read_result = ReadResult([])
mock_pb.read_coils.return_value = read_result
mock_pb.read_discrete_inputs.return_value = read_result
mock_pb.read_input_registers.return_value = read_result
mock_pb.read_holding_registers.return_value = read_result
mock_pb.write_register.return_value = read_result
mock_pb.write_registers.return_value = read_result
mock_pb.write_coil.return_value = read_result
mock_pb.write_coils.return_value = read_result
with (
mock.patch(
"homeassistant.components.modbus.modbus.AsyncModbusTcpClient",
@ -156,7 +165,7 @@ async def mock_pymodbus_exception_fixture(hass, do_exception, mock_modbus):
@pytest.fixture(name="mock_pymodbus_return")
async def mock_pymodbus_return_fixture(hass, register_words, mock_modbus):
"""Trigger update call with time_changed event."""
read_result = ReadResult(register_words) if register_words else None
read_result = ReadResult(register_words if register_words else [])
mock_modbus.read_coils.return_value = read_result
mock_modbus.read_discrete_inputs.return_value = read_result
mock_modbus.read_input_registers.return_value = read_result
@ -165,6 +174,7 @@ async def mock_pymodbus_return_fixture(hass, register_words, mock_modbus):
mock_modbus.write_registers.return_value = read_result
mock_modbus.write_coil.return_value = read_result
mock_modbus.write_coils.return_value = read_result
return mock_modbus
@pytest.fixture(name="mock_do_cycle")

View File

@ -3,3 +3,7 @@ modbus:
host: "testHost"
port: 5001
name: "testModbus"
sensors:
- name: "dummy"
address: 117
slave: 0

View File

@ -25,6 +25,7 @@ import voluptuous as vol
from homeassistant import config as hass_config
from homeassistant.components.binary_sensor import DOMAIN as BINARY_SENSOR_DOMAIN
from homeassistant.components.modbus import async_reset_platform
from homeassistant.components.modbus.const import (
ATTR_ADDRESS,
ATTR_HUB,
@ -1560,7 +1561,7 @@ async def test_shutdown(
],
)
async def test_stop_restart(
hass: HomeAssistant, caplog: pytest.LogCaptureFixture, mock_modbus
hass: HomeAssistant, caplog: pytest.LogCaptureFixture, mock_pymodbus_return
) -> None:
"""Run test for service stop."""
@ -1571,7 +1572,7 @@ async def test_stop_restart(
await hass.async_block_till_done()
assert hass.states.get(entity_id).state == "17"
mock_modbus.reset_mock()
mock_pymodbus_return.reset_mock()
caplog.clear()
data = {
ATTR_HUB: TEST_MODBUS_NAME,
@ -1579,23 +1580,23 @@ async def test_stop_restart(
await hass.services.async_call(DOMAIN, SERVICE_STOP, data, blocking=True)
await hass.async_block_till_done()
assert hass.states.get(entity_id).state == STATE_UNAVAILABLE
assert mock_modbus.close.called
assert mock_pymodbus_return.close.called
assert f"modbus {TEST_MODBUS_NAME} communication closed" in caplog.text
mock_modbus.reset_mock()
mock_pymodbus_return.reset_mock()
caplog.clear()
await hass.services.async_call(DOMAIN, SERVICE_RESTART, data, blocking=True)
await hass.async_block_till_done()
assert not mock_modbus.close.called
assert mock_modbus.connect.called
assert not mock_pymodbus_return.close.called
assert mock_pymodbus_return.connect.called
assert f"modbus {TEST_MODBUS_NAME} communication open" in caplog.text
mock_modbus.reset_mock()
mock_pymodbus_return.reset_mock()
caplog.clear()
await hass.services.async_call(DOMAIN, SERVICE_RESTART, data, blocking=True)
await hass.async_block_till_done()
assert mock_modbus.close.called
assert mock_modbus.connect.called
assert mock_pymodbus_return.close.called
assert mock_pymodbus_return.connect.called
assert f"modbus {TEST_MODBUS_NAME} communication closed" in caplog.text
assert f"modbus {TEST_MODBUS_NAME} communication open" in caplog.text
@ -1625,7 +1626,7 @@ async def test_write_no_client(hass: HomeAssistant, mock_modbus) -> None:
async def test_integration_reload(
hass: HomeAssistant,
caplog: pytest.LogCaptureFixture,
mock_modbus,
mock_pymodbus_return,
freezer: FrozenDateTimeFactory,
) -> None:
"""Run test for integration reload."""
@ -1646,7 +1647,7 @@ async def test_integration_reload(
@pytest.mark.parametrize("do_config", [{}])
async def test_integration_reload_failed(
hass: HomeAssistant, caplog: pytest.LogCaptureFixture, mock_modbus
hass: HomeAssistant, caplog: pytest.LogCaptureFixture, mock_pymodbus_return
) -> None:
"""Run test for integration connect failure on reload."""
caplog.set_level(logging.INFO)
@ -1655,7 +1656,9 @@ async def test_integration_reload_failed(
yaml_path = get_fixture_path("configuration.yaml", "modbus")
with (
mock.patch.object(hass_config, "YAML_CONFIG_FILE", yaml_path),
mock.patch.object(mock_modbus, "connect", side_effect=ModbusException("error")),
mock.patch.object(
mock_pymodbus_return, "connect", side_effect=ModbusException("error")
),
):
await hass.services.async_call(DOMAIN, SERVICE_RELOAD, blocking=True)
await hass.async_block_till_done()
@ -1666,7 +1669,7 @@ async def test_integration_reload_failed(
@pytest.mark.parametrize("do_config", [{}])
async def test_integration_setup_failed(
hass: HomeAssistant, caplog: pytest.LogCaptureFixture, mock_modbus
hass: HomeAssistant, caplog: pytest.LogCaptureFixture, mock_pymodbus_return
) -> None:
"""Run test for integration setup on reload."""
with mock.patch.object(
@ -1694,3 +1697,9 @@ async def test_no_entities(hass: HomeAssistant) -> None:
]
}
assert await async_setup_component(hass, DOMAIN, config) is False
async def test_reset_platform(hass: HomeAssistant) -> None:
"""Run test for async_reset_platform."""
await async_reset_platform(hass, "modbus")
assert DOMAIN not in hass.data

View File

@ -146,20 +146,24 @@ async def test_switch_on(
@pytest.mark.parametrize(
"zone_state_response",
[ZONE_3_ON_RESPONSE],
("zone_state_response", "start_state"),
[
(ZONE_3_ON_RESPONSE, "on"),
(ZONE_OFF_RESPONSE, "off"), # Already off
],
)
async def test_switch_off(
hass: HomeAssistant,
aioclient_mock: AiohttpClientMocker,
responses: list[AiohttpClientMockResponse],
start_state: str,
) -> None:
"""Test turning off irrigation switch."""
# Initially the test zone is on
zone = hass.states.get("switch.rain_bird_sprinkler_3")
assert zone is not None
assert zone.state == "on"
assert zone.state == start_state
aioclient_mock.mock_calls.clear()
responses.extend(

View File

@ -46,11 +46,15 @@ TEST_HA_TO_RISCO = {
}
TEST_OPTIONS = {
"scan_interval": 10,
"code_arm_required": True,
"code_disarm_required": True,
}
TEST_ADVANCED_OPTIONS = {
"scan_interval": 10,
"concurrency": 3,
}
async def test_cloud_form(hass: HomeAssistant) -> None:
"""Test we get the cloud form."""
@ -387,6 +391,53 @@ async def test_options_flow(hass: HomeAssistant) -> None:
}
async def test_advanced_options_flow(hass: HomeAssistant) -> None:
"""Test options flow."""
entry = MockConfigEntry(
domain=DOMAIN,
unique_id=TEST_CLOUD_DATA["username"],
data=TEST_CLOUD_DATA,
)
entry.add_to_hass(hass)
result = await hass.config_entries.options.async_init(
entry.entry_id, context={"show_advanced_options": True}
)
assert result["type"] is FlowResultType.FORM
assert result["step_id"] == "init"
assert "concurrency" in result["data_schema"].schema
assert "scan_interval" in result["data_schema"].schema
result = await hass.config_entries.options.async_configure(
result["flow_id"], user_input={**TEST_OPTIONS, **TEST_ADVANCED_OPTIONS}
)
assert result["type"] is FlowResultType.FORM
assert result["step_id"] == "risco_to_ha"
result = await hass.config_entries.options.async_configure(
result["flow_id"],
user_input=TEST_RISCO_TO_HA,
)
assert result["type"] is FlowResultType.FORM
assert result["step_id"] == "ha_to_risco"
with patch("homeassistant.components.risco.async_setup_entry", return_value=True):
result = await hass.config_entries.options.async_configure(
result["flow_id"],
user_input=TEST_HA_TO_RISCO,
)
assert result["type"] is FlowResultType.CREATE_ENTRY
assert entry.options == {
**TEST_OPTIONS,
**TEST_ADVANCED_OPTIONS,
"risco_states_to_ha": TEST_RISCO_TO_HA,
"ha_states_to_risco": TEST_HA_TO_RISCO,
}
async def test_ha_to_risco_schema(hass: HomeAssistant) -> None:
"""Test that the schema for the ha-to-risco mapping step is generated properly."""
entry = MockConfigEntry(

View File

@ -221,7 +221,7 @@ async def test_auth_close_after_revoke(
hass.auth.async_remove_refresh_token(refresh_token)
msg = await websocket_client.receive()
assert msg.type == aiohttp.WSMsgType.CLOSED
assert msg.type is aiohttp.WSMsgType.CLOSE
assert websocket_client.closed

View File

@ -43,7 +43,7 @@ async def test_pending_msg_overflow(
for idx in range(10):
await websocket_client.send_json({"id": idx + 1, "type": "ping"})
msg = await websocket_client.receive()
assert msg.type == WSMsgType.CLOSED
assert msg.type is WSMsgType.CLOSE
async def test_cleanup_on_cancellation(
@ -249,7 +249,7 @@ async def test_pending_msg_peak(
)
msg = await websocket_client.receive()
assert msg.type == WSMsgType.CLOSED
assert msg.type is WSMsgType.CLOSE
assert "Client unable to keep up with pending messages" in caplog.text
assert "Stayed over 5 for 5 seconds" in caplog.text
assert "overload" in caplog.text
@ -297,7 +297,7 @@ async def test_pending_msg_peak_recovery(
msg = await websocket_client.receive()
assert msg.type == WSMsgType.TEXT
msg = await websocket_client.receive()
assert msg.type == WSMsgType.CLOSED
assert msg.type is WSMsgType.CLOSE
assert "Client unable to keep up with pending messages" not in caplog.text

View File

@ -41,7 +41,7 @@ async def test_quiting_hass(hass: HomeAssistant, websocket_client) -> None:
msg = await websocket_client.receive()
assert msg.type == WSMsgType.CLOSED
assert msg.type is WSMsgType.CLOSE
async def test_unknown_command(websocket_client) -> None: