Spelling updates (#82867)

This commit is contained in:
Marc Mueller 2022-11-28 16:51:43 +01:00 committed by GitHub
parent d72802cfb0
commit 63d519c1a8
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
25 changed files with 27 additions and 27 deletions

View File

@ -225,7 +225,7 @@ class AmcrestCam(Camera):
# Amcrest cameras only support one snapshot command at a time.
# Hence need to wait if a previous snapshot has not yet finished.
# Also need to check that camera is online and turned on before each wait
# and before initiating shapshot.
# and before initiating snapshot.
while self._snapshot_task:
self._check_snapshot_ok()
_LOGGER.debug("Waiting for previous snapshot from %s", self._name)

View File

@ -111,7 +111,7 @@ class EbusdData:
raise RuntimeError(err) from err
def write(self, call: ServiceCall) -> None:
"""Call write methon on ebusd."""
"""Call write method on ebusd."""
name = call.data.get("name")
value = call.data.get("value")

View File

@ -183,7 +183,7 @@ class Router:
if not self.subscriptions.get(key):
return
if key in self.inflight_gets:
_LOGGER.debug("Skipping already inflight get for %s", key)
_LOGGER.debug("Skipping already in-flight get for %s", key)
return
self.inflight_gets.add(key)
_LOGGER.debug("Getting %s for subscribers %s", key, self.subscriptions[key])

View File

@ -267,7 +267,7 @@ class JewishCalendarSensor(SensorEntity):
class JewishCalendarTimeSensor(JewishCalendarSensor):
"""Implement attrbutes for sensors returning times."""
"""Implement attributes for sensors returning times."""
_attr_device_class = SensorDeviceClass.TIMESTAMP

View File

@ -134,7 +134,7 @@ def number_limit_sub_validator(entity_config: OrderedDict) -> OrderedDict:
if dpt_class is None:
raise vol.Invalid(f"'type: {value_type}' is not a valid numeric sensor type.")
# Inifinity is not supported by Home Assistant frontend so user defined
# Infinity is not supported by Home Assistant frontend so user defined
# config is required if if xknx DPTNumeric subclass defines it as limit.
if min_config is None and dpt_class.value_min == float("-inf"):
raise vol.Invalid(f"'min' key required for value type '{value_type}'")

View File

@ -178,7 +178,7 @@ class MeteoFranceWeather(CoordinatorEntity, WeatherEntity):
)
else:
for forecast in self.coordinator.data.daily_forecast:
# stop when we don't have a weather condition (can happen around last days of forcast, max 14)
# stop when we don't have a weather condition (can happen around last days of forecast, max 14)
if not forecast.get("weather12H"):
break
forecast_data.append(

View File

@ -122,7 +122,7 @@ class NextBusDepartureSensor(SensorEntity):
both the route and the stop.
This is possibly a little convoluted to provide as it requires making a
request to the service to get these values. Perhaps it can be simplifed in
request to the service to get these values. Perhaps it can be simplified in
the future using fuzzy logic and matching.
"""

View File

@ -176,7 +176,7 @@ class EventManager:
).total_seconds() < 7200:
await self.async_renew()
except RemoteProtocolError:
# Likley a shutdown event, nothing to see here
# Likely a shutdown event, nothing to see here
return
except SUBSCRIPTION_ERRORS as err:
LOGGER.warning(

View File

@ -73,7 +73,7 @@ class RTSPToWebRTCConfigFlow(config_entries.ConfigFlow, domain=DOMAIN):
return None
async def async_step_hassio(self, discovery_info: HassioServiceInfo) -> FlowResult:
"""Prepare confiugration for the RTSPtoWebRTC server add-on discovery."""
"""Prepare configuration for the RTSPtoWebRTC server add-on discovery."""
if self._async_current_entries():
return self.async_abort(reason="single_instance_allowed")

View File

@ -112,7 +112,7 @@ def create_stream(
dynamic_stream_settings: DynamicStreamSettings,
stream_label: str | None = None,
) -> Stream:
"""Create a stream with the specified identfier based on the source url.
"""Create a stream with the specified identifier based on the source url.
The stream_source is typically an rtsp url (though any url accepted by ffmpeg is fine) and
options (see STREAM_OPTIONS_SCHEMA) are converted and passed into pyav / ffmpeg.

View File

@ -19,7 +19,7 @@ class Diagnostics:
self._values: dict[str, Any] = {}
def increment(self, key: str) -> None:
"""Increment a counter for the spcified key/event."""
"""Increment a counter for the specified key/event."""
self._counter.update(Counter({key: 1}))
def set_value(self, key: str, value: Any) -> None:

View File

@ -102,7 +102,7 @@ async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
for platform in PLATFORMS:
hass.data.pop(DATA_REMOVE_DISCOVER_COMPONENT.format(platform))()
# deattach device triggers
# detach device triggers
device_registry = dr.async_get(hass)
devices = async_entries_for_config_entry(device_registry, entry.entry_id)
for device in devices:

View File

@ -43,7 +43,7 @@ def wilight_trigger(value: Any) -> str | None:
if (step == 6) & result_60:
step = 7
err_desc = "Active part shoul be less than 2"
err_desc = "Active part should be less than 2"
if (step == 7) & result_2:
return value

View File

@ -437,7 +437,7 @@ def _load_services_file(hass: HomeAssistant, integration: Integration) -> JSON_T
def _load_services_files(
hass: HomeAssistant, integrations: Iterable[Integration]
) -> list[JSON_TYPE]:
"""Load service files for multiple intergrations."""
"""Load service files for multiple integrations."""
return [_load_services_file(hass, integration) for integration in integrations]

View File

@ -177,7 +177,7 @@ def find_non_referenced_integrations(
integration: Integration,
references: dict[Path, set[str]],
) -> set[str]:
"""Find intergrations that are not allowed to be referenced."""
"""Find integrations that are not allowed to be referenced."""
allowed_references = calc_allowed_references(integration)
referenced = set()
for path, refs in references.items():

View File

@ -27,7 +27,7 @@ def mock_config_entry() -> MockConfigEntry:
def mock_cpuinfo_config_flow() -> Generator[MagicMock, None, None]:
"""Return a mocked get_cpu_info.
It is only used to check thruthy or falsy values, so it is mocked
It is only used to check truthy or falsy values, so it is mocked
to return True.
"""
with patch(

View File

@ -56,7 +56,7 @@ async def test_step_import_existing_host(hass):
mock_data.update({CONF_SK_NUM_TRIES: 3, CONF_DIM_MODE: 50})
mock_entry = MockConfigEntry(domain=DOMAIN, data=mock_data)
mock_entry.add_to_hass(hass)
# Inititalize a config flow with different data but same host address
# Initialize a config flow with different data but same host address
with patch("pypck.connection.PchkConnectionManager.async_connect"):
imported_data = IMPORT_DATA.copy()
result = await hass.config_entries.flow.async_init(

View File

@ -913,7 +913,7 @@ async def test_subscribe_unsubscribe_logbook_stream_included_entities(
async def test_logbook_stream_excluded_entities_inherits_filters_from_recorder(
recorder_mock, hass, hass_ws_client
):
"""Test subscribe/unsubscribe logbook stream inherts filters from recorder."""
"""Test subscribe/unsubscribe logbook stream inherits filters from recorder."""
now = dt_util.utcnow()
await asyncio.gather(
*[

View File

@ -654,7 +654,7 @@ async def test_pubsub_subscriber_config_entry_reauth(
result = await oauth.async_reauth(config_entry)
await oauth.async_oauth_web_flow(result)
# Entering an updated access token refreshs the config entry.
# Entering an updated access token refreshes the config entry.
entry = await oauth.async_finish_setup(result, {"code": "1234"})
entry.data["token"].pop("expires_at")
assert entry.unique_id == PROJECT_ID

View File

@ -1,4 +1,4 @@
"""Tests for RTSPtoWebRTC inititalization."""
"""Tests for RTSPtoWebRTC initialization."""
from __future__ import annotations

View File

@ -1,4 +1,4 @@
"""Tests for RTSPtoWebRTC inititalization."""
"""Tests for RTSPtoWebRTC initialization."""
from __future__ import annotations

View File

@ -486,7 +486,7 @@ async def test_climate_hvac_action_pi_demand(hass, device_climate):
),
)
async def test_hvac_mode(hass, device_climate, sys_mode, hvac_mode):
"""Test HVAC modee."""
"""Test HVAC mode."""
thrm_cluster = device_climate.device.endpoints[1].thermostat
entity_id = await find_entity_id(Platform.CLIMATE, device_climate, hass)

View File

@ -186,11 +186,11 @@ def test_with_include_domain_glob_filtering_case4a_include_strong():
)
assert testfilter("sensor.working")
assert testfilter("sensor.notworking") is True # iclude is stronger
assert testfilter("sensor.notworking") is True # include is stronger
assert testfilter("light.test")
assert testfilter("light.notworking") is True # iclude is stronger
assert testfilter("light.notworking") is True # include is stronger
assert testfilter("light.ignoreme") is False
assert testfilter("binary_sensor.not_working") is True # iclude is stronger
assert testfilter("binary_sensor.not_working") is True # include is stronger
assert testfilter("binary_sensor.another") is False
assert testfilter("binary_sensor.specificly_included") is True
assert testfilter("sun.sun") is False

View File

@ -327,7 +327,7 @@ async def test_external_step(hass, manager):
"refresh": True,
}
# Frontend refreshses the flow
# Frontend refreshes the flow
result = await manager.async_configure(result["flow_id"])
assert result["type"] == data_entry_flow.FlowResultType.CREATE_ENTRY
assert result["title"] == "Hello"

View File

@ -639,7 +639,7 @@ async def test_integration_logs_is_custom(hass, caplog):
async def test_async_get_loaded_integrations(hass):
"""Test we can enumerate loaded integations."""
"""Test we can enumerate loaded integrations."""
hass.config.components.add("notbase")
hass.config.components.add("switch")
hass.config.components.add("notbase.switch")