Enable Ruff TRY201 (#114269)

* Enable Ruff TRY201

* remove redundant rules
This commit is contained in:
Sid 2024-03-28 10:18:07 +01:00 committed by GitHub
parent 071c3abb69
commit f7b7f74d10
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
36 changed files with 71 additions and 66 deletions

View File

@ -87,7 +87,7 @@ class AirthingsConfigFlow(ConfigFlow, domain=DOMAIN):
_LOGGER.error(
"Unknown error occurred from %s: %s", discovery_info.address, err
)
raise err
raise
return data
async def async_step_bluetooth(

View File

@ -41,8 +41,8 @@ async def validate_input(hass: HomeAssistant, data: dict[str, Any]) -> None:
)
try:
await acc.login()
except (ClientError, TimeoutError, Aladdin.ConnectionError) as ex:
raise ex
except (ClientError, TimeoutError, Aladdin.ConnectionError):
raise
except Aladdin.InvalidPasswordError as ex:
raise InvalidAuth from ex

View File

@ -46,9 +46,9 @@ def validate_and_connect(
ret[ATTR_MODEL] = f"{client.version()} ({client.pn()})"
ret[ATTR_FIRMWARE] = client.firmware(1)
_LOGGER.info("Returning device info=%s", ret)
except AuroraError as err:
except AuroraError:
_LOGGER.warning("Could not connect to device=%s", comport)
raise err
raise
finally:
if client.serline.isOpen():
client.close()

View File

@ -65,7 +65,7 @@ def trace_automation(
except Exception as ex:
if automation_id:
trace.set_error(ex)
raise ex
raise
finally:
if automation_id:
trace.finished()

View File

@ -63,5 +63,5 @@ class DelugeDataUpdateCoordinator(
"Credentials for Deluge client are not valid"
) from ex
LOGGER.error("Unknown error connecting to Deluge: %s", ex)
raise ex
raise
return data

View File

@ -303,7 +303,7 @@ class EcovacsConfigFlow(ConfigFlow, domain=DOMAIN):
except AbortFlow as ex:
if ex.reason == "already_configured":
create_repair()
raise ex
raise
if errors := result.get("errors"):
error = errors["base"]

View File

@ -87,7 +87,7 @@ async def get_hosts_list_if_supported(
)
else:
raise err
raise
return supports_hosts, fbx_devices

View File

@ -152,9 +152,9 @@ class FroniusInverterUpdateCoordinator(FroniusCoordinatorBase):
data = await self.solar_net.fronius.current_inverter_data(
self.inverter_info.solar_net_id
)
except BadStatusError as err:
except BadStatusError:
if silent_retry == (self.SILENT_RETRIES - 1):
raise err
raise
continue
break
# wrap a single devices data in a dict with solar_net_id key for

View File

@ -291,7 +291,7 @@ async def async_test_stream(
return {CONF_STREAM_SOURCE: "stream_no_route_to_host"}
if err.errno == EIO: # input/output error
return {CONF_STREAM_SOURCE: "stream_io_error"}
raise err
raise
return {}

View File

@ -70,7 +70,7 @@ async def async_send_text_commands(
except aiohttp.ClientResponseError as err:
if 400 <= err.status < 500:
entry.async_start_reauth(hass)
raise err
raise
credentials = Credentials(session.token[CONF_ACCESS_TOKEN])
language_code = entry.options.get(CONF_LANGUAGE_CODE, default_language_code(hass))

View File

@ -96,9 +96,9 @@ async def async_setup_service(hass: HomeAssistant) -> None:
service = Client(Credentials(entry.data[CONF_TOKEN][CONF_ACCESS_TOKEN]))
try:
sheet = service.open_by_key(entry.unique_id)
except RefreshError as ex:
except RefreshError:
entry.async_start_reauth(hass)
raise ex
raise
except APIError as ex:
raise HomeAssistantError("Failed to write data") from ex

View File

@ -33,7 +33,7 @@ def setup_headers(app: Application, use_x_frame_options: bool) -> None:
except HTTPException as err:
for key, value in added_headers.items():
err.headers[key] = value
raise err
raise
for key, value in added_headers.items():
response.headers[key] = value

View File

@ -147,8 +147,8 @@ class LaMetricFlowHandler(AbstractOAuth2FlowHandler, domain=DOMAIN):
return await self._async_step_create_entry(
host, user_input[CONF_API_KEY]
)
except AbortFlow as ex:
raise ex
except AbortFlow:
raise
except LaMetricConnectionError as ex:
LOGGER.error("Error connecting to LaMetric: %s", ex)
errors["base"] = "cannot_connect"
@ -209,8 +209,8 @@ class LaMetricFlowHandler(AbstractOAuth2FlowHandler, domain=DOMAIN):
return await self._async_step_create_entry(
str(device.ip), device.api_key
)
except AbortFlow as ex:
raise ex
except AbortFlow:
raise
except LaMetricConnectionError as ex:
LOGGER.error("Error connecting to LaMetric: %s", ex)
errors["base"] = "cannot_connect"

View File

@ -143,7 +143,7 @@ def determine_zones(receiver):
_LOGGER.debug("Zone 2 not available")
except ValueError as error:
if str(error) != TIMEOUT_MESSAGE:
raise error
raise
_LOGGER.debug("Zone 2 timed out, assuming no functionality")
try:
_LOGGER.debug("Checking for zone 3 capability")
@ -154,7 +154,7 @@ def determine_zones(receiver):
_LOGGER.debug("Zone 3 not available")
except ValueError as error:
if str(error) != TIMEOUT_MESSAGE:
raise error
raise
_LOGGER.debug("Zone 3 timed out, assuming no functionality")
except AssertionError:
_LOGGER.error("Zone 3 detection failed")

View File

@ -311,7 +311,7 @@ class OnvifFlowHandler(ConfigFlow, domain=DOMAIN):
self.device_id = interface.Info.HwAddress
except Fault as fault:
if "not implemented" not in fault.message:
raise fault
raise
LOGGER.debug(
"%s: Could not get network interfaces: %s",
self.onvif_config[CONF_NAME],

View File

@ -344,7 +344,7 @@ class ONVIFDevice:
mac = interface.Info.HwAddress
except Fault as fault:
if "not implemented" not in fault.message:
raise fault
raise
LOGGER.debug(
"Couldn't get network interfaces from ONVIF device '%s'. Error: %s",

View File

@ -351,7 +351,7 @@ class ReolinkHost:
await self._api.subscribe(sub_type=SubType.long_poll)
except NotSupportedError as err:
if initial:
raise err
raise
# make sure the long_poll_task is always created to try again later
if not self._lost_subscription:
self._lost_subscription = True
@ -552,7 +552,7 @@ class ReolinkHost:
"Unexpected exception while requesting ONVIF pull point: %s", ex
)
await self._api.unsubscribe(sub_type=SubType.long_poll)
raise ex
raise
self._long_poll_error = False

View File

@ -103,9 +103,9 @@ async def validate_local_input(
)
try:
await risco.connect()
except CannotConnectError as e:
except CannotConnectError:
if comm_delay >= MAX_COMMUNICATION_DELAY:
raise e
raise
comm_delay += 1
else:
break

View File

@ -115,7 +115,7 @@ async def setup_device(
)
_LOGGER.debug(err)
await mqtt_client.async_release()
raise err
raise
coordinator = RoborockDataUpdateCoordinator(
hass, device, networking, product_info, mqtt_client
)

View File

@ -40,7 +40,7 @@ def trace_script(
except Exception as ex:
if item_id:
trace.set_error(ex)
raise ex
raise
finally:
if item_id:
trace.finished()

View File

@ -94,7 +94,7 @@ class SignalNotificationService(BaseNotificationService):
data = DATA_SCHEMA(data)
except vol.Invalid as ex:
_LOGGER.error("Invalid message data: %s", ex)
raise ex
raise
filenames = self.get_filenames(data)
attachments_as_bytes = self.get_attachments_as_bytes(
@ -107,7 +107,7 @@ class SignalNotificationService(BaseNotificationService):
)
except SignalCliRestApiError as ex:
_LOGGER.error("%s", ex)
raise ex
raise
@staticmethod
def get_filenames(data: Any) -> list[str] | None:
@ -174,7 +174,7 @@ class SignalNotificationService(BaseNotificationService):
attachments_as_bytes.append(chunks)
except Exception as ex:
_LOGGER.error("%s", ex)
raise ex
raise
if not attachments_as_bytes:
return None

View File

@ -583,9 +583,9 @@ def stream_worker(
# dts. Use "or 1" to deal with this.
start_dts = next_video_packet.dts - (next_video_packet.duration or 1)
first_keyframe.dts = first_keyframe.pts = start_dts
except StreamWorkerError as ex:
except StreamWorkerError:
container.close()
raise ex
raise
except StopIteration as ex:
container.close()
raise StreamEndedError("Stream ended; no additional packets") from ex
@ -612,8 +612,8 @@ def stream_worker(
while not quit_event.is_set():
try:
packet = next(container_packets)
except StreamWorkerError as ex:
raise ex
except StreamWorkerError:
raise
except StopIteration as ex:
raise StreamEndedError("Stream ended; no additional packets") from ex
except av.AVError as ex:

View File

@ -42,7 +42,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
printer.url,
exc_info=api_error,
)
raise api_error
raise
# if the printer is offline, we raise an UpdateFailed
if printer.is_unknown_state():

View File

@ -129,7 +129,7 @@ class SynoApi:
self._entry.unique_id,
err,
)
raise err
raise
@callback
def subscribe(self, api_key: str, unique_id: str) -> Callable[[], None]:
@ -268,7 +268,7 @@ class SynoApi:
LOGGER.debug(
"Error from '%s': %s", self._entry.unique_id, err, exc_info=True
)
raise err
raise
async def async_reboot(self) -> None:
"""Reboot NAS."""

View File

@ -126,7 +126,7 @@ class TankUtilitySensor(SensorEntity):
self._token = auth.get_token(self._email, self._password, force=True)
data = tank_monitor.get_device_data(self._token, self.device)
else:
raise http_error
raise
data.update(data.pop("device", {}))
data.update(data.pop("lastReading", {}))
return data

View File

@ -66,7 +66,7 @@ class TessieStateUpdateCoordinator(DataUpdateCoordinator[dict[str, Any]]):
if e.status == HTTPStatus.UNAUTHORIZED:
# Auth Token is no longer valid
raise ConfigEntryAuthFailed from e
raise e
raise
return self._flatten(vehicle)

View File

@ -154,7 +154,7 @@ class OAuth2FlowHandler(
await self.async_set_unique_id(user.id)
try:
self._abort_if_unique_id_configured()
except AbortFlow as err:
except AbortFlow:
async_create_issue(
self.hass,
DOMAIN,
@ -168,7 +168,7 @@ class OAuth2FlowHandler(
"integration_title": "Twitch",
},
)
raise err
raise
async_create_issue(
self.hass,
HOMEASSISTANT_DOMAIN,

View File

@ -445,9 +445,9 @@ class PipelineRtpDatagramProtocol(RtpDatagramProtocol):
async with asyncio.timeout(tts_seconds + self.tts_extra_timeout):
# TTS audio is 16Khz 16-bit mono
await self._async_send_audio(audio_bytes)
except TimeoutError as err:
except TimeoutError:
_LOGGER.warning("TTS timeout")
raise err
raise
finally:
# Signal pipeline to restart
self._tts_done.set()

View File

@ -154,7 +154,7 @@ class WallboxCoordinator(DataUpdateCoordinator[dict[str, Any]]):
except requests.exceptions.HTTPError as wallbox_connection_error:
if wallbox_connection_error.response.status_code == 403:
raise InvalidAuth from wallbox_connection_error
raise wallbox_connection_error
raise
async def async_set_charging_current(self, charging_current: float) -> None:
"""Set maximum charging current for Wallbox."""
@ -185,7 +185,7 @@ class WallboxCoordinator(DataUpdateCoordinator[dict[str, Any]]):
except requests.exceptions.HTTPError as wallbox_connection_error:
if wallbox_connection_error.response.status_code == 403:
raise InvalidAuth from wallbox_connection_error
raise wallbox_connection_error
raise
async def async_set_lock_unlock(self, lock: bool) -> None:
"""Set wallbox to locked or unlocked."""

View File

@ -113,9 +113,9 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
try:
await coordinator.async_config_entry_first_refresh()
except ConfigEntryNotReady as err:
except ConfigEntryNotReady:
await bulb.async_close()
raise err
raise
async def _async_shutdown_on_stop(event: Event) -> None:
await bulb.async_close()

View File

@ -164,7 +164,7 @@ def trace_condition(variables: TemplateVarsType) -> Generator[TraceElement, None
yield trace_element
except Exception as ex:
trace_element.set_error(ex)
raise ex
raise
finally:
if should_pop:
trace_stack_pop(trace_stack_cv)

View File

@ -240,16 +240,16 @@ async def trace_action(
yield trace_element
except _AbortScript as ex:
trace_element.set_error(ex.__cause__ or ex)
raise ex
except _ConditionFail as ex:
raise
except _ConditionFail:
# Clear errors which may have been set when evaluating the condition
trace_element.set_error(None)
raise ex
except _StopScript as ex:
raise ex
raise
except _StopScript:
raise
except Exception as ex:
trace_element.set_error(ex)
raise ex
raise
finally:
trace_stack_pop(trace_stack_cv)
@ -469,7 +469,7 @@ class _ScriptRun:
if not self._script.top_level:
# We already consumed the response, do not pass it on
err.response = None
raise err
raise
except Exception:
script_execution_set("error")
raise

View File

@ -2559,9 +2559,9 @@ def make_logging_undefined(
def _fail_with_undefined_error(self, *args, **kwargs):
try:
return super()._fail_with_undefined_error(*args, **kwargs)
except self._undefined_exception as ex:
except self._undefined_exception:
_log_fn(logging.ERROR, self._undefined_message)
raise ex
raise
def __str__(self) -> str:
"""Log undefined __str___."""

View File

@ -378,7 +378,7 @@ class DataUpdateCoordinator(BaseDataUpdateCoordinatorProtocol, Generic[_DataT]):
self.config_entry.async_start_reauth(self.hass)
except NotImplementedError as err:
self.last_exception = err
raise err
raise
except Exception as err: # pylint: disable=broad-except
self.last_exception = err

View File

@ -672,8 +672,7 @@ select = [
"T100", # Trace found: {name} used
"T20", # flake8-print
"TID251", # Banned imports
"TRY004", # Prefer TypeError exception for invalid type
"TRY302", # Remove exception handler; error is immediately re-raised
"TRY", # tryceratops
"UP", # pyupgrade
"W", # pycodestyle
]
@ -701,6 +700,8 @@ ignore = [
"SIM102", # Use a single if statement instead of nested if statements
"SIM108", # Use ternary operator {contents} instead of if-else-block
"SIM115", # Use context handler for opening files
"TRY003", # Avoid specifying long messages outside the exception class
"TRY400", # Use `logging.exception` instead of `logging.error`
"UP006", # keep type annotation style as is
"UP007", # keep type annotation style as is
# Ignored due to performance: https://github.com/charliermarsh/ruff/issues/2923
@ -724,7 +725,11 @@ ignore = [
"PLE0605",
# temporarily disabled
"PT019"
"PT019",
"TRY002",
"TRY301",
"TRY300",
"TRY401"
]
[tool.ruff.lint.flake8-import-conventions.extend-aliases]

View File

@ -76,9 +76,9 @@ async def async_exec(*args, display=False):
if display:
kwargs["stderr"] = asyncio.subprocess.PIPE
proc = await asyncio.create_subprocess_exec(*args, **kwargs)
except FileNotFoundError as err:
except FileNotFoundError:
printc(FAIL, f"Could not execute {args[0]}. Did you install test requirements?")
raise err
raise
if not display:
# Readin stdout into log