Improve flake8 configuration and pydocstyle fixes (#1742)

This commit is contained in:
Franck Nijhof 2020-05-22 13:38:57 +02:00 committed by GitHub
parent db0593f0b2
commit f0d46e8671
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
24 changed files with 56 additions and 40 deletions

View File

@ -14,5 +14,17 @@ use_parentheses = true
known_first_party = supervisor,tests
[flake8]
exclude = .venv,.git,.tox,docs,venv,bin,lib,deps,build
doctests = True
max-line-length = 88
ignore = E501, W503
# E501: line too long
# W503: Line break occurred before a binary operator
# E203: Whitespace before ':'
# D202 No blank lines allowed after function docstring
# W504 line break after binary operator
ignore =
E501,
W503,
E203,
D202,
W504

View File

@ -31,7 +31,7 @@ class AddonBuild(JsonConfig, CoreSysAttributes):
@property
def base_image(self) -> str:
"""Base images for this add-on."""
"""Return base image for this add-on."""
return self._data[ATTR_BUILD_FROM].get(
self.sys_arch.default, f"homeassistant/{self.sys_arch.default}-base:latest"
)

View File

@ -170,7 +170,7 @@ MACHINE_ALL = [
def _simple_startup(value):
"""Simple startup schema."""
"""Define startup schema."""
if value == "before":
return STARTUP_SERVICES
if value == "after":

View File

@ -182,7 +182,7 @@ def migrate_system_env(coresys: CoreSys):
def initialize_logging():
"""Setup the logging."""
"""Initialize the logging."""
logging.basicConfig(level=logging.INFO)
fmt = "%(asctime)s %(levelname)s (%(threadName)s) [%(name)s] %(message)s"
colorfmt = f"%(log_color)s{fmt}%(reset)s"
@ -256,7 +256,7 @@ def reg_signal(loop):
def supervisor_debugger(coresys: CoreSys) -> None:
"""Setup debugger if needed."""
"""Start debugger if needed."""
if not coresys.config.debug:
return
# pylint: disable=import-outside-toplevel

View File

@ -57,7 +57,7 @@ class Core(CoreSysAttributes):
)
async def setup(self):
"""Setup supervisor orchestration."""
"""Start setting up supervisor orchestration."""
self.state = CoreStates.STARTUP
# Load DBus

View File

@ -550,9 +550,9 @@ class CoreSysAttributes:
return self.coresys.hassos
def sys_run_in_executor(self, funct, *args) -> asyncio.Future:
"""Wrapper for executor pool."""
"""Add an job to the executor pool."""
return self.sys_loop.run_in_executor(None, funct, *args)
def sys_create_task(self, coroutine) -> asyncio.Task:
"""Wrapper for async task."""
"""Create an async task."""
return self.sys_loop.create_task(coroutine)

View File

@ -4,7 +4,7 @@ from ..exceptions import DBusNotConnectedError
def dbus_connected(method):
"""Wrapper for check if D-Bus is connected."""
"""Wrap check if D-Bus is connected."""
def wrap_dbus(api, *args, **kwargs):
"""Check if D-Bus is connected before call a method."""

View File

@ -97,7 +97,7 @@ class DockerAPI:
ipv4: Optional[IPv4Address] = None,
**kwargs: Dict[str, Any],
) -> docker.models.containers.Container:
""""Create a Docker container and run it.
"""Create a Docker container and run it.
Need run inside executor.
"""

View File

@ -164,7 +164,7 @@ class DockerAddon(DockerInterface):
@property
def security_opt(self) -> List[str]:
"""Controlling security options."""
"""Control security options."""
security = []
# AppArmor
@ -175,7 +175,7 @@ class DockerAddon(DockerInterface):
security.append(f"apparmor={self.addon.slug}")
# Disable Seccomp / We don't support it official and it
# make troubles on some kind of host systems.
# causes problems on some types of host systems.
security.append("seccomp=unconfined")
return security

View File

@ -227,9 +227,9 @@ class DockerInterface(CoreSysAttributes):
return self.sys_run_in_executor(self._remove)
def _remove(self) -> None:
"""remove docker images.
"""Remove docker images.
Need run inside executor.
Needs run inside executor.
"""
# Cleanup container
with suppress(DockerAPIError):

View File

@ -483,7 +483,7 @@ class HomeAssistant(JsonConfig, CoreSysAttributes):
return ConfigResult(True, log)
async def ensure_access_token(self) -> None:
"""Ensures there is an access token."""
"""Ensure there is an access token."""
if (
self.access_token is not None
and self._access_token_expires > datetime.utcnow()

View File

@ -20,7 +20,7 @@ class ServiceManager(CoreSysAttributes):
self._services = set()
def __iter__(self):
"""Iterator trought services."""
"""Iterate through services."""
return iter(self._services)
def _check_dbus(self, unit=None):

View File

@ -414,6 +414,7 @@ class CoreDNS(JsonConfig, CoreSysAttributes):
def is_fails(self) -> Awaitable[bool]:
"""Return True if a Docker container is fails state.
Return a coroutine.
"""
return self.instance.is_fails()

View File

@ -192,6 +192,7 @@ class Multicast(JsonConfig, CoreSysAttributes):
def is_fails(self) -> Awaitable[bool]:
"""Return True if a Docker container is fails state.
Return a coroutine.
"""
return self.instance.is_fails()

View File

@ -58,7 +58,7 @@ class SnapshotManager(CoreSysAttributes):
self.snapshots_obj = {}
async def _load_snapshot(tar_file):
"""Internal function to load snapshot."""
"""Load the snapshot."""
snapshot = Snapshot(self.coresys, tar_file)
if await snapshot.load():
self.snapshots_obj[snapshot.slug] = snapshot

View File

@ -356,7 +356,7 @@ class Snapshot(CoreSysAttributes):
folder_list = set(folder_list or ALL_FOLDERS)
def _folder_save(name):
"""Internal function to snapshot a folder."""
"""Take snapshot of a folder."""
slug_name = name.replace("/", "_")
tar_name = Path(self._tmp.name, f"{slug_name}.tar.gz")
origin_dir = Path(self.sys_config.path_supervisor, name)

View File

@ -55,7 +55,7 @@ class StoreManager(CoreSysAttributes):
# add new repository
async def _add_repository(url):
"""Helper function to async add repository."""
"""Add a repository."""
repository = Repository(self.coresys, url)
if not await repository.load():
_LOGGER.error("Can't load from repository %s", url)

View File

@ -34,7 +34,8 @@ def process_lock(method):
class AsyncThrottle:
"""
"""A class for throttling the execution of tasks.
Decorator that prevents a function from being called more than once every
time period with blocking.
"""
@ -46,10 +47,10 @@ class AsyncThrottle:
self.synchronize: Optional[asyncio.Lock] = None
def __call__(self, method):
"""Throttle function"""
"""Throttle function."""
async def wrapper(*args, **kwargs):
"""Throttle function wrapper"""
"""Throttle function wrapper."""
if not self.synchronize:
self.synchronize = asyncio.Lock()
@ -65,7 +66,8 @@ class AsyncThrottle:
class AsyncCallFilter:
"""
"""A class for throttling the execution of tasks, with a filter.
Decorator that prevents a function from being called more than once every
time period.
"""
@ -76,10 +78,10 @@ class AsyncCallFilter:
self.time_of_last_call = datetime.min
def __call__(self, method):
"""Throttle function"""
"""Throttle function."""
async def wrapper(*args, **kwargs):
"""Throttle function wrapper"""
"""Throttle function wrapper."""
now = datetime.now()
time_since_last_call = now - self.time_of_last_call

View File

@ -223,7 +223,7 @@ class DBus:
return signal
def __getattr__(self, name: str) -> DBusCallWrapper:
"""Mapping to dbus method."""
"""Map to dbus method."""
return getattr(DBusCallWrapper(self, self.bus_name), name)
@ -236,12 +236,12 @@ class DBusCallWrapper:
self.interface: str = interface
def __call__(self) -> None:
"""Should never be called."""
"""Catch this method from being called."""
_LOGGER.error("DBus method %s not exists!", self.interface)
raise DBusFatalError()
def __getattr__(self, name: str):
"""Mapping to dbus method."""
"""Map to dbus method."""
interface = f"{self.interface}.{name}"
if interface not in self.dbus.methods:

View File

@ -140,7 +140,7 @@ def exclude_filter(
"""Create callable filter function to check TarInfo for add."""
def my_filter(tar: tarfile.TarInfo) -> Optional[tarfile.TarInfo]:
"""Custom exclude filter."""
"""Filter to filter excludes."""
file_path = Path(tar.name)
for exclude in exclude_list:
if not file_path.match(exclude):

View File

@ -8,7 +8,7 @@ def schema_or(schema):
"""Allow schema or empty."""
def _wrapper(value):
"""Wrapper for validator."""
"""Define a wrapper for validator."""
if not value:
return value
return schema(value)

View File

@ -52,7 +52,7 @@ token = vol.Match(r"^[0-9a-f]{32,256}$")
def dns_url(url: str) -> str:
""" takes a DNS url (str) and validates that it matches the scheme dns://<ip address>."""
"""Take a DNS url (str) and validates that it matches the scheme dns://<ip address>."""
if not url.lower().startswith("dns://"):
raise vol.Invalid("Doesn't start with dns://")
address: str = url[6:] # strip the dns:// off

View File

@ -48,7 +48,7 @@ def test_invalid_repository():
def test_valid_repository():
"""Validate basic config with different valid repositories"""
"""Validate basic config with different valid repositories."""
config = load_json_fixture("basic-addon-config.json")
custom_registry = "registry.gitlab.com/company/add-ons/core/test-example"
@ -58,7 +58,7 @@ def test_valid_repository():
def test_valid_map():
"""Validate basic config with different valid maps"""
"""Validate basic config with different valid maps."""
config = load_json_fixture("basic-addon-config.json")
config["map"] = ["backup:rw", "ssl:ro", "config"]

View File

@ -19,29 +19,29 @@ BAD = ["hello world", "https://foo.bar", "", "dns://example.com"]
async def test_dns_url_v4_good():
""" tests the DNS validator with known-good ipv6 DNS URLs """
"""Test the DNS validator with known-good ipv6 DNS URLs."""
for url in GOOD_V4:
assert supervisor.validate.dns_url(url)
async def test_dns_url_v6_good():
""" tests the DNS validator with known-good ipv6 DNS URLs """
"""Test the DNS validator with known-good ipv6 DNS URLs."""
for url in GOOD_V6:
assert supervisor.validate.dns_url(url)
async def test_dns_server_list_v4():
""" test a list with v4 addresses """
"""Test a list with v4 addresses."""
assert supervisor.validate.dns_server_list(GOOD_V4)
async def test_dns_server_list_v6():
""" test a list with v6 addresses """
"""Test a list with v6 addresses."""
assert supervisor.validate.dns_server_list(GOOD_V6)
async def test_dns_server_list_combined():
""" test a list with both v4 and v6 addresses """
"""Test a list with both v4 and v6 addresses."""
combined = GOOD_V4 + GOOD_V6
# test the matches
assert supervisor.validate.dns_server_list(combined)
@ -53,14 +53,14 @@ async def test_dns_server_list_combined():
async def test_dns_server_list_bad():
""" test the bad list """
"""Test the bad list."""
# test the matches
with pytest.raises(voluptuous.error.Invalid):
assert supervisor.validate.dns_server_list(BAD)
async def test_dns_server_list_bad_combined():
""" test the bad list, combined with the good """
"""Test the bad list, combined with the good."""
combined = GOOD_V4 + GOOD_V6 + BAD
with pytest.raises(voluptuous.error.Invalid):