Refactory code / object handling (#289)

* Refactory code / object handling

* Next step

* fix lint

* Step 2

* Cleanup API code

* cleanup addons code

* cleanup data handling

* Cleanup addons data handling

* Cleanup docker api

* clean docker api p2

* next cleanup round

* cleanup start on snapshots

* update format strings

* fix setup

* fix lint

* fix lint

* fix lint

* fix tox

* Fix wrong import of datetime module

* Fix bug with attributes

* fix extraction

* Update core

* Update logs

* Expand scheduler

* add support for time interval objects

* next updates on tasks

* Fix some things

* Cleanup code / supervisor

* fix lint

* Fix some code styles

* rename stuff

* cleanup api call reload

* fix lock replacment

* fix lint

* fix lint

* fix bug

* fix wrong config links

* fix bugs

* fix bug

* Update version on startup

* Fix some bugs

* fix bug

* Fix snapshot

* Add wait boot options

* fix lint

* fix default config

* fix snapshot

* fix snapshot

* load snapshots on startup

* add log message at the end

* Some cleanups

* fix bug

* add logger

* add logger for supervisor update

* Add more logger
This commit is contained in:
Pascal Vizeli 2018-01-02 21:21:29 +01:00 committed by GitHub
parent 34d1f4725d
commit 1c49351e66
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
55 changed files with 1439 additions and 1194 deletions

6
API.md
View File

@ -36,6 +36,7 @@ The addons from `addons` are only installed one.
"arch": "armhf|aarch64|i386|amd64",
"beta_channel": "true|false",
"timezone": "TIMEZONE",
"wait_boot": "int",
"addons": [
{
"name": "xy bla",
@ -72,7 +73,8 @@ Optional:
"timezone": "TIMEZONE",
"addons_repositories": [
"REPO_URL"
]
],
"wait_boot": "int"
}
```
@ -256,6 +258,8 @@ Optional:
}
```
- POST `/host/reload`
### Network
- GET `/network/info`

View File

@ -33,10 +33,10 @@ if __name__ == "__main__":
loop.set_default_executor(executor)
_LOGGER.info("Initialize Hassio setup")
config = bootstrap.initialize_system_data()
hassio = core.HassIO(loop, config)
coresys = bootstrap.initialize_coresys(loop)
hassio = core.HassIO(coresys)
bootstrap.migrate_system_env(config)
bootstrap.migrate_system_env(coresys)
_LOGGER.info("Setup HassIO")
loop.run_until_complete(hassio.setup())

View File

@ -6,45 +6,44 @@ from .addon import Addon
from .repository import Repository
from .data import Data
from ..const import REPOSITORY_CORE, REPOSITORY_LOCAL, BOOT_AUTO
from ..coresys import CoreSysAttributes
_LOGGER = logging.getLogger(__name__)
BUILTIN_REPOSITORIES = set((REPOSITORY_CORE, REPOSITORY_LOCAL))
class AddonManager(object):
class AddonManager(CoreSysAttributes):
"""Manage addons inside HassIO."""
def __init__(self, config, loop, docker):
def __init__(self, coresys):
"""Initialize docker base wrapper."""
self.loop = loop
self.config = config
self.docker = docker
self.data = Data(config)
self.addons = {}
self.repositories = {}
self.coresys = coresys
self.data = Data(coresys)
self.addons_obj = {}
self.repositories_obj = {}
@property
def list_addons(self):
"""Return a list of all addons."""
return list(self.addons.values())
return list(self.addons_obj.values())
@property
def list_repositories(self):
"""Return list of addon repositories."""
return list(self.repositories.values())
return list(self.repositories_obj.values())
def get(self, addon_slug):
"""Return a adddon from slug."""
return self.addons.get(addon_slug)
return self.addons_obj.get(addon_slug)
async def prepare(self):
async def load(self):
"""Startup addon management."""
self.data.reload()
# init hassio built-in repositories
repositories = \
set(self.config.addons_repositories) | BUILTIN_REPOSITORIES
set(self._config.addons_repositories) | BUILTIN_REPOSITORIES
# init custom repositories & load addons
await self.load_repositories(repositories)
@ -52,9 +51,9 @@ class AddonManager(object):
async def reload(self):
"""Update addons from repo and reload list."""
tasks = [repository.update() for repository in
self.repositories.values()]
self.repositories_obj.values()]
if tasks:
await asyncio.wait(tasks, loop=self.loop)
await asyncio.wait(tasks, loop=self._loop)
# read data from repositories
self.data.reload()
@ -65,29 +64,29 @@ class AddonManager(object):
async def load_repositories(self, list_repositories):
"""Add a new custom repository."""
new_rep = set(list_repositories)
old_rep = set(self.repositories)
old_rep = set(self.repositories_obj)
# add new repository
async def _add_repository(url):
"""Helper function to async add repository."""
repository = Repository(self.config, self.loop, self.data, url)
repository = Repository(self.coresys, url)
if not await repository.load():
_LOGGER.error("Can't load from repository %s", url)
return
self.repositories[url] = repository
self.repositories_obj[url] = repository
# don't add built-in repository to config
if url not in BUILTIN_REPOSITORIES:
self.config.add_addon_repository(url)
self._config.add_addon_repository(url)
tasks = [_add_repository(url) for url in new_rep - old_rep]
if tasks:
await asyncio.wait(tasks, loop=self.loop)
await asyncio.wait(tasks, loop=self._loop)
# del new repository
for url in old_rep - new_rep - BUILTIN_REPOSITORIES:
self.repositories.pop(url).remove()
self.config.drop_addon_repository(url)
self.repositories_obj.pop(url).remove()
self._config.drop_addon_repository(url)
# update data
self.data.reload()
@ -98,8 +97,8 @@ class AddonManager(object):
all_addons = set(self.data.system) | set(self.data.cache)
# calc diff
add_addons = all_addons - set(self.addons)
del_addons = set(self.addons) - all_addons
add_addons = all_addons - set(self.addons_obj)
del_addons = set(self.addons_obj) - all_addons
_LOGGER.info("Load addons: %d all - %d new - %d remove",
len(all_addons), len(add_addons), len(del_addons))
@ -107,27 +106,27 @@ class AddonManager(object):
# new addons
tasks = []
for addon_slug in add_addons:
addon = Addon(
self.config, self.loop, self.docker, self.data, addon_slug)
addon = Addon(self.coresys, addon_slug)
tasks.append(addon.load())
self.addons[addon_slug] = addon
self.addons_obj[addon_slug] = addon
if tasks:
await asyncio.wait(tasks, loop=self.loop)
await asyncio.wait(tasks, loop=self._loop)
# remove
for addon_slug in del_addons:
self.addons.pop(addon_slug)
self.addons_obj.pop(addon_slug)
async def auto_boot(self, stage):
"""Boot addons with mode auto."""
tasks = []
for addon in self.addons.values():
for addon in self.addons_obj.values():
if addon.is_installed and addon.boot == BOOT_AUTO and \
addon.startup == stage:
tasks.append(addon.start())
_LOGGER.info("Startup %s run %d addons", stage, len(tasks))
if tasks:
await asyncio.wait(tasks, loop=self.loop)
await asyncio.wait(tasks, loop=self._loop)
await asyncio.sleep(self._config.wait_boot, loop=self._loop)

View File

@ -13,6 +13,7 @@ from voluptuous.humanize import humanize_error
from .validate import (
validate_options, SCHEMA_ADDON_SNAPSHOT, RE_VOLUME)
from .utils import check_installed
from ..const import (
ATTR_NAME, ATTR_VERSION, ATTR_SLUG, ATTR_DESCRIPTON, ATTR_BOOT, ATTR_MAP,
ATTR_OPTIONS, ATTR_PORTS, ATTR_SCHEMA, ATTR_IMAGE, ATTR_REPOSITORY,
@ -23,9 +24,9 @@ from ..const import (
ATTR_HASSIO_API, ATTR_AUDIO, ATTR_AUDIO_OUTPUT, ATTR_AUDIO_INPUT,
ATTR_GPIO, ATTR_HOMEASSISTANT_API, ATTR_STDIN, ATTR_LEGACY, ATTR_HOST_IPC,
ATTR_HOST_DBUS, ATTR_AUTO_UART)
from .util import check_installed
from ..dock.addon import DockerAddon
from ..tools import write_json_file, read_json_file
from ..coresys import CoreSysAttributes
from ..docker.addon import DockerAddon
from ..utils.json import write_json_file, read_json_file
_LOGGER = logging.getLogger(__name__)
@ -34,22 +35,20 @@ RE_WEBUI = re.compile(
r":\/\/\[HOST\]:\[PORT:(?P<t_port>\d+)\](?P<s_suffix>.*)$")
class Addon(object):
class Addon(CoreSysAttributes):
"""Hold data for addon inside HassIO."""
def __init__(self, config, loop, docker, data, slug):
def __init__(self, coresys, slug):
"""Initialize data holder."""
self.loop = loop
self.config = config
self.data = data
self._id = slug
self.coresys = coresys
self.instance = DockerAddon(coresys, slug)
self.docker = DockerAddon(config, loop, docker, self)
self._id = slug
async def load(self):
"""Async initialize of object."""
if self.is_installed:
await self.docker.attach()
await self.instance.attach()
@property
def slug(self):
@ -59,91 +58,96 @@ class Addon(object):
@property
def _mesh(self):
"""Return addon data from system or cache."""
return self.data.system.get(self._id, self.data.cache.get(self._id))
return self._data.system.get(self._id, self._data.cache.get(self._id))
@property
def _data(self):
"""Return addons data storage."""
return self._addons.data
@property
def is_installed(self):
"""Return True if a addon is installed."""
return self._id in self.data.system
return self._id in self._data.system
@property
def is_detached(self):
"""Return True if addon is detached."""
return self._id not in self.data.cache
return self._id not in self._data.cache
@property
def version_installed(self):
"""Return installed version."""
return self.data.user.get(self._id, {}).get(ATTR_VERSION)
return self._data.user.get(self._id, {}).get(ATTR_VERSION)
def _set_install(self, version):
"""Set addon as installed."""
self.data.system[self._id] = deepcopy(self.data.cache[self._id])
self.data.user[self._id] = {
self._data.system[self._id] = deepcopy(self._data.cache[self._id])
self._data.user[self._id] = {
ATTR_OPTIONS: {},
ATTR_VERSION: version,
}
self.data.save()
self._data.save()
def _set_uninstall(self):
"""Set addon as uninstalled."""
self.data.system.pop(self._id, None)
self.data.user.pop(self._id, None)
self.data.save()
self._data.system.pop(self._id, None)
self._data.user.pop(self._id, None)
self._data.save()
def _set_update(self, version):
"""Update version of addon."""
self.data.system[self._id] = deepcopy(self.data.cache[self._id])
self.data.user[self._id][ATTR_VERSION] = version
self.data.save()
self._data.system[self._id] = deepcopy(self._data.cache[self._id])
self._data.user[self._id][ATTR_VERSION] = version
self._data.save()
def _restore_data(self, user, system):
"""Restore data to addon."""
self.data.user[self._id] = deepcopy(user)
self.data.system[self._id] = deepcopy(system)
self.data.save()
self._data.user[self._id] = deepcopy(user)
self._data.system[self._id] = deepcopy(system)
self._data.save()
@property
def options(self):
"""Return options with local changes."""
if self.is_installed:
return {
**self.data.system[self._id][ATTR_OPTIONS],
**self.data.user[self._id][ATTR_OPTIONS]
**self._data.system[self._id][ATTR_OPTIONS],
**self._data.user[self._id][ATTR_OPTIONS]
}
return self.data.cache[self._id][ATTR_OPTIONS]
return self._data.cache[self._id][ATTR_OPTIONS]
@options.setter
def options(self, value):
"""Store user addon options."""
self.data.user[self._id][ATTR_OPTIONS] = deepcopy(value)
self.data.save()
self._data.user[self._id][ATTR_OPTIONS] = deepcopy(value)
self._data.save()
@property
def boot(self):
"""Return boot config with prio local settings."""
if ATTR_BOOT in self.data.user.get(self._id, {}):
return self.data.user[self._id][ATTR_BOOT]
if ATTR_BOOT in self._data.user.get(self._id, {}):
return self._data.user[self._id][ATTR_BOOT]
return self._mesh[ATTR_BOOT]
@boot.setter
def boot(self, value):
"""Store user boot options."""
self.data.user[self._id][ATTR_BOOT] = value
self.data.save()
self._data.user[self._id][ATTR_BOOT] = value
self._data.save()
@property
def auto_update(self):
"""Return if auto update is enable."""
if ATTR_AUTO_UPDATE in self.data.user.get(self._id, {}):
return self.data.user[self._id][ATTR_AUTO_UPDATE]
if ATTR_AUTO_UPDATE in self._data.user.get(self._id, {}):
return self._data.user[self._id][ATTR_AUTO_UPDATE]
return None
@auto_update.setter
def auto_update(self, value):
"""Set auto update."""
self.data.user[self._id][ATTR_AUTO_UPDATE] = value
self.data.save()
self._data.user[self._id][ATTR_AUTO_UPDATE] = value
self._data.save()
@property
def name(self):
@ -159,7 +163,7 @@ class Addon(object):
def api_token(self):
"""Return a API token for this add-on."""
if self.is_installed:
return self.data.user[self._id][ATTR_UUID]
return self._data.user[self._id][ATTR_UUID]
return None
@property
@ -188,8 +192,8 @@ class Addon(object):
@property
def last_version(self):
"""Return version of addon."""
if self._id in self.data.cache:
return self.data.cache[self._id][ATTR_VERSION]
if self._id in self._data.cache:
return self._data.cache[self._id][ATTR_VERSION]
return self.version_installed
@property
@ -204,24 +208,24 @@ class Addon(object):
return None
if not self.is_installed or \
ATTR_NETWORK not in self.data.user[self._id]:
ATTR_NETWORK not in self._data.user[self._id]:
return self._mesh[ATTR_PORTS]
return self.data.user[self._id][ATTR_NETWORK]
return self._data.user[self._id][ATTR_NETWORK]
@ports.setter
def ports(self, value):
"""Set custom ports of addon."""
if value is None:
self.data.user[self._id].pop(ATTR_NETWORK, None)
self._data.user[self._id].pop(ATTR_NETWORK, None)
else:
new_ports = {}
for container_port, host_port in value.items():
if container_port in self._mesh.get(ATTR_PORTS, {}):
new_ports[container_port] = host_port
self.data.user[self._id][ATTR_NETWORK] = new_ports
self._data.user[self._id][ATTR_NETWORK] = new_ports
self.data.save()
self._data.save()
@property
def webui(self):
@ -240,7 +244,7 @@ class Addon(object):
if self.ports is None:
port = t_port
else:
port = self.ports.get("{}/tcp".format(t_port), t_port)
port = self.ports.get(f"{t_port}/tcp", t_port)
# for interface config or port lists
if isinstance(port, (tuple, list)):
@ -252,7 +256,7 @@ class Addon(object):
else:
proto = s_prefix
return "{}://[HOST]:{}{}".format(proto, port, s_suffix)
return f"{proto}://[HOST]:{port}{s_suffix}"
@property
def host_network(self):
@ -330,19 +334,20 @@ class Addon(object):
if not self.with_audio:
return None
setting = self.config.audio_output
if self.is_installed and ATTR_AUDIO_OUTPUT in self.data.user[self._id]:
setting = self.data.user[self._id][ATTR_AUDIO_OUTPUT]
setting = self._config.audio_output
if self.is_installed and \
ATTR_AUDIO_OUTPUT in self._data.user[self._id]:
setting = self._data.user[self._id][ATTR_AUDIO_OUTPUT]
return setting
@audio_output.setter
def audio_output(self, value):
"""Set/remove custom audio output settings."""
if value is None:
self.data.user[self._id].pop(ATTR_AUDIO_OUTPUT, None)
self._data.user[self._id].pop(ATTR_AUDIO_OUTPUT, None)
else:
self.data.user[self._id][ATTR_AUDIO_OUTPUT] = value
self.data.save()
self._data.user[self._id][ATTR_AUDIO_OUTPUT] = value
self._data.save()
@property
def audio_input(self):
@ -350,19 +355,19 @@ class Addon(object):
if not self.with_audio:
return None
setting = self.config.audio_input
if self.is_installed and ATTR_AUDIO_INPUT in self.data.user[self._id]:
setting = self.data.user[self._id][ATTR_AUDIO_INPUT]
setting = self._config.audio_input
if self.is_installed and ATTR_AUDIO_INPUT in self._data.user[self._id]:
setting = self._data.user[self._id][ATTR_AUDIO_INPUT]
return setting
@audio_input.setter
def audio_input(self, value):
"""Set/remove custom audio input settings."""
if value is None:
self.data.user[self._id].pop(ATTR_AUDIO_INPUT, None)
self._data.user[self._id].pop(ATTR_AUDIO_INPUT, None)
else:
self.data.user[self._id][ATTR_AUDIO_INPUT] = value
self.data.save()
self._data.user[self._id][ATTR_AUDIO_INPUT] = value
self._data.save()
@property
def url(self):
@ -391,11 +396,11 @@ class Addon(object):
# Repository with dockerhub images
if ATTR_IMAGE in addon_data:
return addon_data[ATTR_IMAGE].format(arch=self.config.arch)
return addon_data[ATTR_IMAGE].format(arch=self._arch)
# local build
return "{}/{}-addon-{}".format(
addon_data[ATTR_REPOSITORY], self.config.arch,
addon_data[ATTR_REPOSITORY], self._arch,
addon_data[ATTR_SLUG])
@property
@ -416,12 +421,12 @@ class Addon(object):
@property
def path_data(self):
"""Return addon data path inside supervisor."""
return Path(self.config.path_addons_data, self._id)
return Path(self._config.path_addons_data, self._id)
@property
def path_extern_data(self):
"""Return addon data path external for docker."""
return PurePath(self.config.path_extern_addons_data, self._id)
return PurePath(self._config.path_extern_addons_data, self._id)
@property
def path_options(self):
@ -452,7 +457,7 @@ class Addon(object):
schema(options)
return write_json_file(self.path_options, options)
except vol.Invalid as ex:
_LOGGER.error("Addon %s have wrong options -> %s", self._id,
_LOGGER.error("Addon %s have wrong options: %s", self._id,
humanize_error(options, ex))
return False
@ -472,8 +477,8 @@ class Addon(object):
return True
# load next schema
new_raw_schema = self.data.cache[self._id][ATTR_SCHEMA]
default_options = self.data.cache[self._id][ATTR_OPTIONS]
new_raw_schema = self._data.cache[self._id][ATTR_SCHEMA]
default_options = self._data.cache[self._id][ATTR_OPTIONS]
# if disabled
if isinstance(new_raw_schema, bool):
@ -481,7 +486,7 @@ class Addon(object):
# merge options
options = {
**self.data.user[self._id][ATTR_OPTIONS],
**self._data.user[self._id][ATTR_OPTIONS],
**default_options,
}
@ -498,9 +503,9 @@ class Addon(object):
async def install(self):
"""Install a addon."""
if self.config.arch not in self.supported_arch:
if self._arch not in self.supported_arch:
_LOGGER.error(
"Addon %s not supported on %s", self._id, self.config.arch)
"Addon %s not supported on %s", self._id, self._arch)
return False
if self.is_installed:
@ -512,7 +517,7 @@ class Addon(object):
"Create Home-Assistant addon data folder %s", self.path_data)
self.path_data.mkdir()
if not await self.docker.install(self.last_version):
if not await self.instance.install(self.last_version):
return False
self._set_install(self.last_version)
@ -521,7 +526,7 @@ class Addon(object):
@check_installed
async def uninstall(self):
"""Remove a addon."""
if not await self.docker.remove():
if not await self.instance.remove():
return False
if self.path_data.is_dir():
@ -537,7 +542,7 @@ class Addon(object):
if not self.is_installed:
return STATE_NONE
if await self.docker.is_running():
if await self.instance.is_running():
return STATE_STARTED
return STATE_STOPPED
@ -547,7 +552,7 @@ class Addon(object):
Return a coroutine.
"""
return self.docker.run()
return self.instance.run()
@check_installed
def stop(self):
@ -555,7 +560,7 @@ class Addon(object):
Return a coroutine.
"""
return self.docker.stop()
return self.instance.stop()
@check_installed
async def update(self):
@ -563,17 +568,16 @@ class Addon(object):
last_state = await self.state()
if self.last_version == self.version_installed:
_LOGGER.warning(
"No update available for Addon %s", self._id)
_LOGGER.info("No update available for Addon %s", self._id)
return False
if not await self.docker.update(self.last_version):
if not await self.instance.update(self.last_version):
return False
self._set_update(self.last_version)
# restore state
if last_state == STATE_STARTED:
await self.docker.run()
await self.instance.run()
return True
@check_installed
@ -582,7 +586,7 @@ class Addon(object):
Return a coroutine.
"""
return self.docker.restart()
return self.instance.restart()
@check_installed
def logs(self):
@ -590,7 +594,7 @@ class Addon(object):
Return a coroutine.
"""
return self.docker.logs()
return self.instance.logs()
@check_installed
async def rebuild(self):
@ -602,15 +606,15 @@ class Addon(object):
return False
# remove docker container but not addon config
if not await self.docker.remove():
if not await self.instance.remove():
return False
if not await self.docker.install(self.version_installed):
if not await self.instance.install(self.version_installed):
return False
# restore state
if last_state == STATE_STARTED:
await self.docker.run()
await self.instance.run()
return True
@check_installed
@ -623,20 +627,20 @@ class Addon(object):
_LOGGER.error("Add-on don't support write to stdin!")
return False
return await self.docker.write_stdin(data)
return await self.instance.write_stdin(data)
@check_installed
async def snapshot(self, tar_file):
"""Snapshot a state of a addon."""
with TemporaryDirectory(dir=str(self.config.path_tmp)) as temp:
with TemporaryDirectory(dir=str(self._config.path_tmp)) as temp:
# store local image
if self.need_build and not await \
self.docker.export_image(Path(temp, "image.tar")):
self.instance.export_image(Path(temp, "image.tar")):
return False
data = {
ATTR_USER: self.data.user.get(self._id, {}),
ATTR_SYSTEM: self.data.system.get(self._id, {}),
ATTR_USER: self._data.user.get(self._id, {}),
ATTR_SYSTEM: self._data.system.get(self._id, {}),
ATTR_VERSION: self.version_installed,
ATTR_STATE: await self.state(),
}
@ -656,9 +660,9 @@ class Addon(object):
try:
_LOGGER.info("Build snapshot for addon %s", self._id)
await self.loop.run_in_executor(None, _create_tar)
await self._loop.run_in_executor(None, _create_tar)
except tarfile.TarError as err:
_LOGGER.error("Can't write tarfile %s -> %s", tar_file, err)
_LOGGER.error("Can't write tarfile %s: %s", tar_file, err)
return False
_LOGGER.info("Finish snapshot for addon %s", self._id)
@ -666,7 +670,7 @@ class Addon(object):
async def restore(self, tar_file):
"""Restore a state of a addon."""
with TemporaryDirectory(dir=str(self.config.path_tmp)) as temp:
with TemporaryDirectory(dir=str(self._config.path_tmp)) as temp:
# extract snapshot
def _extract_tar():
"""Extract tar snapshot."""
@ -674,39 +678,41 @@ class Addon(object):
snapshot.extractall(path=Path(temp))
try:
await self.loop.run_in_executor(None, _extract_tar)
await self._loop.run_in_executor(None, _extract_tar)
except tarfile.TarError as err:
_LOGGER.error("Can't read tarfile %s -> %s", tar_file, err)
_LOGGER.error("Can't read tarfile %s: %s", tar_file, err)
return False
# read snapshot data
try:
data = read_json_file(Path(temp, "addon.json"))
except (OSError, json.JSONDecodeError) as err:
_LOGGER.error("Can't read addon.json -> %s", err)
_LOGGER.error("Can't read addon.json: %s", err)
# validate
try:
data = SCHEMA_ADDON_SNAPSHOT(data)
except vol.Invalid as err:
_LOGGER.error("Can't validate %s, snapshot data -> %s",
_LOGGER.error("Can't validate %s, snapshot data: %s",
self._id, humanize_error(data, err))
return False
# restore data / reload addon
_LOGGER.info("Restore config for addon %s", self._id)
self._restore_data(data[ATTR_USER], data[ATTR_SYSTEM])
# check version / restore image
version = data[ATTR_VERSION]
if version != self.docker.version:
if version != self.instance.version:
_LOGGER.info("Restore image for addon %s", self._id)
image_file = Path(temp, "image.tar")
if image_file.is_file():
await self.docker.import_image(image_file, version)
await self.instance.import_image(image_file, version)
else:
if await self.docker.install(version):
await self.docker.cleanup()
if await self.instance.install(version):
await self.instance.cleanup()
else:
await self.docker.stop()
await self.instance.stop()
# restore data
def _restore_data():
@ -717,9 +723,9 @@ class Addon(object):
try:
_LOGGER.info("Restore data for addon %s", self._id)
await self.loop.run_in_executor(None, _restore_data)
await self._loop.run_in_executor(None, _restore_data)
except shutil.Error as err:
_LOGGER.error("Can't restore origin data -> %s", err)
_LOGGER.error("Can't restore origin data: %s", err)
return False
# run addon

View File

@ -3,15 +3,16 @@ from pathlib import Path
from .validate import SCHEMA_BUILD_CONFIG, BASE_IMAGE
from ..const import ATTR_SQUASH, ATTR_BUILD_FROM, ATTR_ARGS, META_ADDON
from ..tools import JsonConfig
from ..coresys import CoreSysAttributes
from ..utils.json import JsonConfig
class AddonBuild(JsonConfig):
class AddonBuild(JsonConfig, CoreSysAttributes):
"""Handle build options for addons."""
def __init__(self, config, addon):
def __init__(self, coresys, addon):
"""Initialize addon builder."""
self.config = config
self.coresys = coresys
self.addon = addon
super().__init__(
@ -25,7 +26,7 @@ class AddonBuild(JsonConfig):
def base_image(self):
"""Base images for this addon."""
return self._data[ATTR_BUILD_FROM].get(
self.config.arch, BASE_IMAGE[self.config.arch])
self._arch, BASE_IMAGE[self._arch])
@property
def squash(self):
@ -41,13 +42,13 @@ class AddonBuild(JsonConfig):
"""Create a dict with docker build arguments."""
args = {
'path': str(self.addon.path_location),
'tag': "{}:{}".format(self.addon.image, version),
'tag': f"{self.addon.image}:{version}",
'pull': True,
'forcerm': True,
'squash': self.squash,
'labels': {
'io.hass.version': version,
'io.hass.arch': self.config.arch,
'io.hass.arch': self._arch,
'io.hass.type': META_ADDON,
'io.hass.name': self.addon.name,
'io.hass.description': self.addon.description,
@ -55,7 +56,7 @@ class AddonBuild(JsonConfig):
'buildargs': {
'BUILD_FROM': self.base_image,
'BUILD_VERSION': version,
'BUILD_ARCH': self.config.arch,
'BUILD_ARCH': self._arch,
**self.additional_args,
}
}

View File

@ -7,24 +7,25 @@ from pathlib import Path
import voluptuous as vol
from voluptuous.humanize import humanize_error
from .util import extract_hash_from_path
from .utils import extract_hash_from_path
from .validate import (
SCHEMA_ADDON_CONFIG, SCHEMA_ADDON_FILE, SCHEMA_REPOSITORY_CONFIG)
from ..const import (
FILE_HASSIO_ADDONS, ATTR_VERSION, ATTR_SLUG, ATTR_REPOSITORY, ATTR_LOCATON,
REPOSITORY_CORE, REPOSITORY_LOCAL, ATTR_USER, ATTR_SYSTEM)
from ..tools import JsonConfig, read_json_file
from ..coresys import CoreSysAttributes
from ..utils.json import JsonConfig, read_json_file
_LOGGER = logging.getLogger(__name__)
class Data(JsonConfig):
class Data(JsonConfig, CoreSysAttributes):
"""Hold data for addons inside HassIO."""
def __init__(self, config):
def __init__(self, coresys):
"""Initialize data holder."""
super().__init__(FILE_HASSIO_ADDONS, SCHEMA_ADDON_FILE)
self.config = config
self.coresys = coresys
self._repositories = {}
self._cache = {}
@ -55,17 +56,17 @@ class Data(JsonConfig):
# read core repository
self._read_addons_folder(
self.config.path_addons_core, REPOSITORY_CORE)
self._config.path_addons_core, REPOSITORY_CORE)
# read local repository
self._read_addons_folder(
self.config.path_addons_local, REPOSITORY_LOCAL)
self._config.path_addons_local, REPOSITORY_LOCAL)
# add built-in repositories information
self._set_builtin_repositories()
# read custom git repositories
for repository_element in self.config.path_addons_git.iterdir():
for repository_element in self._config.path_addons_git.iterdir():
if repository_element.is_dir():
self._read_git_repository(repository_element)
@ -118,7 +119,7 @@ class Data(JsonConfig):
_LOGGER.warning("Can't read %s", addon)
except vol.Invalid as ex:
_LOGGER.warning("Can't read %s -> %s", addon,
_LOGGER.warning("Can't read %s: %s", addon,
humanize_error(addon_config, ex))
def _set_builtin_repositories(self):
@ -127,7 +128,7 @@ class Data(JsonConfig):
builtin_file = Path(__file__).parent.joinpath('built-in.json')
builtin_data = read_json_file(builtin_file)
except (OSError, json.JSONDecodeError) as err:
_LOGGER.warning("Can't read built-in.json -> %s", err)
_LOGGER.warning("Can't read built-in json: %s", err)
return
# core repository

View File

@ -7,33 +7,33 @@ import shutil
import git
from .util import get_hash_from_repository
from .utils import get_hash_from_repository
from ..const import URL_HASSIO_ADDONS
from ..coresys import CoreSysAttributes
_LOGGER = logging.getLogger(__name__)
class GitRepo(object):
class GitRepo(CoreSysAttributes):
"""Manage addons git repo."""
def __init__(self, config, loop, path, url):
def __init__(self, coresys, path, url):
"""Initialize git base wrapper."""
self.config = config
self.loop = loop
self.coresys = coresys
self.repo = None
self.path = path
self.url = url
self._lock = asyncio.Lock(loop=loop)
self.lock = asyncio.Lock(loop=coresys.loop)
async def load(self):
"""Init git addon repo."""
if not self.path.is_dir():
return await self.clone()
async with self._lock:
async with self.lock:
try:
_LOGGER.info("Load addon %s repository", self.path)
self.repo = await self.loop.run_in_executor(
self.repo = await self._loop.run_in_executor(
None, git.Repo, str(self.path))
except (git.InvalidGitRepositoryError, git.NoSuchPathError,
@ -45,10 +45,10 @@ class GitRepo(object):
async def clone(self):
"""Clone git addon repo."""
async with self._lock:
async with self.lock:
try:
_LOGGER.info("Clone addon %s repository", self.url)
self.repo = await self.loop.run_in_executor(
self.repo = await self._loop.run_in_executor(
None, ft.partial(
git.Repo.clone_from, self.url, str(self.path),
recursive=True))
@ -62,14 +62,14 @@ class GitRepo(object):
async def pull(self):
"""Pull git addon repo."""
if self._lock.locked():
if self.lock.locked():
_LOGGER.warning("It is already a task in progress.")
return False
async with self._lock:
async with self.lock:
try:
_LOGGER.info("Pull addon %s repository", self.url)
await self.loop.run_in_executor(
await self._loop.run_in_executor(
None, self.repo.remotes.origin.pull)
except (git.InvalidGitRepositoryError, git.NoSuchPathError,
@ -83,20 +83,22 @@ class GitRepo(object):
class GitRepoHassIO(GitRepo):
"""HassIO addons repository."""
def __init__(self, config, loop):
def __init__(self, coresys):
"""Initialize git hassio addon repository."""
super().__init__(
config, loop, config.path_addons_core, URL_HASSIO_ADDONS)
coresys, coresys.config.path_addons_core, URL_HASSIO_ADDONS)
class GitRepoCustom(GitRepo):
"""Custom addons repository."""
def __init__(self, config, loop, url):
def __init__(self, coresys, url):
"""Initialize git hassio addon repository."""
path = Path(config.path_addons_git, get_hash_from_repository(url))
path = Path(
coresys.config.path_addons_git,
get_hash_from_repository(url))
super().__init__(config, loop, path, url)
super().__init__(coresys, path, url)
def remove(self):
"""Remove a custom addon."""

View File

@ -1,18 +1,19 @@
"""Represent a HassIO repository."""
from .git import GitRepoHassIO, GitRepoCustom
from .util import get_hash_from_repository
from .utils import get_hash_from_repository
from ..const import (
REPOSITORY_CORE, REPOSITORY_LOCAL, ATTR_NAME, ATTR_URL, ATTR_MAINTAINER)
from ..coresys import CoreSysAttributes
UNKNOWN = 'unknown'
class Repository(object):
class Repository(CoreSysAttributes):
"""Repository in HassIO."""
def __init__(self, config, loop, data, repository):
def __init__(self, coresys, repository):
"""Initialize repository object."""
self.data = data
self.coresys = coresys
self.source = None
self.git = None
@ -20,16 +21,16 @@ class Repository(object):
self._id = repository
elif repository == REPOSITORY_CORE:
self._id = repository
self.git = GitRepoHassIO(config, loop)
self.git = GitRepoHassIO(coresys)
else:
self._id = get_hash_from_repository(repository)
self.git = GitRepoCustom(config, loop, repository)
self.git = GitRepoCustom(coresys, repository)
self.source = repository
@property
def _mesh(self):
"""Return data struct repository."""
return self.data.repositories.get(self._id, {})
return self._addons.data.repositories.get(self._id, {})
@property
def slug(self):

View File

@ -209,8 +209,7 @@ def validate_options(raw_schema):
# normal value
options[key] = _single_validate(typ, value, key)
except (IndexError, KeyError):
raise vol.Invalid(
"Type error for {}.".format(key)) from None
raise vol.Invalid(f"Type error for {key}") from None
_check_missing_options(raw_schema, options, 'root')
return options
@ -224,7 +223,7 @@ def _single_validate(typ, value, key):
"""Validate a single element."""
# if required argument
if value is None:
raise vol.Invalid("Missing required option '{}'.".format(key))
raise vol.Invalid(f"Missing required option '{key}'")
# parse extend data from type
match = RE_SCHEMA_ELEMENT.match(typ)
@ -253,7 +252,7 @@ def _single_validate(typ, value, key):
elif typ.startswith(V_MATCH):
return vol.Match(match.group('match'))(str(value))
raise vol.Invalid("Fatal error for {} type {}".format(key, typ))
raise vol.Invalid(f"Fatal error for {key} type {typ}")
def _nested_validate_list(typ, data_list, key):
@ -299,5 +298,4 @@ def _check_missing_options(origin, exists, root):
if isinstance(origin[miss_opt], str) and \
origin[miss_opt].endswith("?"):
continue
raise vol.Invalid(
"Missing option {} in {}".format(miss_opt, root))
raise vol.Invalid(f"Missing option {miss_opt} in {root}")

View File

@ -12,26 +12,39 @@ from .proxy import APIProxy
from .supervisor import APISupervisor
from .security import APISecurity
from .snapshots import APISnapshots
from ..coresys import CoreSysAttributes
_LOGGER = logging.getLogger(__name__)
class RestAPI(object):
class RestAPI(CoreSysAttributes):
"""Handle rest api for hassio."""
def __init__(self, config, loop):
def __init__(self, coresys):
"""Initialize docker base wrapper."""
self.config = config
self.loop = loop
self.webapp = web.Application(loop=self.loop)
self.coresys = coresys
self.webapp = web.Application(loop=self._loop)
# service stuff
self._handler = None
self.server = None
def register_host(self, host_control, hardware):
async def load(self):
"""Register REST API Calls."""
self._register_supervisor()
self._register_host()
self._register_homeassistant()
self._register_proxy()
self._register_panel()
self._register_addons()
self._register_snapshots()
self._register_security()
self._register_network()
def _register_host(self):
"""Register hostcontrol function."""
api_host = APIHost(self.config, self.loop, host_control, hardware)
api_host = APIHost()
api_host.coresys = self.coresys
self.webapp.router.add_get('/host/info', api_host.info)
self.webapp.router.add_get('/host/hardware', api_host.hardware)
@ -39,20 +52,20 @@ class RestAPI(object):
self.webapp.router.add_post('/host/shutdown', api_host.shutdown)
self.webapp.router.add_post('/host/update', api_host.update)
self.webapp.router.add_post('/host/options', api_host.options)
self.webapp.router.add_post('/host/reload', api_host.reload)
def register_network(self, host_control):
def _register_network(self):
"""Register network function."""
api_net = APINetwork(self.config, self.loop, host_control)
api_net = APINetwork()
api_net.coresys = self.coresys
self.webapp.router.add_get('/network/info', api_net.info)
self.webapp.router.add_post('/network/options', api_net.options)
def register_supervisor(self, supervisor, snapshots, addons, host_control,
updater):
def _register_supervisor(self):
"""Register supervisor function."""
api_supervisor = APISupervisor(
self.config, self.loop, supervisor, snapshots, addons,
host_control, updater)
api_supervisor = APISupervisor()
api_supervisor.coresys = self.coresys
self.webapp.router.add_get('/supervisor/ping', api_supervisor.ping)
self.webapp.router.add_get('/supervisor/info', api_supervisor.info)
@ -64,9 +77,10 @@ class RestAPI(object):
'/supervisor/options', api_supervisor.options)
self.webapp.router.add_get('/supervisor/logs', api_supervisor.logs)
def register_homeassistant(self, homeassistant):
def _register_homeassistant(self):
"""Register homeassistant function."""
api_hass = APIHomeAssistant(self.config, self.loop, homeassistant)
api_hass = APIHomeAssistant()
api_hass.coresys = self.coresys
self.webapp.router.add_get('/homeassistant/info', api_hass.info)
self.webapp.router.add_get('/homeassistant/logs', api_hass.logs)
@ -77,9 +91,10 @@ class RestAPI(object):
self.webapp.router.add_post('/homeassistant/start', api_hass.start)
self.webapp.router.add_post('/homeassistant/check', api_hass.check)
def register_proxy(self, homeassistant):
def _register_proxy(self):
"""Register HomeAssistant API Proxy."""
api_proxy = APIProxy(self.loop, homeassistant)
api_proxy = APIProxy()
api_proxy.coresys = self.coresys
self.webapp.router.add_get(
'/homeassistant/api/websocket', api_proxy.websocket)
@ -92,9 +107,10 @@ class RestAPI(object):
self.webapp.router.add_get(
'/homeassistant/api', api_proxy.api)
def register_addons(self, addons):
def _register_addons(self):
"""Register homeassistant function."""
api_addons = APIAddons(self.config, self.loop, addons)
api_addons = APIAddons()
api_addons.coresys = self.coresys
self.webapp.router.add_get('/addons', api_addons.list)
self.webapp.router.add_post('/addons/reload', api_addons.reload)
@ -120,18 +136,20 @@ class RestAPI(object):
'/addons/{addon}/changelog', api_addons.changelog)
self.webapp.router.add_post('/addons/{addon}/stdin', api_addons.stdin)
def register_security(self):
def _register_security(self):
"""Register security function."""
api_security = APISecurity(self.config, self.loop)
api_security = APISecurity()
api_security.coresys = self.coresys
self.webapp.router.add_get('/security/info', api_security.info)
self.webapp.router.add_post('/security/options', api_security.options)
self.webapp.router.add_post('/security/totp', api_security.totp)
self.webapp.router.add_post('/security/session', api_security.session)
def register_snapshots(self, snapshots):
def _register_snapshots(self):
"""Register snapshots function."""
api_snapshots = APISnapshots(self.config, self.loop, snapshots)
api_snapshots = APISnapshots()
api_snapshots.coresys = self.coresys
self.webapp.router.add_get('/snapshots', api_snapshots.list)
self.webapp.router.add_post('/snapshots/reload', api_snapshots.reload)
@ -151,11 +169,11 @@ class RestAPI(object):
'/snapshots/{snapshot}/restore/partial',
api_snapshots.restore_partial)
def register_panel(self):
def _register_panel(self):
"""Register panel for homeassistant."""
def create_panel_response(build_type):
"""Create a function to generate a response."""
path = Path(__file__).parents[1].joinpath(
path = Path(__file__).parent.joinpath(
'panel/hassio-main-{}.html'.format(build_type))
return lambda request: web.FileResponse(path)
@ -168,10 +186,10 @@ class RestAPI(object):
async def start(self):
"""Run rest api webserver."""
self._handler = self.webapp.make_handler(loop=self.loop)
self._handler = self.webapp.make_handler(loop=self._loop)
try:
self.server = await self.loop.create_server(
self.server = await self._loop.create_server(
self._handler, "0.0.0.0", "80")
except OSError as err:
_LOGGER.fatal(

View File

@ -5,7 +5,7 @@ import logging
import voluptuous as vol
from voluptuous.humanize import humanize_error
from .util import api_process, api_process_raw, api_validate
from .utils import api_process, api_process_raw, api_validate
from ..const import (
ATTR_VERSION, ATTR_LAST_VERSION, ATTR_STATE, ATTR_BOOT, ATTR_OPTIONS,
ATTR_URL, ATTR_DESCRIPTON, ATTR_DETACHED, ATTR_NAME, ATTR_REPOSITORY,
@ -16,6 +16,7 @@ from ..const import (
ATTR_GPIO, ATTR_HOMEASSISTANT_API, ATTR_STDIN, BOOT_AUTO, BOOT_MANUAL,
ATTR_CHANGELOG, ATTR_HOST_IPC, ATTR_HOST_DBUS, ATTR_LONG_DESCRIPTION,
CONTENT_TYPE_PNG, CONTENT_TYPE_BINARY, CONTENT_TYPE_TEXT)
from ..coresys import CoreSysAttributes
from ..validate import DOCKER_PORTS
_LOGGER = logging.getLogger(__name__)
@ -32,18 +33,12 @@ SCHEMA_OPTIONS = vol.Schema({
})
class APIAddons(object):
class APIAddons(CoreSysAttributes):
"""Handle rest api for addons functions."""
def __init__(self, config, loop, addons):
"""Initialize homeassistant rest api part."""
self.config = config
self.loop = loop
self.addons = addons
def _extract_addon(self, request, check_installed=True):
"""Return addon and if not exists trow a exception."""
addon = self.addons.get(request.match_info.get('addon'))
addon = self._addons.get(request.match_info.get('addon'))
if not addon:
raise RuntimeError("Addon not exists")
@ -64,7 +59,7 @@ class APIAddons(object):
async def list(self, request):
"""Return all addons / repositories ."""
data_addons = []
for addon in self.addons.list_addons:
for addon in self._addons.list_addons:
data_addons.append({
ATTR_NAME: addon.name,
ATTR_SLUG: addon.slug,
@ -80,7 +75,7 @@ class APIAddons(object):
})
data_repositories = []
for repository in self.addons.list_repositories:
for repository in self._addons.list_repositories:
data_repositories.append({
ATTR_SLUG: repository.slug,
ATTR_NAME: repository.name,
@ -97,7 +92,7 @@ class APIAddons(object):
@api_process
async def reload(self, request):
"""Reload all addons data."""
await asyncio.shield(self.addons.reload(), loop=self.loop)
await asyncio.shield(self._addons.reload(), loop=self._loop)
return True
@api_process
@ -167,13 +162,13 @@ class APIAddons(object):
def install(self, request):
"""Install addon."""
addon = self._extract_addon(request, check_installed=False)
return asyncio.shield(addon.install(), loop=self.loop)
return asyncio.shield(addon.install(), loop=self._loop)
@api_process
def uninstall(self, request):
"""Uninstall addon."""
addon = self._extract_addon(request)
return asyncio.shield(addon.uninstall(), loop=self.loop)
return asyncio.shield(addon.uninstall(), loop=self._loop)
@api_process
def start(self, request):
@ -187,13 +182,13 @@ class APIAddons(object):
except vol.Invalid as ex:
raise RuntimeError(humanize_error(options, ex)) from None
return asyncio.shield(addon.start(), loop=self.loop)
return asyncio.shield(addon.start(), loop=self._loop)
@api_process
def stop(self, request):
"""Stop addon."""
addon = self._extract_addon(request)
return asyncio.shield(addon.stop(), loop=self.loop)
return asyncio.shield(addon.stop(), loop=self._loop)
@api_process
def update(self, request):
@ -203,13 +198,13 @@ class APIAddons(object):
if addon.last_version == addon.version_installed:
raise RuntimeError("No update available!")
return asyncio.shield(addon.update(), loop=self.loop)
return asyncio.shield(addon.update(), loop=self._loop)
@api_process
def restart(self, request):
"""Restart addon."""
addon = self._extract_addon(request)
return asyncio.shield(addon.restart(), loop=self.loop)
return asyncio.shield(addon.restart(), loop=self._loop)
@api_process
def rebuild(self, request):
@ -218,7 +213,7 @@ class APIAddons(object):
if not addon.need_build:
raise RuntimeError("Only local build addons are supported")
return asyncio.shield(addon.rebuild(), loop=self.loop)
return asyncio.shield(addon.rebuild(), loop=self._loop)
@api_process_raw(CONTENT_TYPE_BINARY)
def logs(self, request):
@ -254,4 +249,4 @@ class APIAddons(object):
raise RuntimeError("STDIN not supported by addons")
data = await request.read()
return await asyncio.shield(addon.write_stdin(data), loop=self.loop)
return await asyncio.shield(addon.write_stdin(data), loop=self._loop)

View File

@ -4,11 +4,12 @@ import logging
import voluptuous as vol
from .util import api_process, api_process_raw, api_validate
from .utils import api_process, api_process_raw, api_validate
from ..const import (
ATTR_VERSION, ATTR_LAST_VERSION, ATTR_DEVICES, ATTR_IMAGE, ATTR_CUSTOM,
ATTR_BOOT, ATTR_PORT, ATTR_PASSWORD, ATTR_SSL, ATTR_WATCHDOG,
CONTENT_TYPE_BINARY)
from ..coresys import CoreSysAttributes
from ..validate import HASS_DEVICES, NETWORK_PORT
_LOGGER = logging.getLogger(__name__)
@ -32,28 +33,22 @@ SCHEMA_VERSION = vol.Schema({
})
class APIHomeAssistant(object):
class APIHomeAssistant(CoreSysAttributes):
"""Handle rest api for homeassistant functions."""
def __init__(self, config, loop, homeassistant):
"""Initialize homeassistant rest api part."""
self.config = config
self.loop = loop
self.homeassistant = homeassistant
@api_process
async def info(self, request):
"""Return host information."""
return {
ATTR_VERSION: self.homeassistant.version,
ATTR_LAST_VERSION: self.homeassistant.last_version,
ATTR_IMAGE: self.homeassistant.image,
ATTR_DEVICES: self.homeassistant.devices,
ATTR_CUSTOM: self.homeassistant.is_custom_image,
ATTR_BOOT: self.homeassistant.boot,
ATTR_PORT: self.homeassistant.api_port,
ATTR_SSL: self.homeassistant.api_ssl,
ATTR_WATCHDOG: self.homeassistant.watchdog,
ATTR_VERSION: self._homeassistant.version,
ATTR_LAST_VERSION: self._homeassistant.last_version,
ATTR_IMAGE: self._homeassistant.image,
ATTR_DEVICES: self._homeassistant.devices,
ATTR_CUSTOM: self._homeassistant.is_custom_image,
ATTR_BOOT: self._homeassistant.boot,
ATTR_PORT: self._homeassistant.api_port,
ATTR_SSL: self._homeassistant.api_ssl,
ATTR_WATCHDOG: self._homeassistant.watchdog,
}
@api_process
@ -62,26 +57,26 @@ class APIHomeAssistant(object):
body = await api_validate(SCHEMA_OPTIONS, request)
if ATTR_DEVICES in body:
self.homeassistant.devices = body[ATTR_DEVICES]
self._homeassistant.devices = body[ATTR_DEVICES]
if ATTR_IMAGE in body:
self.homeassistant.set_custom(
self._homeassistant.set_custom(
body[ATTR_IMAGE], body[ATTR_LAST_VERSION])
if ATTR_BOOT in body:
self.homeassistant.boot = body[ATTR_BOOT]
self._homeassistant.boot = body[ATTR_BOOT]
if ATTR_PORT in body:
self.homeassistant.api_port = body[ATTR_PORT]
self._homeassistant.api_port = body[ATTR_PORT]
if ATTR_PASSWORD in body:
self.homeassistant.api_password = body[ATTR_PASSWORD]
self._homeassistant.api_password = body[ATTR_PASSWORD]
if ATTR_SSL in body:
self.homeassistant.api_ssl = body[ATTR_SSL]
self._homeassistant.api_ssl = body[ATTR_SSL]
if ATTR_WATCHDOG in body:
self.homeassistant.watchdog = body[ATTR_WATCHDOG]
self._homeassistant.watchdog = body[ATTR_WATCHDOG]
return True
@ -89,38 +84,38 @@ class APIHomeAssistant(object):
async def update(self, request):
"""Update homeassistant."""
body = await api_validate(SCHEMA_VERSION, request)
version = body.get(ATTR_VERSION, self.homeassistant.last_version)
version = body.get(ATTR_VERSION, self._homeassistant.last_version)
if version == self.homeassistant.version:
if version == self._homeassistant.version:
raise RuntimeError("Version {} is already in use".format(version))
return await asyncio.shield(
self.homeassistant.update(version), loop=self.loop)
self._homeassistant.update(version), loop=self._loop)
@api_process
def stop(self, request):
"""Stop homeassistant."""
return asyncio.shield(self.homeassistant.stop(), loop=self.loop)
return asyncio.shield(self._homeassistant.stop(), loop=self._loop)
@api_process
def start(self, request):
"""Start homeassistant."""
return asyncio.shield(self.homeassistant.run(), loop=self.loop)
return asyncio.shield(self._homeassistant.run(), loop=self._loop)
@api_process
def restart(self, request):
"""Restart homeassistant."""
return asyncio.shield(self.homeassistant.restart(), loop=self.loop)
return asyncio.shield(self._homeassistant.restart(), loop=self._loop)
@api_process_raw(CONTENT_TYPE_BINARY)
def logs(self, request):
"""Return homeassistant docker logs."""
return self.homeassistant.logs()
return self._homeassistant.logs()
@api_process
async def check(self, request):
"""Check config of homeassistant."""
code, message = await self.homeassistant.check_config()
code, message = await self._homeassistant.check_config()
if not code:
raise RuntimeError(message)

View File

@ -4,11 +4,12 @@ import logging
import voluptuous as vol
from .util import api_process_hostcontrol, api_process, api_validate
from .utils import api_process_hostcontrol, api_process, api_validate
from ..const import (
ATTR_VERSION, ATTR_LAST_VERSION, ATTR_TYPE, ATTR_HOSTNAME, ATTR_FEATURES,
ATTR_OS, ATTR_SERIAL, ATTR_INPUT, ATTR_DISK, ATTR_AUDIO, ATTR_AUDIO_INPUT,
ATTR_AUDIO_OUTPUT, ATTR_GPIO)
from ..coresys import CoreSysAttributes
from ..validate import ALSA_CHANNEL
_LOGGER = logging.getLogger(__name__)
@ -23,26 +24,19 @@ SCHEMA_OPTIONS = vol.Schema({
})
class APIHost(object):
class APIHost(CoreSysAttributes):
"""Handle rest api for host functions."""
def __init__(self, config, loop, host_control, hardware):
"""Initialize host rest api part."""
self.config = config
self.loop = loop
self.host_control = host_control
self.local_hw = hardware
@api_process
async def info(self, request):
"""Return host information."""
return {
ATTR_TYPE: self.host_control.type,
ATTR_VERSION: self.host_control.version,
ATTR_LAST_VERSION: self.host_control.last_version,
ATTR_FEATURES: self.host_control.features,
ATTR_HOSTNAME: self.host_control.hostname,
ATTR_OS: self.host_control.os_info,
ATTR_TYPE: self._host_control.type,
ATTR_VERSION: self._host_control.version,
ATTR_LAST_VERSION: self._host_control.last_version,
ATTR_FEATURES: self._host_control.features,
ATTR_HOSTNAME: self._host_control.hostname,
ATTR_OS: self._host_control.os_info,
}
@api_process
@ -51,41 +45,47 @@ class APIHost(object):
body = await api_validate(SCHEMA_OPTIONS, request)
if ATTR_AUDIO_OUTPUT in body:
self.config.audio_output = body[ATTR_AUDIO_OUTPUT]
self._config.audio_output = body[ATTR_AUDIO_OUTPUT]
if ATTR_AUDIO_INPUT in body:
self.config.audio_input = body[ATTR_AUDIO_INPUT]
self._config.audio_input = body[ATTR_AUDIO_INPUT]
return True
@api_process_hostcontrol
def reboot(self, request):
"""Reboot host."""
return self.host_control.reboot()
return self._host_control.reboot()
@api_process_hostcontrol
def shutdown(self, request):
"""Poweroff host."""
return self.host_control.shutdown()
return self._host_control.shutdown()
@api_process_hostcontrol
async def reload(self, request):
"""Reload host data."""
await self._host_control.load()
return True
@api_process_hostcontrol
async def update(self, request):
"""Update host OS."""
body = await api_validate(SCHEMA_VERSION, request)
version = body.get(ATTR_VERSION, self.host_control.last_version)
version = body.get(ATTR_VERSION, self._host_control.last_version)
if version == self.host_control.version:
raise RuntimeError("Version {} is already in use".format(version))
if version == self._host_control.version:
raise RuntimeError(f"Version {version} is already in use")
return await asyncio.shield(
self.host_control.update(version=version), loop=self.loop)
self._host_control.update(version=version), loop=self._loop)
@api_process
async def hardware(self, request):
"""Return local hardware infos."""
return {
ATTR_SERIAL: list(self.local_hw.serial_devices),
ATTR_INPUT: list(self.local_hw.input_devices),
ATTR_DISK: list(self.local_hw.disk_devices),
ATTR_GPIO: list(self.local_hw.gpio_devices),
ATTR_AUDIO: self.local_hw.audio_devices,
ATTR_SERIAL: list(self._hardware.serial_devices),
ATTR_INPUT: list(self._hardware.input_devices),
ATTR_DISK: list(self._hardware.disk_devices),
ATTR_GPIO: list(self._hardware.gpio_devices),
ATTR_AUDIO: self._hardware.audio_devices,
}

View File

@ -3,8 +3,9 @@ import logging
import voluptuous as vol
from .util import api_process, api_process_hostcontrol, api_validate
from .utils import api_process, api_process_hostcontrol, api_validate
from ..const import ATTR_HOSTNAME
from ..coresys import CoreSysAttributes
_LOGGER = logging.getLogger(__name__)
@ -14,20 +15,14 @@ SCHEMA_OPTIONS = vol.Schema({
})
class APINetwork(object):
class APINetwork(CoreSysAttributes):
"""Handle rest api for network functions."""
def __init__(self, config, loop, host_control):
"""Initialize network rest api part."""
self.config = config
self.loop = loop
self.host_control = host_control
@api_process
async def info(self, request):
"""Show network settings."""
return {
ATTR_HOSTNAME: self.host_control.hostname,
ATTR_HOSTNAME: self._host_control.hostname,
}
@api_process_hostcontrol
@ -37,7 +32,7 @@ class APINetwork(object):
# hostname
if ATTR_HOSTNAME in body:
if self.host_control.hostname != body[ATTR_HOSTNAME]:
await self.host_control.set_hostname(body[ATTR_HOSTNAME])
if self._host_control.hostname != body[ATTR_HOSTNAME]:
await self._host_control.set_hostname(body[ATTR_HOSTNAME])
return True

View File

@ -9,40 +9,33 @@ from aiohttp.hdrs import CONTENT_TYPE
import async_timeout
from ..const import HEADER_HA_ACCESS
from ..coresys import CoreSysAttributes
_LOGGER = logging.getLogger(__name__)
class APIProxy(object):
class APIProxy(CoreSysAttributes):
"""API Proxy for Home-Assistant."""
def __init__(self, loop, homeassistant):
"""Initialize api proxy."""
self.loop = loop
self.homeassistant = homeassistant
# Use homeassistant websession to ignore SSL
self.websession = homeassistant.websession
async def _api_client(self, request, path, timeout=300):
"""Return a client request with proxy origin for Home-Assistant."""
url = f"{self.homeassistant.api_url}/api/{path}"
url = f"{self._homeassistant.api_url}/api/{path}"
try:
data = None
headers = {}
method = getattr(self.websession, request.method.lower())
method = getattr(self._websession_ssl, request.method.lower())
# read data
with async_timeout.timeout(30, loop=self.loop):
with async_timeout.timeout(30, loop=self._loop):
data = await request.read()
if data:
headers.update({CONTENT_TYPE: request.content_type})
# need api password?
if self.homeassistant.api_password:
headers = {HEADER_HA_ACCESS: self.homeassistant.api_password}
if self._homeassistant.api_password:
headers = {HEADER_HA_ACCESS: self._homeassistant.api_password}
# reset headers
if not headers:
@ -110,7 +103,7 @@ class APIProxy(object):
url = f"{self.homeassistant.api_url}/api/websocket"
try:
client = await self.websession.ws_connect(
client = await self._websession_ssl.ws_connect(
url, heartbeat=60, verify_ssl=False)
# handle authentication
@ -121,7 +114,7 @@ class APIProxy(object):
elif data.get('type') == 'auth_required':
await client.send_json({
'type': 'auth',
'api_password': self.homeassistant.api_password,
'api_password': self._homeassistant.api_password,
})
_LOGGER.error("Authentication to Home-Assistant websocket")
@ -154,15 +147,15 @@ class APIProxy(object):
while not server.closed and not client.closed:
if not client_read:
client_read = asyncio.ensure_future(
client.receive_str(), loop=self.loop)
client.receive_str(), loop=self._loop)
if not server_read:
server_read = asyncio.ensure_future(
server.receive_str(), loop=self.loop)
server.receive_str(), loop=self._loop)
# wait until data need to be processed
await asyncio.wait(
[client_read, server_read],
loop=self.loop, return_when=asyncio.FIRST_COMPLETED
loop=self._loop, return_when=asyncio.FIRST_COMPLETED
)
# server

View File

@ -10,8 +10,9 @@ import voluptuous as vol
import pyotp
import pyqrcode
from .util import api_process, api_validate, hash_password
from .utils import api_process, api_validate, hash_password
from ..const import ATTR_INITIALIZE, ATTR_PASSWORD, ATTR_TOTP, ATTR_SESSION
from ..coresys import CoreSysAttributes
_LOGGER = logging.getLogger(__name__)
@ -24,29 +25,24 @@ SCHEMA_SESSION = SCHEMA_PASSWORD.extend({
})
class APISecurity(object):
class APISecurity(CoreSysAttributes):
"""Handle rest api for security functions."""
def __init__(self, config, loop):
"""Initialize security rest api part."""
self.config = config
self.loop = loop
def _check_password(self, body):
"""Check if password is valid and security is initialize."""
if not self.config.security_initialize:
if not self._config.security_initialize:
raise RuntimeError("First set a password")
password = hash_password(body[ATTR_PASSWORD])
if password != self.config.security_password:
if password != self._config.security_password:
raise RuntimeError("Wrong password")
@api_process
async def info(self, request):
"""Return host information."""
return {
ATTR_INITIALIZE: self.config.security_initialize,
ATTR_TOTP: self.config.security_totp is not None,
ATTR_INITIALIZE: self._config.security_initialize,
ATTR_TOTP: self._config.security_totp is not None,
}
@api_process
@ -54,11 +50,11 @@ class APISecurity(object):
"""Set options / password."""
body = await api_validate(SCHEMA_PASSWORD, request)
if self.config.security_initialize:
if self._config.security_initialize:
raise RuntimeError("Password is already set!")
self.config.security_password = hash_password(body[ATTR_PASSWORD])
self.config.security_initialize = True
self._config.security_password = hash_password(body[ATTR_PASSWORD])
self._config.security_initialize = True
return True
@api_process
@ -78,7 +74,7 @@ class APISecurity(object):
qrcode.svg(buff)
# finish
self.config.security_totp = totp_init_key
self._config.security_totp = totp_init_key
return web.Response(body=buff.getvalue(), content_type='image/svg+xml')
@api_process
@ -88,8 +84,8 @@ class APISecurity(object):
self._check_password(body)
# check TOTP
if self.config.security_totp:
totp = pyotp.TOTP(self.config.security_totp)
if self._config.security_totp:
totp = pyotp.TOTP(self._config.security_totp)
if body[ATTR_TOTP] != totp.now():
raise RuntimeError("Invalid TOTP token!")
@ -98,5 +94,5 @@ class APISecurity(object):
session = hashlib.sha256(os.urandom(54)).hexdigest()
# store session
self.config.add_security_session(session, valid_until)
self._config.add_security_session(session, valid_until)
return {ATTR_SESSION: session}

View File

@ -4,12 +4,13 @@ import logging
import voluptuous as vol
from .util import api_process, api_validate
from .utils import api_process, api_validate
from ..snapshots.validate import ALL_FOLDERS
from ..const import (
ATTR_NAME, ATTR_SLUG, ATTR_DATE, ATTR_ADDONS, ATTR_REPOSITORIES,
ATTR_HOMEASSISTANT, ATTR_VERSION, ATTR_SIZE, ATTR_FOLDERS, ATTR_TYPE,
ATTR_DEVICES, ATTR_SNAPSHOTS)
from ..coresys import CoreSysAttributes
_LOGGER = logging.getLogger(__name__)
@ -31,18 +32,12 @@ SCHEMA_SNAPSHOT_PARTIAL = SCHEMA_SNAPSHOT_FULL.extend({
})
class APISnapshots(object):
class APISnapshots(CoreSysAttributes):
"""Handle rest api for snapshot functions."""
def __init__(self, config, loop, snapshots):
"""Initialize network rest api part."""
self.config = config
self.loop = loop
self.snapshots = snapshots
def _extract_snapshot(self, request):
"""Return addon and if not exists trow a exception."""
snapshot = self.snapshots.get(request.match_info.get('snapshot'))
snapshot = self._snapshots.get(request.match_info.get('snapshot'))
if not snapshot:
raise RuntimeError("Snapshot not exists")
return snapshot
@ -51,7 +46,7 @@ class APISnapshots(object):
async def list(self, request):
"""Return snapshot list."""
data_snapshots = []
for snapshot in self.snapshots.list_snapshots:
for snapshot in self._snapshots.list_snapshots:
data_snapshots.append({
ATTR_SLUG: snapshot.slug,
ATTR_NAME: snapshot.name,
@ -65,7 +60,7 @@ class APISnapshots(object):
@api_process
async def reload(self, request):
"""Reload snapshot list."""
await asyncio.shield(self.snapshots.reload(), loop=self.loop)
await asyncio.shield(self._snapshots.reload(), loop=self._loop)
return True
@api_process
@ -101,21 +96,21 @@ class APISnapshots(object):
"""Full-Snapshot a snapshot."""
body = await api_validate(SCHEMA_SNAPSHOT_FULL, request)
return await asyncio.shield(
self.snapshots.do_snapshot_full(**body), loop=self.loop)
self._snapshots.do_snapshot_full(**body), loop=self._loop)
@api_process
async def snapshot_partial(self, request):
"""Partial-Snapshot a snapshot."""
body = await api_validate(SCHEMA_SNAPSHOT_PARTIAL, request)
return await asyncio.shield(
self.snapshots.do_snapshot_partial(**body), loop=self.loop)
self._snapshots.do_snapshot_partial(**body), loop=self._loop)
@api_process
def restore_full(self, request):
"""Full-Restore a snapshot."""
snapshot = self._extract_snapshot(request)
return asyncio.shield(
self.snapshots.do_restore_full(snapshot), loop=self.loop)
self._snapshots.do_restore_full(snapshot), loop=self._loop)
@api_process
async def restore_partial(self, request):
@ -124,12 +119,12 @@ class APISnapshots(object):
body = await api_validate(SCHEMA_SNAPSHOT_PARTIAL, request)
return await asyncio.shield(
self.snapshots.do_restore_partial(snapshot, **body),
loop=self.loop
self._snapshots.do_restore_partial(snapshot, **body),
loop=self._loop
)
@api_process
async def remove(self, request):
"""Remove a snapshot."""
snapshot = self._extract_snapshot(request)
return self.snapshots.remove(snapshot)
return self._snapshots.remove(snapshot)

View File

@ -4,13 +4,14 @@ import logging
import voluptuous as vol
from .util import api_process, api_process_raw, api_validate
from .utils import api_process, api_process_raw, api_validate
from ..const import (
ATTR_ADDONS, ATTR_VERSION, ATTR_LAST_VERSION, ATTR_BETA_CHANNEL, ATTR_ARCH,
HASSIO_VERSION, ATTR_ADDONS_REPOSITORIES, ATTR_LOGO, ATTR_REPOSITORY,
ATTR_DESCRIPTON, ATTR_NAME, ATTR_SLUG, ATTR_INSTALLED, ATTR_TIMEZONE,
ATTR_STATE, CONTENT_TYPE_BINARY)
from ..validate import validate_timezone
ATTR_STATE, ATTR_WAIT_BOOT, CONTENT_TYPE_BINARY)
from ..coresys import CoreSysAttributes
from ..validate import validate_timezone, WAIT_BOOT
_LOGGER = logging.getLogger(__name__)
@ -19,6 +20,7 @@ SCHEMA_OPTIONS = vol.Schema({
vol.Optional(ATTR_BETA_CHANNEL): vol.Boolean(),
vol.Optional(ATTR_ADDONS_REPOSITORIES): [vol.Url()],
vol.Optional(ATTR_TIMEZONE): validate_timezone,
vol.Optional(ATTR_WAIT_BOOT): WAIT_BOOT,
})
SCHEMA_VERSION = vol.Schema({
@ -26,20 +28,9 @@ SCHEMA_VERSION = vol.Schema({
})
class APISupervisor(object):
class APISupervisor(CoreSysAttributes):
"""Handle rest api for supervisor functions."""
def __init__(self, config, loop, supervisor, snapshots, addons,
host_control, updater):
"""Initialize supervisor rest api part."""
self.config = config
self.loop = loop
self.supervisor = supervisor
self.addons = addons
self.snapshots = snapshots
self.host_control = host_control
self.updater = updater
@api_process
async def ping(self, request):
"""Return ok for signal that the api is ready."""
@ -49,7 +40,7 @@ class APISupervisor(object):
async def info(self, request):
"""Return host information."""
list_addons = []
for addon in self.addons.list_addons:
for addon in self._addons.list_addons:
if addon.is_installed:
list_addons.append({
ATTR_NAME: addon.name,
@ -64,12 +55,13 @@ class APISupervisor(object):
return {
ATTR_VERSION: HASSIO_VERSION,
ATTR_LAST_VERSION: self.updater.version_hassio,
ATTR_BETA_CHANNEL: self.updater.beta_channel,
ATTR_ARCH: self.config.arch,
ATTR_TIMEZONE: self.config.timezone,
ATTR_LAST_VERSION: self._updater.version_hassio,
ATTR_BETA_CHANNEL: self._updater.beta_channel,
ATTR_ARCH: self._arch,
ATTR_WAIT_BOOT: self._config.wait_boot,
ATTR_TIMEZONE: self._config.timezone,
ATTR_ADDONS: list_addons,
ATTR_ADDONS_REPOSITORIES: self.config.addons_repositories,
ATTR_ADDONS_REPOSITORIES: self._config.addons_repositories,
}
@api_process
@ -78,14 +70,17 @@ class APISupervisor(object):
body = await api_validate(SCHEMA_OPTIONS, request)
if ATTR_BETA_CHANNEL in body:
self.updater.beta_channel = body[ATTR_BETA_CHANNEL]
self._updater.beta_channel = body[ATTR_BETA_CHANNEL]
if ATTR_TIMEZONE in body:
self.config.timezone = body[ATTR_TIMEZONE]
self._config.timezone = body[ATTR_TIMEZONE]
if ATTR_WAIT_BOOT in body:
self._config.wait_boot = body[ATTR_WAIT_BOOT]
if ATTR_ADDONS_REPOSITORIES in body:
new = set(body[ATTR_ADDONS_REPOSITORIES])
await asyncio.shield(self.addons.load_repositories(new))
await asyncio.shield(self._addons.load_repositories(new))
return True
@ -93,25 +88,22 @@ class APISupervisor(object):
async def update(self, request):
"""Update supervisor OS."""
body = await api_validate(SCHEMA_VERSION, request)
version = body.get(ATTR_VERSION, self.updater.version_hassio)
version = body.get(ATTR_VERSION, self._updater.version_hassio)
if version == self.supervisor.version:
if version == self._supervisor.version:
raise RuntimeError("Version {} is already in use".format(version))
return await asyncio.shield(
self.supervisor.update(version), loop=self.loop)
self._supervisor.update(version), loop=self._loop)
@api_process
async def reload(self, request):
"""Reload addons, config ect."""
tasks = [
self.addons.reload(),
self.snapshots.reload(),
self.updater.fetch_data(),
self.host_control.load()
self._updater.reload(),
]
results, _ = await asyncio.shield(
asyncio.wait(tasks, loop=self.loop), loop=self.loop)
asyncio.wait(tasks, loop=self._loop), loop=self._loop)
for result in results:
if result.exception() is not None:
@ -122,4 +114,4 @@ class APISupervisor(object):
@api_process_raw(CONTENT_TYPE_BINARY)
def logs(self, request):
"""Return supervisor docker logs."""
return self.supervisor.logs()
return self._supervisor.logs()

View File

@ -49,7 +49,8 @@ def api_process_hostcontrol(method):
"""Wrap HostControl calls to rest api."""
async def wrap_hostcontrol(api, *args, **kwargs):
"""Return host information."""
if not api.host_control.active:
# pylint: disable=protected-access
if not api._host_control.active:
raise HTTPServiceUnavailable()
try:

View File

@ -7,15 +7,41 @@ from pathlib import Path
from colorlog import ColoredFormatter
from .addons import AddonManager
from .api import RestAPI
from .const import SOCKET_DOCKER
from .config import CoreConfig
from .coresys import CoreSys
from .supervisor import Supervisor
from .homeassistant import HomeAssistant
from .snapshots import SnapshotsManager
from .tasks import Tasks
from .updater import Updater
_LOGGER = logging.getLogger(__name__)
def initialize_system_data():
def initialize_coresys(loop):
"""Initialize HassIO coresys/objects."""
coresys = CoreSys(loop)
# Initialize core objects
coresys.updater = Updater(coresys)
coresys.api = RestAPI(coresys)
coresys.supervisor = Supervisor(coresys)
coresys.homeassistant = HomeAssistant(coresys)
coresys.addons = AddonManager(coresys)
coresys.snapshots = SnapshotsManager(coresys)
coresys.tasks = Tasks(coresys)
# bootstrap config
initialize_system_data(coresys)
return coresys
def initialize_system_data(coresys):
"""Setup default config and create folders."""
config = CoreConfig()
config = coresys.config
# homeassistant config folder
if not config.path_config.is_dir():
@ -62,8 +88,9 @@ def initialize_system_data():
return config
def migrate_system_env(config):
def migrate_system_env(coresys):
"""Cleanup some stuff after update."""
config = coresys.config
# hass.io 0.37 -> 0.38
old_build = Path(config.path_hassio, "addons/build")

View File

@ -7,8 +7,9 @@ from pathlib import Path, PurePath
from .const import (
FILE_HASSIO_CONFIG, HASSIO_DATA, ATTR_SECURITY, ATTR_SESSIONS,
ATTR_PASSWORD, ATTR_TOTP, ATTR_TIMEZONE, ATTR_ADDONS_CUSTOM_LIST,
ATTR_AUDIO_INPUT, ATTR_AUDIO_OUTPUT, ATTR_LAST_BOOT)
from .tools import JsonConfig, parse_datetime
ATTR_AUDIO_INPUT, ATTR_AUDIO_OUTPUT, ATTR_LAST_BOOT, ATTR_WAIT_BOOT)
from .utils.dt import parse_datetime
from .utils.json import JsonConfig
from .validate import SCHEMA_HASSIO_CONFIG
_LOGGER = logging.getLogger(__name__)
@ -35,7 +36,6 @@ class CoreConfig(JsonConfig):
def __init__(self):
"""Initialize config object."""
super().__init__(FILE_HASSIO_CONFIG, SCHEMA_HASSIO_CONFIG)
self.arch = None
@property
def timezone(self):
@ -48,6 +48,17 @@ class CoreConfig(JsonConfig):
self._data[ATTR_TIMEZONE] = value
self.save()
@property
def wait_boot(self):
"""Return wait time for auto boot stages."""
return self._data[ATTR_WAIT_BOOT]
@wait_boot.setter
def wait_boot(self, value):
"""Set wait boot time."""
self._data[ATTR_WAIT_BOOT] = value
self.save()
@property
def last_boot(self):
"""Return last boot datetime."""

View File

@ -11,15 +11,6 @@ URL_HASSIO_ADDONS = 'https://github.com/home-assistant/hassio-addons'
HASSIO_DATA = Path("/data")
RUN_UPDATE_INFO_TASKS = 28800
RUN_UPDATE_SUPERVISOR_TASKS = 29100
RUN_UPDATE_ADDONS_TASKS = 57600
RUN_RELOAD_ADDONS_TASKS = 28800
RUN_RELOAD_SNAPSHOTS_TASKS = 72000
RUN_WATCHDOG_HOMEASSISTANT_DOCKER = 15
RUN_WATCHDOG_HOMEASSISTANT_API = 300
RUN_CLEANUP_API_SESSIONS = 900
FILE_HASSIO_ADDONS = Path(HASSIO_DATA, "addons.json")
FILE_HASSIO_CONFIG = Path(HASSIO_DATA, "config.json")
FILE_HASSIO_HOMEASSISTANT = Path(HASSIO_DATA, "homeassistant.json")
@ -53,6 +44,7 @@ CONTENT_TYPE_JSON = 'application/json'
CONTENT_TYPE_TEXT = 'text/plain'
HEADER_HA_ACCESS = 'x-ha-access'
ATTR_WAIT_BOOT = 'wait_boot'
ATTR_WATCHDOG = 'watchdog'
ATTR_CHANGELOG = 'changelog'
ATTR_DATE = 'date'

View File

@ -2,190 +2,108 @@
import asyncio
import logging
import aiohttp
from .addons import AddonManager
from .api import RestAPI
from .host_control import HostControl
from .coresys import CoreSysAttributes
from .const import (
RUN_UPDATE_INFO_TASKS, RUN_RELOAD_ADDONS_TASKS,
RUN_UPDATE_SUPERVISOR_TASKS, RUN_WATCHDOG_HOMEASSISTANT_DOCKER,
RUN_CLEANUP_API_SESSIONS, STARTUP_SYSTEM, STARTUP_SERVICES,
STARTUP_APPLICATION, STARTUP_INITIALIZE, RUN_RELOAD_SNAPSHOTS_TASKS,
RUN_UPDATE_ADDONS_TASKS)
from .hardware import Hardware
from .homeassistant import HomeAssistant
from .scheduler import Scheduler
from .dock import DockerAPI
from .dock.supervisor import DockerSupervisor
from .dns import DNSForward
from .snapshots import SnapshotsManager
from .updater import Updater
from .tasks import (
hassio_update, homeassistant_watchdog_docker, api_sessions_cleanup,
addons_update)
from .tools import fetch_timezone
STARTUP_SYSTEM, STARTUP_SERVICES, STARTUP_APPLICATION, STARTUP_INITIALIZE)
from .utils.dt import fetch_timezone
_LOGGER = logging.getLogger(__name__)
class HassIO(object):
class HassIO(CoreSysAttributes):
"""Main object of hassio."""
def __init__(self, loop, config):
def __init__(self, coresys):
"""Initialize hassio object."""
self.exit_code = 0
self.loop = loop
self.config = config
self.websession = aiohttp.ClientSession(loop=loop)
self.updater = Updater(config, loop, self.websession)
self.scheduler = Scheduler(loop)
self.api = RestAPI(config, loop)
self.hardware = Hardware()
self.docker = DockerAPI(self.hardware)
self.dns = DNSForward(loop)
# init basic docker container
self.supervisor = DockerSupervisor(
config, loop, self.docker, self.stop)
# init homeassistant
self.homeassistant = HomeAssistant(
config, loop, self.docker, self.updater)
# init HostControl
self.host_control = HostControl(loop)
# init addon system
self.addons = AddonManager(config, loop, self.docker)
# init snapshot system
self.snapshots = SnapshotsManager(
config, loop, self.scheduler, self.addons, self.homeassistant)
self.coresys = coresys
async def setup(self):
"""Setup HassIO orchestration."""
# supervisor
if not await self.supervisor.attach():
_LOGGER.fatal("Can't setup supervisor docker container!")
await self.supervisor.cleanup()
# set running arch
self.config.arch = self.supervisor.arch
# update timezone
if self.config.timezone == 'UTC':
self.config.timezone = await fetch_timezone(self.websession)
if self._config.timezone == 'UTC':
self._config.timezone = await fetch_timezone(self._websession)
# supervisor
await self._supervisor.load()
# hostcontrol
await self.host_control.load()
# schedule update info tasks
self.scheduler.register_task(
self.host_control.load, RUN_UPDATE_INFO_TASKS)
# rest api views
self.api.register_host(self.host_control, self.hardware)
self.api.register_network(self.host_control)
self.api.register_supervisor(
self.supervisor, self.snapshots, self.addons, self.host_control,
self.updater)
self.api.register_homeassistant(self.homeassistant)
self.api.register_proxy(self.homeassistant)
self.api.register_addons(self.addons)
self.api.register_security()
self.api.register_snapshots(self.snapshots)
self.api.register_panel()
# schedule api session cleanup
self.scheduler.register_task(
api_sessions_cleanup(self.config), RUN_CLEANUP_API_SESSIONS,
now=True)
await self._host_control.load()
# Load homeassistant
await self.homeassistant.prepare()
await self._homeassistant.load()
# Load addons
await self.addons.prepare()
await self._addons.load()
# schedule addon update task
self.scheduler.register_task(
self.addons.reload, RUN_RELOAD_ADDONS_TASKS, now=True)
self.scheduler.register_task(
addons_update(self.loop, self.addons), RUN_UPDATE_ADDONS_TASKS)
# rest api views
await self._api.load()
# schedule self update task
self.scheduler.register_task(
hassio_update(self.supervisor, self.updater),
RUN_UPDATE_SUPERVISOR_TASKS)
# load last available data
await self._updater.load()
# schedule snapshot update tasks
self.scheduler.register_task(
self.snapshots.reload, RUN_RELOAD_SNAPSHOTS_TASKS, now=True)
# load last available data
await self._snapshots.load()
# start dns forwarding
self.loop.create_task(self.dns.start())
self._loop.create_task(self._dns.start())
# start addon mark as initialize
await self.addons.auto_boot(STARTUP_INITIALIZE)
await self._addons.auto_boot(STARTUP_INITIALIZE)
async def start(self):
"""Start HassIO orchestration."""
# on release channel, try update itself
# on beta channel, only read new versions
await asyncio.wait(
[hassio_update(self.supervisor, self.updater)()],
loop=self.loop
)
if not self._updater.beta_channel:
await self._supervisor.update()
else:
_LOGGER.info("Ignore Hass.io auto updates on beta mode")
# start api
await self.api.start()
_LOGGER.info("Start hassio api on %s", self.docker.network.supervisor)
await self._api.start()
_LOGGER.info("Start API on %s", self._docker.network.supervisor)
try:
# HomeAssistant is already running / supervisor have only reboot
if self.hardware.last_boot == self.config.last_boot:
_LOGGER.info("HassIO reboot detected")
if self._hardware.last_boot == self._config.last_boot:
_LOGGER.info("Hass.io reboot detected")
return
# start addon mark as system
await self.addons.auto_boot(STARTUP_SYSTEM)
await self._addons.auto_boot(STARTUP_SYSTEM)
# start addon mark as services
await self.addons.auto_boot(STARTUP_SERVICES)
await self._addons.auto_boot(STARTUP_SERVICES)
# run HomeAssistant
if self.homeassistant.boot:
await self.homeassistant.run()
if self._homeassistant.boot:
await self._homeassistant.run()
# start addon mark as application
await self.addons.auto_boot(STARTUP_APPLICATION)
await self._addons.auto_boot(STARTUP_APPLICATION)
# store new last boot
self.config.last_boot = self.hardware.last_boot
self._config.last_boot = self._hardware.last_boot
finally:
# schedule homeassistant watchdog
self.scheduler.register_task(
homeassistant_watchdog_docker(self.loop, self.homeassistant),
RUN_WATCHDOG_HOMEASSISTANT_DOCKER)
# self.scheduler.register_task(
# homeassistant_watchdog_api(self.loop, self.homeassistant),
# RUN_WATCHDOG_HOMEASSISTANT_API)
# Add core tasks into scheduler
await self._tasks.load()
# If landingpage / run upgrade in background
if self.homeassistant.version == 'landingpage':
self.loop.create_task(self.homeassistant.install())
if self._homeassistant.version == 'landingpage':
self._loop.create_task(self._homeassistant.install())
_LOGGER.info("Hass.io is up and running")
async def stop(self):
"""Stop a running orchestration."""
# don't process scheduler anymore
self.scheduler.suspend = True
self._scheduler.suspend = True
# process stop tasks
self.websession.close()
self.homeassistant.websession.close()
self._websession.close()
self._websession_ssl.close()
# process async stop tasks
await asyncio.wait([self.api.stop(), self.dns.stop()], loop=self.loop)
await asyncio.wait(
[self._api.stop(), self._dns.stop()], loop=self._loop)

190
hassio/coresys.py Normal file
View File

@ -0,0 +1,190 @@
"""Handle core shared data."""
import aiohttp
from .config import CoreConfig
from .docker import DockerAPI
from .dns import DNSForward
from .hardware import Hardware
from .host_control import HostControl
from .scheduler import Scheduler
class CoreSys(object):
"""Class that handle all shared data."""
def __init__(self, loop):
"""Initialize coresys."""
# Static attributes
self.exit_code = 0
# External objects
self._loop = loop
self._websession = aiohttp.ClientSession(loop=loop)
self._websession_ssl = aiohttp.ClientSession(
connector=aiohttp.TCPConnector(verify_ssl=False), loop=loop)
# Global objects
self._config = CoreConfig()
self._hardware = Hardware()
self._docker = DockerAPI()
self._scheduler = Scheduler(loop=loop)
self._dns = DNSForward(loop=loop)
self._host_control = HostControl(loop=loop)
# Internal objects pointers
self._homeassistant = None
self._supervisor = None
self._addons = None
self._api = None
self._updater = None
self._snapshots = None
self._tasks = None
@property
def arch(self):
"""Return running arch of hass.io system."""
if self._supervisor:
return self._supervisor.arch
return None
@property
def loop(self):
"""Return loop object."""
return self._loop
@property
def websession(self):
"""Return websession object."""
return self._websession
@property
def websession_ssl(self):
"""Return websession object with disabled SSL."""
return self._websession_ssl
@property
def config(self):
"""Return CoreConfig object."""
return self._config
@property
def hardware(self):
"""Return Hardware object."""
return self._hardware
@property
def docker(self):
"""Return DockerAPI object."""
return self._docker
@property
def scheduler(self):
"""Return Scheduler object."""
return self._scheduler
@property
def dns(self):
"""Return DNSForward object."""
return self._dns
@property
def host_control(self):
"""Return HostControl object."""
return self._host_control
@property
def homeassistant(self):
"""Return HomeAssistant object."""
return self._homeassistant
@homeassistant.setter
def homeassistant(self, value):
"""Set a HomeAssistant object."""
if self._homeassistant:
raise RuntimeError("HomeAssistant already set!")
self._homeassistant = value
@property
def supervisor(self):
"""Return Supervisor object."""
return self._supervisor
@supervisor.setter
def supervisor(self, value):
"""Set a Supervisor object."""
if self._supervisor:
raise RuntimeError("Supervisor already set!")
self._supervisor = value
@property
def api(self):
"""Return API object."""
return self._api
@api.setter
def api(self, value):
"""Set a API object."""
if self._api:
raise RuntimeError("API already set!")
self._api = value
@property
def updater(self):
"""Return Updater object."""
return self._updater
@updater.setter
def updater(self, value):
"""Set a Updater object."""
if self._updater:
raise RuntimeError("Updater already set!")
self._updater = value
@property
def addons(self):
"""Return AddonManager object."""
return self._addons
@addons.setter
def addons(self, value):
"""Set a AddonManager object."""
if self._addons:
raise RuntimeError("AddonManager already set!")
self._addons = value
@property
def snapshots(self):
"""Return SnapshotsManager object."""
return self._snapshots
@snapshots.setter
def snapshots(self, value):
"""Set a SnapshotsManager object."""
if self._snapshots:
raise RuntimeError("SnapshotsManager already set!")
self._snapshots = value
@property
def tasks(self):
"""Return SnapshotsManager object."""
return self._tasks
@tasks.setter
def tasks(self, value):
"""Set a Tasks object."""
if self._tasks:
raise RuntimeError("Tasks already set!")
self._tasks = value
class CoreSysAttributes(object):
"""Inheret basic CoreSysAttributes."""
coresys = None
def __getattr__(self, name):
"""Mapping to coresys."""
if hasattr(self.coresys, name[1:]):
return getattr(self.coresys, name[1:])
raise AttributeError(f"Can't find {name} on {self.__class__}")

View File

@ -27,7 +27,7 @@ class DNSForward(object):
loop=self.loop
)
except OSError as err:
_LOGGER.error("Can't start DNS forwarding -> %s", err)
_LOGGER.error("Can't start DNS forwarding: %s", err)
else:
_LOGGER.info("Start DNS port forwarding for host add-ons")

View File

@ -1,77 +0,0 @@
"""Init file for HassIO docker object."""
import logging
import os
import docker
from .interface import DockerInterface
from .util import docker_process
_LOGGER = logging.getLogger(__name__)
class DockerSupervisor(DockerInterface):
"""Docker hassio wrapper for HomeAssistant."""
def __init__(self, config, loop, api, stop_callback, image=None):
"""Initialize docker base wrapper."""
super().__init__(config, loop, api, image=image)
self.stop_callback = stop_callback
@property
def name(self):
"""Return name of docker container."""
return os.environ['SUPERVISOR_NAME']
def _attach(self):
"""Attach to running docker container.
Need run inside executor.
"""
try:
container = self.docker.containers.get(self.name)
except docker.errors.DockerException:
return False
self.process_metadata(container.attrs)
_LOGGER.info("Attach to supervisor %s with version %s",
self.image, self.version)
# if already attach
if container in self.docker.network.containers:
return True
# attach to network
return self.docker.network.attach_container(
container, alias=['hassio'], ipv4=self.docker.network.supervisor)
@docker_process
async def update(self, tag):
"""Update a supervisor docker image."""
_LOGGER.info("Update supervisor docker to %s:%s", self.image, tag)
if await self.loop.run_in_executor(None, self._install, tag):
self.loop.call_later(1, self.loop.stop)
return True
return False
async def run(self):
"""Run docker image."""
raise RuntimeError("Not support on supervisor docker container!")
async def install(self, tag):
"""Pull docker image."""
raise RuntimeError("Not support on supervisor docker container!")
async def stop(self):
"""Stop/remove docker container."""
raise RuntimeError("Not support on supervisor docker container!")
async def remove(self):
"""Remove docker image."""
raise RuntimeError("Not support on supervisor docker container!")
async def restart(self):
"""Restart docker container."""
raise RuntimeError("Not support on supervisor docker container!")

View File

@ -16,12 +16,11 @@ class DockerAPI(object):
This class is not AsyncIO safe!
"""
def __init__(self, hardware):
def __init__(self):
"""Initialize docker base wrapper."""
self.docker = docker.DockerClient(
base_url="unix:/{}".format(str(SOCKET_DOCKER)), version='auto')
self.network = DockerNetwork(self.docker)
self.hardware = hardware
@property
def images(self):
@ -57,7 +56,7 @@ class DockerAPI(object):
try:
container = self.docker.containers.create(image, **kwargs)
except docker.errors.DockerException as err:
_LOGGER.error("Can't create container from %s -> %s", name, err)
_LOGGER.error("Can't create container from %s: %s", name, err)
return False
# attach network
@ -72,7 +71,7 @@ class DockerAPI(object):
try:
container.start()
except docker.errors.DockerException as err:
_LOGGER.error("Can't start %s -> %s", name, err)
_LOGGER.error("Can't start %s: %s", name, err)
return False
return True
@ -99,7 +98,7 @@ class DockerAPI(object):
output = container.logs(stdout=stdout, stderr=stderr)
except docker.errors.DockerException as err:
_LOGGER.error("Can't execute command -> %s", err)
_LOGGER.error("Can't execute command: %s", err)
return (None, b"")
# cleanup container

View File

@ -6,7 +6,7 @@ import docker
import requests
from .interface import DockerInterface
from .util import docker_process
from .utils import docker_process
from ..addons.build import AddonBuild
from ..const import (
MAP_CONFIG, MAP_SSL, MAP_ADDONS, MAP_BACKUP, MAP_SHARE)
@ -19,27 +19,39 @@ AUDIO_DEVICE = "/dev/snd:/dev/snd:rwm"
class DockerAddon(DockerInterface):
"""Docker hassio wrapper for HomeAssistant."""
def __init__(self, config, loop, api, addon):
def __init__(self, coresys, slug):
"""Initialize docker homeassistant wrapper."""
super().__init__(
config, loop, api, image=addon.image, timeout=addon.timeout)
self.addon = addon
super().__init__(coresys)
self._id = slug
# pylint: disable=inconsistent-return-statements
def process_metadata(self, metadata, force=False):
"""Use addon data instead meta data with legacy."""
@property
def addon(self):
"""Return name of docker image."""
return self._addons.get(self._id)
@property
def image(self):
"""Return name of docker image."""
return self.addon.image
@property
def timeout(self):
"""Return timeout for docker actions."""
return self.addon.timeout
@property
def version(self):
"""Return version of docker image."""
if not self.addon.legacy:
return super().process_metadata(metadata, force=force)
return super().version
return self.addon.version_installed
# set meta data
if not self.version or force:
if force: # called on install/update/build
self.version = self.addon.last_version
else:
self.version = self.addon.version_installed
if not self.arch:
self.arch = self.config.arch
@property
def arch(self):
"""Return arch of docker image."""
if not self.addon.legacy:
return super().arch
return self._arch
@property
def name(self):
@ -74,7 +86,7 @@ class DockerAddon(DockerInterface):
return {
**addon_env,
'TZ': self.config.timezone,
'TZ': self._config.timezone,
}
@property
@ -88,7 +100,7 @@ class DockerAddon(DockerInterface):
# Auto mapping UART devices
if self.addon.auto_uart:
for uart_dev in self.docker.hardware.serial_devices:
for uart_dev in self._hardware.serial_devices:
devices.append("{0}:{0}:rwm".format(uart_dev))
# Return None if no devices is present
@ -130,8 +142,8 @@ class DockerAddon(DockerInterface):
def network_mapping(self):
"""Return hosts mapping."""
return {
'homeassistant': self.docker.network.gateway,
'hassio': self.docker.network.supervisor,
'homeassistant': self._docker.network.gateway,
'hassio': self._docker.network.supervisor,
}
@property
@ -154,31 +166,31 @@ class DockerAddon(DockerInterface):
# setup config mappings
if MAP_CONFIG in addon_mapping:
volumes.update({
str(self.config.path_extern_config): {
str(self._config.path_extern_config): {
'bind': "/config", 'mode': addon_mapping[MAP_CONFIG]
}})
if MAP_SSL in addon_mapping:
volumes.update({
str(self.config.path_extern_ssl): {
str(self._config.path_extern_ssl): {
'bind': "/ssl", 'mode': addon_mapping[MAP_SSL]
}})
if MAP_ADDONS in addon_mapping:
volumes.update({
str(self.config.path_extern_addons_local): {
str(self._config.path_extern_addons_local): {
'bind': "/addons", 'mode': addon_mapping[MAP_ADDONS]
}})
if MAP_BACKUP in addon_mapping:
volumes.update({
str(self.config.path_extern_backup): {
str(self._config.path_extern_backup): {
'bind': "/backup", 'mode': addon_mapping[MAP_BACKUP]
}})
if MAP_SHARE in addon_mapping:
volumes.update({
str(self.config.path_extern_share): {
str(self._config.path_extern_share): {
'bind': "/share", 'mode': addon_mapping[MAP_SHARE]
}})
@ -217,7 +229,7 @@ class DockerAddon(DockerInterface):
if not self.addon.write_options():
return False
ret = self.docker.run(
ret = self._docker.run(
self.image,
name=self.name,
hostname=self.hostname,
@ -257,17 +269,17 @@ class DockerAddon(DockerInterface):
Need run inside executor.
"""
build_env = AddonBuild(self.config, self.addon)
build_env = AddonBuild(self.coresys, self.addon)
_LOGGER.info("Start build %s:%s", self.image, tag)
try:
image = self.docker.images.build(**build_env.get_docker_args(tag))
image = self._docker.images.build(**build_env.get_docker_args(tag))
image.tag(self.image, tag='latest')
self.process_metadata(image.attrs, force=True)
self._meta = image.attrs
except (docker.errors.DockerException) as err:
_LOGGER.error("Can't build %s:%s -> %s", self.image, tag, err)
_LOGGER.error("Can't build %s:%s: %s", self.image, tag, err)
return False
_LOGGER.info("Build %s:%s done", self.image, tag)
@ -276,7 +288,7 @@ class DockerAddon(DockerInterface):
@docker_process
def export_image(self, path):
"""Export current images into a tar file."""
return self.loop.run_in_executor(None, self._export_image, path)
return self._loop.run_in_executor(None, self._export_image, path)
def _export_image(self, tar_file):
"""Export current images into a tar file.
@ -284,9 +296,9 @@ class DockerAddon(DockerInterface):
Need run inside executor.
"""
try:
image = self.docker.api.get_image(self.image)
image = self._docker.api.get_image(self.image)
except docker.errors.DockerException as err:
_LOGGER.error("Can't fetch image %s -> %s", self.image, err)
_LOGGER.error("Can't fetch image %s: %s", self.image, err)
return False
try:
@ -294,7 +306,7 @@ class DockerAddon(DockerInterface):
for chunk in image.stream():
write_tar.write(chunk)
except (OSError, requests.exceptions.ReadTimeout) as err:
_LOGGER.error("Can't write tar file %s -> %s", tar_file, err)
_LOGGER.error("Can't write tar file %s: %s", tar_file, err)
return False
_LOGGER.info("Export image %s to %s", self.image, tar_file)
@ -303,7 +315,7 @@ class DockerAddon(DockerInterface):
@docker_process
def import_image(self, path, tag):
"""Import a tar file as image."""
return self.loop.run_in_executor(None, self._import_image, path, tag)
return self._loop.run_in_executor(None, self._import_image, path, tag)
def _import_image(self, tar_file, tag):
"""Import a tar file as image.
@ -312,16 +324,16 @@ class DockerAddon(DockerInterface):
"""
try:
with tar_file.open("rb") as read_tar:
self.docker.api.load_image(read_tar)
self._docker.api.load_image(read_tar)
image = self.docker.images.get(self.image)
image = self._docker.images.get(self.image)
image.tag(self.image, tag=tag)
except (docker.errors.DockerException, OSError) as err:
_LOGGER.error("Can't import image %s -> %s", self.image, err)
_LOGGER.error("Can't import image %s: %s", self.image, err)
return False
_LOGGER.info("Import image %s and tag %s", tar_file, tag)
self.process_metadata(image.attrs, force=True)
self._meta = image.attrs
self._cleanup()
return True
@ -337,7 +349,7 @@ class DockerAddon(DockerInterface):
@docker_process
def write_stdin(self, data):
"""Write to add-on stdin."""
return self.loop.run_in_executor(None, self._write_stdin, data)
return self._loop.run_in_executor(None, self._write_stdin, data)
def _write_stdin(self, data):
"""Write to add-on stdin.
@ -349,10 +361,10 @@ class DockerAddon(DockerInterface):
try:
# load needed docker objects
container = self.docker.containers.get(self.name)
container = self._docker.containers.get(self.name)
socket = container.attach_socket(params={'stdin': 1, 'stream': 1})
except docker.errors.DockerException as err:
_LOGGER.error("Can't attach to %s stdin -> %s", self.name, err)
_LOGGER.error("Can't attach to %s stdin: %s", self.name, err)
return False
try:
@ -361,7 +373,7 @@ class DockerAddon(DockerInterface):
os.write(socket.fileno(), data)
socket.close()
except OSError as err:
_LOGGER.error("Can't write to %s stdin -> %s", self.name, err)
_LOGGER.error("Can't write to %s stdin: %s", self.name, err)
return False
return True

View File

@ -13,10 +13,10 @@ HASS_DOCKER_NAME = 'homeassistant'
class DockerHomeAssistant(DockerInterface):
"""Docker hassio wrapper for HomeAssistant."""
def __init__(self, config, loop, api, data):
"""Initialize docker homeassistant wrapper."""
super().__init__(config, loop, api, image=data.image)
self.data = data
@property
def image(self):
"""Return name of docker image."""
return self._homeassistant.image
@property
def name(self):
@ -26,11 +26,11 @@ class DockerHomeAssistant(DockerInterface):
@property
def devices(self):
"""Create list of special device to map into docker."""
if not self.data.devices:
if not self._homeassistant.devices:
return None
devices = []
for device in self.data.devices:
for device in self._homeassistant.devices:
devices.append("/dev/{0}:/dev/{0}:rwm".format(device))
return devices
@ -46,7 +46,7 @@ class DockerHomeAssistant(DockerInterface):
# cleanup
self._stop()
ret = self.docker.run(
ret = self._docker.run(
self.image,
name=self.name,
hostname=self.name,
@ -56,15 +56,15 @@ class DockerHomeAssistant(DockerInterface):
devices=self.devices,
network_mode='host',
environment={
'HASSIO': self.docker.network.supervisor,
'TZ': self.config.timezone,
'HASSIO': self._docker.network.supervisor,
'TZ': self._config.timezone,
},
volumes={
str(self.config.path_extern_config):
str(self._config.path_extern_config):
{'bind': '/config', 'mode': 'rw'},
str(self.config.path_extern_ssl):
str(self._config.path_extern_ssl):
{'bind': '/ssl', 'mode': 'ro'},
str(self.config.path_extern_share):
str(self._config.path_extern_share):
{'bind': '/share', 'mode': 'rw'},
}
)
@ -80,26 +80,26 @@ class DockerHomeAssistant(DockerInterface):
Need run inside executor.
"""
return self.docker.run_command(
return self._docker.run_command(
self.image,
command,
detach=True,
stdout=True,
stderr=True,
environment={
'TZ': self.config.timezone,
'TZ': self._config.timezone,
},
volumes={
str(self.config.path_extern_config):
str(self._config.path_extern_config):
{'bind': '/config', 'mode': 'ro'},
str(self.config.path_extern_ssl):
str(self._config.path_extern_ssl):
{'bind': '/ssl', 'mode': 'ro'},
}
)
def is_initialize(self):
"""Return True if docker container exists."""
return self.loop.run_in_executor(None, self._is_initialize)
return self._loop.run_in_executor(None, self._is_initialize)
def _is_initialize(self):
"""Return True if docker container exists.
@ -107,7 +107,7 @@ class DockerHomeAssistant(DockerInterface):
Need run inside executor.
"""
try:
self.docker.containers.get(self.name)
self._docker.containers.get(self.name)
except docker.errors.DockerException:
return False

View File

@ -5,59 +5,62 @@ import logging
import docker
from .util import docker_process
from .utils import docker_process
from ..const import LABEL_VERSION, LABEL_ARCH
from ..coresys import CoreSysAttributes
_LOGGER = logging.getLogger(__name__)
class DockerInterface(object):
class DockerInterface(CoreSysAttributes):
"""Docker hassio interface."""
def __init__(self, config, loop, api, image=None, timeout=30):
def __init__(self, coresys):
"""Initialize docker base wrapper."""
self.config = config
self.loop = loop
self.docker = api
self.coresys = coresys
self._meta = None
self.lock = asyncio.Lock(loop=self._loop)
self.image = image
self.timeout = timeout
self.version = None
self.arch = None
self._lock = asyncio.Lock(loop=loop)
@property
def timeout(self):
"""Return timeout for docker actions."""
return 30
@property
def name(self):
"""Return name of docker container."""
return None
@property
def image(self):
"""Return name of docker image."""
if not self._meta:
return None
return self._meta['Config']['Image']
@property
def version(self):
"""Return version of docker image."""
if self._meta and LABEL_VERSION in self._meta['Config']['Labels']:
return self._meta['Config']['Labels'][LABEL_VERSION]
return None
@property
def arch(self):
"""Return arch of docker image."""
if self._meta and LABEL_ARCH in self._meta['Config']['Labels']:
return self._meta['Config']['Labels'][LABEL_ARCH]
return None
@property
def in_progress(self):
"""Return True if a task is in progress."""
return self._lock.locked()
def process_metadata(self, metadata, force=False):
"""Read metadata and set it to object."""
# read image
if not self.image:
self.image = metadata['Config']['Image']
# read version
need_version = force or not self.version
if need_version and LABEL_VERSION in metadata['Config']['Labels']:
self.version = metadata['Config']['Labels'][LABEL_VERSION]
elif need_version:
_LOGGER.warning("Can't read version from %s", self.name)
# read arch
need_arch = force or not self.arch
if need_arch and LABEL_ARCH in metadata['Config']['Labels']:
self.arch = metadata['Config']['Labels'][LABEL_ARCH]
return self.lock.locked()
@docker_process
def install(self, tag):
"""Pull docker image."""
return self.loop.run_in_executor(None, self._install, tag)
return self._loop.run_in_executor(None, self._install, tag)
def _install(self, tag):
"""Pull docker image.
@ -66,10 +69,10 @@ class DockerInterface(object):
"""
try:
_LOGGER.info("Pull image %s tag %s.", self.image, tag)
image = self.docker.images.pull("{}:{}".format(self.image, tag))
image = self._docker.images.pull("{}:{}".format(self.image, tag))
image.tag(self.image, tag='latest')
self.process_metadata(image.attrs, force=True)
self._meta = image.attrs
except docker.errors.APIError as err:
_LOGGER.error("Can't install %s:%s -> %s.", self.image, tag, err)
return False
@ -79,7 +82,7 @@ class DockerInterface(object):
def exists(self):
"""Return True if docker image exists in local repo."""
return self.loop.run_in_executor(None, self._exists)
return self._loop.run_in_executor(None, self._exists)
def _exists(self):
"""Return True if docker image exists in local repo.
@ -87,7 +90,7 @@ class DockerInterface(object):
Need run inside executor.
"""
try:
self.docker.images.get(self.image)
self._docker.images.get(self.image)
except docker.errors.DockerException:
return False
@ -98,7 +101,7 @@ class DockerInterface(object):
Return a Future.
"""
return self.loop.run_in_executor(None, self._is_running)
return self._loop.run_in_executor(None, self._is_running)
def _is_running(self):
"""Return True if docker is Running.
@ -106,8 +109,8 @@ class DockerInterface(object):
Need run inside executor.
"""
try:
container = self.docker.containers.get(self.name)
image = self.docker.images.get(self.image)
container = self._docker.containers.get(self.name)
image = self._docker.images.get(self.image)
except docker.errors.DockerException:
return False
@ -124,7 +127,7 @@ class DockerInterface(object):
@docker_process
def attach(self):
"""Attach to running docker container."""
return self.loop.run_in_executor(None, self._attach)
return self._loop.run_in_executor(None, self._attach)
def _attach(self):
"""Attach to running docker container.
@ -133,13 +136,12 @@ class DockerInterface(object):
"""
try:
if self.image:
obj_data = self.docker.images.get(self.image).attrs
self._meta = self._docker.images.get(self.image).attrs
else:
obj_data = self.docker.containers.get(self.name).attrs
self._meta = self._docker.containers.get(self.name).attrs
except docker.errors.DockerException:
return False
self.process_metadata(obj_data)
_LOGGER.info(
"Attach to image %s with version %s", self.image, self.version)
@ -148,7 +150,7 @@ class DockerInterface(object):
@docker_process
def run(self):
"""Run docker image."""
return self.loop.run_in_executor(None, self._run)
return self._loop.run_in_executor(None, self._run)
def _run(self):
"""Run docker image.
@ -160,7 +162,7 @@ class DockerInterface(object):
@docker_process
def stop(self):
"""Stop/remove docker container."""
return self.loop.run_in_executor(None, self._stop)
return self._loop.run_in_executor(None, self._stop)
def _stop(self):
"""Stop/remove and remove docker container.
@ -168,7 +170,7 @@ class DockerInterface(object):
Need run inside executor.
"""
try:
container = self.docker.containers.get(self.name)
container = self._docker.containers.get(self.name)
except docker.errors.DockerException:
return False
@ -186,7 +188,7 @@ class DockerInterface(object):
@docker_process
def remove(self):
"""Remove docker images."""
return self.loop.run_in_executor(None, self._remove)
return self._loop.run_in_executor(None, self._remove)
def _remove(self):
"""remove docker images.
@ -201,27 +203,24 @@ class DockerInterface(object):
try:
with suppress(docker.errors.ImageNotFound):
self.docker.images.remove(
self._docker.images.remove(
image="{}:latest".format(self.image), force=True)
with suppress(docker.errors.ImageNotFound):
self.docker.images.remove(
self._docker.images.remove(
image="{}:{}".format(self.image, self.version), force=True)
except docker.errors.DockerException as err:
_LOGGER.warning("Can't remove image %s -> %s", self.image, err)
_LOGGER.warning("Can't remove image %s: %s", self.image, err)
return False
# clean metadata
self.version = None
self.arch = None
self._meta = None
return True
@docker_process
def update(self, tag):
"""Update a docker image."""
return self.loop.run_in_executor(None, self._update, tag)
return self._loop.run_in_executor(None, self._update, tag)
def _update(self, tag):
"""Update a docker image.
@ -246,7 +245,7 @@ class DockerInterface(object):
Return a Future.
"""
return self.loop.run_in_executor(None, self._logs)
return self._loop.run_in_executor(None, self._logs)
def _logs(self):
"""Return docker logs of container.
@ -254,19 +253,19 @@ class DockerInterface(object):
Need run inside executor.
"""
try:
container = self.docker.containers.get(self.name)
container = self._docker.containers.get(self.name)
except docker.errors.DockerException:
return b""
try:
return container.logs(tail=100, stdout=True, stderr=True)
except docker.errors.DockerException as err:
_LOGGER.warning("Can't grap logs from %s -> %s", self.image, err)
_LOGGER.warning("Can't grap logs from %s: %s", self.image, err)
@docker_process
def restart(self):
"""Restart docker container."""
return self.loop.run_in_executor(None, self._restart)
return self._loop.run_in_executor(None, self._restart)
def _restart(self):
"""Restart docker container.
@ -274,7 +273,7 @@ class DockerInterface(object):
Need run inside executor.
"""
try:
container = self.docker.containers.get(self.name)
container = self._docker.containers.get(self.name)
except docker.errors.DockerException:
return False
@ -283,7 +282,7 @@ class DockerInterface(object):
try:
container.restart(timeout=self.timeout)
except docker.errors.DockerException as err:
_LOGGER.warning("Can't restart %s -> %s", self.image, err)
_LOGGER.warning("Can't restart %s: %s", self.image, err)
return False
return True
@ -291,7 +290,7 @@ class DockerInterface(object):
@docker_process
def cleanup(self):
"""Check if old version exists and cleanup."""
return self.loop.run_in_executor(None, self._cleanup)
return self._loop.run_in_executor(None, self._cleanup)
def _cleanup(self):
"""Check if old version exists and cleanup.
@ -299,25 +298,25 @@ class DockerInterface(object):
Need run inside executor.
"""
try:
latest = self.docker.images.get(self.image)
latest = self._docker.images.get(self.image)
except docker.errors.DockerException:
_LOGGER.warning("Can't find %s for cleanup", self.image)
return False
for image in self.docker.images.list(name=self.image):
for image in self._docker.images.list(name=self.image):
if latest.id == image.id:
continue
with suppress(docker.errors.DockerException):
_LOGGER.info("Cleanup docker images: %s", image.tags)
self.docker.images.remove(image.id, force=True)
self._docker.images.remove(image.id, force=True)
return True
@docker_process
def execute_command(self, command):
"""Create a temporary container and run command."""
return self.loop.run_in_executor(None, self._execute_command, command)
return self._loop.run_in_executor(None, self._execute_command, command)
def _execute_command(self, command):
"""Create a temporary container and run command.

View File

@ -66,7 +66,7 @@ class DockerNetwork(object):
try:
self.network.connect(container, aliases=alias, ipv4_address=ipv4)
except docker.errors.APIError as err:
_LOGGER.error("Can't link container to hassio-net -> %s", err)
_LOGGER.error("Can't link container to hassio-net: %s", err)
return False
self.network.reload()
@ -86,4 +86,4 @@ class DockerNetwork(object):
except docker.errors.APIError as err:
_LOGGER.warning(
"Can't disconnect container from default -> %s", err)
"Can't disconnect container from default: %s", err)

View File

@ -0,0 +1,41 @@
"""Init file for HassIO docker object."""
import logging
import os
import docker
from .interface import DockerInterface
from ..coresys import CoreSysAttributes
_LOGGER = logging.getLogger(__name__)
class DockerSupervisor(DockerInterface, CoreSysAttributes):
"""Docker hassio wrapper for HomeAssistant."""
@property
def name(self):
"""Return name of docker container."""
return os.environ['SUPERVISOR_NAME']
def _attach(self):
"""Attach to running docker container.
Need run inside executor.
"""
try:
container = self._docker.containers.get(self.name)
except docker.errors.DockerException:
return False
self._meta = container.attrs
_LOGGER.info("Attach to supervisor %s with version %s",
self.image, self.version)
# if already attach
if container in self._docker.network.containers:
return True
# attach to network
return self._docker.network.attach_container(
container, alias=['hassio'], ipv4=self._docker.network.supervisor)

View File

@ -9,12 +9,12 @@ def docker_process(method):
"""Wrap function with only run once."""
async def wrap_api(api, *args, **kwargs):
"""Return api wrapper."""
if api._lock.locked():
if api.lock.locked():
_LOGGER.error(
"Can't excute %s while a task is in progress", method.__name__)
return False
async with api._lock:
async with api.lock:
return await method(api, *args, **kwargs)
return wrap_api

View File

@ -69,7 +69,7 @@ class Hardware(object):
with ASOUND_DEVICES.open('r') as devices_file:
devices = devices_file.read()
except OSError as err:
_LOGGER.error("Can't read asound data -> %s", err)
_LOGGER.error("Can't read asound data: %s", err)
return None
audio_list = {}
@ -109,7 +109,7 @@ class Hardware(object):
with PROC_STAT.open("r") as stat_file:
stats = stat_file.read()
except OSError as err:
_LOGGER.error("Can't read stat data -> %s", err)
_LOGGER.error("Can't read stat data: %s", err)
return None
# parse stat file

View File

@ -6,14 +6,15 @@ import re
import aiohttp
from aiohttp.hdrs import CONTENT_TYPE
import async_timeout
from .const import (
FILE_HASSIO_HOMEASSISTANT, ATTR_DEVICES, ATTR_IMAGE, ATTR_LAST_VERSION,
ATTR_VERSION, ATTR_BOOT, ATTR_PASSWORD, ATTR_PORT, ATTR_SSL, ATTR_WATCHDOG,
HEADER_HA_ACCESS, CONTENT_TYPE_JSON)
from .dock.homeassistant import DockerHomeAssistant
from .tools import JsonConfig, convert_to_ascii
from .coresys import CoreSysAttributes
from .docker.homeassistant import DockerHomeAssistant
from .utils import convert_to_ascii
from .utils.json import JsonConfig
from .validate import SCHEMA_HASS_CONFIG
_LOGGER = logging.getLogger(__name__)
@ -21,30 +22,30 @@ _LOGGER = logging.getLogger(__name__)
RE_YAML_ERROR = re.compile(r"homeassistant\.util\.yaml")
class HomeAssistant(JsonConfig):
class HomeAssistant(JsonConfig, CoreSysAttributes):
"""Hass core object for handle it."""
def __init__(self, config, loop, docker, updater):
def __init__(self, coresys):
"""Initialize hass object."""
super().__init__(FILE_HASSIO_HOMEASSISTANT, SCHEMA_HASS_CONFIG)
self.config = config
self.loop = loop
self.updater = updater
self.docker = DockerHomeAssistant(config, loop, docker, self)
self.api_ip = docker.network.gateway
self.websession = aiohttp.ClientSession(
connector=aiohttp.TCPConnector(verify_ssl=False), loop=loop)
self.coresys = coresys
self.instance = DockerHomeAssistant(coresys)
async def prepare(self):
async def load(self):
"""Prepare HomeAssistant object."""
if not await self.docker.exists():
if not await self.instance.exists():
_LOGGER.info("No HomeAssistant docker %s found.", self.image)
if self.is_custom_image:
await self.install()
else:
await self.install_landingpage()
else:
await self.docker.attach()
await self.instance.attach()
@property
def api_ip(self):
"""Return IP of HomeAssistant instance."""
return self._docker.network.gateway
@property
def api_port(self):
@ -100,14 +101,14 @@ class HomeAssistant(JsonConfig):
@property
def version(self):
"""Return version of running homeassistant."""
return self.docker.version
return self.instance.version
@property
def last_version(self):
"""Return last available version of homeassistant."""
if self.is_custom_image:
return self._data.get(ATTR_LAST_VERSION)
return self.updater.version_homeassistant
return self._updater.version_homeassistant
@property
def image(self):
@ -150,11 +151,11 @@ class HomeAssistant(JsonConfig):
self._data.pop(ATTR_IMAGE, None)
self._data.pop(ATTR_VERSION, None)
self.docker.image = self.image
self.instance.image = self.image
else:
if image:
self._data[ATTR_IMAGE] = image
self.docker.image = image
self.instance.image = image
if version:
self._data[ATTR_VERSION] = version
self.save()
@ -163,13 +164,13 @@ class HomeAssistant(JsonConfig):
"""Install a landingpage."""
_LOGGER.info("Setup HomeAssistant landingpage")
while True:
if await self.docker.install('landingpage'):
if await self.instance.install('landingpage'):
break
_LOGGER.warning("Fails install landingpage, retry after 60sec")
await asyncio.sleep(60, loop=self.loop)
await asyncio.sleep(60, loop=self._loop)
# run landingpage after installation
await self.docker.run()
await self.instance.run()
async def install(self):
"""Install a landingpage."""
@ -177,85 +178,85 @@ class HomeAssistant(JsonConfig):
while True:
# read homeassistant tag and install it
if not self.last_version:
await self.updater.fetch_data()
await self._updater.reload()
tag = self.last_version
if tag and await self.docker.install(tag):
if tag and await self.instance.install(tag):
break
_LOGGER.warning("Error on install HomeAssistant. Retry in 60sec")
await asyncio.sleep(60, loop=self.loop)
await asyncio.sleep(60, loop=self._loop)
# finishing
_LOGGER.info("HomeAssistant docker now installed")
if self.boot:
await self.docker.run()
await self.docker.cleanup()
await self.instance.run()
await self.instance.cleanup()
async def update(self, version=None):
"""Update HomeAssistant version."""
version = version or self.last_version
running = await self.docker.is_running()
running = await self.instance.is_running()
if version == self.docker.version:
_LOGGER.warning("Version %s is already installed", version)
if version == self.instance.version:
_LOGGER.info("Version %s is already installed", version)
return False
try:
return await self.docker.update(version)
return await self.instance.update(version)
finally:
if running:
await self.docker.run()
await self.instance.run()
def run(self):
"""Run HomeAssistant docker.
Return a coroutine.
"""
return self.docker.run()
return self.instance.run()
def stop(self):
"""Stop HomeAssistant docker.
Return a coroutine.
"""
return self.docker.stop()
return self.instance.stop()
def restart(self):
"""Restart HomeAssistant docker.
Return a coroutine.
"""
return self.docker.restart()
return self.instance.restart()
def logs(self):
"""Get HomeAssistant docker logs.
Return a coroutine.
"""
return self.docker.logs()
return self.instance.logs()
def is_running(self):
"""Return True if docker container is running.
Return a coroutine.
"""
return self.docker.is_running()
return self.instance.is_running()
def is_initialize(self):
"""Return True if a docker container is exists.
Return a coroutine.
"""
return self.docker.is_initialize()
return self.instance.is_initialize()
@property
def in_progress(self):
"""Return True if a task is in progress."""
return self.docker.in_progress
return self.instance.in_progress
async def check_config(self):
"""Run homeassistant config check."""
exit_code, log = await self.docker.execute_command(
exit_code, log = await self.instance.execute_command(
"python3 -m homeassistant -c /config --script check_config"
)
@ -271,16 +272,17 @@ class HomeAssistant(JsonConfig):
async def check_api_state(self):
"""Check if Home-Assistant up and running."""
url = "{}/api/".format(self.api_url)
url = f"{self.api_url}/api/"
header = {CONTENT_TYPE: CONTENT_TYPE_JSON}
if self.api_password:
header.update({HEADER_HA_ACCESS: self.api_password})
try:
async with async_timeout.timeout(30, loop=self.loop):
async with self.websession.get(url, headers=header) as request:
status = request.status
# pylint: disable=bad-continuation
async with self._websession_ssl.get(
url, headers=header, timeout=30) as request:
status = request.status
except (asyncio.TimeoutError, aiohttp.ClientError):
return False

View File

@ -1,9 +1,10 @@
"""Schedule for HassIO."""
import logging
from datetime import date, datetime, time, timedelta
_LOGGER = logging.getLogger(__name__)
SEC = 'seconds'
INTERVAL = 'interval'
REPEAT = 'repeat'
CALL = 'callback'
TASK = 'task'
@ -18,39 +19,57 @@ class Scheduler(object):
self._data = {}
self.suspend = False
def register_task(self, coro_callback, seconds, repeat=True,
now=False):
def register_task(self, coro_callback, interval, repeat=True):
"""Schedule a coroutine.
The coroutien need to be a callback without arguments.
"""
idx = hash(coro_callback)
task_id = hash(coro_callback)
# generate data
opts = {
CALL: coro_callback,
SEC: seconds,
INTERVAL: interval,
REPEAT: repeat,
}
self._data[idx] = opts
# schedule task
if now:
self._run_task(idx)
else:
task = self.loop.call_later(seconds, self._run_task, idx)
self._data[idx][TASK] = task
self._data[task_id] = opts
self._schedule_task(interval, task_id)
return idx
return task_id
def _run_task(self, idx):
def _run_task(self, task_id):
"""Run a scheduled task."""
data = self._data.pop(idx)
data = self._data[task_id]
if not self.suspend:
self.loop.create_task(data[CALL]())
if data[REPEAT]:
task = self.loop.call_later(data[SEC], self._run_task, idx)
data[TASK] = task
self._data[idx] = data
self._schedule_task(data[INTERVAL], task_id)
else:
self._data.pop(task_id)
def _schedule_task(self, interval, task_id):
"""Schedule a task on loop."""
if isinstance(interval, (int, float)):
job = self.loop.call_later(interval, self._run_task, task_id)
elif isinstance(interval, time):
today = datetime.combine(date.today(), interval)
tomorrow = datetime.combine(
date.today() + timedelta(days=1), interval)
# check if we run it today or next day
if today > datetime.today():
calc = today
else:
calc = tomorrow
job = self.loop.call_at(calc.timestamp(), self._run_task, task_id)
else:
_LOGGER.fatal("Unknow interval %s (type: %s) for scheduler %s",
interval, type(interval), task_id)
# Store job
self._data[task_id][TASK] = job

View File

@ -6,95 +6,99 @@ from pathlib import Path
import tarfile
from .snapshot import Snapshot
from .util import create_slug
from .utils import create_slug
from ..const import (
ATTR_SLUG, FOLDER_HOMEASSISTANT, SNAPSHOT_FULL, SNAPSHOT_PARTIAL)
from ..coresys import CoreSysAttributes
_LOGGER = logging.getLogger(__name__)
class SnapshotsManager(object):
class SnapshotsManager(CoreSysAttributes):
"""Manage snapshots."""
def __init__(self, config, loop, sheduler, addons, homeassistant):
def __init__(self, coresys):
"""Initialize a snapshot manager."""
self.config = config
self.loop = loop
self.sheduler = sheduler
self.addons = addons
self.homeassistant = homeassistant
self.snapshots = {}
self._lock = asyncio.Lock(loop=loop)
self.coresys = coresys
self.snapshots_obj = {}
self.lock = asyncio.Lock(loop=coresys.loop)
@property
def list_snapshots(self):
"""Return a list of all snapshot object."""
return set(self.snapshots.values())
return set(self.snapshots_obj.values())
def get(self, slug):
"""Return snapshot object."""
return self.snapshots.get(slug)
return self.snapshots_obj.get(slug)
def _create_snapshot(self, name, sys_type):
"""Initialize a new snapshot object from name."""
date_str = datetime.utcnow().isoformat()
slug = create_slug(name, date_str)
tar_file = Path(self.config.path_backup, "{}.tar".format(slug))
tar_file = Path(self._config.path_backup, "{}.tar".format(slug))
# init object
snapshot = Snapshot(self.config, self.loop, tar_file)
snapshot = Snapshot(self.coresys, tar_file)
snapshot.create(slug, name, date_str, sys_type)
# set general data
snapshot.snapshot_homeassistant(self.homeassistant)
snapshot.repositories = self.config.addons_repositories
snapshot.store_homeassistant()
snapshot.store_repositories()
return snapshot
def load(self):
"""Load exists snapshots data.
Return a coroutine.
"""
return self.reload()
async def reload(self):
"""Load exists backups."""
self.snapshots = {}
self.snapshots_obj = {}
async def _load_snapshot(tar_file):
"""Internal function to load snapshot."""
snapshot = Snapshot(self.config, self.loop, tar_file)
snapshot = Snapshot(self.coresys, tar_file)
if await snapshot.load():
self.snapshots[snapshot.slug] = snapshot
self.snapshots_obj[snapshot.slug] = snapshot
tasks = [_load_snapshot(tar_file) for tar_file in
self.config.path_backup.glob("*.tar")]
self._config.path_backup.glob("*.tar")]
_LOGGER.info("Found %d snapshot files", len(tasks))
if tasks:
await asyncio.wait(tasks, loop=self.loop)
await asyncio.wait(tasks, loop=self._loop)
def remove(self, snapshot):
"""Remove a snapshot."""
try:
snapshot.tar_file.unlink()
self.snapshots.pop(snapshot.slug, None)
self.snapshots_obj.pop(snapshot.slug, None)
except OSError as err:
_LOGGER.error("Can't remove snapshot %s -> %s", snapshot.slug, err)
_LOGGER.error("Can't remove snapshot %s: %s", snapshot.slug, err)
return False
return True
async def do_snapshot_full(self, name=""):
"""Create a full snapshot."""
if self._lock.locked():
if self.lock.locked():
_LOGGER.error("It is already a snapshot/restore process running")
return False
snapshot = self._create_snapshot(name, SNAPSHOT_FULL)
_LOGGER.info("Full-Snapshot %s start", snapshot.slug)
try:
self.sheduler.suspend = True
await self._lock.acquire()
self._scheduler.suspend = True
await self.lock.acquire()
async with snapshot:
# snapshot addons
tasks = []
for addon in self.addons.list_addons:
for addon in self._addons.list_addons:
if not addon.is_installed:
continue
tasks.append(snapshot.import_addon(addon))
@ -102,27 +106,27 @@ class SnapshotsManager(object):
if tasks:
_LOGGER.info("Full-Snapshot %s run %d addons",
snapshot.slug, len(tasks))
await asyncio.wait(tasks, loop=self.loop)
await asyncio.wait(tasks, loop=self._loop)
# snapshot folders
_LOGGER.info("Full-Snapshot %s store folders", snapshot.slug)
await snapshot.store_folders()
_LOGGER.info("Full-Snapshot %s done", snapshot.slug)
self.snapshots[snapshot.slug] = snapshot
self.snapshots_obj[snapshot.slug] = snapshot
return True
except (OSError, ValueError, tarfile.TarError) as err:
_LOGGER.info("Full-Snapshot %s error -> %s", snapshot.slug, err)
_LOGGER.info("Full-Snapshot %s error: %s", snapshot.slug, err)
return False
finally:
self.sheduler.suspend = False
self._lock.release()
self._scheduler.suspend = False
self.lock.release()
async def do_snapshot_partial(self, name="", addons=None, folders=None):
"""Create a partial snapshot."""
if self._lock.locked():
if self.lock.locked():
_LOGGER.error("It is already a snapshot/restore process running")
return False
@ -132,21 +136,21 @@ class SnapshotsManager(object):
_LOGGER.info("Partial-Snapshot %s start", snapshot.slug)
try:
self.sheduler.suspend = True
await self._lock.acquire()
self._scheduler.suspend = True
await self.lock.acquire()
async with snapshot:
# snapshot addons
tasks = []
for slug in addons:
addon = self.addons.get(slug)
addon = self._addons.get(slug)
if addon.is_installed:
tasks.append(snapshot.import_addon(addon))
if tasks:
_LOGGER.info("Partial-Snapshot %s run %d addons",
snapshot.slug, len(tasks))
await asyncio.wait(tasks, loop=self.loop)
await asyncio.wait(tasks, loop=self._loop)
# snapshot folders
_LOGGER.info("Partial-Snapshot %s store folders %s",
@ -154,20 +158,20 @@ class SnapshotsManager(object):
await snapshot.store_folders(folders)
_LOGGER.info("Partial-Snapshot %s done", snapshot.slug)
self.snapshots[snapshot.slug] = snapshot
self.snapshots_obj[snapshot.slug] = snapshot
return True
except (OSError, ValueError, tarfile.TarError) as err:
_LOGGER.info("Partial-Snapshot %s error -> %s", snapshot.slug, err)
_LOGGER.info("Partial-Snapshot %s error: %s", snapshot.slug, err)
return False
finally:
self.sheduler.suspend = False
self._lock.release()
self._scheduler.suspend = False
self.lock.release()
async def do_restore_full(self, snapshot):
"""Restore a snapshot."""
if self._lock.locked():
if self.lock.locked():
_LOGGER.error("It is already a snapshot/restore process running")
return False
@ -178,19 +182,19 @@ class SnapshotsManager(object):
_LOGGER.info("Full-Restore %s start", snapshot.slug)
try:
self.sheduler.suspend = True
await self._lock.acquire()
self._scheduler.suspend = True
await self.lock.acquire()
async with snapshot:
# stop system
tasks = []
tasks.append(self.homeassistant.stop())
tasks.append(self._homeassistant.stop())
for addon in self.addons.list_addons:
for addon in self._addons.list_addons:
if addon.is_installed:
tasks.append(addon.stop())
await asyncio.wait(tasks, loop=self.loop)
await asyncio.wait(tasks, loop=self._loop)
# restore folders
_LOGGER.info("Full-Restore %s restore folders", snapshot.slug)
@ -199,17 +203,19 @@ class SnapshotsManager(object):
# start homeassistant restore
_LOGGER.info("Full-Restore %s restore Home-Assistant",
snapshot.slug)
snapshot.restore_homeassistant(self.homeassistant)
task_hass = self.loop.create_task(
self.homeassistant.update(snapshot.homeassistant_version))
snapshot.restore_homeassistant()
task_hass = self._loop.create_task(
self._homeassistant.update(snapshot.homeassistant_version))
# restore repositories
await self.addons.load_repositories(snapshot.repositories)
_LOGGER.info("Full-Restore %s restore Repositories",
snapshot.slug)
await snapshot.restore_repositories()
# restore addons
tasks = []
actual_addons = \
set(addon.slug for addon in self.addons.list_addons
set(addon.slug for addon in self._addons.list_addons
if addon.is_installed)
restore_addons = \
set(data[ATTR_SLUG] for data in snapshot.addons)
@ -219,14 +225,14 @@ class SnapshotsManager(object):
snapshot.slug, restore_addons, remove_addons)
for slug in remove_addons:
addon = self.addons.get(slug)
addon = self._addons.get(slug)
if addon:
tasks.append(addon.uninstall())
else:
_LOGGER.warning("Can't remove addon %s", slug)
for slug in restore_addons:
addon = self.addons.get(slug)
addon = self._addons.get(slug)
if addon:
tasks.append(snapshot.export_addon(addon))
else:
@ -235,29 +241,29 @@ class SnapshotsManager(object):
if tasks:
_LOGGER.info("Full-Restore %s restore addons tasks %d",
snapshot.slug, len(tasks))
await asyncio.wait(tasks, loop=self.loop)
await asyncio.wait(tasks, loop=self._loop)
# finish homeassistant task
_LOGGER.info("Full-Restore %s wait until homeassistant ready",
snapshot.slug)
await task_hass
await self.homeassistant.run()
await self._homeassistant.run()
_LOGGER.info("Full-Restore %s done", snapshot.slug)
return True
except (OSError, ValueError, tarfile.TarError) as err:
_LOGGER.info("Full-Restore %s error -> %s", slug, err)
_LOGGER.info("Full-Restore %s error: %s", slug, err)
return False
finally:
self.sheduler.suspend = False
self._lock.release()
self._scheduler.suspend = False
self.lock.release()
async def do_restore_partial(self, snapshot, homeassistant=False,
addons=None, folders=None):
"""Restore a snapshot."""
if self._lock.locked():
if self.lock.locked():
_LOGGER.error("It is already a snapshot/restore process running")
return False
@ -266,14 +272,14 @@ class SnapshotsManager(object):
_LOGGER.info("Partial-Restore %s start", snapshot.slug)
try:
self.sheduler.suspend = True
await self._lock.acquire()
self._scheduler.suspend = True
await self.lock.acquire()
async with snapshot:
tasks = []
if FOLDER_HOMEASSISTANT in folders:
await self.homeassistant.stop()
await self._homeassistant.stop()
if folders:
_LOGGER.info("Partial-Restore %s restore folders %s",
@ -283,12 +289,12 @@ class SnapshotsManager(object):
if homeassistant:
_LOGGER.info("Partial-Restore %s restore Home-Assistant",
snapshot.slug)
snapshot.restore_homeassistant(self.homeassistant)
tasks.append(self.homeassistant.update(
snapshot.restore_homeassistant()
tasks.append(self._homeassistant.update(
snapshot.homeassistant_version))
for slug in addons:
addon = self.addons.get(slug)
addon = self._addons.get(slug)
if addon:
tasks.append(snapshot.export_addon(addon))
else:
@ -297,18 +303,18 @@ class SnapshotsManager(object):
if tasks:
_LOGGER.info("Partial-Restore %s run %d tasks",
snapshot.slug, len(tasks))
await asyncio.wait(tasks, loop=self.loop)
await asyncio.wait(tasks, loop=self._loop)
# make sure homeassistant run agen
await self.homeassistant.run()
await self._homeassistant.run()
_LOGGER.info("Partial-Restore %s done", snapshot.slug)
return True
except (OSError, ValueError, tarfile.TarError) as err:
_LOGGER.info("Partial-Restore %s error -> %s", slug, err)
_LOGGER.info("Partial-Restore %s error: %s", slug, err)
return False
finally:
self.sheduler.suspend = False
self._lock.release()
self._scheduler.suspend = False
self.lock.release()

View File

@ -10,23 +10,23 @@ import voluptuous as vol
from voluptuous.humanize import humanize_error
from .validate import SCHEMA_SNAPSHOT, ALL_FOLDERS
from .util import remove_folder
from .utils import remove_folder
from ..const import (
ATTR_SLUG, ATTR_NAME, ATTR_DATE, ATTR_ADDONS, ATTR_REPOSITORIES,
ATTR_HOMEASSISTANT, ATTR_FOLDERS, ATTR_VERSION, ATTR_TYPE, ATTR_DEVICES,
ATTR_IMAGE, ATTR_PORT, ATTR_SSL, ATTR_PASSWORD, ATTR_WATCHDOG, ATTR_BOOT)
from ..tools import write_json_file
from ..coresys import CoreSysAttributes
from ..utils.json import write_json_file
_LOGGER = logging.getLogger(__name__)
class Snapshot(object):
class Snapshot(CoreSysAttributes):
"""A signle hassio snapshot."""
def __init__(self, config, loop, tar_file):
def __init__(self, coresys, tar_file):
"""Initialize a snapshot."""
self.loop = loop
self.config = config
self.coresys = coresys
self.tar_file = tar_file
self._data = {}
self._tmp = None
@ -166,43 +166,8 @@ class Snapshot(object):
self._data[ATTR_DATE] = date
self._data[ATTR_TYPE] = sys_type
# init other constructs
self._data[ATTR_HOMEASSISTANT] = {}
self._data[ATTR_ADDONS] = []
self._data[ATTR_REPOSITORIES] = []
self._data[ATTR_FOLDERS] = []
def snapshot_homeassistant(self, homeassistant):
"""Read all data from homeassistant object."""
self.homeassistant_version = homeassistant.version
self.homeassistant_devices = homeassistant.devices
self.homeassistant_watchdog = homeassistant.watchdog
self.homeassistant_boot = homeassistant.boot
# custom image
if homeassistant.is_custom_image:
self.homeassistant_image = homeassistant.image
# api
self.homeassistant_port = homeassistant.api_port
self.homeassistant_ssl = homeassistant.api_ssl
self.homeassistant_password = homeassistant.api_password
def restore_homeassistant(self, homeassistant):
"""Write all data to homeassistant object."""
homeassistant.devices = self.homeassistant_devices
homeassistant.watchdog = self.homeassistant_watchdog
homeassistant.boot = self.homeassistant_boot
# custom image
if self.homeassistant_image:
homeassistant.set_custom(
self.homeassistant_image, self.homeassistant_version)
# api
homeassistant.api_port = self.homeassistant_port
homeassistant.api_ssl = self.homeassistant_ssl
homeassistant.api_password = self.homeassistant_password
# Add defaults
self._data = SCHEMA_SNAPSHOT(self._data)
async def load(self):
"""Read snapshot.json from tar file."""
@ -218,24 +183,24 @@ class Snapshot(object):
# read snapshot.json
try:
raw = await self.loop.run_in_executor(None, _load_file)
raw = await self._loop.run_in_executor(None, _load_file)
except (tarfile.TarError, KeyError) as err:
_LOGGER.error(
"Can't read snapshot tarfile %s -> %s", self.tar_file, err)
"Can't read snapshot tarfile %s: %s", self.tar_file, err)
return False
# parse data
try:
raw_dict = json.loads(raw)
except json.JSONDecodeError as err:
_LOGGER.error("Can't read data for %s -> %s", self.tar_file, err)
_LOGGER.error("Can't read data for %s: %s", self.tar_file, err)
return False
# validate
try:
self._data = SCHEMA_SNAPSHOT(raw_dict)
except vol.Invalid as err:
_LOGGER.error("Can't validate data for %s -> %s", self.tar_file,
_LOGGER.error("Can't validate data for %s: %s", self.tar_file,
humanize_error(raw_dict, err))
return False
@ -243,7 +208,7 @@ class Snapshot(object):
async def __aenter__(self):
"""Async context to open a snapshot."""
self._tmp = TemporaryDirectory(dir=str(self.config.path_tmp))
self._tmp = TemporaryDirectory(dir=str(self._config.path_tmp))
# create a snapshot
if not self.tar_file.is_file():
@ -255,7 +220,7 @@ class Snapshot(object):
with tarfile.open(self.tar_file, "r:") as tar:
tar.extractall(path=self._tmp.name)
await self.loop.run_in_executor(None, _extract_snapshot)
await self._loop.run_in_executor(None, _extract_snapshot)
async def __aexit__(self, exception_type, exception_value, traceback):
"""Async context to close a snapshot."""
@ -268,7 +233,7 @@ class Snapshot(object):
try:
self._data = SCHEMA_SNAPSHOT(self._data)
except vol.Invalid as err:
_LOGGER.error("Invalid data for %s -> %s", self.tar_file,
_LOGGER.error("Invalid data for %s: %s", self.tar_file,
humanize_error(self._data, err))
raise ValueError("Invalid config") from None
@ -279,7 +244,7 @@ class Snapshot(object):
tar.add(self._tmp.name, arcname=".")
if write_json_file(Path(self._tmp.name, "snapshot.json"), self._data):
await self.loop.run_in_executor(None, _create_snapshot)
await self._loop.run_in_executor(None, _create_snapshot)
else:
_LOGGER.error("Can't write snapshot.json")
@ -320,7 +285,7 @@ class Snapshot(object):
"""Intenal function to snapshot a folder."""
slug_name = name.replace("/", "_")
snapshot_tar = Path(self._tmp.name, "{}.tar.gz".format(slug_name))
origin_dir = Path(self.config.path_hassio, name)
origin_dir = Path(self._config.path_hassio, name)
try:
_LOGGER.info("Snapshot folder %s", name)
@ -331,13 +296,13 @@ class Snapshot(object):
self._data[ATTR_FOLDERS].append(name)
except tarfile.TarError as err:
_LOGGER.warning("Can't snapshot folder %s -> %s", name, err)
_LOGGER.warning("Can't snapshot folder %s: %s", name, err)
# run tasks
tasks = [self.loop.run_in_executor(None, _folder_save, folder)
tasks = [self._loop.run_in_executor(None, _folder_save, folder)
for folder in folder_list]
if tasks:
await asyncio.wait(tasks, loop=self.loop)
await asyncio.wait(tasks, loop=self._loop)
async def restore_folders(self, folder_list=None):
"""Backup hassio data into snapshot."""
@ -347,7 +312,7 @@ class Snapshot(object):
"""Intenal function to restore a folder."""
slug_name = name.replace("/", "_")
snapshot_tar = Path(self._tmp.name, "{}.tar.gz".format(slug_name))
origin_dir = Path(self.config.path_hassio, name)
origin_dir = Path(self._config.path_hassio, name)
# clean old stuff
if origin_dir.is_dir():
@ -359,10 +324,53 @@ class Snapshot(object):
tar_file.extractall(path=origin_dir)
_LOGGER.info("Restore folder %s done", name)
except tarfile.TarError as err:
_LOGGER.warning("Can't restore folder %s -> %s", name, err)
_LOGGER.warning("Can't restore folder %s: %s", name, err)
# run tasks
tasks = [self.loop.run_in_executor(None, _folder_restore, folder)
tasks = [self._loop.run_in_executor(None, _folder_restore, folder)
for folder in folder_list]
if tasks:
await asyncio.wait(tasks, loop=self.loop)
await asyncio.wait(tasks, loop=self._loop)
def store_homeassistant(self):
"""Read all data from homeassistant object."""
self.homeassistant_version = self._homeassistant.version
self.homeassistant_devices = self._homeassistant.devices
self.homeassistant_watchdog = self._homeassistant.watchdog
self.homeassistant_boot = self._homeassistant.boot
# custom image
if self._homeassistant.is_custom_image:
self.homeassistant_image = self._homeassistant.image
# api
self.homeassistant_port = self._homeassistant.api_port
self.homeassistant_ssl = self._homeassistant.api_ssl
self.homeassistant_password = self._homeassistant.api_password
def restore_homeassistant(self):
"""Write all data to homeassistant object."""
self._homeassistant.devices = self.homeassistant_devices
self._homeassistant.watchdog = self.homeassistant_watchdog
self._homeassistant.boot = self.homeassistant_boot
# custom image
if self.homeassistant_image:
self._homeassistant.set_custom(
self.homeassistant_image, self.homeassistant_version)
# api
self._homeassistant.api_port = self.homeassistant_port
self._homeassistant.api_ssl = self.homeassistant_ssl
self._homeassistant.api_password = self.homeassistant_password
def store_repositories(self):
"""Store repository list into snapshot."""
self.repositories = self._config.addons_repositories
def restore_repositories(self):
"""Restore repositories from snapshot.
Return a coroutine.
"""
return self._addons.load_repositories(self.repositories)

View File

@ -18,7 +18,7 @@ SCHEMA_SNAPSHOT = vol.Schema({
vol.Required(ATTR_TYPE): vol.In([SNAPSHOT_FULL, SNAPSHOT_PARTIAL]),
vol.Required(ATTR_NAME): vol.Coerce(str),
vol.Required(ATTR_DATE): vol.Coerce(str),
vol.Required(ATTR_HOMEASSISTANT): vol.Schema({
vol.Optional(ATTR_HOMEASSISTANT, default={}): vol.Schema({
vol.Required(ATTR_VERSION): vol.Coerce(str),
vol.Optional(ATTR_DEVICES, default=[]): HASS_DEVICES,
vol.Optional(ATTR_IMAGE): vol.Coerce(str),

61
hassio/supervisor.py Normal file
View File

@ -0,0 +1,61 @@
"""HomeAssistant control object."""
import logging
from .coresys import CoreSysAttributes
from .docker.supervisor import DockerSupervisor
_LOGGER = logging.getLogger(__name__)
class Supervisor(CoreSysAttributes):
"""Hass core object for handle it."""
def __init__(self, coresys):
"""Initialize hass object."""
self.coresys = coresys
self.instance = DockerSupervisor(coresys)
async def load(self):
"""Prepare HomeAssistant object."""
if not await self.instance.attach():
_LOGGER.fatal("Can't setup supervisor docker container!")
await self.instance.cleanup()
@property
def version(self):
"""Return version of running homeassistant."""
return self.instance.version
@property
def last_version(self):
"""Return last available version of homeassistant."""
return self._updater.version_hassio
@property
def image(self):
"""Return image name of hass containter."""
return self.instance.image
@property
def arch(self):
"""Return arch of hass.io containter."""
return self.instance.arch
async def update(self, version=None):
"""Update HomeAssistant version."""
version = version or self.last_version
if version == self._supervisor.version:
_LOGGER.info("Version %s is already installed", version)
return
_LOGGER.info("Update supervisor to version %s", version)
if await self.instance.install(version):
self._loop.call_later(1, self._loop.stop)
else:
_LOGGER.error("Update of hass.io fails!")
@property
def in_progress(self):
"""Return True if a task is in progress."""
return self.instance.in_progress

View File

@ -3,27 +3,68 @@ import asyncio
from datetime import datetime
import logging
from .coresys import CoreSysAttributes
_LOGGER = logging.getLogger(__name__)
def api_sessions_cleanup(config):
"""Create scheduler task for cleanup api sessions."""
async def _api_sessions_cleanup():
class Tasks(CoreSysAttributes):
"""Handle Tasks inside HassIO."""
RUN_UPDATE_SUPERVISOR = 29100
RUN_UPDATE_ADDONS = 57600
RUN_RELOAD_ADDONS = 21600
RUN_RELOAD_SNAPSHOTS = 72000
RUN_RELOAD_HOST_CONTROL = 72000
RUN_RELOAD_UPDATER = 21600
RUN_WATCHDOG_HOMEASSISTANT_DOCKER = 15
RUN_WATCHDOG_HOMEASSISTANT_API = 300
RUN_CLEANUP_API_SESSIONS = 900
def __init__(self, coresys):
"""Initialize Tasks."""
self.coresys = coresys
self.jobs = set()
self._data = {}
async def load(self):
"""Add Tasks to scheduler."""
self.jobs.add(self._scheduler.register_task(
self._update_addons, self.RUN_UPDATE_ADDONS))
self.jobs.add(self._scheduler.register_task(
self._update_supervisor, self.RUN_UPDATE_SUPERVISOR))
self.jobs.add(self._scheduler.register_task(
self._addons.reload, self.RUN_RELOAD_ADDONS))
self.jobs.add(self._scheduler.register_task(
self._updater.reload, self.RUN_RELOAD_UPDATER))
self.jobs.add(self._scheduler.register_task(
self._snapshots.reload, self.RUN_RELOAD_SNAPSHOTS))
self.jobs.add(self._scheduler.register_task(
self._host_control.load, self.RUN_RELOAD_HOST_CONTROL))
self.jobs.add(self._scheduler.register_task(
self._watchdog_homeassistant_docker,
self.RUN_WATCHDOG_HOMEASSISTANT_DOCKER))
self.jobs.add(self._scheduler.register_task(
self._watchdog_homeassistant_api,
self.RUN_WATCHDOG_HOMEASSISTANT_API))
async def _cleanup_sessions(self):
"""Cleanup old api sessions."""
now = datetime.now()
for session, until_valid in config.security_sessions.items():
for session, until_valid in self._config.security_sessions.items():
if now >= until_valid:
config.drop_security_session(session)
self._config.drop_security_session(session)
return _api_sessions_cleanup
def addons_update(loop, addons):
"""Create scheduler task for auto update addons."""
async def _addons_update():
async def _update_addons(self):
"""Check if a update is available of a addon and update it."""
tasks = []
for addon in addons.list_addons:
for addon in self._addons.list_addons:
if not addon.is_installed or not addon.auto_update:
continue
@ -38,78 +79,62 @@ def addons_update(loop, addons):
if tasks:
_LOGGER.info("Addon auto update process %d tasks", len(tasks))
await asyncio.wait(tasks, loop=loop)
await asyncio.wait(tasks, loop=self._loop)
return _addons_update
def hassio_update(supervisor, updater):
"""Create scheduler task for update of supervisor hassio."""
async def _hassio_update():
async def _update_supervisor(self):
"""Check and run update of supervisor hassio."""
await updater.fetch_data()
if updater.version_hassio == supervisor.version:
await self._updater.reload()
if self._supervisor.last_version == self._supervisor.version:
return
# don't perform a update on beta/dev channel
if updater.beta_channel:
_LOGGER.warning("Ignore Hass.IO update on beta upstream!")
if self._updater.beta_channel:
_LOGGER.warning("Ignore Hass.io update on beta upstream!")
return
_LOGGER.info("Found new HassIO version %s.", updater.version_hassio)
await supervisor.update(updater.version_hassio)
_LOGGER.info("Found new Hass.io version")
await self._supervisor.update()
return _hassio_update
def homeassistant_watchdog_docker(loop, homeassistant):
"""Create scheduler task for montoring running state of docker."""
async def _homeassistant_watchdog_docker():
async def _watchdog_homeassistant_docker(self):
"""Check running state of docker and start if they is close."""
# if Home-Assistant is active
if not await homeassistant.is_initialize() or \
not homeassistant.watchdog:
if not await self._homeassistant.is_initialize() or \
not self._homeassistant.watchdog:
return
# if Home-Assistant is running
if homeassistant.in_progress or await homeassistant.is_running():
if self._homeassistant.in_progress or \
await self._homeassistant.is_running():
return
loop.create_task(homeassistant.run())
_LOGGER.error("Watchdog found a problem with Home-Assistant docker!")
_LOGGER.warning("Watchdog found a problem with Home-Assistant docker!")
await self._homeassistant.run()
return _homeassistant_watchdog_docker
async def _watchdog_homeassistant_api(self):
"""Create scheduler task for montoring running state of API.
Try 2 times to call API before we restart Home-Assistant. Maybe we had
a delay in our system.
"""
retry_scan = self._data.get('HASS_WATCHDOG_API', 0)
def homeassistant_watchdog_api(loop, homeassistant):
"""Create scheduler task for montoring running state of API.
Try 2 times to call API before we restart Home-Assistant. Maybe we had a
delay in our system.
"""
retry_scan = 0
async def _homeassistant_watchdog_api():
"""Check running state of API and start if they is close."""
nonlocal retry_scan
# if Home-Assistant is active
if not await homeassistant.is_initialize() or \
not homeassistant.watchdog:
# If Home-Assistant is active
if not await self._homeassistant.is_initialize() or \
not self._homeassistant.watchdog:
return
# if Home-Assistant API is up
if homeassistant.in_progress or await homeassistant.check_api_state():
# If Home-Assistant API is up
if self._homeassistant.in_progress or \
await self._homeassistant.check_api_state():
return
# Look like we run into a problem
retry_scan += 1
# Retry active
if retry_scan == 1:
self._data['HASS_WATCHDOG_API'] = retry_scan
_LOGGER.warning("Watchdog miss API response from Home-Assistant")
return
loop.create_task(homeassistant.restart())
_LOGGER.error("Watchdog found a problem with Home-Assistant API!")
retry_scan = 0
return _homeassistant_watchdog_api
await self._homeassistant.restart()
self._data['HASS_WATCHDOG_API'] = 0

View File

@ -1,167 +0,0 @@
"""Tools file for HassIO."""
import asyncio
from contextlib import suppress
from datetime import datetime, timedelta, timezone
import json
import logging
import re
import aiohttp
import async_timeout
import pytz
import voluptuous as vol
from voluptuous.humanize import humanize_error
_LOGGER = logging.getLogger(__name__)
FREEGEOIP_URL = "https://freegeoip.io/json/"
RE_STRING = re.compile(r"\x1b(\[.*?[@-~]|\].*?(\x07|\x1b\\))")
# Copyright (c) Django Software Foundation and individual contributors.
# All rights reserved.
# https://github.com/django/django/blob/master/LICENSE
DATETIME_RE = re.compile(
r'(?P<year>\d{4})-(?P<month>\d{1,2})-(?P<day>\d{1,2})'
r'[T ](?P<hour>\d{1,2}):(?P<minute>\d{1,2})'
r'(?::(?P<second>\d{1,2})(?:\.(?P<microsecond>\d{1,6})\d{0,6})?)?'
r'(?P<tzinfo>Z|[+-]\d{2}(?::?\d{2})?)?$'
)
def write_json_file(jsonfile, data):
"""Write a json file."""
try:
json_str = json.dumps(data, indent=2)
with jsonfile.open('w') as conf_file:
conf_file.write(json_str)
except (OSError, json.JSONDecodeError):
return False
return True
def read_json_file(jsonfile):
"""Read a json file and return a dict."""
with jsonfile.open('r') as cfile:
return json.loads(cfile.read())
async def fetch_timezone(websession):
"""Read timezone from freegeoip."""
data = {}
with suppress(aiohttp.ClientError, asyncio.TimeoutError,
json.JSONDecodeError, KeyError):
with async_timeout.timeout(10, loop=websession.loop):
async with websession.get(FREEGEOIP_URL) as request:
data = await request.json()
return data.get('time_zone', 'UTC')
def convert_to_ascii(raw):
"""Convert binary to ascii and remove colors."""
return RE_STRING.sub("", raw.decode())
# Copyright (c) Django Software Foundation and individual contributors.
# All rights reserved.
# https://github.com/django/django/blob/master/LICENSE
def parse_datetime(dt_str):
"""Parse a string and return a datetime.datetime.
This function supports time zone offsets. When the input contains one,
the output uses a timezone with a fixed offset from UTC.
Raises ValueError if the input is well formatted but not a valid datetime.
Returns None if the input isn't well formatted.
"""
match = DATETIME_RE.match(dt_str)
if not match:
return None
kws = match.groupdict() # type: Dict[str, Any]
if kws['microsecond']:
kws['microsecond'] = kws['microsecond'].ljust(6, '0')
tzinfo_str = kws.pop('tzinfo')
tzinfo = None # type: Optional[dt.tzinfo]
if tzinfo_str == 'Z':
tzinfo = pytz.utc
elif tzinfo_str is not None:
offset_mins = int(tzinfo_str[-2:]) if len(tzinfo_str) > 3 else 0
offset_hours = int(tzinfo_str[1:3])
offset = timedelta(hours=offset_hours, minutes=offset_mins)
if tzinfo_str[0] == '-':
offset = -offset
tzinfo = timezone(offset)
else:
tzinfo = None
kws = {k: int(v) for k, v in kws.items() if v is not None}
kws['tzinfo'] = tzinfo
return datetime(**kws)
class JsonConfig(object):
"""Hass core object for handle it."""
def __init__(self, json_file, schema):
"""Initialize hass object."""
self._file = json_file
self._schema = schema
self._data = {}
# init or load data
if self._file.is_file():
try:
self._data = read_json_file(self._file)
except (OSError, json.JSONDecodeError):
_LOGGER.warning("Can't read %s", self._file)
self._data = {}
# validate
try:
self._data = self._schema(self._data)
except vol.Invalid as ex:
_LOGGER.error("Can't parse %s -> %s",
self._file, humanize_error(self._data, ex))
# reset data to default
self._data = self._schema({})
def save(self):
"""Store data to config file."""
# validate
try:
self._data = self._schema(self._data)
except vol.Invalid as ex:
_LOGGER.error("Can't parse data -> %s",
humanize_error(self._data, ex))
return False
# write
if not write_json_file(self._file, self._data):
_LOGGER.error("Can't store config in %s", self._file)
return False
return True
class AsyncThrottle(object):
"""
Decorator that prevents a function from being called more than once every
time period.
"""
def __init__(self, delta):
"""Initialize async throttle."""
self.throttle_period = delta
self.time_of_last_call = datetime.min
def __call__(self, method):
"""Throttle function"""
async def wrapper(*args, **kwargs):
"""Throttle function wrapper"""
now = datetime.now()
time_since_last_call = now - self.time_of_last_call
if time_since_last_call > self.throttle_period:
self.time_of_last_call = now
return await method(*args, **kwargs)
return wrapper

View File

@ -10,21 +10,28 @@ import async_timeout
from .const import (
URL_HASSIO_VERSION, FILE_HASSIO_UPDATER, ATTR_HOMEASSISTANT, ATTR_HASSIO,
ATTR_BETA_CHANNEL)
from .tools import AsyncThrottle, JsonConfig
from .coresys import CoreSysAttributes
from .utils import AsyncThrottle
from .utils.json import JsonConfig
from .validate import SCHEMA_UPDATER_CONFIG
_LOGGER = logging.getLogger(__name__)
class Updater(JsonConfig):
class Updater(JsonConfig, CoreSysAttributes):
"""Fetch last versions from version.json."""
def __init__(self, config, loop, websession):
def __init__(self, coresys):
"""Initialize updater."""
super().__init__(FILE_HASSIO_UPDATER, SCHEMA_UPDATER_CONFIG)
self.config = config
self.loop = loop
self.websession = websession
self.coresys = coresys
def load(self):
"""Update internal data.
Return a coroutine.
"""
return self.reload()
@property
def version_homeassistant(self):
@ -55,7 +62,7 @@ class Updater(JsonConfig):
self.save()
@AsyncThrottle(timedelta(seconds=60))
async def fetch_data(self):
async def reload(self):
"""Fetch current versions from github.
Is a coroutine.
@ -63,16 +70,16 @@ class Updater(JsonConfig):
url = URL_HASSIO_VERSION.format(self.upstream)
try:
_LOGGER.info("Fetch update data from %s", url)
with async_timeout.timeout(10, loop=self.loop):
async with self.websession.get(url) as request:
with async_timeout.timeout(10, loop=self._loop):
async with self._websession.get(url) as request:
data = await request.json(content_type=None)
except (aiohttp.ClientError, asyncio.TimeoutError, KeyError) as err:
_LOGGER.warning("Can't fetch versions from %s -> %s", url, err)
_LOGGER.warning("Can't fetch versions from %s: %s", url, err)
return
except json.JSONDecodeError as err:
_LOGGER.warning("Can't parse versions from %s -> %s", url, err)
_LOGGER.warning("Can't parse versions from %s: %s", url, err)
return
# data valid?

34
hassio/utils/__init__.py Normal file
View File

@ -0,0 +1,34 @@
"""Tools file for HassIO."""
from datetime import datetime
import re
RE_STRING = re.compile(r"\x1b(\[.*?[@-~]|\].*?(\x07|\x1b\\))")
def convert_to_ascii(raw):
"""Convert binary to ascii and remove colors."""
return RE_STRING.sub("", raw.decode())
class AsyncThrottle(object):
"""
Decorator that prevents a function from being called more than once every
time period.
"""
def __init__(self, delta):
"""Initialize async throttle."""
self.throttle_period = delta
self.time_of_last_call = datetime.min
def __call__(self, method):
"""Throttle function"""
async def wrapper(*args, **kwargs):
"""Throttle function wrapper"""
now = datetime.now()
time_since_last_call = now - self.time_of_last_call
if time_since_last_call > self.throttle_period:
self.time_of_last_call = now
return await method(*args, **kwargs)
return wrapper

76
hassio/utils/dt.py Normal file
View File

@ -0,0 +1,76 @@
"""Tools file for HassIO."""
import asyncio
from datetime import datetime, timedelta, timezone
import logging
import re
import aiohttp
import async_timeout
import pytz
_LOGGER = logging.getLogger(__name__)
FREEGEOIP_URL = "https://freegeoip.io/json/"
# Copyright (c) Django Software Foundation and individual contributors.
# All rights reserved.
# https://github.com/django/django/blob/master/LICENSE
DATETIME_RE = re.compile(
r'(?P<year>\d{4})-(?P<month>\d{1,2})-(?P<day>\d{1,2})'
r'[T ](?P<hour>\d{1,2}):(?P<minute>\d{1,2})'
r'(?::(?P<second>\d{1,2})(?:\.(?P<microsecond>\d{1,6})\d{0,6})?)?'
r'(?P<tzinfo>Z|[+-]\d{2}(?::?\d{2})?)?$'
)
async def fetch_timezone(websession):
"""Read timezone from freegeoip."""
data = {}
try:
with async_timeout.timeout(10, loop=websession.loop):
async with websession.get(FREEGEOIP_URL) as request:
data = await request.json()
except (aiohttp.ClientError, asyncio.TimeoutError, KeyError) as err:
_LOGGER.warning("Can't fetch freegeoip data: %s", err)
except ValueError as err:
_LOGGER.warning("Error on parse freegeoip data: %s", err)
return data.get('time_zone', 'UTC')
# Copyright (c) Django Software Foundation and individual contributors.
# All rights reserved.
# https://github.com/django/django/blob/master/LICENSE
def parse_datetime(dt_str):
"""Parse a string and return a datetime.datetime.
This function supports time zone offsets. When the input contains one,
the output uses a timezone with a fixed offset from UTC.
Raises ValueError if the input is well formatted but not a valid datetime.
Returns None if the input isn't well formatted.
"""
match = DATETIME_RE.match(dt_str)
if not match:
return None
kws = match.groupdict() # type: Dict[str, Any]
if kws['microsecond']:
kws['microsecond'] = kws['microsecond'].ljust(6, '0')
tzinfo_str = kws.pop('tzinfo')
tzinfo = None # type: Optional[dt.tzinfo]
if tzinfo_str == 'Z':
tzinfo = pytz.utc
elif tzinfo_str is not None:
offset_mins = int(tzinfo_str[-2:]) if len(tzinfo_str) > 3 else 0
offset_hours = int(tzinfo_str[1:3])
offset = timedelta(hours=offset_hours, minutes=offset_mins)
if tzinfo_str[0] == '-':
offset = -offset
tzinfo = timezone(offset)
else:
tzinfo = None
kws = {k: int(v) for k, v in kws.items() if v is not None}
kws['tzinfo'] = tzinfo
return datetime(**kws)

69
hassio/utils/json.py Normal file
View File

@ -0,0 +1,69 @@
"""Tools file for HassIO."""
import json
import logging
import voluptuous as vol
from voluptuous.humanize import humanize_error
_LOGGER = logging.getLogger(__name__)
def write_json_file(jsonfile, data):
"""Write a json file."""
try:
json_str = json.dumps(data, indent=2)
with jsonfile.open('w') as conf_file:
conf_file.write(json_str)
except (OSError, json.JSONDecodeError):
return False
return True
def read_json_file(jsonfile):
"""Read a json file and return a dict."""
with jsonfile.open('r') as cfile:
return json.loads(cfile.read())
class JsonConfig(object):
"""Hass core object for handle it."""
def __init__(self, json_file, schema):
"""Initialize hass object."""
self._file = json_file
self._schema = schema
self._data = {}
# init or load data
if self._file.is_file():
try:
self._data = read_json_file(self._file)
except (OSError, json.JSONDecodeError):
_LOGGER.warning("Can't read %s", self._file)
self._data = {}
# validate
try:
self._data = self._schema(self._data)
except vol.Invalid as ex:
_LOGGER.error("Can't parse %s: %s",
self._file, humanize_error(self._data, ex))
# reset data to default
self._data = self._schema({})
def save(self):
"""Store data to config file."""
# validate
try:
self._data = self._schema(self._data)
except vol.Invalid as ex:
_LOGGER.error("Can't parse data: %s",
humanize_error(self._data, ex))
return False
# write
if not write_json_file(self._file, self._data):
_LOGGER.error("Can't store config in %s", self._file)
return False
return True

View File

@ -8,12 +8,13 @@ from .const import (
ATTR_TOTP, ATTR_SECURITY, ATTR_BETA_CHANNEL, ATTR_TIMEZONE,
ATTR_ADDONS_CUSTOM_LIST, ATTR_AUDIO_OUTPUT, ATTR_AUDIO_INPUT,
ATTR_HOMEASSISTANT, ATTR_HASSIO, ATTR_BOOT, ATTR_LAST_BOOT, ATTR_SSL,
ATTR_PORT, ATTR_WATCHDOG)
ATTR_PORT, ATTR_WATCHDOG, ATTR_WAIT_BOOT)
NETWORK_PORT = vol.All(vol.Coerce(int), vol.Range(min=1, max=65535))
HASS_DEVICES = [vol.Match(r"^[^/]*$")]
ALSA_CHANNEL = vol.Match(r"\d+,\d+")
WAIT_BOOT = vol.All(vol.Coerce(int), vol.Range(min=1, max=60))
def validate_timezone(timezone):
@ -90,4 +91,5 @@ SCHEMA_HASSIO_CONFIG = vol.Schema({
vol.Schema({vol.Coerce(str): vol.Coerce(str)}),
vol.Optional(ATTR_AUDIO_OUTPUT): ALSA_CHANNEL,
vol.Optional(ATTR_AUDIO_INPUT): ALSA_CHANNEL,
vol.Optional(ATTR_WAIT_BOOT, default=5): WAIT_BOOT,
}, extra=vol.REMOVE_EXTRA)

View File

@ -31,7 +31,8 @@ setup(
platforms='any',
packages=[
'hassio',
'hassio.dock',
'hassio.utils',
'hassio.docker',
'hassio.api',
'hassio.addons',
'hassio.snapshots'

View File

@ -2,8 +2,6 @@
envlist = lint
[testenv]
setenv =
PYTHONPATH = {toxinidir}:{toxinidir}/hassio
deps =
flake8
pylint
@ -12,5 +10,5 @@ deps =
basepython = python3
ignore_errors = True
commands =
flake8 hassio
pylint hassio
flake8
pylint --rcfile pylintrc hassio