Support armv7 and allow support of multible arch types per CPU (#892)

* Support armv7 and first abstraction

* Change layout

* Add more type hints

* Fix imports

* Update

* move forward

* add tests

* fix type

* fix lint & tests

* fix tests

* Fix unittests

* Fix create folder

* cleanup

* Fix import order

* cleanup loop parameter

* cleanup init function

* Allow changeable image name

* fix setup

* Fix load of arch

* Fix lint

* Add typing

* fix init

* fix hassos cli problem & stick on supervisor arch

* address comments

* cleanup

* Fix image selfheal

* Add comment

* update uvloop

* remove uvloop

* fix tagging

* Fix install name

* Fix validate build config

* Abstract image_name from system cache
This commit is contained in:
Pascal Vizeli 2019-01-31 18:47:44 +01:00 committed by GitHub
parent 118a2e1951
commit 35aae69f23
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
33 changed files with 1019 additions and 457 deletions

2
API.md
View File

@ -346,6 +346,7 @@ Load host configs from a USB stick.
{
"version": "INSTALL_VERSION",
"last_version": "LAST_VERSION",
"arch": "arch",
"machine": "Image machine type",
"image": "str",
"custom": "bool -> if custom image",
@ -675,6 +676,7 @@ return:
"hostname": "name",
"machine": "type",
"arch": "arch",
"supported_arch": ["arch1", "arch2"],
"channel": "stable|beta|dev"
}
```

View File

@ -9,7 +9,7 @@ from hassio import bootstrap
_LOGGER = logging.getLogger(__name__)
def attempt_use_uvloop():
def initialize_event_loop():
"""Attempt to use uvloop."""
try:
import uvloop
@ -17,13 +17,17 @@ def attempt_use_uvloop():
except ImportError:
pass
return asyncio.get_event_loop()
# pylint: disable=invalid-name
if __name__ == "__main__":
bootstrap.initialize_logging()
attempt_use_uvloop()
loop = asyncio.get_event_loop()
# Init async event loop
loop = initialize_event_loop()
# Check if all information are available to setup Hass.io
if not bootstrap.check_environment():
sys.exit(1)
@ -32,7 +36,7 @@ if __name__ == "__main__":
loop.set_default_executor(executor)
_LOGGER.info("Initialize Hass.io setup")
coresys = bootstrap.initialize_coresys(loop)
coresys = loop.run_until_complete(bootstrap.initialize_coresys())
bootstrap.migrate_system_env(coresys)

View File

@ -1,8 +1,8 @@
"""Init file for Hass.io add-ons."""
from contextlib import suppress
from copy import deepcopy
import logging
import json
import logging
from pathlib import Path, PurePath
import re
import shutil
@ -12,30 +12,30 @@ from tempfile import TemporaryDirectory
import voluptuous as vol
from voluptuous.humanize import humanize_error
from .validate import (
validate_options, SCHEMA_ADDON_SNAPSHOT, RE_VOLUME, RE_SERVICE,
MACHINE_ALL)
from .utils import check_installed, remove_data
from ..const import (
ATTR_NAME, ATTR_VERSION, ATTR_SLUG, ATTR_DESCRIPTON, ATTR_BOOT, ATTR_MAP,
ATTR_OPTIONS, ATTR_PORTS, ATTR_SCHEMA, ATTR_IMAGE, ATTR_REPOSITORY,
ATTR_URL, ATTR_ARCH, ATTR_LOCATON, ATTR_DEVICES, ATTR_ENVIRONMENT,
ATTR_HOST_NETWORK, ATTR_TMPFS, ATTR_PRIVILEGED, ATTR_STARTUP, ATTR_UUID,
STATE_STARTED, STATE_STOPPED, STATE_NONE, ATTR_USER, ATTR_SYSTEM,
ATTR_STATE, ATTR_TIMEOUT, ATTR_AUTO_UPDATE, ATTR_NETWORK, ATTR_WEBUI,
ATTR_HASSIO_API, ATTR_AUDIO, ATTR_AUDIO_OUTPUT, ATTR_AUDIO_INPUT,
ATTR_GPIO, ATTR_HOMEASSISTANT_API, ATTR_STDIN, ATTR_LEGACY, ATTR_HOST_IPC,
ATTR_HOST_DBUS, ATTR_AUTO_UART, ATTR_DISCOVERY, ATTR_SERVICES,
ATTR_APPARMOR, ATTR_DEVICETREE, ATTR_DOCKER_API, ATTR_FULL_ACCESS,
ATTR_PROTECTED, ATTR_ACCESS_TOKEN, ATTR_HOST_PID, ATTR_HASSIO_ROLE,
ATTR_MACHINE, ATTR_AUTH_API, ATTR_KERNEL_MODULES,
SECURITY_PROFILE, SECURITY_DISABLE, SECURITY_DEFAULT)
ATTR_ACCESS_TOKEN, ATTR_APPARMOR, ATTR_ARCH, ATTR_AUDIO, ATTR_AUDIO_INPUT,
ATTR_AUDIO_OUTPUT, ATTR_AUTH_API, ATTR_AUTO_UART, ATTR_AUTO_UPDATE,
ATTR_BOOT, ATTR_DESCRIPTON, ATTR_DEVICES, ATTR_DEVICETREE, ATTR_DISCOVERY,
ATTR_DOCKER_API, ATTR_ENVIRONMENT, ATTR_FULL_ACCESS, ATTR_GPIO,
ATTR_HASSIO_API, ATTR_HASSIO_ROLE, ATTR_HOMEASSISTANT_API, ATTR_HOST_DBUS,
ATTR_HOST_IPC, ATTR_HOST_NETWORK, ATTR_HOST_PID, ATTR_IMAGE,
ATTR_KERNEL_MODULES, ATTR_LEGACY, ATTR_LOCATON, ATTR_MACHINE, ATTR_MAP,
ATTR_NAME, ATTR_NETWORK, ATTR_OPTIONS, ATTR_PORTS, ATTR_PRIVILEGED,
ATTR_PROTECTED, ATTR_REPOSITORY, ATTR_SCHEMA, ATTR_SERVICES, ATTR_SLUG,
ATTR_STARTUP, ATTR_STATE, ATTR_STDIN, ATTR_SYSTEM, ATTR_TIMEOUT,
ATTR_TMPFS, ATTR_URL, ATTR_USER, ATTR_UUID, ATTR_VERSION, ATTR_WEBUI,
SECURITY_DEFAULT, SECURITY_DISABLE, SECURITY_PROFILE, STATE_NONE,
STATE_STARTED, STATE_STOPPED)
from ..coresys import CoreSysAttributes
from ..docker.addon import DockerAddon
from ..utils import create_token
from ..utils.json import write_json_file, read_json_file
from ..utils.apparmor import adjust_profile
from ..exceptions import HostAppArmorError
from ..utils import create_token
from ..utils.apparmor import adjust_profile
from ..utils.json import read_json_file, write_json_file
from .utils import check_installed, remove_data
from .validate import (
MACHINE_ALL, RE_SERVICE, RE_VOLUME, SCHEMA_ADDON_SNAPSHOT,
validate_options)
_LOGGER = logging.getLogger(__name__)
@ -56,8 +56,14 @@ class Addon(CoreSysAttributes):
async def load(self):
"""Async initialize of object."""
if self.is_installed:
await self.instance.attach()
if not self.is_installed:
return
await self.instance.attach()
# NOTE: Can't be removed after soon
if ATTR_IMAGE not in self._data.user[self._id]:
self._data.user[self._id][ATTR_IMAGE] = self.image_name
self.save_data()
@property
def slug(self):
@ -87,10 +93,14 @@ class Addon(CoreSysAttributes):
@property
def available(self):
"""Return True if this add-on is available on this platform."""
if self.sys_arch not in self.supported_arch:
# Architecture
if not self.sys_arch.is_supported(self.supported_arch):
return False
# Machine / Hardware
if self.sys_machine not in self.supported_machine:
return False
return True
@property
@ -104,26 +114,27 @@ class Addon(CoreSysAttributes):
self._data.user[self._id] = {
ATTR_OPTIONS: {},
ATTR_VERSION: version,
ATTR_IMAGE: self.image_name,
}
self._data.save_data()
self.save_data()
def _set_uninstall(self):
"""Set add-on as uninstalled."""
self._data.system.pop(self._id, None)
self._data.user.pop(self._id, None)
self._data.save_data()
self.save_data()
def _set_update(self, version):
"""Update version of add-on."""
self._data.system[self._id] = deepcopy(self._data.cache[self._id])
self._data.user[self._id][ATTR_VERSION] = version
self._data.save_data()
self.save_data()
def _restore_data(self, user, system):
"""Restore data to add-on."""
self._data.user[self._id] = deepcopy(user)
self._data.system[self._id] = deepcopy(system)
self._data.save_data()
self.save_data()
@property
def options(self):
@ -496,16 +507,29 @@ class Addon(CoreSysAttributes):
@property
def image(self):
"""Return image name of add-on."""
addon_data = self._mesh
if self.is_installed:
# NOTE: cleanup
if ATTR_IMAGE in self._data.user[self._id]:
return self._data.user[self._id][ATTR_IMAGE]
return self.image_name
@property
def image_name(self):
"""Return image name for install/update."""
if self.is_detached:
addon_data = self._data.system.get(self._id)
else:
addon_data = self._data.cache.get(self._id)
# Repository with Dockerhub images
if ATTR_IMAGE in addon_data:
return addon_data[ATTR_IMAGE].format(arch=self.sys_arch)
arch = self.sys_arch.match(addon_data[ATTR_ARCH])
return addon_data[ATTR_IMAGE].format(arch=arch)
# local build
return "{}/{}-addon-{}".format(
addon_data[ATTR_REPOSITORY], self.sys_arch,
addon_data[ATTR_SLUG])
return (f"{addon_data[ATTR_REPOSITORY]}/"
f"{self.sys_arch.default}-"
f"addon-{addon_data[ATTR_SLUG]}")
@property
def need_build(self):
@ -680,7 +704,7 @@ class Addon(CoreSysAttributes):
if not self.available:
_LOGGER.error(
"Add-on %s not supported on %s with %s architecture",
self._id, self.sys_machine, self.sys_arch)
self._id, self.sys_machine, self.sys_arch.supported)
return False
if self.is_installed:
@ -695,7 +719,8 @@ class Addon(CoreSysAttributes):
# Setup/Fix AppArmor profile
await self._install_apparmor()
if not await self.instance.install(self.last_version):
if not await self.instance.install(
self.last_version, self.image_name):
return False
self._set_install(self.last_version)
@ -746,7 +771,7 @@ class Addon(CoreSysAttributes):
# Access Token
self._data.user[self._id][ATTR_ACCESS_TOKEN] = create_token()
self._data.save_data()
self.save_data()
# Options
if not self.write_options():
@ -775,7 +800,8 @@ class Addon(CoreSysAttributes):
_LOGGER.warning("No update available for add-on %s", self._id)
return False
if not await self.instance.update(self.last_version):
if not await self.instance.update(
self.last_version, self.image_name):
return False
self._set_update(self.last_version)

View File

@ -1,19 +1,24 @@
"""Hass.io add-on build environment."""
from __future__ import annotations
from pathlib import Path
from typing import TYPE_CHECKING, Dict
from .validate import SCHEMA_BUILD_CONFIG, BASE_IMAGE
from ..const import ATTR_SQUASH, ATTR_BUILD_FROM, ATTR_ARGS, META_ADDON
from ..coresys import CoreSysAttributes
from ..const import ATTR_ARGS, ATTR_BUILD_FROM, ATTR_SQUASH, META_ADDON
from ..coresys import CoreSys, CoreSysAttributes
from ..utils.json import JsonConfig
from .validate import SCHEMA_BUILD_CONFIG
if TYPE_CHECKING:
from .addon import Addon
class AddonBuild(JsonConfig, CoreSysAttributes):
"""Handle build options for add-ons."""
def __init__(self, coresys, slug):
def __init__(self, coresys: CoreSys, slug: str) -> None:
"""Initialize Hass.io add-on builder."""
self.coresys = coresys
self._id = slug
self.coresys: CoreSys = coresys
self._id: str = slug
super().__init__(
Path(self.addon.path_location, 'build.json'), SCHEMA_BUILD_CONFIG)
@ -22,23 +27,24 @@ class AddonBuild(JsonConfig, CoreSysAttributes):
"""Ignore save function."""
@property
def addon(self):
def addon(self) -> Addon:
"""Return add-on of build data."""
return self.sys_addons.get(self._id)
@property
def base_image(self):
def base_image(self) -> str:
"""Base images for this add-on."""
return self._data[ATTR_BUILD_FROM].get(
self.sys_arch, BASE_IMAGE[self.sys_arch])
self.sys_arch.default,
f"homeassistant/{self.sys_arch.default}-base:latest")
@property
def squash(self):
def squash(self) -> bool:
"""Return True or False if squash is active."""
return self._data[ATTR_SQUASH]
@property
def additional_args(self):
def additional_args(self) -> Dict[str, str]:
"""Return additional Docker build arguments."""
return self._data[ATTR_ARGS]
@ -52,7 +58,7 @@ class AddonBuild(JsonConfig, CoreSysAttributes):
'squash': self.squash,
'labels': {
'io.hass.version': version,
'io.hass.arch': self.sys_arch,
'io.hass.arch': self.sys_arch.default,
'io.hass.type': META_ADDON,
'io.hass.name': self._fix_label('name'),
'io.hass.description': self._fix_label('description'),
@ -60,7 +66,7 @@ class AddonBuild(JsonConfig, CoreSysAttributes):
'buildargs': {
'BUILD_FROM': self.base_image,
'BUILD_VERSION': version,
'BUILD_ARCH': self.sys_arch,
'BUILD_ARCH': self.sys_arch.default,
**self.additional_args,
}
}
@ -70,7 +76,7 @@ class AddonBuild(JsonConfig, CoreSysAttributes):
return args
def _fix_label(self, label_name):
def _fix_label(self, label_name: str) -> str:
"""Remove characters they are not supported."""
label = getattr(self.addon, label_name, "")
return label.replace("'", "")

View File

@ -124,7 +124,7 @@ class AddonsData(JsonConfig, CoreSysAttributes):
def _set_builtin_repositories(self):
"""Add local built-in repository into dataset."""
try:
builtin_file = Path(__file__).parent.joinpath('built-in.json')
builtin_file = Path(__file__).parent.joinpath("built-in.json")
builtin_data = read_json_file(builtin_file)
except (OSError, json.JSONDecodeError) as err:
_LOGGER.warning("Can't read built-in json: %s", err)

View File

@ -1,21 +1,25 @@
"""Util add-ons functions."""
from __future__ import annotations
import asyncio
import hashlib
import logging
from pathlib import Path
import re
from typing import TYPE_CHECKING
from ..const import (
SECURITY_DISABLE, SECURITY_PROFILE, PRIVILEGED_NET_ADMIN,
PRIVILEGED_SYS_ADMIN, PRIVILEGED_SYS_RAWIO, PRIVILEGED_SYS_PTRACE,
PRIVILEGED_DAC_READ_SEARCH, PRIVILEGED_SYS_MODULE, ROLE_ADMIN,
ROLE_MANAGER)
from ..const import (PRIVILEGED_DAC_READ_SEARCH, PRIVILEGED_NET_ADMIN,
PRIVILEGED_SYS_ADMIN, PRIVILEGED_SYS_MODULE,
PRIVILEGED_SYS_PTRACE, PRIVILEGED_SYS_RAWIO, ROLE_ADMIN,
ROLE_MANAGER, SECURITY_DISABLE, SECURITY_PROFILE)
if TYPE_CHECKING:
from .addon import Addon
RE_SHA1 = re.compile(r"[a-f0-9]{8}")
_LOGGER = logging.getLogger(__name__)
def rating_security(addon):
def rating_security(addon: Addon) -> int:
"""Return 1-6 for security rating.
1 = not secure
@ -34,17 +38,16 @@ def rating_security(addon):
rating += 1
# Privileged options
# pylint: disable=bad-continuation
if any(
privilege in addon.privileged
for privilege in (
PRIVILEGED_NET_ADMIN,
PRIVILEGED_SYS_ADMIN,
PRIVILEGED_SYS_RAWIO,
PRIVILEGED_SYS_PTRACE,
PRIVILEGED_SYS_MODULE,
PRIVILEGED_DAC_READ_SEARCH,
)
):
privilege in addon.privileged for privilege in (
PRIVILEGED_NET_ADMIN,
PRIVILEGED_SYS_ADMIN,
PRIVILEGED_SYS_RAWIO,
PRIVILEGED_SYS_PTRACE,
PRIVILEGED_SYS_MODULE,
PRIVILEGED_DAC_READ_SEARCH,
)):
rating += -1
# API Hass.io role
@ -72,19 +75,19 @@ def rating_security(addon):
return max(min(6, rating), 1)
def get_hash_from_repository(name):
def get_hash_from_repository(name: str) -> str:
"""Generate a hash from repository."""
key = name.lower().encode()
return hashlib.sha1(key).hexdigest()[:8]
def extract_hash_from_path(path):
def extract_hash_from_path(path: Path) -> str:
"""Extract repo id from path."""
repo_dir = path.parts[-1]
repository_dir = path.parts[-1]
if not RE_SHA1.match(repo_dir):
return get_hash_from_repository(repo_dir)
return repo_dir
if not RE_SHA1.match(repository_dir):
return get_hash_from_repository(repository_dir)
return repository_dir
def check_installed(method):
@ -100,12 +103,11 @@ def check_installed(method):
return wrap_check
async def remove_data(folder):
async def remove_data(folder: Path) -> None:
"""Remove folder and reset privileged."""
try:
proc = await asyncio.create_subprocess_exec(
"rm", "-rf", str(folder), stdout=asyncio.subprocess.DEVNULL
)
"rm", "-rf", str(folder), stdout=asyncio.subprocess.DEVNULL)
_, error_msg = await proc.communicate()
except OSError as err:

View File

@ -6,29 +6,24 @@ import uuid
import voluptuous as vol
from ..const import (
ATTR_NAME, ATTR_VERSION, ATTR_SLUG, ATTR_DESCRIPTON, ATTR_STARTUP,
ATTR_BOOT, ATTR_MAP, ATTR_OPTIONS, ATTR_PORTS, STARTUP_ONCE,
STARTUP_SYSTEM, STARTUP_SERVICES, STARTUP_APPLICATION, STARTUP_INITIALIZE,
BOOT_AUTO, BOOT_MANUAL, ATTR_SCHEMA, ATTR_IMAGE, ATTR_URL, ATTR_MAINTAINER,
ATTR_ARCH, ATTR_DEVICES, ATTR_ENVIRONMENT, ATTR_HOST_NETWORK, ARCH_ARMHF,
ARCH_AARCH64, ARCH_AMD64, ARCH_I386, ATTR_TMPFS, ATTR_PRIVILEGED,
ATTR_USER, ATTR_STATE, ATTR_SYSTEM, STATE_STARTED, STATE_STOPPED,
ATTR_LOCATON, ATTR_REPOSITORY, ATTR_TIMEOUT, ATTR_NETWORK, ATTR_UUID,
ATTR_AUTO_UPDATE, ATTR_WEBUI, ATTR_AUDIO, ATTR_AUDIO_INPUT, ATTR_HOST_IPC,
ATTR_AUDIO_OUTPUT, ATTR_HASSIO_API, ATTR_BUILD_FROM, ATTR_SQUASH,
ATTR_ARGS, ATTR_GPIO, ATTR_HOMEASSISTANT_API, ATTR_STDIN, ATTR_LEGACY,
ATTR_HOST_DBUS, ATTR_AUTO_UART, ATTR_SERVICES, ATTR_DISCOVERY,
ATTR_APPARMOR, ATTR_DEVICETREE, ATTR_DOCKER_API, ATTR_PROTECTED,
ATTR_FULL_ACCESS, ATTR_ACCESS_TOKEN, ATTR_HOST_PID, ATTR_HASSIO_ROLE,
ATTR_MACHINE, ATTR_AUTH_API, ATTR_KERNEL_MODULES,
PRIVILEGED_NET_ADMIN, PRIVILEGED_SYS_ADMIN, PRIVILEGED_SYS_RAWIO,
PRIVILEGED_IPC_LOCK, PRIVILEGED_SYS_TIME, PRIVILEGED_SYS_NICE,
PRIVILEGED_SYS_RESOURCE, PRIVILEGED_SYS_PTRACE, PRIVILEGED_DAC_READ_SEARCH,
PRIVILEGED_SYS_MODULE, ROLE_DEFAULT, ROLE_HOMEASSISTANT, ROLE_MANAGER,
ROLE_ADMIN, ROLE_BACKUP)
from ..validate import (
NETWORK_PORT, DOCKER_PORTS, ALSA_DEVICE, UUID_MATCH, SHA256)
ARCH_ALL, ATTR_ACCESS_TOKEN, ATTR_APPARMOR, ATTR_ARCH, ATTR_ARGS,
ATTR_AUDIO, ATTR_AUDIO_INPUT, ATTR_AUDIO_OUTPUT, ATTR_AUTH_API,
ATTR_AUTO_UART, ATTR_AUTO_UPDATE, ATTR_BOOT, ATTR_BUILD_FROM,
ATTR_DESCRIPTON, ATTR_DEVICES, ATTR_DEVICETREE, ATTR_DISCOVERY,
ATTR_DOCKER_API, ATTR_ENVIRONMENT, ATTR_FULL_ACCESS, ATTR_GPIO,
ATTR_HASSIO_API, ATTR_HASSIO_ROLE, ATTR_HOMEASSISTANT_API, ATTR_HOST_DBUS,
ATTR_HOST_IPC, ATTR_HOST_NETWORK, ATTR_HOST_PID, ATTR_IMAGE,
ATTR_KERNEL_MODULES, ATTR_LEGACY, ATTR_LOCATON, ATTR_MACHINE,
ATTR_MAINTAINER, ATTR_MAP, ATTR_NAME, ATTR_NETWORK, ATTR_OPTIONS,
ATTR_PORTS, ATTR_PRIVILEGED, ATTR_PROTECTED, ATTR_REPOSITORY, ATTR_SCHEMA,
ATTR_SERVICES, ATTR_SLUG, ATTR_SQUASH, ATTR_STARTUP, ATTR_STATE,
ATTR_STDIN, ATTR_SYSTEM, ATTR_TIMEOUT, ATTR_TMPFS, ATTR_URL, ATTR_USER,
ATTR_UUID, ATTR_VERSION, ATTR_WEBUI, BOOT_AUTO, BOOT_MANUAL,
PRIVILEGED_ALL, ROLE_ALL, ROLE_DEFAULT, STARTUP_ALL, STARTUP_APPLICATION,
STARTUP_SERVICES, STATE_STARTED, STATE_STOPPED)
from ..services.validate import DISCOVERY_SERVICES
from ..validate import (
ALSA_DEVICE, DOCKER_PORTS, NETWORK_PORT, SHA256, UUID_MATCH)
_LOGGER = logging.getLogger(__name__)
@ -54,54 +49,20 @@ RE_SCHEMA_ELEMENT = re.compile(
r")\??$"
)
RE_DOCKER_IMAGE = re.compile(
r"^([a-zA-Z\-\.:\d{}]+/)*?([\-\w{}]+)/([\-\w{}]+)$")
RE_DOCKER_IMAGE_BUILD = re.compile(
r"^([a-zA-Z\-\.:\d{}]+/)*?([\-\w{}]+)/([\-\w{}]+)(:[\.\-\w{}]+)?$")
SCHEMA_ELEMENT = vol.Match(RE_SCHEMA_ELEMENT)
ARCH_ALL = [
ARCH_ARMHF, ARCH_AARCH64, ARCH_AMD64, ARCH_I386
]
MACHINE_ALL = [
'intel-nuc',
'odroid-c2', 'odroid-xu',
'orangepi-prime',
'qemux86', 'qemux86-64', 'qemuarm', 'qemuarm-64',
'raspberrypi', 'raspberrypi2', 'raspberrypi3', 'raspberrypi3-64',
'tinker',
'intel-nuc', 'odroid-c2', 'odroid-xu', 'orangepi-prime', 'qemux86',
'qemux86-64', 'qemuarm', 'qemuarm-64', 'raspberrypi', 'raspberrypi2',
'raspberrypi3', 'raspberrypi3-64', 'tinker',
]
STARTUP_ALL = [
STARTUP_ONCE, STARTUP_INITIALIZE, STARTUP_SYSTEM, STARTUP_SERVICES,
STARTUP_APPLICATION
]
PRIVILEGED_ALL = [
PRIVILEGED_NET_ADMIN,
PRIVILEGED_SYS_ADMIN,
PRIVILEGED_SYS_RAWIO,
PRIVILEGED_IPC_LOCK,
PRIVILEGED_SYS_TIME,
PRIVILEGED_SYS_NICE,
PRIVILEGED_SYS_RESOURCE,
PRIVILEGED_SYS_PTRACE,
PRIVILEGED_SYS_MODULE,
PRIVILEGED_DAC_READ_SEARCH,
]
ROLE_ALL = [
ROLE_DEFAULT,
ROLE_HOMEASSISTANT,
ROLE_BACKUP,
ROLE_MANAGER,
ROLE_ADMIN,
]
BASE_IMAGE = {
ARCH_ARMHF: "homeassistant/armhf-base:latest",
ARCH_AARCH64: "homeassistant/aarch64-base:latest",
ARCH_I386: "homeassistant/i386-base:latest",
ARCH_AMD64: "homeassistant/amd64-base:latest",
}
def _simple_startup(value):
"""Simple startup schema."""
@ -166,7 +127,7 @@ SCHEMA_ADDON_CONFIG = vol.Schema({
}))
}), False),
vol.Optional(ATTR_IMAGE):
vol.Match(r"^([a-zA-Z\-\.:\d{}]+/)*?([\-\w{}]+)/([\-\w{}]+)$"),
vol.Match(RE_DOCKER_IMAGE),
vol.Optional(ATTR_TIMEOUT, default=10):
vol.All(vol.Coerce(int), vol.Range(min=10, max=120)),
}, extra=vol.REMOVE_EXTRA)
@ -182,8 +143,8 @@ SCHEMA_REPOSITORY_CONFIG = vol.Schema({
# pylint: disable=no-value-for-parameter
SCHEMA_BUILD_CONFIG = vol.Schema({
vol.Optional(ATTR_BUILD_FROM, default=BASE_IMAGE): vol.Schema({
vol.In(ARCH_ALL): vol.Match(r"(?:^[\w{}]+/)?[\-\w{}]+:[\.\-\w{}]+$"),
vol.Optional(ATTR_BUILD_FROM, default=dict): vol.Schema({
vol.In(ARCH_ALL): vol.Match(RE_DOCKER_IMAGE_BUILD),
}),
vol.Optional(ATTR_SQUASH, default=False): vol.Boolean(),
vol.Optional(ATTR_ARGS, default=dict): vol.Schema({
@ -195,6 +156,7 @@ SCHEMA_BUILD_CONFIG = vol.Schema({
# pylint: disable=no-value-for-parameter
SCHEMA_ADDON_USER = vol.Schema({
vol.Required(ATTR_VERSION): vol.Coerce(str),
vol.Optional(ATTR_IMAGE): vol.Maybe(vol.Coerce(str)),
vol.Optional(ATTR_UUID, default=lambda: uuid.uuid4().hex): UUID_MATCH,
vol.Optional(ATTR_ACCESS_TOKEN): SHA256,
vol.Optional(ATTR_OPTIONS, default=dict): dict,

View File

@ -30,7 +30,7 @@ class RestAPI(CoreSysAttributes):
self.coresys = coresys
self.security = SecurityMiddleware(coresys)
self.webapp = web.Application(
middlewares=[self.security.token_validation], loop=coresys.loop)
middlewares=[self.security.token_validation])
# service stuff
self._runner = web.AppRunner(self.webapp)
@ -66,10 +66,10 @@ class RestAPI(CoreSysAttributes):
web.get('/host/services', api_host.services),
web.post('/host/services/{service}/stop', api_host.service_stop),
web.post('/host/services/{service}/start', api_host.service_start),
web.post(
'/host/services/{service}/restart', api_host.service_restart),
web.post(
'/host/services/{service}/reload', api_host.service_reload),
web.post('/host/services/{service}/restart',
api_host.service_restart),
web.post('/host/services/{service}/reload',
api_host.service_reload),
])
def _register_hassos(self):
@ -224,8 +224,7 @@ class RestAPI(CoreSysAttributes):
self.webapp.add_routes([
web.get('/discovery', api_discovery.list),
web.get('/discovery/{uuid}', api_discovery.get_discovery),
web.delete('/discovery/{uuid}',
api_discovery.del_discovery),
web.delete('/discovery/{uuid}', api_discovery.del_discovery),
web.post('/discovery', api_discovery.set_discovery),
])
@ -239,8 +238,8 @@ class RestAPI(CoreSysAttributes):
return lambda request: web.FileResponse(path)
# This route is for backwards compatibility with HA < 0.58
self.webapp.add_routes([
web.get('/panel', create_response('hassio-main-es5'))])
self.webapp.add_routes(
[web.get('/panel', create_response('hassio-main-es5'))])
# This route is for backwards compatibility with HA 0.58 - 0.61
self.webapp.add_routes([
@ -266,8 +265,8 @@ class RestAPI(CoreSysAttributes):
try:
await self._site.start()
except OSError as err:
_LOGGER.fatal(
"Failed to create HTTP server at 0.0.0.0:80 -> %s", err)
_LOGGER.fatal("Failed to create HTTP server at 0.0.0.0:80 -> %s",
err)
else:
_LOGGER.info("Start API on %s", self.sys_docker.network.supervisor)

View File

@ -4,34 +4,39 @@ import logging
import voluptuous as vol
from .utils import api_process, api_process_raw, api_validate
from ..const import (
ATTR_VERSION, ATTR_LAST_VERSION, ATTR_IMAGE, ATTR_CUSTOM, ATTR_BOOT,
ATTR_PORT, ATTR_PASSWORD, ATTR_SSL, ATTR_WATCHDOG, ATTR_CPU_PERCENT,
ATTR_MEMORY_USAGE, ATTR_MEMORY_LIMIT, ATTR_NETWORK_RX, ATTR_NETWORK_TX,
ATTR_BLK_READ, ATTR_BLK_WRITE, ATTR_WAIT_BOOT, ATTR_MACHINE,
ATTR_REFRESH_TOKEN, CONTENT_TYPE_BINARY)
ATTR_ARCH, ATTR_BLK_READ, ATTR_BLK_WRITE, ATTR_BOOT, ATTR_CPU_PERCENT,
ATTR_CUSTOM, ATTR_IMAGE, ATTR_LAST_VERSION, ATTR_MACHINE, ATTR_MEMORY_LIMIT,
ATTR_MEMORY_USAGE, ATTR_NETWORK_RX, ATTR_NETWORK_TX, ATTR_PASSWORD,
ATTR_PORT, ATTR_REFRESH_TOKEN, ATTR_SSL, ATTR_VERSION, ATTR_WAIT_BOOT,
ATTR_WATCHDOG, CONTENT_TYPE_BINARY)
from ..coresys import CoreSysAttributes
from ..validate import NETWORK_PORT, DOCKER_IMAGE
from ..exceptions import APIError
from ..validate import DOCKER_IMAGE, NETWORK_PORT
from .utils import api_process, api_process_raw, api_validate
_LOGGER = logging.getLogger(__name__)
# pylint: disable=no-value-for-parameter
SCHEMA_OPTIONS = vol.Schema({
vol.Optional(ATTR_BOOT): vol.Boolean(),
vol.Optional(ATTR_BOOT):
vol.Boolean(),
vol.Inclusive(ATTR_IMAGE, 'custom_hass'):
vol.Maybe(vol.Coerce(str)),
vol.Inclusive(ATTR_LAST_VERSION, 'custom_hass'):
vol.Any(None, DOCKER_IMAGE),
vol.Optional(ATTR_PORT): NETWORK_PORT,
vol.Optional(ATTR_PASSWORD): vol.Maybe(vol.Coerce(str)),
vol.Optional(ATTR_SSL): vol.Boolean(),
vol.Optional(ATTR_WATCHDOG): vol.Boolean(),
vol.Optional(ATTR_PORT):
NETWORK_PORT,
vol.Optional(ATTR_PASSWORD):
vol.Maybe(vol.Coerce(str)),
vol.Optional(ATTR_SSL):
vol.Boolean(),
vol.Optional(ATTR_WATCHDOG):
vol.Boolean(),
vol.Optional(ATTR_WAIT_BOOT):
vol.All(vol.Coerce(int), vol.Range(min=60)),
vol.Optional(ATTR_REFRESH_TOKEN): vol.Maybe(vol.Coerce(str)),
vol.Optional(ATTR_REFRESH_TOKEN):
vol.Maybe(vol.Coerce(str)),
})
SCHEMA_VERSION = vol.Schema({
@ -49,6 +54,7 @@ class APIHomeAssistant(CoreSysAttributes):
ATTR_VERSION: self.sys_homeassistant.version,
ATTR_LAST_VERSION: self.sys_homeassistant.last_version,
ATTR_MACHINE: self.sys_homeassistant.machine,
ATTR_ARCH: self.sys_homeassistant.arch,
ATTR_IMAGE: self.sys_homeassistant.image,
ATTR_CUSTOM: self.sys_homeassistant.is_custom_image,
ATTR_BOOT: self.sys_homeassistant.boot,

View File

@ -1,11 +1,11 @@
"""Init file for Hass.io info RESTful API."""
import logging
from .utils import api_process
from ..const import (
ATTR_HOMEASSISTANT, ATTR_SUPERVISOR, ATTR_MACHINE, ATTR_ARCH, ATTR_HASSOS,
ATTR_CHANNEL, ATTR_HOSTNAME)
from ..const import (ATTR_ARCH, ATTR_CHANNEL, ATTR_HASSOS, ATTR_HOMEASSISTANT,
ATTR_HOSTNAME, ATTR_MACHINE, ATTR_SUPERVISOR,
ATTR_SUPPORTED_ARCH)
from ..coresys import CoreSysAttributes
from .utils import api_process
_LOGGER = logging.getLogger(__name__)
@ -22,6 +22,7 @@ class APIInfo(CoreSysAttributes):
ATTR_HASSOS: self.sys_hassos.version,
ATTR_HOSTNAME: self.sys_host.info.hostname,
ATTR_MACHINE: self.sys_machine,
ATTR_ARCH: self.sys_arch,
ATTR_ARCH: self.sys_arch.default,
ATTR_SUPPORTED_ARCH: self.sys_arch.supported,
ATTR_CHANNEL: self.sys_updater.channel,
}

View File

@ -61,7 +61,7 @@ class APISupervisor(CoreSysAttributes):
ATTR_VERSION: HASSIO_VERSION,
ATTR_LAST_VERSION: self.sys_updater.version_hassio,
ATTR_CHANNEL: self.sys_updater.channel,
ATTR_ARCH: self.sys_arch,
ATTR_ARCH: self.sys_supervisor.arch,
ATTR_WAIT_BOOT: self.sys_config.wait_boot,
ATTR_TIMEZONE: self.sys_config.timezone,
ATTR_ADDONS: list_addons,
@ -116,8 +116,7 @@ class APISupervisor(CoreSysAttributes):
if version == self.sys_supervisor.version:
raise APIError("Version {} is already in use".format(version))
return await asyncio.shield(
self.sys_supervisor.update(version))
return await asyncio.shield(self.sys_supervisor.update(version))
@api_process
async def reload(self, request):
@ -125,8 +124,7 @@ class APISupervisor(CoreSysAttributes):
tasks = [
self.sys_updater.reload(),
]
results, _ = await asyncio.shield(
asyncio.wait(tasks))
results, _ = await asyncio.shield(asyncio.wait(tasks))
for result in results:
if result.exception() is not None:

44
hassio/arch.json Normal file
View File

@ -0,0 +1,44 @@
{
"raspberrypi": [
"armhf"
],
"raspberrypi2": [
"armhf"
],
"raspberrypi3": [
"armhf"
],
"raspberrypi3-64": [
"aarch64",
"armhf"
],
"tinker": [
"armhf"
],
"odroid-c2": [
"aarch64"
],
"odroid-xu": [
"armhf"
],
"orangepi-prime": [
"aarch64"
],
"qemux86": [
"i386"
],
"qemux86-64": [
"amd64",
"i386"
],
"qemuarm": [
"armhf"
],
"qemuarm-64": [
"aarch64"
],
"intel-nuc": [
"amd64",
"i386"
]
}

67
hassio/arch.py Normal file
View File

@ -0,0 +1,67 @@
"""Handle Arch for underlay maschine/platforms."""
import json
import logging
from typing import List
from pathlib import Path
from .coresys import CoreSysAttributes, CoreSys
from .exceptions import HassioArchNotFound
from .utils.json import read_json_file
_LOGGER = logging.getLogger(__name__)
class CpuArch(CoreSysAttributes):
"""Manage available architectures."""
def __init__(self, coresys: CoreSys) -> None:
"""Initialize CPU Architecture handler."""
self.coresys = coresys
self._supported_arch: List[str] = []
self._default_arch: str
@property
def default(self) -> str:
"""Return system default arch."""
return self._default_arch
@property
def supervisor(self) -> str:
"""Return supervisor arch."""
return self.sys_supervisor.arch
@property
def supported(self) -> List[str]:
"""Return support arch by CPU/Machine."""
return self._supported_arch
async def load(self) -> None:
"""Load data and initialize default arch."""
try:
arch_file = Path(__file__).parent.joinpath("arch.json")
arch_data = read_json_file(arch_file)
except (OSError, json.JSONDecodeError) as err:
_LOGGER.warning("Can't read arch json: %s", err)
return
# Evaluate current CPU/Platform
if not self.sys_machine or self.sys_machine not in arch_data:
_LOGGER.warning("Can't detect underlay machine type!")
self._default_arch = self.sys_supervisor.arch
self._supported_arch.append(self.default)
return
# Use configs from arch.json
self._supported_arch.extend(arch_data[self.sys_machine])
self._default_arch = self.supported[0]
def is_supported(self, arch_list: List[str]) -> bool:
"""Return True if there is a supported arch by this platform."""
return not set(self.supported).isdisjoint(set(arch_list))
def match(self, arch_list: List[str]) -> str:
"""Return best match for this CPU/Platform."""
for self_arch in self.supported:
if self_arch in arch_list:
return self_arch
raise HassioArchNotFound()

View File

@ -1,44 +1,46 @@
"""Bootstrap Hass.io."""
import logging
import os
import signal
import shutil
from pathlib import Path
import shutil
import signal
from colorlog import ColoredFormatter
from .core import HassIO
from .auth import Auth
from .addons import AddonManager
from .api import RestAPI
from .arch import CpuArch
from .auth import Auth
from .const import SOCKET_DOCKER
from .core import HassIO
from .coresys import CoreSys
from .supervisor import Supervisor
from .dbus import DBusManager
from .discovery import Discovery
from .hassos import HassOS
from .homeassistant import HomeAssistant
from .host import HostManager
from .services import ServiceManager
from .snapshots import SnapshotManager
from .supervisor import Supervisor
from .tasks import Tasks
from .updater import Updater
from .services import ServiceManager
from .discovery import Discovery
from .host import HostManager
from .dbus import DBusManager
from .hassos import HassOS
_LOGGER = logging.getLogger(__name__)
ENV_SHARE = 'SUPERVISOR_SHARE'
ENV_NAME = 'SUPERVISOR_NAME'
ENV_REPO = 'HOMEASSISTANT_REPOSITORY'
ENV_SHARE = "SUPERVISOR_SHARE"
ENV_NAME = "SUPERVISOR_NAME"
ENV_REPO = "HOMEASSISTANT_REPOSITORY"
MACHINE_ID = Path('/etc/machine-id')
MACHINE_ID = Path("/etc/machine-id")
def initialize_coresys(loop):
async def initialize_coresys():
"""Initialize HassIO coresys/objects."""
coresys = CoreSys(loop)
coresys = CoreSys()
# Initialize core objects
coresys.core = HassIO(coresys)
coresys.arch = CpuArch(coresys)
coresys.auth = Auth(coresys)
coresys.updater = Updater(coresys)
coresys.api = RestAPI(coresys)
@ -69,9 +71,8 @@ def initialize_system_data(coresys):
# Home Assistant configuration folder
if not config.path_homeassistant.is_dir():
_LOGGER.info(
"Create Home Assistant configuration folder %s",
config.path_homeassistant)
_LOGGER.info("Create Home Assistant configuration folder %s",
config.path_homeassistant)
config.path_homeassistant.mkdir()
# hassio ssl folder
@ -81,8 +82,8 @@ def initialize_system_data(coresys):
# hassio addon data folder
if not config.path_addons_data.is_dir():
_LOGGER.info(
"Create Hass.io Add-on data folder %s", config.path_addons_data)
_LOGGER.info("Create Hass.io Add-on data folder %s",
config.path_addons_data)
config.path_addons_data.mkdir(parents=True)
if not config.path_addons_local.is_dir():
@ -134,26 +135,26 @@ def migrate_system_env(coresys):
def initialize_logging():
"""Setup the logging."""
logging.basicConfig(level=logging.INFO)
fmt = ("%(asctime)s %(levelname)s (%(threadName)s) "
"[%(name)s] %(message)s")
colorfmt = "%(log_color)s{}%(reset)s".format(fmt)
datefmt = '%y-%m-%d %H:%M:%S'
fmt = "%(asctime)s %(levelname)s (%(threadName)s) [%(name)s] %(message)s"
colorfmt = f"%(log_color)s{fmt}%(reset)s"
datefmt = "%y-%m-%d %H:%M:%S"
# suppress overly verbose logs from libraries that aren't helpful
logging.getLogger("aiohttp.access").setLevel(logging.WARNING)
logging.getLogger().handlers[0].setFormatter(ColoredFormatter(
colorfmt,
datefmt=datefmt,
reset=True,
log_colors={
'DEBUG': 'cyan',
'INFO': 'green',
'WARNING': 'yellow',
'ERROR': 'red',
'CRITICAL': 'red',
}
))
logging.getLogger().handlers[0].setFormatter(
ColoredFormatter(
colorfmt,
datefmt=datefmt,
reset=True,
log_colors={
"DEBUG": "cyan",
"INFO": "green",
"WARNING": "yellow",
"ERROR": "red",
"CRITICAL": "red",
},
))
def check_environment():
@ -172,12 +173,12 @@ def check_environment():
return False
# check socat exec
if not shutil.which('socat'):
if not shutil.which("socat"):
_LOGGER.fatal("Can't find socat!")
return False
# check socat exec
if not shutil.which('gdbus'):
if not shutil.which("gdbus"):
_LOGGER.fatal("Can't find gdbus!")
return False
@ -187,19 +188,19 @@ def check_environment():
def reg_signal(loop):
"""Register SIGTERM and SIGKILL to stop system."""
try:
loop.add_signal_handler(
signal.SIGTERM, lambda: loop.call_soon(loop.stop))
loop.add_signal_handler(signal.SIGTERM,
lambda: loop.call_soon(loop.stop))
except (ValueError, RuntimeError):
_LOGGER.warning("Could not bind to SIGTERM")
try:
loop.add_signal_handler(
signal.SIGHUP, lambda: loop.call_soon(loop.stop))
loop.add_signal_handler(signal.SIGHUP,
lambda: loop.call_soon(loop.stop))
except (ValueError, RuntimeError):
_LOGGER.warning("Could not bind to SIGHUP")
try:
loop.add_signal_handler(
signal.SIGINT, lambda: loop.call_soon(loop.stop))
loop.add_signal_handler(signal.SIGINT,
lambda: loop.call_soon(loop.stop))
except (ValueError, RuntimeError):
_LOGGER.warning("Could not bind to SIGINT")

View File

@ -8,10 +8,8 @@ URL_HASSIO_ADDONS = "https://github.com/home-assistant/hassio-addons"
URL_HASSIO_VERSION = "https://s3.amazonaws.com/hassio-version/{channel}.json"
URL_HASSIO_APPARMOR = "https://s3.amazonaws.com/hassio-version/apparmor.txt"
URL_HASSOS_OTA = (
"https://github.com/home-assistant/hassos/releases/download/"
"{version}/hassos_{board}-{version}.raucb"
)
URL_HASSOS_OTA = ("https://github.com/home-assistant/hassos/releases/download/"
"{version}/hassos_{board}-{version}.raucb")
HASSIO_DATA = Path("/data")
@ -187,6 +185,7 @@ ATTR_HASSIO_ROLE = "hassio_role"
ATTR_SUPERVISOR = "supervisor"
ATTR_AUTH_API = "auth_api"
ATTR_KERNEL_MODULES = "kernel_modules"
ATTR_SUPPORTED_ARCH = "supported_arch"
SERVICE_MQTT = "mqtt"
PROVIDE_SERVICE = "provide"
@ -199,6 +198,11 @@ STARTUP_SERVICES = "services"
STARTUP_APPLICATION = "application"
STARTUP_ONCE = "once"
STARTUP_ALL = [
STARTUP_ONCE, STARTUP_INITIALIZE, STARTUP_SYSTEM, STARTUP_SERVICES,
STARTUP_APPLICATION
]
BOOT_AUTO = "auto"
BOOT_MANUAL = "manual"
@ -213,10 +217,13 @@ MAP_BACKUP = "backup"
MAP_SHARE = "share"
ARCH_ARMHF = "armhf"
ARCH_ARMV7 = "armv7"
ARCH_AARCH64 = "aarch64"
ARCH_AMD64 = "amd64"
ARCH_I386 = "i386"
ARCH_ALL = [ARCH_ARMHF, ARCH_ARMV7, ARCH_AARCH64, ARCH_AMD64, ARCH_I386]
CHANNEL_STABLE = "stable"
CHANNEL_BETA = "beta"
CHANNEL_DEV = "dev"
@ -249,6 +256,19 @@ PRIVILEGED_SYS_RESOURCE = "SYS_RESOURCE"
PRIVILEGED_SYS_PTRACE = "SYS_PTRACE"
PRIVILEGED_DAC_READ_SEARCH = "DAC_READ_SEARCH"
PRIVILEGED_ALL = [
PRIVILEGED_NET_ADMIN,
PRIVILEGED_SYS_ADMIN,
PRIVILEGED_SYS_RAWIO,
PRIVILEGED_IPC_LOCK,
PRIVILEGED_SYS_TIME,
PRIVILEGED_SYS_NICE,
PRIVILEGED_SYS_RESOURCE,
PRIVILEGED_SYS_PTRACE,
PRIVILEGED_SYS_MODULE,
PRIVILEGED_DAC_READ_SEARCH,
]
FEATURES_SHUTDOWN = "shutdown"
FEATURES_REBOOT = "reboot"
FEATURES_HASSOS = "hassos"
@ -261,5 +281,13 @@ ROLE_BACKUP = "backup"
ROLE_MANAGER = "manager"
ROLE_ADMIN = "admin"
ROLE_ALL = [
ROLE_DEFAULT,
ROLE_HOMEASSISTANT,
ROLE_BACKUP,
ROLE_MANAGER,
ROLE_ADMIN,
]
CHAN_ID = "chan_id"
CHAN_TYPE = "chan_type"

View File

@ -6,8 +6,8 @@ import logging
import async_timeout
from .coresys import CoreSysAttributes
from .const import (
STARTUP_SYSTEM, STARTUP_SERVICES, STARTUP_APPLICATION, STARTUP_INITIALIZE)
from .const import (STARTUP_SYSTEM, STARTUP_SERVICES, STARTUP_APPLICATION,
STARTUP_INITIALIZE)
from .exceptions import HassioError, HomeAssistantError
_LOGGER = logging.getLogger(__name__)
@ -31,12 +31,15 @@ class HassIO(CoreSysAttributes):
# Load Host
await self.sys_host.load()
# Load HassOS
await self.sys_hassos.load()
# Load Home Assistant
await self.sys_homeassistant.load()
# Load CPU/Arch
await self.sys_arch.load()
# Load HassOS
await self.sys_hassos.load()
# Load Add-ons
await self.sys_addons.load()

View File

@ -1,300 +1,455 @@
"""Handle core shared data."""
from __future__ import annotations
import asyncio
from typing import TYPE_CHECKING
import aiohttp
from .const import CHANNEL_DEV
from .config import CoreConfig
from .const import CHANNEL_DEV
from .docker import DockerAPI
from .misc.dns import DNSForward
from .misc.hardware import Hardware
from .misc.scheduler import Scheduler
if TYPE_CHECKING:
from .addons import AddonManager
from .api import RestAPI
from .arch import CpuArch
from .auth import Auth
from .core import HassIO
from .dbus import DBusManager
from .discovery import Discovery
from .hassos import HassOS
from .homeassistant import HomeAssistant
from .host import HostManager
from .services import ServiceManager
from .snapshots import SnapshotManager
from .supervisor import Supervisor
from .tasks import Tasks
from .updater import Updater
class CoreSys:
"""Class that handle all shared data."""
def __init__(self, loop):
def __init__(self):
"""Initialize coresys."""
# Static attributes
self.exit_code = 0
self.machine_id = None
self.machine_id: str = None
# External objects
self._loop = loop
self._websession = aiohttp.ClientSession(loop=loop)
self._websession_ssl = aiohttp.ClientSession(
connector=aiohttp.TCPConnector(verify_ssl=False), loop=loop)
self._loop: asyncio.BaseEventLoop = asyncio.get_running_loop()
self._websession: aiohttp.ClientSession = aiohttp.ClientSession()
self._websession_ssl: aiohttp.ClientSession = aiohttp.ClientSession(
connector=aiohttp.TCPConnector(ssl=False))
# Global objects
self._config = CoreConfig()
self._hardware = Hardware()
self._docker = DockerAPI()
self._scheduler = Scheduler(loop=loop)
self._dns = DNSForward(loop=loop)
self._config: CoreConfig = CoreConfig()
self._hardware: Hardware = Hardware()
self._docker: DockerAPI = DockerAPI()
self._scheduler: Scheduler = Scheduler()
self._dns: DNSForward = DNSForward()
# Internal objects pointers
self._core = None
self._auth = None
self._homeassistant = None
self._supervisor = None
self._addons = None
self._api = None
self._updater = None
self._snapshots = None
self._tasks = None
self._host = None
self._dbus = None
self._hassos = None
self._services = None
self._discovery = None
self._core: HassIO = None
self._arch: CpuArch = None
self._auth: Auth = None
self._homeassistant: HomeAssistant = None
self._supervisor: Supervisor = None
self._addons: AddonManager = None
self._api: RestAPI = None
self._updater: Updater = None
self._snapshots: SnapshotManager = None
self._tasks: Tasks = None
self._host: HostManager = None
self._dbus: DBusManager = None
self._hassos: HassOS = None
self._services: ServiceManager = None
self._discovery: Discovery = None
@property
def arch(self):
"""Return running arch of the Hass.io system."""
if self._supervisor:
return self._supervisor.arch
return None
@property
def machine(self):
def machine(self) -> str:
"""Return running machine type of the Hass.io system."""
if self._homeassistant:
return self._homeassistant.machine
return None
@property
def dev(self):
def dev(self) -> str:
"""Return True if we run dev mode."""
return self._updater.channel == CHANNEL_DEV
@property
def timezone(self):
def timezone(self) -> str:
"""Return timezone."""
return self._config.timezone
@property
def loop(self):
def loop(self) -> asyncio.BaseEventLoop:
"""Return loop object."""
return self._loop
@property
def websession(self):
def websession(self) -> aiohttp.ClientSession:
"""Return websession object."""
return self._websession
@property
def websession_ssl(self):
def websession_ssl(self) -> aiohttp.ClientSession:
"""Return websession object with disabled SSL."""
return self._websession_ssl
@property
def config(self):
def config(self) -> CoreConfig:
"""Return CoreConfig object."""
return self._config
@property
def hardware(self):
def hardware(self) -> Hardware:
"""Return Hardware object."""
return self._hardware
@property
def docker(self):
def docker(self) -> DockerAPI:
"""Return DockerAPI object."""
return self._docker
@property
def scheduler(self):
def scheduler(self) -> Scheduler:
"""Return Scheduler object."""
return self._scheduler
@property
def dns(self):
def dns(self) -> DNSForward:
"""Return DNSForward object."""
return self._dns
@property
def core(self):
def core(self) -> HassIO:
"""Return HassIO object."""
return self._core
@core.setter
def core(self, value):
def core(self, value: HassIO):
"""Set a Hass.io object."""
if self._core:
raise RuntimeError("Hass.io already set!")
self._core = value
@property
def auth(self):
def arch(self) -> CpuArch:
"""Return CpuArch object."""
return self._arch
@arch.setter
def arch(self, value: CpuArch):
"""Set a CpuArch object."""
if self._arch:
raise RuntimeError("CpuArch already set!")
self._arch = value
@property
def auth(self) -> Auth:
"""Return Auth object."""
return self._auth
@auth.setter
def auth(self, value):
def auth(self, value: Auth):
"""Set a Auth object."""
if self._auth:
raise RuntimeError("Auth already set!")
self._auth = value
@property
def homeassistant(self):
def homeassistant(self) -> HomeAssistant:
"""Return Home Assistant object."""
return self._homeassistant
@homeassistant.setter
def homeassistant(self, value):
def homeassistant(self, value: HomeAssistant):
"""Set a HomeAssistant object."""
if self._homeassistant:
raise RuntimeError("Home Assistant already set!")
self._homeassistant = value
@property
def supervisor(self):
def supervisor(self) -> Supervisor:
"""Return Supervisor object."""
return self._supervisor
@supervisor.setter
def supervisor(self, value):
def supervisor(self, value: Supervisor):
"""Set a Supervisor object."""
if self._supervisor:
raise RuntimeError("Supervisor already set!")
self._supervisor = value
@property
def api(self):
def api(self) -> RestAPI:
"""Return API object."""
return self._api
@api.setter
def api(self, value):
def api(self, value: RestAPI):
"""Set an API object."""
if self._api:
raise RuntimeError("API already set!")
self._api = value
@property
def updater(self):
def updater(self) -> Updater:
"""Return Updater object."""
return self._updater
@updater.setter
def updater(self, value):
def updater(self, value: Updater):
"""Set a Updater object."""
if self._updater:
raise RuntimeError("Updater already set!")
self._updater = value
@property
def addons(self):
def addons(self) -> AddonManager:
"""Return AddonManager object."""
return self._addons
@addons.setter
def addons(self, value):
def addons(self, value: AddonManager):
"""Set a AddonManager object."""
if self._addons:
raise RuntimeError("AddonManager already set!")
self._addons = value
@property
def snapshots(self):
def snapshots(self) -> SnapshotManager:
"""Return SnapshotManager object."""
return self._snapshots
@snapshots.setter
def snapshots(self, value):
def snapshots(self, value: SnapshotManager):
"""Set a SnapshotManager object."""
if self._snapshots:
raise RuntimeError("SnapshotsManager already set!")
self._snapshots = value
@property
def tasks(self):
def tasks(self) -> Tasks:
"""Return Tasks object."""
return self._tasks
@tasks.setter
def tasks(self, value):
def tasks(self, value: Tasks):
"""Set a Tasks object."""
if self._tasks:
raise RuntimeError("Tasks already set!")
self._tasks = value
@property
def services(self):
def services(self) -> ServiceManager:
"""Return ServiceManager object."""
return self._services
@services.setter
def services(self, value):
def services(self, value: ServiceManager):
"""Set a ServiceManager object."""
if self._services:
raise RuntimeError("Services already set!")
self._services = value
@property
def discovery(self):
def discovery(self) -> Discovery:
"""Return ServiceManager object."""
return self._discovery
@discovery.setter
def discovery(self, value):
def discovery(self, value: Discovery):
"""Set a Discovery object."""
if self._discovery:
raise RuntimeError("Discovery already set!")
self._discovery = value
@property
def dbus(self):
def dbus(self) -> DBusManager:
"""Return DBusManager object."""
return self._dbus
@dbus.setter
def dbus(self, value):
def dbus(self, value: DBusManager):
"""Set a DBusManager object."""
if self._dbus:
raise RuntimeError("DBusManager already set!")
self._dbus = value
@property
def host(self):
def host(self) -> HostManager:
"""Return HostManager object."""
return self._host
@host.setter
def host(self, value):
def host(self, value: HostManager):
"""Set a HostManager object."""
if self._host:
raise RuntimeError("HostManager already set!")
self._host = value
@property
def hassos(self):
def hassos(self) -> HassOS:
"""Return HassOS object."""
return self._hassos
@hassos.setter
def hassos(self, value):
def hassos(self, value: HassOS):
"""Set a HassOS object."""
if self._hassos:
raise RuntimeError("HassOS already set!")
self._hassos = value
def run_in_executor(self, funct, *args):
"""Wrapper for executor pool."""
return self._loop.run_in_executor(None, funct, *args)
def create_task(self, coroutine):
"""Wrapper for async task."""
return self._loop.create_task(coroutine)
class CoreSysAttributes:
"""Inheret basic CoreSysAttributes."""
coresys = None
def __getattr__(self, name):
"""Mapping to coresys."""
if name.startswith("sys_") and hasattr(self.coresys, name[4:]):
return getattr(self.coresys, name[4:])
raise AttributeError(f"Can't resolve {name} on {self}")
@property
def sys_machine(self) -> str:
"""Return running machine type of the Hass.io system."""
return self.coresys.machine
@property
def sys_dev(self) -> str:
"""Return True if we run dev mode."""
return self.coresys.dev
@property
def sys_timezone(self) -> str:
"""Return timezone."""
return self.coresys.timezone
@property
def sys_machine_id(self) -> str:
"""Return timezone."""
return self.coresys.machine_id
@property
def sys_loop(self) -> asyncio.BaseEventLoop:
"""Return loop object."""
return self.coresys.loop
@property
def sys_websession(self) -> aiohttp.ClientSession:
"""Return websession object."""
return self.coresys.websession
@property
def sys_websession_ssl(self) -> aiohttp.ClientSession:
"""Return websession object with disabled SSL."""
return self.coresys.websession_ssl
@property
def sys_config(self) -> CoreConfig:
"""Return CoreConfig object."""
return self.coresys.config
@property
def sys_hardware(self) -> Hardware:
"""Return Hardware object."""
return self.coresys.hardware
@property
def sys_docker(self) -> DockerAPI:
"""Return DockerAPI object."""
return self.coresys.docker
@property
def sys_scheduler(self) -> Scheduler:
"""Return Scheduler object."""
return self.coresys.scheduler
@property
def sys_dns(self) -> DNSForward:
"""Return DNSForward object."""
return self.coresys.dns
@property
def sys_core(self) -> HassIO:
"""Return HassIO object."""
return self.coresys.core
@property
def sys_arch(self) -> CpuArch:
"""Return CpuArch object."""
return self.coresys.arch
@property
def sys_auth(self) -> Auth:
"""Return Auth object."""
return self.coresys.auth
@property
def sys_homeassistant(self) -> HomeAssistant:
"""Return Home Assistant object."""
return self.coresys.homeassistant
@property
def sys_supervisor(self) -> Supervisor:
"""Return Supervisor object."""
return self.coresys.supervisor
@property
def sys_api(self) -> RestAPI:
"""Return API object."""
return self.coresys.api
@property
def sys_updater(self) -> Updater:
"""Return Updater object."""
return self.coresys.updater
@property
def sys_addons(self) -> AddonManager:
"""Return AddonManager object."""
return self.coresys.addons
@property
def sys_snapshots(self) -> SnapshotManager:
"""Return SnapshotManager object."""
return self.coresys.snapshots
@property
def sys_tasks(self) -> Tasks:
"""Return Tasks object."""
return self.coresys.tasks
@property
def sys_services(self) -> ServiceManager:
"""Return ServiceManager object."""
return self.coresys.services
@property
def sys_discovery(self) -> Discovery:
"""Return ServiceManager object."""
return self.coresys.discovery
@property
def sys_dbus(self) -> DBusManager:
"""Return DBusManager object."""
return self.coresys.dbus
@property
def sys_host(self) -> HostManager:
"""Return HostManager object."""
return self.coresys.host
@property
def sys_hassos(self) -> HassOS:
"""Return HassOS object."""
return self.coresys.hassos
def sys_run_in_executor(self, funct, *args) -> asyncio.Future:
"""Wrapper for executor pool."""
return self.sys_loop.run_in_executor(None, funct, *args)
def sys_create_task(self, coroutine) -> asyncio.Task:
"""Wrapper for async task."""
return self.sys_loop.create_task(coroutine)

View File

@ -7,9 +7,8 @@ import requests
from .interface import DockerInterface
from ..addons.build import AddonBuild
from ..const import (
MAP_CONFIG, MAP_SSL, MAP_ADDONS, MAP_BACKUP, MAP_SHARE, ENV_TOKEN,
ENV_TIME, SECURITY_PROFILE, SECURITY_DISABLE)
from ..const import (MAP_CONFIG, MAP_SSL, MAP_ADDONS, MAP_BACKUP, MAP_SHARE,
ENV_TOKEN, ENV_TIME, SECURITY_PROFILE, SECURITY_DISABLE)
from ..utils import process_lock
_LOGGER = logging.getLogger(__name__)
@ -43,16 +42,16 @@ class DockerAddon(DockerInterface):
@property
def version(self):
"""Return version of Docker image."""
if not self.addon.legacy:
return super().version
return self.addon.version_installed
if self.addon.legacy:
return self.addon.version_installed
return super().version
@property
def arch(self):
"""Return arch of Docker image."""
if not self.addon.legacy:
return super().arch
return self.sys_arch
if self.addon.legacy:
return self.sys_arch.default
return super().arch
@property
def name(self):
@ -178,8 +177,10 @@ class DockerAddon(DockerInterface):
"""Generate volumes for mappings."""
volumes = {
str(self.addon.path_extern_data): {
'bind': "/data", 'mode': 'rw'
}}
'bind': "/data",
'mode': 'rw'
}
}
addon_mapping = self.addon.map_volumes
@ -187,32 +188,42 @@ class DockerAddon(DockerInterface):
if MAP_CONFIG in addon_mapping:
volumes.update({
str(self.sys_config.path_extern_homeassistant): {
'bind': "/config", 'mode': addon_mapping[MAP_CONFIG]
}})
'bind': "/config",
'mode': addon_mapping[MAP_CONFIG]
}
})
if MAP_SSL in addon_mapping:
volumes.update({
str(self.sys_config.path_extern_ssl): {
'bind': "/ssl", 'mode': addon_mapping[MAP_SSL]
}})
'bind': "/ssl",
'mode': addon_mapping[MAP_SSL]
}
})
if MAP_ADDONS in addon_mapping:
volumes.update({
str(self.sys_config.path_extern_addons_local): {
'bind': "/addons", 'mode': addon_mapping[MAP_ADDONS]
}})
'bind': "/addons",
'mode': addon_mapping[MAP_ADDONS]
}
})
if MAP_BACKUP in addon_mapping:
volumes.update({
str(self.sys_config.path_extern_backup): {
'bind': "/backup", 'mode': addon_mapping[MAP_BACKUP]
}})
'bind': "/backup",
'mode': addon_mapping[MAP_BACKUP]
}
})
if MAP_SHARE in addon_mapping:
volumes.update({
str(self.sys_config.path_extern_share): {
'bind': "/share", 'mode': addon_mapping[MAP_SHARE]
}})
'bind': "/share",
'mode': addon_mapping[MAP_SHARE]
}
})
# Init other hardware mappings
@ -221,7 +232,8 @@ class DockerAddon(DockerInterface):
for gpio_path in ("/sys/class/gpio", "/sys/devices/platform/soc"):
volumes.update({
gpio_path: {
'bind': gpio_path, 'mode': 'rw'
'bind': gpio_path,
'mode': 'rw'
},
})
@ -229,7 +241,8 @@ class DockerAddon(DockerInterface):
if self.addon.with_devicetree:
volumes.update({
"/sys/firmware/devicetree/base": {
'bind': "/device-tree", 'mode': 'ro'
'bind': "/device-tree",
'mode': 'ro'
},
})
@ -237,7 +250,8 @@ class DockerAddon(DockerInterface):
if self.addon.with_kernel_modules:
volumes.update({
"/lib/modules": {
'bind': "/lib/modules", 'mode': 'ro'
'bind': "/lib/modules",
'mode': 'ro'
},
})
@ -245,7 +259,8 @@ class DockerAddon(DockerInterface):
if not self.addon.protected and self.addon.access_docker_api:
volumes.update({
"/var/run/docker.sock": {
'bind': "/var/run/docker.sock", 'mode': 'ro'
'bind': "/var/run/docker.sock",
'mode': 'ro'
},
})
@ -253,15 +268,19 @@ class DockerAddon(DockerInterface):
if self.addon.host_dbus:
volumes.update({
"/var/run/dbus": {
'bind': "/var/run/dbus", 'mode': 'rw'
}})
'bind': "/var/run/dbus",
'mode': 'rw'
}
})
# ALSA configuration
if self.addon.with_audio:
volumes.update({
str(self.addon.path_extern_asound): {
'bind': "/etc/asound.conf", 'mode': 'ro'
}})
'bind': "/etc/asound.conf",
'mode': 'ro'
}
})
return volumes
@ -275,8 +294,8 @@ class DockerAddon(DockerInterface):
# Security check
if not self.addon.protected:
_LOGGER.warning(
"%s run with disabled protected mode!", self.addon.name)
_LOGGER.warning("%s run with disabled protected mode!",
self.addon.name)
# cleanup
self._stop()
@ -299,16 +318,15 @@ class DockerAddon(DockerInterface):
security_opt=self.security_opt,
environment=self.environment,
volumes=self.volumes,
tmpfs=self.tmpfs
)
tmpfs=self.tmpfs)
if ret:
_LOGGER.info("Start Docker add-on %s with version %s",
self.image, self.version)
_LOGGER.info("Start Docker add-on %s with version %s", self.image,
self.version)
return ret
def _install(self, tag):
def _install(self, tag, image=None):
"""Pull Docker image or build it.
Need run inside executor.
@ -316,7 +334,7 @@ class DockerAddon(DockerInterface):
if self.addon.need_build:
return self._build(tag)
return super()._install(tag)
return super()._install(tag, image)
def _build(self, tag):
"""Build a Docker container.
@ -328,8 +346,7 @@ class DockerAddon(DockerInterface):
_LOGGER.info("Start build %s:%s", self.image, tag)
try:
image, log = self.sys_docker.images.build(
use_config_proxy=False,
**build_env.get_docker_args(tag))
use_config_proxy=False, **build_env.get_docker_args(tag))
_LOGGER.debug("Build %s:%s done: %s", self.image, tag, log)
image.tag(self.image, tag='latest')

View File

@ -3,8 +3,8 @@ import logging
import docker
from .interface import DockerInterface
from ..coresys import CoreSysAttributes
from .interface import DockerInterface
_LOGGER = logging.getLogger(__name__)
@ -15,7 +15,7 @@ class DockerHassOSCli(DockerInterface, CoreSysAttributes):
@property
def image(self):
"""Return name of HassOS CLI image."""
return f"homeassistant/{self.sys_arch}-hassio-cli"
return f"homeassistant/{self.sys_arch.supervisor}-hassio-cli"
def _stop(self):
"""Don't need stop."""
@ -33,5 +33,5 @@ class DockerHassOSCli(DockerInterface, CoreSysAttributes):
else:
self._meta = image.attrs
_LOGGER.info("Found HassOS CLI %s with version %s",
self.image, self.version)
_LOGGER.info("Found HassOS CLI %s with version %s", self.image,
self.version)

View File

@ -65,26 +65,28 @@ class DockerInterface(CoreSysAttributes):
return self.lock.locked()
@process_lock
def install(self, tag):
def install(self, tag, image=None):
"""Pull docker image."""
return self.sys_run_in_executor(self._install, tag)
return self.sys_run_in_executor(self._install, tag, image)
def _install(self, tag):
def _install(self, tag, image=None):
"""Pull Docker image.
Need run inside executor.
"""
try:
_LOGGER.info("Pull image %s tag %s.", self.image, tag)
image = self.sys_docker.images.pull(f"{self.image}:{tag}")
image = image or self.image
image.tag(self.image, tag='latest')
self._meta = image.attrs
try:
_LOGGER.info("Pull image %s tag %s.", image, tag)
docker_image = self.sys_docker.images.pull(f"{image}:{tag}")
docker_image.tag(image, tag='latest')
self._meta = docker_image.attrs
except docker.errors.APIError as err:
_LOGGER.error("Can't install %s:%s -> %s.", self.image, tag, err)
_LOGGER.error("Can't install %s:%s -> %s.", image, tag, err)
return False
_LOGGER.info("Tag image %s with version %s as latest", self.image, tag)
_LOGGER.info("Tag image %s with version %s as latest", image, tag)
return True
def exists(self):
@ -97,8 +99,8 @@ class DockerInterface(CoreSysAttributes):
Need run inside executor.
"""
try:
image = self.sys_docker.images.get(self.image)
assert f"{self.image}:{self.version}" in image.tags
docker_image = self.sys_docker.images.get(self.image)
assert f"{self.image}:{self.version}" in docker_image.tags
except (docker.errors.DockerException, AssertionError):
return False
@ -117,17 +119,17 @@ class DockerInterface(CoreSysAttributes):
Need run inside executor.
"""
try:
container = self.sys_docker.containers.get(self.name)
image = self.sys_docker.images.get(self.image)
docker_container = self.sys_docker.containers.get(self.name)
docker_image = self.sys_docker.images.get(self.image)
except docker.errors.DockerException:
return False
# container is not running
if container.status != 'running':
if docker_container.status != 'running':
return False
# we run on an old image, stop and start it
if container.image.id != image.id:
if docker_container.image.id != docker_image.id:
return False
return True
@ -150,8 +152,8 @@ class DockerInterface(CoreSysAttributes):
except docker.errors.DockerException:
return False
_LOGGER.info(
"Attach to image %s with version %s", self.image, self.version)
_LOGGER.info("Attach to image %s with version %s", self.image,
self.version)
return True
@ -178,18 +180,18 @@ class DockerInterface(CoreSysAttributes):
Need run inside executor.
"""
try:
container = self.sys_docker.containers.get(self.name)
docker_container = self.sys_docker.containers.get(self.name)
except docker.errors.DockerException:
return False
if container.status == 'running':
if docker_container.status == 'running':
_LOGGER.info("Stop %s Docker application", self.image)
with suppress(docker.errors.DockerException):
container.stop(timeout=self.timeout)
docker_container.stop(timeout=self.timeout)
with suppress(docker.errors.DockerException):
_LOGGER.info("Clean %s Docker application", self.image)
container.remove(force=True)
docker_container.remove(force=True)
return True
@ -206,8 +208,8 @@ class DockerInterface(CoreSysAttributes):
# Cleanup container
self._stop()
_LOGGER.info(
"Remove Docker %s with latest and %s", self.image, self.version)
_LOGGER.info("Remove Docker %s with latest and %s", self.image,
self.version)
try:
with suppress(docker.errors.ImageNotFound):
@ -226,20 +228,22 @@ class DockerInterface(CoreSysAttributes):
return True
@process_lock
def update(self, tag):
def update(self, tag, image=None):
"""Update a Docker image."""
return self.sys_run_in_executor(self._update, tag)
return self.sys_run_in_executor(self._update, tag, image)
def _update(self, tag):
def _update(self, tag, image=None):
"""Update a docker image.
Need run inside executor.
"""
_LOGGER.info(
"Update Docker %s with %s:%s", self.version, self.image, tag)
image = image or self.image
_LOGGER.info("Update Docker %s:%s to %s:%s", self.image, self.version,
image, tag)
# Update docker image
if not self._install(tag):
if not self._install(tag, image):
return False
# Stop container & cleanup
@ -261,12 +265,12 @@ class DockerInterface(CoreSysAttributes):
Need run inside executor.
"""
try:
container = self.sys_docker.containers.get(self.name)
docker_container = self.sys_docker.containers.get(self.name)
except docker.errors.DockerException:
return b""
try:
return container.logs(tail=100, stdout=True, stderr=True)
return docker_container.logs(tail=100, stdout=True, stderr=True)
except docker.errors.DockerException as err:
_LOGGER.warning("Can't grep logs from %s: %s", self.image, err)
@ -318,12 +322,12 @@ class DockerInterface(CoreSysAttributes):
Need run inside executor.
"""
try:
container = self.sys_docker.containers.get(self.name)
docker_container = self.sys_docker.containers.get(self.name)
except docker.errors.DockerException:
return None
try:
stats = container.stats(stream=False)
stats = docker_container.stats(stream=False)
return DockerStats(stats)
except docker.errors.DockerException as err:
_LOGGER.error("Can't read stats from %s: %s", self.name, err)

View File

@ -11,6 +11,7 @@ class HassioNotSupportedError(HassioError):
# HomeAssistant
class HomeAssistantError(HassioError):
"""Home Assistant exception."""
@ -29,6 +30,7 @@ class HomeAssistantAuthError(HomeAssistantAPIError):
# HassOS
class HassOSError(HassioError):
"""HassOS exception."""
@ -41,20 +43,30 @@ class HassOSNotSupportedError(HassioNotSupportedError):
"""Function not supported by HassOS."""
# Arch
class HassioArchNotFound(HassioNotSupportedError):
"""No matches with exists arch."""
# Updater
class HassioUpdaterError(HassioError):
"""Error on Updater."""
# Auth
class AuthError(HassioError):
"""Auth errors."""
# Host
class HostError(HassioError):
"""Internal Host error."""
@ -73,6 +85,7 @@ class HostAppArmorError(HostError):
# API
class APIError(HassioError, RuntimeError):
"""API errors."""
@ -83,6 +96,7 @@ class APIForbidden(APIError):
# Service / Discovery
class DiscoveryError(HassioError):
"""Discovery Errors."""
@ -93,6 +107,7 @@ class ServicesError(HassioError):
# utils/gdbus
class DBusError(HassioError):
"""DBus generic error."""
@ -111,6 +126,7 @@ class DBusParseError(DBusError):
# util/apparmor
class AppArmorError(HostAppArmorError):
"""General AppArmor error."""

View File

@ -13,16 +13,14 @@ import aiohttp
from aiohttp import hdrs
import attr
from .const import (
FILE_HASSIO_HOMEASSISTANT, ATTR_IMAGE, ATTR_LAST_VERSION, ATTR_UUID,
ATTR_BOOT, ATTR_PASSWORD, ATTR_PORT, ATTR_SSL, ATTR_WATCHDOG,
ATTR_WAIT_BOOT, ATTR_REFRESH_TOKEN, ATTR_ACCESS_TOKEN,
HEADER_HA_ACCESS)
from .const import (FILE_HASSIO_HOMEASSISTANT, ATTR_IMAGE, ATTR_LAST_VERSION,
ATTR_UUID, ATTR_BOOT, ATTR_PASSWORD, ATTR_PORT, ATTR_SSL,
ATTR_WATCHDOG, ATTR_WAIT_BOOT, ATTR_REFRESH_TOKEN,
ATTR_ACCESS_TOKEN, HEADER_HA_ACCESS)
from .coresys import CoreSysAttributes
from .docker.homeassistant import DockerHomeAssistant
from .exceptions import (
HomeAssistantUpdateError, HomeAssistantError, HomeAssistantAPIError,
HomeAssistantAuthError)
from .exceptions import (HomeAssistantUpdateError, HomeAssistantError,
HomeAssistantAPIError, HomeAssistantAuthError)
from .utils import convert_to_ascii, process_lock, create_token
from .utils.json import JsonConfig
from .validate import SCHEMA_HASS_CONFIG
@ -66,6 +64,11 @@ class HomeAssistant(JsonConfig, CoreSysAttributes):
"""Return the system machines."""
return self.instance.machine
@property
def arch(self):
"""Return arch of running Home Assistant."""
return self.instance.arch
@property
def error_state(self):
"""Return True if system is in error."""
@ -109,9 +112,8 @@ class HomeAssistant(JsonConfig, CoreSysAttributes):
@property
def api_url(self):
"""Return API url to Home Assistant."""
return "{}://{}:{}".format(
'https' if self.api_ssl else 'http', self.api_ip, self.api_port
)
return "{}://{}:{}".format('https' if self.api_ssl else 'http',
self.api_ip, self.api_port)
@property
def watchdog(self):
@ -171,8 +173,8 @@ class HomeAssistant(JsonConfig, CoreSysAttributes):
@property
def is_custom_image(self):
"""Return True if a custom image is used."""
return all(attr in self._data for attr in
(ATTR_IMAGE, ATTR_LAST_VERSION))
return all(
attr in self._data for attr in (ATTR_IMAGE, ATTR_LAST_VERSION))
@property
def boot(self):
@ -349,8 +351,7 @@ class HomeAssistant(JsonConfig, CoreSysAttributes):
async def check_config(self):
"""Run Home Assistant config check."""
result = await self.instance.execute_command(
"python3 -m homeassistant -c /config --script check_config"
)
"python3 -m homeassistant -c /config --script check_config")
# if not valid
if result.exit_code is None:
@ -379,8 +380,7 @@ class HomeAssistant(JsonConfig, CoreSysAttributes):
data={
"grant_type": "refresh_token",
"refresh_token": self.refresh_token
}
) as resp:
}) as resp:
if resp.status != 200:
_LOGGER.error("Can't update Home Assistant access token!")
raise HomeAssistantAuthError()
@ -392,8 +392,13 @@ class HomeAssistant(JsonConfig, CoreSysAttributes):
datetime.utcnow() + timedelta(seconds=tokens['expires_in'])
@asynccontextmanager
async def make_request(self, method, path, json=None, content_type=None,
data=None, timeout=30):
async def make_request(self,
method,
path,
json=None,
content_type=None,
data=None,
timeout=30):
"""Async context manager to make a request with right auth."""
url = f"{self.api_url}/{path}"
headers = {}
@ -415,8 +420,7 @@ class HomeAssistant(JsonConfig, CoreSysAttributes):
try:
async with getattr(self.sys_websession_ssl, method)(
url, data=data, timeout=timeout, json=json,
headers=headers
) as resp:
headers=headers) as resp:
# Access token expired
if resp.status == 401 and self.refresh_token:
self.access_token = None
@ -444,8 +448,8 @@ class HomeAssistant(JsonConfig, CoreSysAttributes):
"""Block until Home-Assistant is booting up or startup timeout."""
start_time = time.monotonic()
migration_progress = False
migration_file = Path(
self.sys_config.path_homeassistant, '.migration_progress')
migration_file = Path(self.sys_config.path_homeassistant,
'.migration_progress')
def check_port():
"""Check if port is mapped."""
@ -488,8 +492,7 @@ class HomeAssistant(JsonConfig, CoreSysAttributes):
# 4: Timeout
if time.monotonic() - start_time > self.wait_boot:
_LOGGER.warning(
"Don't wait anymore of Home Assistant startup!")
_LOGGER.warning("Don't wait anymore of Home Assistant startup!")
break
self._error_state = True

View File

@ -13,9 +13,8 @@ COMMAND = "socat UDP-RECVFROM:53,fork UDP-SENDTO:127.0.0.11:53"
class DNSForward:
"""Manage DNS forwarding to internal DNS."""
def __init__(self, loop):
def __init__(self):
"""Initialize DNS forwarding."""
self.loop = loop
self.proc = None
async def start(self):
@ -25,9 +24,7 @@ class DNSForward:
*shlex.split(COMMAND),
stdin=asyncio.subprocess.DEVNULL,
stdout=asyncio.subprocess.DEVNULL,
stderr=asyncio.subprocess.DEVNULL,
loop=self.loop
)
stderr=asyncio.subprocess.DEVNULL)
except OSError as err:
_LOGGER.error("Can't start DNS forwarding: %s", err)
else:

View File

@ -1,6 +1,7 @@
"""Schedule for Hass.io."""
import logging
import asyncio
from datetime import date, datetime, time, timedelta
import logging
_LOGGER = logging.getLogger(__name__)
@ -13,9 +14,9 @@ TASK = 'task'
class Scheduler:
"""Schedule task inside Hass.io."""
def __init__(self, loop):
def __init__(self):
"""Initialize task schedule."""
self.loop = loop
self.loop = asyncio.get_running_loop()
self._data = {}
self.suspend = False
@ -57,8 +58,8 @@ class Scheduler:
job = self.loop.call_later(interval, self._run_task, task_id)
elif isinstance(interval, time):
today = datetime.combine(date.today(), interval)
tomorrow = datetime.combine(
date.today() + timedelta(days=1), interval)
tomorrow = datetime.combine(date.today() + timedelta(days=1),
interval)
# Check if we run it today or next day
if today > datetime.today():

5
requirements_tests.txt Normal file
View File

@ -0,0 +1,5 @@
flake8==3.6.0
pylint==2.2.2
pytest==4.1.1
pytest-timeout==1.3.3
pytest-aiohttp==0.3.0

17
setup.cfg Normal file
View File

@ -0,0 +1,17 @@
[isort]
multi_line_output = 4
indent = " "
not_skip = __init__.py
force_sort_within_sections = true
sections = FUTURE,STDLIB,INBETWEENS,THIRDPARTY,FIRSTPARTY,LOCALFOLDER
default_section = THIRDPARTY
forced_separate = tests
combine_as_imports = true
use_parentheses = true
[yapf]
based_on_style = chromium
indent_width = 4
[flake8]
max-line-length = 80

View File

@ -2,7 +2,6 @@ from setuptools import setup
from hassio.const import HASSIO_VERSION
setup(
name='HassIO',
version=HASSIO_VERSION,
@ -11,9 +10,9 @@ setup(
author_email='hello@home-assistant.io',
url='https://home-assistant.io/',
description=('Open-source private cloud os for Home-Assistant'
' based on ResinOS'),
' based on HassOS'),
long_description=('A maintainless private cloud operator system that'
'setup a Home-Assistant instance. Based on ResinOS'),
'setup a Home-Assistant instance. Based on HassOS'),
classifiers=[
'Intended Audience :: End Users/Desktop',
'Intended Audience :: Developers',
@ -30,13 +29,7 @@ setup(
zip_safe=False,
platforms='any',
packages=[
'hassio',
'hassio.docker',
'hassio.addons',
'hassio.api',
'hassio.misc',
'hassio.utils',
'hassio.snapshots'
'hassio', 'hassio.docker', 'hassio.addons', 'hassio.api', 'hassio.misc',
'hassio.utils', 'hassio.snapshots'
],
include_package_data=True
)
include_package_data=True)

View File

@ -40,10 +40,12 @@ def test_invalid_repository():
with pytest.raises(vol.Invalid):
vd.SCHEMA_ADDON_CONFIG(config)
config['image'] = "registry.gitlab.com/company/add-ons/test-example/text-example:no-tag-allow"
config[
'image'] = "registry.gitlab.com/company/add-ons/test-example/text-example:no-tag-allow"
with pytest.raises(vol.Invalid):
vd.SCHEMA_ADDON_CONFIG(config)
def test_valid_repository():
"""Validate basic config with different valid repositories"""
config = load_json_fixture("basic-addon-config.json")
@ -59,4 +61,11 @@ def test_valid_map():
config = load_json_fixture("basic-addon-config.json")
config['map'] = ['backup:rw', 'ssl:ro', 'config']
valid_config = vd.SCHEMA_ADDON_CONFIG(config)
vd.SCHEMA_ADDON_CONFIG(config)
def test_valid_basic_build():
"""Validate basic build config."""
config = load_json_fixture("basic-build-config.json")
vd.SCHEMA_BUILD_CONFIG(config)

42
tests/conftest.py Normal file
View File

@ -0,0 +1,42 @@
"""Common test functions."""
from unittest.mock import patch, PropertyMock, MagicMock
import pytest
from hassio.bootstrap import initialize_coresys
# pylint: disable=redefined-outer-name
@pytest.fixture
def docker():
"""Mock Docker API."""
with patch('hassio.coresys.DockerAPI') as mock:
yield mock
@pytest.fixture
async def coresys(loop, docker):
"""Create a CoreSys Mock."""
with patch('hassio.bootstrap.initialize_system_data'):
coresys_obj = await initialize_coresys()
yield coresys_obj
@pytest.fixture
def sys_machine():
"""Mock sys_machine."""
with patch(
'hassio.coresys.CoreSys.machine',
new_callable=PropertyMock) as mock:
yield mock
@pytest.fixture
def sys_supervisor():
with patch(
'hassio.coresys.CoreSys.supervisor',
new_callable=PropertyMock) as mock:
mock.return_value = MagicMock()
yield MagicMock

11
tests/fixtures/basic-build-config.json vendored Normal file
View File

@ -0,0 +1,11 @@
{
"build_from": {
"armhf": "mycustom/base-image:latest",
"aarch64": "mycustom/base-image",
"amd64": "homeassistant/amd64-base-ubuntu:18.04"
},
"squash": false,
"args": {
"my_build_arg": "xy"
}
}

149
tests/test_arch.py Normal file
View File

@ -0,0 +1,149 @@
"""Test arch object."""
async def test_machine_not_exits(coresys, sys_machine, sys_supervisor):
"""Test arch for raspberrypi."""
sys_machine.return_value = None
sys_supervisor.arch = "amd64"
await coresys.arch.load()
assert coresys.arch.default == "amd64"
assert coresys.arch.supported == ["amd64"]
async def test_machine_not_exits_in_db(coresys, sys_machine, sys_supervisor):
"""Test arch for raspberrypi."""
sys_machine.return_value = "jedi-master-knight"
sys_supervisor.arch = "amd64"
await coresys.arch.load()
assert coresys.arch.default == "amd64"
assert coresys.arch.supported == ["amd64"]
async def test_supervisor_arch(coresys, sys_machine, sys_supervisor):
"""Test arch for raspberrypi."""
sys_machine.return_value = None
sys_supervisor.arch = "amd64"
assert coresys.arch.supervisor == "amd64"
await coresys.arch.load()
assert coresys.arch.supervisor == "amd64"
async def test_raspberrypi_arch(coresys, sys_machine):
"""Test arch for raspberrypi."""
sys_machine.return_value = "raspberrypi"
await coresys.arch.load()
assert coresys.arch.default == "armhf"
assert coresys.arch.supported == ["armhf"]
async def test_raspberrypi2_arch(coresys, sys_machine):
"""Test arch for raspberrypi2."""
sys_machine.return_value = "raspberrypi2"
await coresys.arch.load()
assert coresys.arch.default == "armhf"
assert coresys.arch.supported == ["armhf"]
async def test_raspberrypi3_arch(coresys, sys_machine):
"""Test arch for raspberrypi3."""
sys_machine.return_value = "raspberrypi3"
await coresys.arch.load()
assert coresys.arch.default == "armhf"
assert coresys.arch.supported == ["armhf"]
async def test_raspberrypi3_64_arch(coresys, sys_machine):
"""Test arch for raspberrypi3_64."""
sys_machine.return_value = "raspberrypi3-64"
await coresys.arch.load()
assert coresys.arch.default == "aarch64"
assert coresys.arch.supported == ["aarch64", "armhf"]
async def test_tinker_arch(coresys, sys_machine):
"""Test arch for tinker."""
sys_machine.return_value = "tinker"
await coresys.arch.load()
assert coresys.arch.default == "armhf"
assert coresys.arch.supported == ["armhf"]
async def test_odroid_c2_arch(coresys, sys_machine):
"""Test arch for odroid-c2."""
sys_machine.return_value = "odroid-c2"
await coresys.arch.load()
assert coresys.arch.default == "aarch64"
assert coresys.arch.supported == ["aarch64"]
async def test_odroid_xu_arch(coresys, sys_machine):
"""Test arch for odroid-xu."""
sys_machine.return_value = "odroid-xu"
await coresys.arch.load()
assert coresys.arch.default == "armhf"
assert coresys.arch.supported == ["armhf"]
async def test_orangepi_prime_arch(coresys, sys_machine):
"""Test arch for orangepi_prime."""
sys_machine.return_value = "orangepi-prime"
await coresys.arch.load()
assert coresys.arch.default == "aarch64"
assert coresys.arch.supported == ["aarch64"]
async def test_intel_nuc_arch(coresys, sys_machine):
"""Test arch for intel-nuc."""
sys_machine.return_value = "intel-nuc"
await coresys.arch.load()
assert coresys.arch.default == "amd64"
assert coresys.arch.supported == ["amd64", "i386"]
async def test_qemux86_arch(coresys, sys_machine):
"""Test arch for qemux86."""
sys_machine.return_value = "qemux86"
await coresys.arch.load()
assert coresys.arch.default == "i386"
assert coresys.arch.supported == ["i386"]
async def test_qemux86_64_arch(coresys, sys_machine):
"""Test arch for qemux86-64."""
sys_machine.return_value = "qemux86-64"
await coresys.arch.load()
assert coresys.arch.default == "amd64"
assert coresys.arch.supported == ["amd64", "i386"]
async def test_qemuarm_arch(coresys, sys_machine):
"""Test arch for qemuarm."""
sys_machine.return_value = "qemuarm"
await coresys.arch.load()
assert coresys.arch.default == "armhf"
assert coresys.arch.supported == ["armhf"]
async def test_qemuarm_64_arch(coresys, sys_machine):
"""Test arch for qemuarm-64."""
sys_machine.return_value = "qemuarm-64"
await coresys.arch.load()
assert coresys.arch.default == "aarch64"
assert coresys.arch.supported == ["aarch64"]

View File

@ -3,9 +3,7 @@ envlist = lint, tests
[testenv]
deps =
flake8==3.6.0
pylint==2.2.2
pytest==4.1.1
-r{toxinidir}/requirements_tests.txt
-r{toxinidir}/requirements.txt
[testenv:lint]

View File

@ -1,4 +0,0 @@
{
"hassio": "108",
"homeassistant": "0.70.0"
}