ha-core/script/gen_requirements_all.py

604 lines
19 KiB
Python
Executable File

#!/usr/bin/env python3
"""Generate updated constraint and requirements files."""
from __future__ import annotations
import difflib
import importlib
from operator import itemgetter
import os
from pathlib import Path
import pkgutil
import re
import sys
import tomllib
from typing import Any
from homeassistant.util.yaml.loader import load_yaml
from script.hassfest.model import Integration
# Requirements which can't be installed on all systems because they rely on additional
# system packages. Requirements listed in EXCLUDED_REQUIREMENTS_ALL will be commented-out
# in requirements_all.txt and requirements_test_all.txt.
EXCLUDED_REQUIREMENTS_ALL = {
"atenpdu", # depends on pysnmp which is not maintained at this time
"avea", # depends on bluepy
"avion",
"beacontools",
"beewi-smartclim", # depends on bluepy
"bluepy",
"decora",
"decora-wifi",
"evdev",
"face-recognition",
"pybluez",
"pycocotools",
"pycups",
"python-gammu",
"python-lirc",
"pyuserinput",
"tensorflow",
"tf-models-official",
}
# Requirements excluded by EXCLUDED_REQUIREMENTS_ALL which should be included when
# building integration wheels for all architectures.
INCLUDED_REQUIREMENTS_WHEELS = {
"decora-wifi",
"evdev",
"pycups",
"python-gammu",
"pyuserinput",
}
# Requirements to exclude or include when running github actions.
# Requirements listed in "exclude" will be commented-out in
# requirements_all_{action}.txt
# Requirements listed in "include" must be listed in EXCLUDED_REQUIREMENTS_CI, and
# will be included in requirements_all_{action}.txt
OVERRIDDEN_REQUIREMENTS_ACTIONS = {
"pytest": {"exclude": set(), "include": {"python-gammu"}},
"wheels_aarch64": {"exclude": set(), "include": INCLUDED_REQUIREMENTS_WHEELS},
# Pandas has issues building on armhf, it is expected they
# will drop the platform in the near future (they consider it
# "flimsy" on 386). The following packages depend on pandas,
# so we comment them out.
"wheels_armhf": {
"exclude": {"env-canada", "noaa-coops", "pyezviz", "pykrakenapi"},
"include": INCLUDED_REQUIREMENTS_WHEELS,
},
"wheels_armv7": {"exclude": set(), "include": INCLUDED_REQUIREMENTS_WHEELS},
"wheels_amd64": {"exclude": set(), "include": INCLUDED_REQUIREMENTS_WHEELS},
"wheels_i386": {"exclude": set(), "include": INCLUDED_REQUIREMENTS_WHEELS},
}
IGNORE_PIN = ("colorlog>2.1,<3", "urllib3")
URL_PIN = (
"https://developers.home-assistant.io/docs/"
"creating_platform_code_review.html#1-requirements"
)
CONSTRAINT_PATH = os.path.join(
os.path.dirname(__file__), "../homeassistant/package_constraints.txt"
)
CONSTRAINT_BASE = """
# Constrain pycryptodome to avoid vulnerability
# see https://github.com/home-assistant/core/pull/16238
pycryptodome>=3.6.6
# Constrain httplib2 to protect against GHSA-93xj-8mrv-444m
# https://github.com/advisories/GHSA-93xj-8mrv-444m
httplib2>=0.19.0
# gRPC is an implicit dependency that we want to make explicit so we manage
# upgrades intentionally. It is a large package to build from source and we
# want to ensure we have wheels built.
grpcio==1.59.0
grpcio-status==1.59.0
grpcio-reflection==1.59.0
# libcst >=0.4.0 requires a newer Rust than we currently have available,
# thus our wheels builds fail. This pins it to the last working version,
# which at this point satisfies our needs.
libcst==0.3.23
# This is a old unmaintained library and is replaced with pycryptodome
pycrypto==1000000000.0.0
# This is a old unmaintained library and is replaced with faust-cchardet
cchardet==1000000000.0.0
# To remove reliance on typing
btlewrap>=0.0.10
# This overrides a built-in Python package
enum34==1000000000.0.0
typing==1000000000.0.0
uuid==1000000000.0.0
# regex causes segfault with version 2021.8.27
# https://bitbucket.org/mrabarnett/mrab-regex/issues/421/2021827-results-in-fatal-python-error
# This is fixed in 2021.8.28
regex==2021.8.28
# httpx requires httpcore, and httpcore requires anyio and h11, but the version constraints on
# these requirements are quite loose. As the entire stack has some outstanding issues, and
# even newer versions seem to introduce new issues, it's useful for us to pin all these
# requirements so we can directly link HA versions to these library versions.
anyio==4.3.0
h11==0.14.0
httpcore==1.0.5
# Ensure we have a hyperframe version that works in Python 3.10
# 5.2.0 fixed a collections abc deprecation
hyperframe>=5.2.0
# Ensure we run compatible with musllinux build env
numpy==1.26.0
# Prevent dependency conflicts between sisyphus-control and aioambient
# until upper bounds for sisyphus-control have been updated
# https://github.com/jkeljo/sisyphus-control/issues/6
python-engineio>=3.13.1,<4.0
python-socketio>=4.6.0,<5.0
# Constrain multidict to avoid typing issues
# https://github.com/home-assistant/core/pull/67046
multidict>=6.0.2
# Version 2.0 added typing, prevent accidental fallbacks
backoff>=2.0
# Required to avoid breaking (#101042).
# v2 has breaking changes (#99218).
pydantic==1.10.15
# Breaks asyncio
# https://github.com/pubnub/python/issues/130
pubnub!=6.4.0
# Package's __init__.pyi stub has invalid syntax and breaks mypy
# https://github.com/dahlia/iso4217/issues/16
iso4217!=1.10.20220401
# pyOpenSSL 24.0.0 or later required to avoid import errors when
# cryptography 42.0.0 is installed with botocore
pyOpenSSL>=24.0.0
# protobuf must be in package constraints for the wheel
# builder to build binary wheels
protobuf==4.25.1
# faust-cchardet: Ensure we have a version we can build wheels
# 2.1.18 is the first version that works with our wheel builder
faust-cchardet>=2.1.18
# websockets 11.0 is missing files in the source distribution
# which break wheel builds so we need at least 11.0.1
# https://github.com/aaugustin/websockets/issues/1329
websockets>=11.0.1
# pysnmplib is no longer maintained and does not work with newer
# python
pysnmplib==1000000000.0.0
# pysnmp is no longer maintained and does not work with newer
# python
pysnmp==1000000000.0.0
# The get-mac package has been replaced with getmac. Installing get-mac alongside getmac
# breaks getmac due to them both sharing the same python package name inside 'getmac'.
get-mac==1000000000.0.0
# We want to skip the binary wheels for the 'charset-normalizer' packages.
# They are build with mypyc, but causes issues with our wheel builder.
# In order to do so, we need to constrain the version.
charset-normalizer==3.2.0
# dacite: Ensure we have a version that is able to handle type unions for
# Roborock, NAM, Brother, and GIOS.
dacite>=1.7.0
# Musle wheels for pandas 2.2.0 cannot be build for any architecture.
pandas==2.1.4
# chacha20poly1305-reuseable==0.12.0 is incompatible with cryptography==42.0.x
chacha20poly1305-reuseable>=0.12.1
# pycountry<23.12.11 imports setuptools at run time
# https://github.com/pycountry/pycountry/blob/ea69bab36f00df58624a0e490fdad4ccdc14268b/HISTORY.txt#L39
pycountry>=23.12.11
# scapy<2.5.0 will not work with python3.12
scapy>=2.5.0
# tuf isn't updated to deal with breaking changes in securesystemslib==1.0.
# Only tuf>=4 includes a constraint to <1.0.
# https://github.com/theupdateframework/python-tuf/releases/tag/v4.0.0
tuf>=4.0.0
"""
GENERATED_MESSAGE = (
f"# Automatically generated by {Path(__file__).name}, do not edit\n\n"
)
IGNORE_PRE_COMMIT_HOOK_ID = (
"check-executables-have-shebangs",
"check-json",
"no-commit-to-branch",
"prettier",
"python-typing-update",
"ruff-format", # it's just ruff
)
PACKAGE_REGEX = re.compile(r"^(?:--.+\s)?([-_\.\w\d]+).*==.+$")
def has_tests(module: str) -> bool:
"""Test if a module has tests.
Module format: homeassistant.components.hue
Test if exists: tests/components/hue/__init__.py
"""
path = (
Path(module.replace(".", "/").replace("homeassistant", "tests")) / "__init__.py"
)
return path.exists()
def explore_module(package: str, explore_children: bool) -> list[str]:
"""Explore the modules."""
module = importlib.import_module(package)
found: list[str] = []
if not hasattr(module, "__path__"):
return found
for _, name, _ in pkgutil.iter_modules(module.__path__, f"{package}."):
found.append(name)
if explore_children:
found.extend(explore_module(name, False))
return found
def core_requirements() -> list[str]:
"""Gather core requirements out of pyproject.toml."""
with open("pyproject.toml", "rb") as fp:
data = tomllib.load(fp)
dependencies: list[str] = data["project"]["dependencies"]
return dependencies
def gather_recursive_requirements(
domain: str, seen: set[str] | None = None
) -> set[str]:
"""Recursively gather requirements from a module."""
if seen is None:
seen = set()
seen.add(domain)
integration = Integration(Path(f"homeassistant/components/{domain}"))
integration.load_manifest()
reqs = {x for x in integration.requirements if x not in CONSTRAINT_BASE}
for dep_domain in integration.dependencies:
reqs.update(gather_recursive_requirements(dep_domain, seen))
return reqs
def _normalize_package_name(package_name: str) -> str:
"""Normalize a package name."""
# pipdeptree needs lowercase and dash instead of underscore or period as separator
return package_name.lower().replace("_", "-").replace(".", "-")
def normalize_package_name(requirement: str) -> str:
"""Return a normalized package name from a requirement string."""
# This function is also used in hassfest.
match = PACKAGE_REGEX.search(requirement)
if not match:
return ""
# pipdeptree needs lowercase and dash instead of underscore or period as separator
return _normalize_package_name(match.group(1))
def comment_requirement(req: str) -> bool:
"""Comment out requirement. Some don't install on all systems."""
return normalize_package_name(req) in EXCLUDED_REQUIREMENTS_ALL
def process_action_requirement(req: str, action: str) -> str:
"""Process requirement for a specific github action."""
normalized_package_name = normalize_package_name(req)
if normalized_package_name in OVERRIDDEN_REQUIREMENTS_ACTIONS[action]["exclude"]:
return f"# {req}"
if normalized_package_name in OVERRIDDEN_REQUIREMENTS_ACTIONS[action]["include"]:
return req
if normalized_package_name in EXCLUDED_REQUIREMENTS_ALL:
return f"# {req}"
return req
def gather_modules() -> dict[str, list[str]] | None:
"""Collect the information."""
reqs: dict[str, list[str]] = {}
errors: list[str] = []
gather_requirements_from_manifests(errors, reqs)
gather_requirements_from_modules(errors, reqs)
for key in reqs:
reqs[key] = sorted(reqs[key], key=lambda name: (len(name.split(".")), name))
if errors:
print("******* ERROR")
print("Errors while importing: ", ", ".join(errors))
return None
return reqs
def gather_requirements_from_manifests(
errors: list[str], reqs: dict[str, list[str]]
) -> None:
"""Gather all of the requirements from manifests."""
integrations = Integration.load_dir(Path("homeassistant/components"))
for domain in sorted(integrations):
integration = integrations[domain]
if integration.disabled:
continue
process_requirements(
errors, integration.requirements, f"homeassistant.components.{domain}", reqs
)
def gather_requirements_from_modules(
errors: list[str], reqs: dict[str, list[str]]
) -> None:
"""Collect the requirements from the modules directly."""
for package in sorted(
explore_module("homeassistant.scripts", True)
+ explore_module("homeassistant.auth", True)
):
try:
module = importlib.import_module(package)
except ImportError as err:
print(f"{package.replace('.', '/')}.py: {err}")
errors.append(package)
continue
if getattr(module, "REQUIREMENTS", None):
process_requirements(errors, module.REQUIREMENTS, package, reqs)
def process_requirements(
errors: list[str],
module_requirements: list[str],
package: str,
reqs: dict[str, list[str]],
) -> None:
"""Process all of the requirements."""
for req in module_requirements:
if "://" in req:
errors.append(f"{package}[Only pypi dependencies are allowed: {req}]")
if req.partition("==")[1] == "" and req not in IGNORE_PIN:
errors.append(f"{package}[Please pin requirement {req}, see {URL_PIN}]")
reqs.setdefault(req, []).append(package)
def generate_requirements_list(reqs: dict[str, list[str]]) -> str:
"""Generate a pip file based on requirements."""
output = []
for pkg, requirements in sorted(reqs.items(), key=itemgetter(0)):
output.extend(f"\n# {req}" for req in sorted(requirements))
if comment_requirement(pkg):
output.append(f"\n# {pkg}\n")
else:
output.append(f"\n{pkg}\n")
return "".join(output)
def generate_action_requirements_list(reqs: dict[str, list[str]], action: str) -> str:
"""Generate a pip file based on requirements."""
output = []
for pkg, requirements in sorted(reqs.items(), key=itemgetter(0)):
output.extend(f"\n# {req}" for req in sorted(requirements))
processed_pkg = process_action_requirement(pkg, action)
output.append(f"\n{processed_pkg}\n")
return "".join(output)
def requirements_output() -> str:
"""Generate output for requirements."""
output = [
GENERATED_MESSAGE,
"-c homeassistant/package_constraints.txt\n",
"\n",
"# Home Assistant Core\n",
]
output.append("\n".join(core_requirements()))
output.append("\n")
return "".join(output)
def requirements_all_output(reqs: dict[str, list[str]]) -> str:
"""Generate output for requirements_all."""
output = [
"# Home Assistant Core, full dependency set\n",
GENERATED_MESSAGE,
"-r requirements.txt\n",
]
output.append(generate_requirements_list(reqs))
return "".join(output)
def requirements_all_action_output(reqs: dict[str, list[str]], action: str) -> str:
"""Generate output for requirements_all_{action}."""
output = [
f"# Home Assistant Core, full dependency set for {action}\n",
GENERATED_MESSAGE,
"-r requirements.txt\n",
]
output.append(generate_action_requirements_list(reqs, action))
return "".join(output)
def requirements_test_all_output(reqs: dict[str, list[str]]) -> str:
"""Generate output for test_requirements."""
output = [
"# Home Assistant tests, full dependency set\n",
GENERATED_MESSAGE,
"-r requirements_test.txt\n",
]
filtered = {
requirement: modules
for requirement, modules in reqs.items()
if any(
# Always install requirements that are not part of integrations
not mdl.startswith("homeassistant.components.")
or
# Install tests for integrations that have tests
has_tests(mdl)
for mdl in modules
)
}
output.append(generate_requirements_list(filtered))
return "".join(output)
def requirements_pre_commit_output() -> str:
"""Generate output for pre-commit dependencies."""
source = ".pre-commit-config.yaml"
pre_commit_conf: dict[str, list[dict[str, Any]]]
pre_commit_conf = load_yaml(source) # type: ignore[assignment]
reqs: list[str] = []
hook: dict[str, Any]
for repo in (x for x in pre_commit_conf["repos"] if x.get("rev")):
rev: str = repo["rev"]
for hook in repo["hooks"]:
if hook["id"] not in IGNORE_PRE_COMMIT_HOOK_ID:
reqs.append(f"{hook['id']}=={rev.lstrip('v')}")
reqs.extend(x for x in hook.get("additional_dependencies", ()))
output = [
f"# Automatically generated "
f"from {source} by {Path(__file__).name}, do not edit",
"",
]
output.extend(sorted(reqs))
return "\n".join(output) + "\n"
def gather_constraints() -> str:
"""Construct output for constraint file."""
return (
GENERATED_MESSAGE
+ "\n".join(
[
*sorted(
{
*core_requirements(),
*gather_recursive_requirements("default_config"),
*gather_recursive_requirements("mqtt"),
},
key=str.lower,
),
"",
]
)
+ CONSTRAINT_BASE
)
def diff_file(filename: str, content: str) -> list[str]:
"""Diff a file."""
return list(
difflib.context_diff(
[f"{line}\n" for line in Path(filename).read_text().split("\n")],
[f"{line}\n" for line in content.split("\n")],
filename,
"generated",
)
)
def main(validate: bool, ci: bool) -> int:
"""Run the script."""
if not os.path.isfile("requirements_all.txt"):
print("Run this from HA root dir")
return 1
data = gather_modules()
if data is None:
return 1
reqs_file = requirements_output()
reqs_all_file = requirements_all_output(data)
reqs_all_action_files = {
action: requirements_all_action_output(data, action)
for action in OVERRIDDEN_REQUIREMENTS_ACTIONS
}
reqs_test_all_file = requirements_test_all_output(data)
# Always calling requirements_pre_commit_output is intentional to ensure
# the code is called by the pre-commit hooks.
reqs_pre_commit_file = requirements_pre_commit_output()
constraints = gather_constraints()
files = [
("requirements.txt", reqs_file),
("requirements_all.txt", reqs_all_file),
("requirements_test_pre_commit.txt", reqs_pre_commit_file),
("requirements_test_all.txt", reqs_test_all_file),
("homeassistant/package_constraints.txt", constraints),
]
if ci:
files.extend(
(f"requirements_all_{action}.txt", reqs_all_file)
for action, reqs_all_file in reqs_all_action_files.items()
)
if validate:
errors = []
for filename, content in files:
diff = diff_file(filename, content)
if diff:
errors.append("".join(diff))
if errors:
print("ERROR - FOUND THE FOLLOWING DIFFERENCES")
print()
print()
print("\n\n".join(errors))
print()
print("Please run python3 -m script.gen_requirements_all")
return 1
return 0
for filename, content in files:
Path(filename).write_text(content)
return 0
if __name__ == "__main__":
_VAL = sys.argv[-1] == "validate"
_CI = sys.argv[-1] == "ci"
sys.exit(main(_VAL, _CI))