home-assistant-core/script/gen_requirements_all.py
puddly 6295f91a1f
Pin chacha20poly1305-reuseable>=0.12.1 (#109807)
* Pin `chacha20poly1305-reuseable`
Prevents a runtime `assert isinstance(cipher, AESGCM)` error

* Update `gen_requirements_all.py` as well
2024-02-06 16:55:51 +01:00

520 lines
15 KiB
Python
Executable file

#!/usr/bin/env python3
"""Generate updated constraint and requirements files."""
from __future__ import annotations
import difflib
import importlib
from operator import itemgetter
import os
from pathlib import Path
import pkgutil
import re
import sys
import tomllib
from typing import Any
from homeassistant.util.yaml.loader import load_yaml
from script.hassfest.model import Integration
COMMENT_REQUIREMENTS = (
"Adafruit-BBIO",
"atenpdu", # depends on pysnmp which is not maintained at this time
"avea", # depends on bluepy
"avion",
"beacontools",
"beewi-smartclim", # depends on bluepy
"bluepy",
"decora",
"decora-wifi",
"evdev",
"face-recognition",
"opencv-python-headless",
"pybluez",
"pycocotools",
"pycups",
"python-gammu",
"python-lirc",
"pyuserinput",
"tensorflow",
"tf-models-official",
)
COMMENT_REQUIREMENTS_NORMALIZED = {
commented.lower().replace("_", "-") for commented in COMMENT_REQUIREMENTS
}
IGNORE_PIN = ("colorlog>2.1,<3", "urllib3")
URL_PIN = (
"https://developers.home-assistant.io/docs/"
"creating_platform_code_review.html#1-requirements"
)
CONSTRAINT_PATH = os.path.join(
os.path.dirname(__file__), "../homeassistant/package_constraints.txt"
)
CONSTRAINT_BASE = """
# Constrain pycryptodome to avoid vulnerability
# see https://github.com/home-assistant/core/pull/16238
pycryptodome>=3.6.6
# Constrain httplib2 to protect against GHSA-93xj-8mrv-444m
# https://github.com/advisories/GHSA-93xj-8mrv-444m
httplib2>=0.19.0
# gRPC is an implicit dependency that we want to make explicit so we manage
# upgrades intentionally. It is a large package to build from source and we
# want to ensure we have wheels built.
grpcio==1.59.0
grpcio-status==1.59.0
grpcio-reflection==1.59.0
# libcst >=0.4.0 requires a newer Rust than we currently have available,
# thus our wheels builds fail. This pins it to the last working version,
# which at this point satisfies our needs.
libcst==0.3.23
# This is a old unmaintained library and is replaced with pycryptodome
pycrypto==1000000000.0.0
# This is a old unmaintained library and is replaced with faust-cchardet
cchardet==1000000000.0.0
# To remove reliance on typing
btlewrap>=0.0.10
# This overrides a built-in Python package
enum34==1000000000.0.0
typing==1000000000.0.0
uuid==1000000000.0.0
# regex causes segfault with version 2021.8.27
# https://bitbucket.org/mrabarnett/mrab-regex/issues/421/2021827-results-in-fatal-python-error
# This is fixed in 2021.8.28
regex==2021.8.28
# httpx requires httpcore, and httpcore requires anyio and h11, but the version constraints on
# these requirements are quite loose. As the entire stack has some outstanding issues, and
# even newer versions seem to introduce new issues, it's useful for us to pin all these
# requirements so we can directly link HA versions to these library versions.
anyio==4.1.0
h11==0.14.0
httpcore==1.0.2
# Ensure we have a hyperframe version that works in Python 3.10
# 5.2.0 fixed a collections abc deprecation
hyperframe>=5.2.0
# Ensure we run compatible with musllinux build env
numpy==1.26.0
# Prevent dependency conflicts between sisyphus-control and aioambient
# until upper bounds for sisyphus-control have been updated
# https://github.com/jkeljo/sisyphus-control/issues/6
python-engineio>=3.13.1,<4.0
python-socketio>=4.6.0,<5.0
# Constrain multidict to avoid typing issues
# https://github.com/home-assistant/core/pull/67046
multidict>=6.0.2
# Version 2.0 added typing, prevent accidental fallbacks
backoff>=2.0
# Required to avoid breaking (#101042).
# v2 has breaking changes (#99218).
pydantic==1.10.12
# Breaks asyncio
# https://github.com/pubnub/python/issues/130
pubnub!=6.4.0
# Package's __init__.pyi stub has invalid syntax and breaks mypy
# https://github.com/dahlia/iso4217/issues/16
iso4217!=1.10.20220401
# Matplotlib 3.6.2 has issues building wheels on armhf/armv7
# We need at least >=2.1.0 (tensorflow integration -> pycocotools)
matplotlib==3.6.1
# pyOpenSSL 24.0.0 or later required to avoid import errors when
# cryptography 42.0.0 is installed with botocore
pyOpenSSL>=24.0.0
# protobuf must be in package constraints for the wheel
# builder to build binary wheels
protobuf==4.25.1
# faust-cchardet: Ensure we have a version we can build wheels
# 2.1.18 is the first version that works with our wheel builder
faust-cchardet>=2.1.18
# websockets 11.0 is missing files in the source distribution
# which break wheel builds so we need at least 11.0.1
# https://github.com/aaugustin/websockets/issues/1329
websockets>=11.0.1
# pyasn1 0.5.0 has breaking changes which cause pysnmplib to fail
# until they are resolved, we need to pin pyasn1 to 0.4.8 and
# pysnmplib to 5.0.21 to avoid the issue.
# https://github.com/pyasn1/pyasn1/pull/30#issuecomment-1517564335
# https://github.com/pysnmp/pysnmp/issues/51
pyasn1==0.4.8
pysnmplib==5.0.21
# pysnmp is no longer maintained and does not work with newer
# python
pysnmp==1000000000.0.0
# The get-mac package has been replaced with getmac. Installing get-mac alongside getmac
# breaks getmac due to them both sharing the same python package name inside 'getmac'.
get-mac==1000000000.0.0
# We want to skip the binary wheels for the 'charset-normalizer' packages.
# They are build with mypyc, but causes issues with our wheel builder.
# In order to do so, we need to constrain the version.
charset-normalizer==3.2.0
# dacite: Ensure we have a version that is able to handle type unions for
# Roborock, NAM, Brother, and GIOS.
dacite>=1.7.0
# Musle wheels for pandas 2.2.0 cannot be build for any architecture.
pandas==2.1.4
# chacha20poly1305-reuseable==0.12.0 is incompatible with cryptography==42.0.x
chacha20poly1305-reuseable>=0.12.1
"""
GENERATED_MESSAGE = (
f"# Automatically generated by {Path(__file__).name}, do not edit\n\n"
)
IGNORE_PRE_COMMIT_HOOK_ID = (
"check-executables-have-shebangs",
"check-json",
"no-commit-to-branch",
"prettier",
"python-typing-update",
"ruff-format", # it's just ruff
)
PACKAGE_REGEX = re.compile(r"^(?:--.+\s)?([-_\.\w\d]+).*==.+$")
def has_tests(module: str) -> bool:
"""Test if a module has tests.
Module format: homeassistant.components.hue
Test if exists: tests/components/hue/__init__.py
"""
path = (
Path(module.replace(".", "/").replace("homeassistant", "tests")) / "__init__.py"
)
return path.exists()
def explore_module(package: str, explore_children: bool) -> list[str]:
"""Explore the modules."""
module = importlib.import_module(package)
found: list[str] = []
if not hasattr(module, "__path__"):
return found
for _, name, _ in pkgutil.iter_modules(module.__path__, f"{package}."):
found.append(name)
if explore_children:
found.extend(explore_module(name, False))
return found
def core_requirements() -> list[str]:
"""Gather core requirements out of pyproject.toml."""
with open("pyproject.toml", "rb") as fp:
data = tomllib.load(fp)
dependencies: list[str] = data["project"]["dependencies"]
return dependencies
def gather_recursive_requirements(
domain: str, seen: set[str] | None = None
) -> set[str]:
"""Recursively gather requirements from a module."""
if seen is None:
seen = set()
seen.add(domain)
integration = Integration(Path(f"homeassistant/components/{domain}"))
integration.load_manifest()
reqs = {x for x in integration.requirements if x not in CONSTRAINT_BASE}
for dep_domain in integration.dependencies:
reqs.update(gather_recursive_requirements(dep_domain, seen))
return reqs
def normalize_package_name(requirement: str) -> str:
"""Return a normalized package name from a requirement string."""
# This function is also used in hassfest.
match = PACKAGE_REGEX.search(requirement)
if not match:
return ""
# pipdeptree needs lowercase and dash instead of underscore as separator
package = match.group(1).lower().replace("_", "-")
return package
def comment_requirement(req: str) -> bool:
"""Comment out requirement. Some don't install on all systems."""
return any(
normalize_package_name(req) == ign for ign in COMMENT_REQUIREMENTS_NORMALIZED
)
def gather_modules() -> dict[str, list[str]] | None:
"""Collect the information."""
reqs: dict[str, list[str]] = {}
errors: list[str] = []
gather_requirements_from_manifests(errors, reqs)
gather_requirements_from_modules(errors, reqs)
for key in reqs:
reqs[key] = sorted(reqs[key], key=lambda name: (len(name.split(".")), name))
if errors:
print("******* ERROR")
print("Errors while importing: ", ", ".join(errors))
return None
return reqs
def gather_requirements_from_manifests(
errors: list[str], reqs: dict[str, list[str]]
) -> None:
"""Gather all of the requirements from manifests."""
integrations = Integration.load_dir(Path("homeassistant/components"))
for domain in sorted(integrations):
integration = integrations[domain]
if integration.disabled:
continue
process_requirements(
errors, integration.requirements, f"homeassistant.components.{domain}", reqs
)
def gather_requirements_from_modules(
errors: list[str], reqs: dict[str, list[str]]
) -> None:
"""Collect the requirements from the modules directly."""
for package in sorted(
explore_module("homeassistant.scripts", True)
+ explore_module("homeassistant.auth", True)
):
try:
module = importlib.import_module(package)
except ImportError as err:
print(f"{package.replace('.', '/')}.py: {err}")
errors.append(package)
continue
if getattr(module, "REQUIREMENTS", None):
process_requirements(errors, module.REQUIREMENTS, package, reqs)
def process_requirements(
errors: list[str],
module_requirements: list[str],
package: str,
reqs: dict[str, list[str]],
) -> None:
"""Process all of the requirements."""
for req in module_requirements:
if "://" in req:
errors.append(f"{package}[Only pypi dependencies are allowed: {req}]")
if req.partition("==")[1] == "" and req not in IGNORE_PIN:
errors.append(f"{package}[Please pin requirement {req}, see {URL_PIN}]")
reqs.setdefault(req, []).append(package)
def generate_requirements_list(reqs: dict[str, list[str]]) -> str:
"""Generate a pip file based on requirements."""
output = []
for pkg, requirements in sorted(reqs.items(), key=itemgetter(0)):
for req in sorted(requirements):
output.append(f"\n# {req}")
if comment_requirement(pkg):
output.append(f"\n# {pkg}\n")
else:
output.append(f"\n{pkg}\n")
return "".join(output)
def requirements_output() -> str:
"""Generate output for requirements."""
output = [
GENERATED_MESSAGE,
"-c homeassistant/package_constraints.txt\n",
"\n",
"# Home Assistant Core\n",
]
output.append("\n".join(core_requirements()))
output.append("\n")
return "".join(output)
def requirements_all_output(reqs: dict[str, list[str]]) -> str:
"""Generate output for requirements_all."""
output = [
"# Home Assistant Core, full dependency set\n",
GENERATED_MESSAGE,
"-r requirements.txt\n",
]
output.append(generate_requirements_list(reqs))
return "".join(output)
def requirements_test_all_output(reqs: dict[str, list[str]]) -> str:
"""Generate output for test_requirements."""
output = [
"# Home Assistant tests, full dependency set\n",
GENERATED_MESSAGE,
"-r requirements_test.txt\n",
]
filtered = {
requirement: modules
for requirement, modules in reqs.items()
if any(
# Always install requirements that are not part of integrations
not mdl.startswith("homeassistant.components.")
or
# Install tests for integrations that have tests
has_tests(mdl)
for mdl in modules
)
}
output.append(generate_requirements_list(filtered))
return "".join(output)
def requirements_pre_commit_output() -> str:
"""Generate output for pre-commit dependencies."""
source = ".pre-commit-config.yaml"
pre_commit_conf: dict[str, list[dict[str, Any]]]
pre_commit_conf = load_yaml(source) # type: ignore[assignment]
reqs: list[str] = []
hook: dict[str, Any]
for repo in (x for x in pre_commit_conf["repos"] if x.get("rev")):
rev: str = repo["rev"]
for hook in repo["hooks"]:
if hook["id"] not in IGNORE_PRE_COMMIT_HOOK_ID:
reqs.append(f"{hook['id']}=={rev.lstrip('v')}")
reqs.extend(x for x in hook.get("additional_dependencies", ()))
output = [
f"# Automatically generated "
f"from {source} by {Path(__file__).name}, do not edit",
"",
]
output.extend(sorted(reqs))
return "\n".join(output) + "\n"
def gather_constraints() -> str:
"""Construct output for constraint file."""
return (
GENERATED_MESSAGE
+ "\n".join(
sorted(
{
*core_requirements(),
*gather_recursive_requirements("default_config"),
*gather_recursive_requirements("mqtt"),
},
key=str.lower,
)
+ [""]
)
+ CONSTRAINT_BASE
)
def diff_file(filename: str, content: str) -> list[str]:
"""Diff a file."""
return list(
difflib.context_diff(
[f"{line}\n" for line in Path(filename).read_text().split("\n")],
[f"{line}\n" for line in content.split("\n")],
filename,
"generated",
)
)
def main(validate: bool) -> int:
"""Run the script."""
if not os.path.isfile("requirements_all.txt"):
print("Run this from HA root dir")
return 1
data = gather_modules()
if data is None:
return 1
reqs_file = requirements_output()
reqs_all_file = requirements_all_output(data)
reqs_test_all_file = requirements_test_all_output(data)
reqs_pre_commit_file = requirements_pre_commit_output()
constraints = gather_constraints()
files = (
("requirements.txt", reqs_file),
("requirements_all.txt", reqs_all_file),
("requirements_test_pre_commit.txt", reqs_pre_commit_file),
("requirements_test_all.txt", reqs_test_all_file),
("homeassistant/package_constraints.txt", constraints),
)
if validate:
errors = []
for filename, content in files:
diff = diff_file(filename, content)
if diff:
errors.append("".join(diff))
if errors:
print("ERROR - FOUND THE FOLLOWING DIFFERENCES")
print()
print()
print("\n\n".join(errors))
print()
print("Please run python3 -m script.gen_requirements_all")
return 1
return 0
for filename, content in files:
Path(filename).write_text(content)
return 0
if __name__ == "__main__":
_VAL = sys.argv[-1] == "validate"
sys.exit(main(_VAL))