|
|
|
@ -15,92 +15,117 @@ |
|
|
|
|
# nixpkgs' python3Packages are searched for appropriate names. |
|
|
|
|
# Then, a Nix attribute set mapping integration name to dependencies is created. |
|
|
|
|
|
|
|
|
|
from io import BytesIO |
|
|
|
|
import json |
|
|
|
|
import pathlib |
|
|
|
|
import os |
|
|
|
|
import pathlib |
|
|
|
|
import re |
|
|
|
|
import subprocess |
|
|
|
|
import sys |
|
|
|
|
import tempfile |
|
|
|
|
import tarfile |
|
|
|
|
import tempfile |
|
|
|
|
from io import BytesIO |
|
|
|
|
from urllib.request import urlopen |
|
|
|
|
|
|
|
|
|
COMPONENT_PREFIX = 'homeassistant.components' |
|
|
|
|
PKG_SET = 'python3Packages' |
|
|
|
|
COMPONENT_PREFIX = "homeassistant.components" |
|
|
|
|
PKG_SET = "python3Packages" |
|
|
|
|
|
|
|
|
|
# If some requirements are matched by multiple python packages, |
|
|
|
|
# the following can be used to choose one of them |
|
|
|
|
PKG_PREFERENCES = { |
|
|
|
|
# Use python3Packages.youtube-dl-light instead of python3Packages.youtube-dl |
|
|
|
|
'youtube-dl': 'youtube-dl-light', |
|
|
|
|
'tensorflow-bin': 'tensorflow', |
|
|
|
|
'tensorflowWithoutCuda': 'tensorflow' |
|
|
|
|
"youtube-dl": "youtube-dl-light", |
|
|
|
|
"tensorflow-bin": "tensorflow", |
|
|
|
|
"tensorflowWithoutCuda": "tensorflow", |
|
|
|
|
} |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def get_version(): |
|
|
|
|
with open(os.path.dirname(sys.argv[0]) + '/default.nix') as f: |
|
|
|
|
with open(os.path.dirname(sys.argv[0]) + "/default.nix") as f: |
|
|
|
|
# A version consists of digits, dots, and possibly a "b" (for beta) |
|
|
|
|
m = re.search('hassVersion = "([\\d\\.b]+)";', f.read()) |
|
|
|
|
return m.group(1) |
|
|
|
|
|
|
|
|
|
def parse_components(version='master'): |
|
|
|
|
|
|
|
|
|
def parse_components(version="master"): |
|
|
|
|
components = {} |
|
|
|
|
with tempfile.TemporaryDirectory() as tmp: |
|
|
|
|
with urlopen(f'https://github.com/home-assistant/home-assistant/archive/{version}.tar.gz') as response: |
|
|
|
|
with urlopen( |
|
|
|
|
f"https://github.com/home-assistant/home-assistant/archive/{version}.tar.gz" |
|
|
|
|
) as response: |
|
|
|
|
tarfile.open(fileobj=BytesIO(response.read())).extractall(tmp) |
|
|
|
|
# Use part of a script from the Home Assistant codebase |
|
|
|
|
sys.path.append(os.path.join(tmp, f'home-assistant-{version}')) |
|
|
|
|
sys.path.append(os.path.join(tmp, f"home-assistant-{version}")) |
|
|
|
|
from script.hassfest.model import Integration |
|
|
|
|
integrations = Integration.load_dir(pathlib.Path( |
|
|
|
|
os.path.join(tmp, f'home-assistant-{version}', 'homeassistant/components') |
|
|
|
|
)) |
|
|
|
|
|
|
|
|
|
integrations = Integration.load_dir( |
|
|
|
|
pathlib.Path( |
|
|
|
|
os.path.join( |
|
|
|
|
tmp, f"home-assistant-{version}", "homeassistant/components" |
|
|
|
|
) |
|
|
|
|
) |
|
|
|
|
) |
|
|
|
|
for domain in sorted(integrations): |
|
|
|
|
integration = integrations[domain] |
|
|
|
|
components[domain] = integration.manifest |
|
|
|
|
return components |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
# Recursively get the requirements of a component and its dependencies |
|
|
|
|
def get_reqs(components, component): |
|
|
|
|
requirements = set(components[component]['requirements']) |
|
|
|
|
for dependency in components[component]['dependencies']: |
|
|
|
|
requirements = set(components[component]["requirements"]) |
|
|
|
|
for dependency in components[component]["dependencies"]: |
|
|
|
|
requirements.update(get_reqs(components, dependency)) |
|
|
|
|
return requirements |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
# Store a JSON dump of Nixpkgs' python3Packages |
|
|
|
|
output = subprocess.check_output(['nix-env', '-f', os.path.dirname(sys.argv[0]) + '/../../..', '-qa', '-A', PKG_SET, '--json']) |
|
|
|
|
output = subprocess.check_output( |
|
|
|
|
[ |
|
|
|
|
"nix-env", |
|
|
|
|
"-f", |
|
|
|
|
os.path.dirname(sys.argv[0]) + "/../../..", |
|
|
|
|
"-qa", |
|
|
|
|
"-A", |
|
|
|
|
PKG_SET, |
|
|
|
|
"--json", |
|
|
|
|
] |
|
|
|
|
) |
|
|
|
|
packages = json.loads(output) |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def name_to_attr_path(req): |
|
|
|
|
attr_paths = set() |
|
|
|
|
names = [req] |
|
|
|
|
# E.g. python-mpd2 is actually called python3.6-mpd2 |
|
|
|
|
# instead of python-3.6-python-mpd2 inside Nixpkgs |
|
|
|
|
if req.startswith('python-') or req.startswith('python_'): |
|
|
|
|
names.append(req[len('python-'):]) |
|
|
|
|
if req.startswith("python-") or req.startswith("python_"): |
|
|
|
|
names.append(req[len("python-") :]) |
|
|
|
|
for name in names: |
|
|
|
|
# treat "-" and "_" equally |
|
|
|
|
name = re.sub('[-_]', '[-_]', name) |
|
|
|
|
pattern = re.compile('^python\\d\\.\\d-{}-\\d'.format(name), re.I) |
|
|
|
|
name = re.sub("[-_]", "[-_]", name) |
|
|
|
|
pattern = re.compile("^python\\d\\.\\d-{}-\\d".format(name), re.I) |
|
|
|
|
for attr_path, package in packages.items(): |
|
|
|
|
if pattern.match(package['name']): |
|
|
|
|
if pattern.match(package["name"]): |
|
|
|
|
attr_paths.add(attr_path) |
|
|
|
|
if len(attr_paths) > 1: |
|
|
|
|
for to_replace, replacement in PKG_PREFERENCES.items(): |
|
|
|
|
try: |
|
|
|
|
attr_paths.remove(PKG_SET + '.' + to_replace) |
|
|
|
|
attr_paths.add(PKG_SET + '.' + replacement) |
|
|
|
|
attr_paths.remove(PKG_SET + "." + to_replace) |
|
|
|
|
attr_paths.add(PKG_SET + "." + replacement) |
|
|
|
|
except KeyError: |
|
|
|
|
pass |
|
|
|
|
# Let's hope there's only one derivation with a matching name |
|
|
|
|
assert len(attr_paths) <= 1, "{} matches more than one derivation: {}".format(req, attr_paths) |
|
|
|
|
assert len(attr_paths) <= 1, "{} matches more than one derivation: {}".format( |
|
|
|
|
req, attr_paths |
|
|
|
|
) |
|
|
|
|
if len(attr_paths) == 1: |
|
|
|
|
return attr_paths.pop() |
|
|
|
|
else: |
|
|
|
|
return None |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
version = get_version() |
|
|
|
|
print('Generating component-packages.nix for version {}'.format(version)) |
|
|
|
|
print("Generating component-packages.nix for version {}".format(version)) |
|
|
|
|
components = parse_components(version=version) |
|
|
|
|
build_inputs = {} |
|
|
|
|
for component in sorted(components.keys()): |
|
|
|
@ -110,12 +135,12 @@ for component in sorted(components.keys()): |
|
|
|
|
for req in reqs: |
|
|
|
|
# Some requirements are specified by url, e.g. https://example.org/foobar#xyz==1.0.0 |
|
|
|
|
# Therefore, if there's a "#" in the line, only take the part after it |
|
|
|
|
req = req[req.find('#') + 1:] |
|
|
|
|
name = req.split('==')[0] |
|
|
|
|
req = req[req.find("#") + 1 :] |
|
|
|
|
name = req.split("==")[0] |
|
|
|
|
attr_path = name_to_attr_path(name) |
|
|
|
|
if attr_path is not None: |
|
|
|
|
# Add attribute path without "python3Packages." prefix |
|
|
|
|
attr_paths.append(attr_path[len(PKG_SET + '.'):]) |
|
|
|
|
attr_paths.append(attr_path[len(PKG_SET + ".") :]) |
|
|
|
|
else: |
|
|
|
|
missing_reqs.append(name) |
|
|
|
|
else: |
|
|
|
@ -125,15 +150,15 @@ for component in sorted(components.keys()): |
|
|
|
|
print("Component {} is missing {} dependencies".format(component, n_diff)) |
|
|
|
|
print("missing requirements: {}".format(missing_reqs)) |
|
|
|
|
|
|
|
|
|
with open(os.path.dirname(sys.argv[0]) + '/component-packages.nix', 'w') as f: |
|
|
|
|
f.write('# Generated by parse-requirements.py\n') |
|
|
|
|
f.write('# Do not edit!\n\n') |
|
|
|
|
f.write('{\n') |
|
|
|
|
f.write(' version = "{}";\n'.format(version)) |
|
|
|
|
f.write(' components = {\n') |
|
|
|
|
with open(os.path.dirname(sys.argv[0]) + "/component-packages.nix", "w") as f: |
|
|
|
|
f.write("# Generated by parse-requirements.py\n") |
|
|
|
|
f.write("# Do not edit!\n\n") |
|
|
|
|
f.write("{\n") |
|
|
|
|
f.write(f' version = "{version}";\n') |
|
|
|
|
f.write(" components = {\n") |
|
|
|
|
for component, attr_paths in build_inputs.items(): |
|
|
|
|
f.write(' "{}" = ps: with ps; [ '.format(component)) |
|
|
|
|
f.write(' '.join(attr_paths)) |
|
|
|
|
f.write(' ];\n') |
|
|
|
|
f.write(' };\n') |
|
|
|
|
f.write('}\n') |
|
|
|
|
f.write(' "{component}" = ps: with ps; [ ') |
|
|
|
|
f.write(" ".join(attr_paths)) |
|
|
|
|
f.write(" ];\n") |
|
|
|
|
f.write(" };\n") |
|
|
|
|
f.write("}\n") |
|
|
|
|