about summary refs log tree commit diff
path: root/pkgs/servers/x11/xorg/update.py
blob: 6928837c76fa3f4c24f601ba8740ff2fd8671608 (plain) (blame)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
#!/usr/bin/env nix-shell
#!nix-shell --pure --keep NIX_PATH -i python3 -p nix git "python3.withPackages (ps: [ ps. packaging ps.beautifulsoup4 ps.requests ])"

# Usage: Run ./update.py from the directory containing tarballs.list. The script checks for the
# latest versions of all packages, updates the expressions if any update is found, and commits
# any changes.

import subprocess

import requests
from bs4 import BeautifulSoup
from packaging import version

mirror = "mirror://xorg/"
allversions = {}

print("Downloading latest version info...")

# xorg packages
for component in [
    "individual/app",
    "individual/data",
    "individual/data/xkeyboard-config",
    "individual/doc",
    "individual/driver",
    "individual/font",
    "individual/lib",
    "individual/proto",
    "individual/util",
    "individual/xcb",
    "individual/xserver",
]:
    url = "https://xorg.freedesktop.org/releases/{}/".format(component)
    r = requests.get(url)
    soup = BeautifulSoup(r.text, "html.parser")
    for a in soup.table.find_all("a"):
        href = a["href"]
        if not href.endswith((".tar.bz2", ".tar.gz", ".tar.xz")):
            continue

        pname, rem = href.rsplit("-", 1)
        ver, _, ext = rem.rsplit(".", 2)

        if "rc" in ver:
            continue

        entry = allversions.setdefault(f"{mirror}{component}/{pname}", ([], {}))
        entry[0].append(version.parse(ver))
        entry[1][ver] = f"{mirror}{component}/{href}"

# luit
lurl = "https://invisible-mirror.net/archives/luit/"
r = requests.get(lurl)
soup = BeautifulSoup(r.text, "html.parser")
for a in soup.find_all("a"):
    href = a["href"]

    if not href.endswith(".tgz"):
        continue

    pname, rem = href.rsplit("-", 1)
    ver, _ = rem.rsplit(".", 1)

    entry = allversions.setdefault(f"{lurl}{pname}", ([], {}))
    entry[0].append(version.parse(ver))
    entry[1][ver] = f"{lurl}{href}"

print("Finding updated versions...")

with open("./tarballs.list") as f:
    lines_tarballs = f.readlines()

updated_tarballs = []
changes = {}
changes_text = []
for line in lines_tarballs:
    line = line.rstrip("\n")

    if any(line.startswith(frag) for frag in [mirror, lurl]):
        pname, rem = line.rsplit("-", 1)
        if line.startswith(mirror):
            ver, _, _ = rem.rsplit(".", 2)
        else:
            ver, _ = rem.rsplit(".", 1)

        if pname not in allversions:
            print("# WARNING: no version found for {}".format(pname))
            continue

        highest = max(allversions[pname][0])
        if highest > version.parse(ver):
            line = allversions[pname][1][str(highest)]
            text = f"{pname.split('/')[-1]}: {ver} -> {str(highest)}"
            print(f"    Updating {text}")
            changes[pname] = line
            changes_text.append(text)

    updated_tarballs.append(line)

if len(changes) == 0:
    print("No updates found")
    exit()

print("Updating tarballs.list...")

with open("./tarballs.list", "w") as f:
    f.writelines(f'{tarball}\n' for tarball in updated_tarballs)

print("Generating updated expr (slow)...")

subprocess.run(["./generate-expr-from-tarballs.pl", "tarballs.list"], check=True)

print("Committing...")

subprocess.run(["git", "add", "default.nix", "tarballs.list"], check=True)
subprocess.run(["git", "commit", "-mxorg.*: update\n\n%s" % "\n".join(changes_text)], check=True)