about summary refs log tree commit diff
path: root/pkgs/servers/x11/xorg/update.py
diff options
context:
space:
mode:
Diffstat (limited to 'pkgs/servers/x11/xorg/update.py')
-rwxr-xr-xpkgs/servers/x11/xorg/update.py116
1 files changed, 116 insertions, 0 deletions
diff --git a/pkgs/servers/x11/xorg/update.py b/pkgs/servers/x11/xorg/update.py
new file mode 100755
index 0000000000000..6928837c76fa3
--- /dev/null
+++ b/pkgs/servers/x11/xorg/update.py
@@ -0,0 +1,116 @@
+#!/usr/bin/env nix-shell
+#!nix-shell --pure --keep NIX_PATH -i python3 -p nix git "python3.withPackages (ps: [ ps. packaging ps.beautifulsoup4 ps.requests ])"
+
+# Usage: Run ./update.py from the directory containing tarballs.list. The script checks for the
+# latest versions of all packages, updates the expressions if any update is found, and commits
+# any changes.
+
+import subprocess
+
+import requests
+from bs4 import BeautifulSoup
+from packaging import version
+
+mirror = "mirror://xorg/"
+allversions = {}
+
+print("Downloading latest version info...")
+
+# xorg packages
+for component in [
+    "individual/app",
+    "individual/data",
+    "individual/data/xkeyboard-config",
+    "individual/doc",
+    "individual/driver",
+    "individual/font",
+    "individual/lib",
+    "individual/proto",
+    "individual/util",
+    "individual/xcb",
+    "individual/xserver",
+]:
+    url = "https://xorg.freedesktop.org/releases/{}/".format(component)
+    r = requests.get(url)
+    soup = BeautifulSoup(r.text, "html.parser")
+    for a in soup.table.find_all("a"):
+        href = a["href"]
+        if not href.endswith((".tar.bz2", ".tar.gz", ".tar.xz")):
+            continue
+
+        pname, rem = href.rsplit("-", 1)
+        ver, _, ext = rem.rsplit(".", 2)
+
+        if "rc" in ver:
+            continue
+
+        entry = allversions.setdefault(f"{mirror}{component}/{pname}", ([], {}))
+        entry[0].append(version.parse(ver))
+        entry[1][ver] = f"{mirror}{component}/{href}"
+
+# luit
+lurl = "https://invisible-mirror.net/archives/luit/"
+r = requests.get(lurl)
+soup = BeautifulSoup(r.text, "html.parser")
+for a in soup.find_all("a"):
+    href = a["href"]
+
+    if not href.endswith(".tgz"):
+        continue
+
+    pname, rem = href.rsplit("-", 1)
+    ver, _ = rem.rsplit(".", 1)
+
+    entry = allversions.setdefault(f"{lurl}{pname}", ([], {}))
+    entry[0].append(version.parse(ver))
+    entry[1][ver] = f"{lurl}{href}"
+
+print("Finding updated versions...")
+
+with open("./tarballs.list") as f:
+    lines_tarballs = f.readlines()
+
+updated_tarballs = []
+changes = {}
+changes_text = []
+for line in lines_tarballs:
+    line = line.rstrip("\n")
+
+    if any(line.startswith(frag) for frag in [mirror, lurl]):
+        pname, rem = line.rsplit("-", 1)
+        if line.startswith(mirror):
+            ver, _, _ = rem.rsplit(".", 2)
+        else:
+            ver, _ = rem.rsplit(".", 1)
+
+        if pname not in allversions:
+            print("# WARNING: no version found for {}".format(pname))
+            continue
+
+        highest = max(allversions[pname][0])
+        if highest > version.parse(ver):
+            line = allversions[pname][1][str(highest)]
+            text = f"{pname.split('/')[-1]}: {ver} -> {str(highest)}"
+            print(f"    Updating {text}")
+            changes[pname] = line
+            changes_text.append(text)
+
+    updated_tarballs.append(line)
+
+if len(changes) == 0:
+    print("No updates found")
+    exit()
+
+print("Updating tarballs.list...")
+
+with open("./tarballs.list", "w") as f:
+    f.writelines(f'{tarball}\n' for tarball in updated_tarballs)
+
+print("Generating updated expr (slow)...")
+
+subprocess.run(["./generate-expr-from-tarballs.pl", "tarballs.list"], check=True)
+
+print("Committing...")
+
+subprocess.run(["git", "add", "default.nix", "tarballs.list"], check=True)
+subprocess.run(["git", "commit", "-mxorg.*: update\n\n%s" % "\n".join(changes_text)], check=True)