1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
|
#!/usr/bin/env nix-shell
#!nix-shell -i python3 -p git "python3.withPackages (ps: with ps; [ gitpython packaging beautifulsoup4 pandas lxml ])"
import bs4
import git
import io
import json
import os
import packaging.version
import pandas
import re
import subprocess
import sys
import tempfile
import typing
import urllib.request
_QUERY_VERSION_PATTERN = re.compile('^([A-Z]+)="(.+)"$')
BASE_DIR = os.path.dirname(os.path.abspath(__file__))
MIN_VERSION = packaging.version.Version("13.0.0")
MAIN_BRANCH = "main"
TAG_PATTERN = re.compile(
f"^release/({packaging.version.VERSION_PATTERN})$", re.IGNORECASE | re.VERBOSE
)
REMOTE = "origin"
BRANCH_PATTERN = re.compile(
f"^{REMOTE}/((stable|releng)/({packaging.version.VERSION_PATTERN}))$",
re.IGNORECASE | re.VERBOSE,
)
def request_supported_refs() -> list[str]:
# Looks pretty shady but I think this should work with every version of the page in the last 20 years
r = re.compile("^h\d$", re.IGNORECASE)
soup = bs4.BeautifulSoup(
urllib.request.urlopen("https://www.freebsd.org/security"), features="lxml"
)
header = soup.find(
lambda tag: r.match(tag.name) is not None
and tag.text.lower() == "supported freebsd releases"
)
table = header.find_next("table")
df = pandas.read_html(io.StringIO(table.prettify()))[0]
return list(df["Branch"])
def query_version(repo: git.Repo) -> dict[str, typing.Any]:
# This only works on FreeBSD 13 and later
text = (
subprocess.check_output(
["bash", os.path.join(repo.working_dir, "sys", "conf", "newvers.sh"), "-v"]
)
.decode("utf-8")
.strip()
)
fields = dict()
for line in text.splitlines():
m = _QUERY_VERSION_PATTERN.match(line)
if m is None:
continue
fields[m[1].lower()] = m[2]
fields["major"] = packaging.version.parse(fields["revision"]).major
return fields
def handle_commit(
repo: git.Repo,
rev: git.objects.commit.Commit,
ref_name: str,
ref_type: str,
supported_refs: list[str],
old_versions: dict[str, typing.Any],
) -> dict[str, typing.Any]:
if old_versions.get(ref_name, {}).get("rev", None) == rev.hexsha:
print(f"{ref_name}: revision still {rev.hexsha}, skipping")
return old_versions[ref_name]
repo.git.checkout(rev)
print(f"{ref_name}: checked out {rev.hexsha}")
full_hash = (
subprocess.check_output(["nix", "hash", "path", "--sri", repo.working_dir])
.decode("utf-8")
.strip()
)
print(f"{ref_name}: hash is {full_hash}")
version = query_version(repo)
print(f"{ref_name}: version is {version['version']}")
return {
"rev": rev.hexsha,
"hash": full_hash,
"ref": ref_name,
"refType": ref_type,
"supported": ref_name in supported_refs,
"version": query_version(repo),
}
def main() -> None:
# Normally uses /run/user/*, which is on a tmpfs and too small
temp_dir = tempfile.TemporaryDirectory(dir="/tmp")
print(f"Selected temporary directory {temp_dir.name}")
if len(sys.argv) >= 2:
orig_repo = git.Repo(sys.argv[1])
print(f"Fetching updates on {orig_repo.git_dir}")
orig_repo.remote("origin").fetch()
else:
print("Cloning source repo")
orig_repo = git.Repo.clone_from(
"https://git.FreeBSD.org/src.git", to_path=os.path.join(temp_dir.name, "orig")
)
supported_refs = request_supported_refs()
print(f"Supported refs are: {' '.join(supported_refs)}")
print("Doing git crimes, do not run `git worktree prune` until after script finishes!")
workdir = os.path.join(temp_dir.name, "work")
git.cmd.Git(orig_repo.git_dir).worktree("add", "--orphan", workdir)
# Have to create object before removing .git otherwise it will complain
repo = git.Repo(workdir)
repo.git.set_persistent_git_options(git_dir=repo.git_dir)
# Remove so that nix hash doesn't see the file
os.remove(os.path.join(workdir, ".git"))
print(f"Working in directory {repo.working_dir} with git directory {repo.git_dir}")
try:
with open(os.path.join(BASE_DIR, "versions.json"), "r") as f:
old_versions = json.load(f)
except FileNotFoundError:
old_versions = dict()
versions = dict()
for tag in repo.tags:
m = TAG_PATTERN.match(tag.name)
if m is None:
continue
version = packaging.version.parse(m[1])
if version < MIN_VERSION:
print(f"Skipping old tag {tag.name} ({version})")
continue
print(f"Trying tag {tag.name} ({version})")
result = handle_commit(
repo, tag.commit, tag.name, "tag", supported_refs, old_versions
)
versions[tag.name] = result
for branch in repo.remote("origin").refs:
m = BRANCH_PATTERN.match(branch.name)
if m is not None:
fullname = m[1]
version = packaging.version.parse(m[3])
if version < MIN_VERSION:
print(f"Skipping old branch {fullname} ({version})")
continue
print(f"Trying branch {fullname} ({version})")
elif branch.name == f"{REMOTE}/{MAIN_BRANCH}":
fullname = MAIN_BRANCH
print(f"Trying development branch {fullname}")
else:
continue
result = handle_commit(
repo, branch.commit, fullname, "branch", supported_refs, old_versions
)
versions[fullname] = result
with open(os.path.join(BASE_DIR, "versions.json"), "w") as out:
json.dump(versions, out, sort_keys=True, indent=2)
out.write("\n")
if __name__ == '__main__':
main()
|