| author | Urja (ARMLFS builder)
<urja+armlfs@urja.dev> 2025-12-06 14:14:22 UTC |
| committer | Urja (ARMLFS builder)
<urja+armlfs@urja.dev> 2025-12-06 14:14:22 UTC |
| parent | 3d901456683d2675f91ad3e4498807beccb449f8 |
| .gitignore | +1 | -0 |
| mesa.py | +187 | -0 |
diff --git a/.gitignore b/.gitignore index 77f9166..9444d26 100644 --- a/.gitignore +++ b/.gitignore @@ -1,5 +1,6 @@ __pycache__ /linux-kbb/linux*/ +/mesa-announce/ **/log/** *releases.json *.lock diff --git a/mesa.py b/mesa.py new file mode 100755 index 0000000..e91979d --- /dev/null +++ b/mesa.py @@ -0,0 +1,187 @@ +#!/usr/bin/env python3 + +from urllib.request import urlopen, Request +import os +import sys +from subprocess import run,DEVNULL,STDOUT +from datetime import date, timedelta +from tempfile import NamedTemporaryFile +import gzip +import shutil +import mailbox +import email +from autoupdater_helpers import * + +# Exactly how the mailman archive urls spell the months ( 1-based indexing via the None ;) ) +months = (None, "January", "February", "March", "April", "May", "June", "July", "August", "September", "October", "November", "December") + +urlbase = "https://lists.freedesktop.org/archives/mesa-announce/" +user_agent = "Mozilla/5.0 (Windows NT 10.0; Win64; x64; rv:145.0) Gecko/20100101 Firefox/145.0" +# Example urls: (current month, prev month) +# https://lists.freedesktop.org/archives/mesa-announce/2025-December.txt +# https://lists.freedesktop.org/archives/mesa-announce/2025-November.txt.gz + +def yearmon_fn(y,m): + return f"{y}-{months[m]}.txt" + +def this_month_url(): + d = date.today() + return urlbase + yearmon_fn(d.year,d.month) + +def last_month(): + d = date.today() + return d - timedelta(days=d.day) + +def last_month_url(): + lm = last_month() + return urlbase + yearmon_fn(lm.year,lm.month) + ".gz" + +def fetch_store(url): + _,fn = url.rsplit("/",maxsplit=1) + if url.endswith(".gz") and os.path.exists(fn): + return gzip.open(fn, 'rb') +# aggressive caching that is not appropriate for production use, but helpful in dev to avoid +# repeatedly poking the internet for no reason: +# if os.path.exists(fn): +# return open(fn, 'rb') + fnnew = fn + ".new" + print(f"Downloading {fn}..") + with open(fnnew, 'wb') as f_store: + req = Request(url, headers={'User-Agent': user_agent}) + with urlopen(req, context=ssl_ctx()) as f_read: + shutil.copyfileobj(f_read, f_store) + os.replace(fnnew, fn) + if url.endswith(".gz"): + old_partial_fn = fn[:-3] + if os.path.exists(old_partial_fn): + os.unlink(old_partial_fn) + return gzip.open(fn, 'rb') + else: + return open(fn, 'rb') + + + +def get_body(message: email.message.Message, encoding: str = "utf-8") -> str: + body_in_bytes = bytes() + if message.is_multipart(): + for part in message.walk(): + ctype = part.get_content_type() + cdispo = str(part.get("Content-Disposition")) + + # skip any text/plain (txt) attachments + if ctype == "text/plain" and "attachment" not in cdispo: + body_in_bytes = part.get_payload(decode=True) # decode + break + # not multipart - i.e. plain text, no attachments, keeping fingers crossed + else: + body_in_bytes = message.get_payload(decode=True) + + body = body_in_bytes.decode(encoding) + + return body + +def fetch_messages(url): + m = [] + f = fetch_store(url) + ftmp = NamedTemporaryFile(delete=False) + shutil.copyfileobj(f,ftmp) + fn = ftmp.name + ftmp.close() + for message in mailbox.mbox(fn): + subj = message['subject'] + body = get_body(message) + m.append((subj, body)) + f.close() + os.unlink(fn) + return m + +def parse_ver(vers): + # Ignore -rc or similar + if "-" in vers: + return None + vparts = vers.split('.') + # Expect A.B.C + if len(vparts) != 3: + return None + t = tuple([ int(x) for x in vparts ]) + # Never even consider A.B.0 + if t[2] == 0: + return None + return t + +def parse_subj_ver(subj): + parts = subj.strip().split() + if len(parts) != 3 or parts[0] != '[ANNOUNCE]' or parts[1] != 'mesa': + return None + return parse_ver(parts[2]) + +def parse_hash_msg(msg): + for L in msg.splitlines(): + if L.startswith('SHA256: '): + _,hash,_ = L.split() + return hash + return None + +# True if a is better than b +# This encapsulates a bunch of our version policy +# Like, 25.2.8 > 25.3.1, but 25.3.2 > 25.2.x +# But still remember that 26.1.1 > 25.3.1. +# Basically, we dont want to build a .1 if there's a .2 (or bigger), +# but if we're comparing between .1's still take the newer. +def ver_comp(a, b): + if b is None: + return True + if a is None: + return False + if a[2] == 1 and b[2] == 1: + # Tuple comparison happens left-to-right the way we want + return a > b + # Yes, we are better than a .1 + if b[2] == 1: + return True + # and vice versa + if a[2] == 1: + return False + # Finally, this is not covered by any special case + return a > b + +def ver_str(v): + return '.'.join([str(x) for x in v]) + +os.chdir("mesa-announce") + +m = fetch_messages(this_month_url()) +m += fetch_messages(last_month_url()) + +best = (None, None) + +for sub, msg in m: +# print(sub) + ver = parse_subj_ver(sub) + if ver: + hash = parse_hash_msg(msg) + candidate = (ver, hash) +# print(candidate) + if ver_comp(candidate[0], best[0]): + best = candidate + +print("best:", best) + +os.chdir("/sources/base-pkgbuilds/mesa") + +prev_ver = pkgbuild_ver() + +if not ver_comp(best[0], parse_ver(prev_ver)): + print("No update necessary.") + sys.exit(0) + +new_ver = ver_str(best[0]) + +pkgbuild_new_ver(new_ver,sha256sum=best[1]) + +os.chdir("..") + +print(f"Updating {prev_ver} to {new_ver}") +mpkg("mesa", new_ver) +mpkg("mesa", new_ver, carch="aarch64") +