import fcntl import glob import logging.config import os import pathlib import re import shutil import subprocess import sys import time from queue import Queue, Empty import semver import yaml regex_pkgver = re.compile(r"^pkgrel\s*=\s*(.+)$", re.MULTILINE) regex_pkgrel = re.compile(r"^pkgrel\s*=\s*(.+)$", re.MULTILINE) regex_march = re.compile(r"(-march=)(.+?) ", re.MULTILINE) regex_validkeys = re.compile(r"^validpgpkeys\+?=\((.*?)\)", re.MULTILINE | re.DOTALL) fp = None q = Queue() update_last = time.time() def already_running(): global fp fp = os.open(f"/tmp/alhp.lock", os.O_WRONLY | os.O_CREAT) try: fcntl.lockf(fp, fcntl.LOCK_EX | fcntl.LOCK_NB) return False except OSError: return True def find_all_files_for_pkg(name, repo): searchpath = os.path.join(config["basedir"]["repo"], repo, "os", config["arch"]) + "/" + name + "/*.pkg.*" # logging.debug("Search for packages with %s", searchpath) pkgs = glob.glob(searchpath) return pkgs def build(pkgbuild, repo): start_time = time.time() name = pathlib.Path(pkgbuild).parts[-4] logging.info("[%s/%s] Build starting", repo, name) # setup buildflags setup_makepkg(repo) # import pgp keys import_keys(pkgbuild) # build with devtools os.chdir(pathlib.Path(pkgbuild).parent) res = subprocess.run(["sudo", "extra-x86_64-build"], stdout=subprocess.PIPE, stderr=subprocess.STDOUT) if res.returncode: logging.warning("[%s/%s] Build failed: %s", repo, name, res) # write packagename to failed list with open(os.path.join(config["basedir"]["repo"], repo + "_failed.txt"), "a") as f: f.write(name + "\n") # write logs to file if not os.path.exists(os.path.join(config["basedir"]["repo"], "logs", repo)): os.mkdir(os.path.join(config["basedir"]["repo"], "logs", repo)) with open(os.path.join(config["basedir"]["repo"], "logs", repo, name + ".log"), "w") as l: l.write(res.stdout.decode()) subprocess.run(["git", "clean", "-xdf"], check=True, capture_output=True) os.chdir(sys.path[0]) return # signing pkgs = glob.glob("*.pkg.tar.zst") for pkg in pkgs: s_res = subprocess.run(["gpg", "--batch", "--detach-sign", pkg], capture_output=True) if s_res.returncode: logging.error("[%s/%s] Signing failed: %s", repo, name, s_res) subprocess.run(["git", "clean", "-xdf"], check=True, capture_output=True) os.chdir(sys.path[0]) return # copying pkgs.extend(glob.glob("*.pkg.tar.zst.sig")) for pkg in pkgs: logging.debug("[%s/%s] Copy %s to %s", repo, name, pkg, os.path.join(config["basedir"]["repo"], repo, "os", config["arch"] + "/")) shutil.copy2(pkg, os.path.join(config["basedir"]["repo"], repo, "os", config["arch"] + "/")) # repo r_res = subprocess.run(["repo-add", "-s", "-v", os.path.join(config["basedir"]["repo"], repo, "os", config["arch"], repo + ".db.tar.xz"), pkgs[0]], capture_output=True) if r_res.returncode: logging.error("[%s/%s] Repo action failed: %s", repo, name, r_res) subprocess.run(["git", "clean", "-xdf"], check=True, capture_output=True) os.chdir(sys.path[0]) return p_res = subprocess.run( ["paccache", "-rc", os.path.join(config["basedir"]["repo"], repo, "os", config["arch"]), "-k", "1"], capture_output=True) if p_res.returncode: logging.error("[%s/%s] Repo cleanup failed: %s", repo, name, p_res) subprocess.run(["git", "clean", "-xdf"], check=True, capture_output=True) os.chdir(sys.path[0]) return # cleanup subprocess.run(["git", "clean", "-xdf"], check=True, capture_output=True) os.chdir(sys.path[0]) logging.info("[%s/%s] Build successful (%s)", repo, name, time.time() - start_time) def setup_makepkg(repo): with open(config["basedir"]["makepkg"]) as conf: c_all = conf.read() c_all = c_all.replace("-mtune=generic", "") c_all = c_all.replace("-O2", "-O3") c_all = regex_march.sub(r"\1" + repo.split("-")[1] + " ", c_all) with open(config["basedir"]["makepkg"], "w") as conf: conf.write(c_all) def import_keys(pkgbuild): with open(pkgbuild) as pkgb: keys_s = regex_validkeys.findall(pkgb.read()) if keys_s: keys = [] for k in keys_s: keys.extend(k.split(" ")) for k in keys: k = k.strip() k = k.replace("'", "") k = k.replace("\"", "") if len(k) == 40: logging.debug(subprocess.run( ["gpg", "--keyserver", "keyserver.ubuntu.com", "--recv-keys", k], capture_output=True)) logging.info("[GPG] Imported key %s", k) def package_exists(name, repo): pkgs = find_all_files_for_pkg(name, repo) return len(pkgs) > 0 def update_git2svn(): if not os.path.exists(config["basedir"]["svn2git"]): logging.debug(subprocess.run( ["git", "clone", "https://github.com/archlinux/svntogit-packages.git", config["basedir"]["svn2git"]], check=True, capture_output=True)) else: os.chdir(config["basedir"]["svn2git"]) logging.debug(subprocess.run(["git", "clean", "-xdf"], check=True, capture_output=True)) logging.debug(subprocess.run(["git", "pull"], check=True, capture_output=True)) os.chdir("..") def parse_pkgbuild(pkgbuild_file): with open(pkgbuild_file) as p: pkgbuild_str = p.read() pkgver = regex_pkgver.findall(pkgbuild_str) pkgrel = regex_pkgrel.findall(pkgbuild_str) return semver.VersionInfo.parse("{}-{}".format(pkgver, pkgrel)) def parse_repo(name, repo): ver_split = find_all_files_for_pkg(name, repo)[0].split("-") return semver.VersionInfo.parse(ver_split[-3] + ver_split[-2]) def sync_marchs_with_config(): repos = [dI for dI in os.listdir(config["basedir"]["repo"]) if os.path.isdir(os.path.join(config["basedir"]["repo"], dI))] repo_quota = [] for r, a in ((x, y) for x in config["repos"] for y in config["march"]): repo_quota.append("{}-{}".format(r, a)) logging.info("Repos: %s", repo_quota) repos_create = list(set(repo_quota) - set(repos)) repos_delete = list(set(repos) - set(repo_quota)) for repo in repos_create: logging.debug("Create repo %s: %s", repo, os.path.join(config["basedir"]["repo"], repo, "os/x86_64")) pathlib.Path(os.path.join(config["basedir"]["repo"], repo, "os/x86_64")).mkdir(parents=True, exist_ok=True) for repo in repos_delete: logging.debug("Delete repo %s: %s", repo, os.path.join(config["basedir"]["repo"], repo)) shutil.rmtree(os.path.join(config["basedir"]["repo"], repo)) def fill_queue(): all_pkgbuild = glob.glob(os.path.join(config["basedir"]["svn2git"]) + "/**/PKGBUILD", recursive=True) to_delete = [] for pkgbuild in all_pkgbuild: path_split = pkgbuild.split("/") # ignore pkgbuild if in trunk, -any package, not in repos, or on blacklist if path_split[-2] == "trunk" or path_split[-2].split("-")[0] not in config[ "repos"] or "any" in path_split[-2] or path_split[-4] in config["blacklist"]: to_delete.append(pkgbuild) final_pkgbuilds = list(set(all_pkgbuild) - set(to_delete)) for pkgb in final_pkgbuilds: for march in config["march"]: path_split = pkgb.split("/") name = path_split[1] repo = path_split[3].split("-")[0] + "-" + march if not package_exists(name, repo): q.put((pkgb, repo)) logging.debug("[%s/%s] Build queued (package not build yet)", repo, pkgb.split("/")[-4]) elif parse_repo(name, repo) < parse_pkgbuild(pkgb): q.put((pkgb, repo)) logging.debug("[%s/%s] Build queued (new version available %s < %s)", repo, pkgb.split("/")[-4], parse_repo(name, repo), parse_pkgbuild(pkgb)) logging.info("Queue size after fill: %s", q.qsize()) if __name__ == '__main__': with open("config.yaml") as c: config = yaml.safe_load(c) logging.config.dictConfig(config["logging"]) logging.getLogger("ALHP") if already_running(): logging.error("Another instance is already running") sys.exit(2) if not os.path.exists(config["basedir"]["repo"]): pathlib.Path(config["basedir"]["repo"]).mkdir(parents=True, exist_ok=True) sync_marchs_with_config() update_git2svn() fill_queue() while True: if q.qsize() > 0: try: build(*q.get_nowait()) except Empty: pass else: time.sleep(60) if time.time() - update_last > 900: update_git2svn() update_last = time.time() fill_queue()