#!/usr/bin/env python3 import fcntl import glob import logging.config import os import pathlib import re import shutil import signal import subprocess import sys import time from multiprocessing import Pool, current_process, JoinableQueue, Lock, Manager import yaml from humanfriendly import format_timespan from packaging import version from packaging.version import LegacyVersion regex_pkgver = re.compile(r"^_?pkgver\s*=\s*(.+)$", re.MULTILINE) regex_pkgrel = re.compile(r"^pkgrel\s*=\s*(.+)$", re.MULTILINE) regex_epoch = re.compile(r"^epoch\s*=\s*(.+)$", re.MULTILINE) regex_march = re.compile(r"(-march=)(.+?) ", re.MULTILINE) regex_validkeys = re.compile(r"^validpgpkeys\+?=\((.*?)\)", re.MULTILINE | re.DOTALL) regex_pkg_repo = re.compile(r"^(.*)-.*-.*-(?:x86_64|any)\.pkg\.tar\.zst(?:\.sig)*$", re.MULTILINE) fp = None update_last = time.time() copy_l = Lock() def build(pkgbuild: str, repo: str) -> None: start_time = time.time() name = pathlib.Path(pkgbuild).parts[-4] process_name = current_process().name logging.info("[%s/%s/%s] Build starting (Queue ~= %s)", process_name, repo, name, q.qsize()) # setup makepkg setup_makepkg(repo) # import pgp keys import_keys(pkgbuild) # increase pkgrel increase_pkgrel(pkgbuild) # build with devtools os.chdir(pathlib.Path(pkgbuild).parent) res = subprocess.run( ["makechrootpkg", "-c", "-D", os.path.join(config["basedir"]["makepkg"]), "-l", process_name, "-r", os.path.join(config["basedir"]["chroot"]), "--", "--config", os.path.join(config["basedir"]["makepkg"]) + "makepkg-" + '-'.join( repo.split("-")[1:]) + ".conf"], stdout=subprocess.PIPE, stderr=subprocess.STDOUT) if res.returncode: logging.warning("[%s/%s/%s] Build failed. Check repo/logs for more information.", process_name, repo, name) # write packagename to failed list with open(os.path.join(config["basedir"]["repo"], repo + "_failed.txt"), "a") as f: f.write(name + "\n") # write logs if not os.path.exists(os.path.join(config["basedir"]["repo"], "logs", repo)): pathlib.Path(os.path.join(config["basedir"]["repo"], "logs", repo)).mkdir(parents=True, exist_ok=True) with open(os.path.join(config["basedir"]["repo"], "logs", repo, name + ".log"), "w") as log: log.write(res.stdout.decode(errors="ignore")) logging.debug("[GIT] %s", subprocess.run(["git", "clean", "-xdff"], check=False, stdout=subprocess.PIPE, stderr=subprocess.STDOUT).stdout.decode(errors="ignore")) return # signing pkgs = glob.glob("*.pkg.tar.zst") for pkg in pkgs: s_res = subprocess.run(["gpg", "--batch", "--detach-sign", pkg], stdout=subprocess.PIPE, stderr=subprocess.STDOUT) if s_res.returncode: logging.error("[%s/%s/%s] Signing failed: %s", process_name, repo, name, s_res.stdout.decode(errors="ignore")) logging.debug("[GIT] %s", subprocess.run(["git", "clean", "-xdff"], check=False, stdout=subprocess.PIPE, stderr=subprocess.STDOUT).stdout.decode(errors="ignore")) return # copying pkgs.extend(glob.glob("*.pkg.tar.zst.sig")) with copy_l: for pkg in pkgs: logging.debug("[%s/%s/%s] Copy %s to %s", process_name, repo, name, pkg, os.path.join(config["basedir"]["repo"], repo, "os", config["arch"] + "/")) shutil.copy2(pkg, os.path.join(config["basedir"]["repo"], repo, "os", config["arch"] + "/")) logging.debug("[GIT] %s", subprocess.run(["git", "clean", "-xdff"], check=False, stdout=subprocess.PIPE, stderr=subprocess.STDOUT).stdout.decode(errors="ignore")) logging.info("[%s/%s/%s] Build successful (%s)", process_name, repo, name, format_timespan(time.time() - start_time)) def run_worker() -> None: os.nice(20) while True: try: build(*q.get(block=True)) except Exception as e: logging.error("Error in worker: %s", e) finally: q.task_done() os.chdir(sys.path[0]) def do_repo_work() -> None: for repo in config["repos"]: args = ["repo-add", "-s", "-v", "-p", "-n", os.path.join(config["basedir"]["repo"], repo, "os", config["arch"], repo + ".db.tar.xz"), "*.zst"] r_res = subprocess.run(args, stdout=subprocess.PIPE, stderr=subprocess.STDOUT) logging.debug("[REPO-ADD] %s", r_res.stdout.decode(errors="ignore")) if r_res.returncode: logging.error("[REPO/%s] Repo action failed: %s", repo, r_res.stdout.decode(errors="ignore")) p_res = subprocess.run( ["paccache", "-rc", os.path.join(config["basedir"]["repo"], repo, "os", config["arch"]), "-k", "1"], stdout=subprocess.PIPE, stderr=subprocess.STDOUT) logging.debug("[PACCACHE] %s", p_res.stdout.decode(errors="ignore")) if p_res.returncode: logging.error("[REPO/%s] Repo cleanup failed: %s", repo, p_res.stdout.decode(errors="ignore")) os.chdir(sys.path[0]) def already_running() -> bool: global fp fp = os.open(f"/tmp/alhp.lock", os.O_WRONLY | os.O_CREAT) try: fcntl.lockf(fp, fcntl.LOCK_EX | fcntl.LOCK_NB) return False except OSError: return True def find_all_files_for_pkg(name: str, repo: str) -> list: pkgs = [] for root, dirs, files in os.walk(os.path.join(config["basedir"]["repo"], repo, "os", config["arch"])): for file in files: res = regex_pkg_repo.match(file) if res: if res.group(1) is name: pkgs.append(os.path.join(root, file)) # searchpath = os.path.join(config["basedir"]["repo"], repo, "os", config["arch"]) + "/" + name + "-*.pkg.*" # pkgs = glob.glob(searchpath) for p in pkgs: if p.endswith(".sig"): pkgs.remove(p) return pkgs def get_failed_packages(repo: str) -> list: if os.path.exists(os.path.join(config["basedir"]["repo"], repo + "_failed.txt")): with open(os.path.join(config["basedir"]["repo"], repo + "_failed.txt")) as p: return p.read().splitlines() else: return [] def setup_chroot() -> None: if not os.path.exists(os.path.join(config["basedir"]["chroot"], "root")): pathlib.Path(config["basedir"]["chroot"]).mkdir(parents=True, exist_ok=True) s = subprocess.run(["mkarchroot", "-C", "/usr/share/devtools/pacman-extra.conf", os.path.join(config["basedir"]["chroot"], "root"), "base-devel"], stdout=subprocess.PIPE, stderr=subprocess.STDOUT) logging.debug("[MKCHROOT] %s", s.stdout.decode(errors='ignore')) if s.returncode: logging.fatal("[MKCHROOT] Failed to create root chroot: %s", s.stdout.decode(errors="ignore")) sys.exit(2) else: logging.debug("[NSPAWN] %s", subprocess.run( ["arch-nspawn", os.path.join(config["basedir"]["chroot"], "root"), "pacman", "-Syuu", "--noconfirm"])) def setup_makepkg(repo) -> None: makepkg_repo = os.path.join(config["basedir"]["makepkg"], "makepkg-" + '-'.join(repo.split("-")[1:]) + ".conf") if not os.path.exists(makepkg_repo): pathlib.Path(config["basedir"]["makepkg"]).mkdir(parents=True, exist_ok=True) shutil.copyfile("makepkg.tmpl", makepkg_repo) with open(makepkg_repo) as conf: c_all = conf.read() c_all = c_all.replace("-mtune=generic", "") c_all = c_all.replace("-O2", "-O3") c_all = regex_march.sub(r"\1" + '-'.join(repo.split("-")[1:]), c_all) with open(makepkg_repo, "w") as conf: conf.write(c_all) def import_keys(pkgbuild) -> bool: with open(pkgbuild, errors='ignore') as pkgb: keys_s = regex_validkeys.findall(pkgb.read()) if keys_s: keys = [] for k in keys_s: keys.extend(k.split(" ")) for k in keys: k = k.strip() k = k.replace("'", "") k = k.replace("\"", "") if len(k) == 40: s = subprocess.run(["gpg", "--keyserver", "keyserver.ubuntu.com", "--recv-keys", k], stdout=subprocess.PIPE, stderr=subprocess.STDOUT) logging.debug("[GPG] %s", s.stdout.decode(errors='ignore')) if s.returncode: logging.warning("[GPG] Import of key %s failed: %s", k, s.stdout.decode(errors="ignore")) return False else: logging.info("[GPG] Imported key %s", k) return True def package_exists(name, repo) -> bool: pkgs = find_all_files_for_pkg(name, repo) return len(pkgs) > 0 def update_svn2git() -> None: if not os.path.exists(config["basedir"]["upstream"]): pathlib.Path(config["basedir"]["upstream"]).mkdir(parents=True, exist_ok=True) for git_dir, git_url in config["svn2git"].items(): git_path = os.path.join(config["basedir"]["upstream"], git_dir) if not os.path.exists(git_path): logging.debug("[GIT] %s", subprocess.run(["git", "clone", "--depth=1", git_url, git_path], check=True, stdout=subprocess.PIPE, stderr=subprocess.STDOUT).stdout.decode( errors="ignore")) else: os.chdir(git_path) logging.debug("[GIT] %s", subprocess.run(["git", "clean", "-xdff"], check=False, stdout=subprocess.PIPE, stderr=subprocess.STDOUT).stdout.decode(errors="ignore")) logging.debug("[GIT] %s", subprocess.run(["git", "reset", "--hard"], check=True, stdout=subprocess.PIPE, stderr=subprocess.STDOUT).stdout.decode(errors="ignore")) logging.debug("[GIT] %s", subprocess.run(["git", "pull"], check=True, stdout=subprocess.PIPE, stderr=subprocess.STDOUT).stdout.decode(errors="ignore")) os.chdir(sys.path[0]) def parse_pkgbuild(pkgbuild_file) -> LegacyVersion: with open(pkgbuild_file, errors='ignore') as p: pkgbuild_str = p.read() pkgver = regex_pkgver.findall(pkgbuild_str) pkgrel = regex_pkgrel.findall(pkgbuild_str) epoch = regex_epoch.findall(pkgbuild_str) if not pkgver or not pkgrel: logging.warning("[%s] Failed to parse pkgbuild", pkgbuild_file.split("/")[-4]) return version.parse("") if epoch: return LegacyVersion("{}:{}-{}".format(epoch[0], pkgver[0], pkgrel[0])) return LegacyVersion("{}-{}".format(pkgver[0], pkgrel[0])) def increase_pkgrel(pkgbuild_file) -> None: with open(pkgbuild_file, errors='ignore') as p: pkgbuild_str = p.read() pkgbuild_str = regex_pkgrel.sub(r"pkgrel=\1.1", pkgbuild_str) with open(pkgbuild_file, "w") as pkg: pkg.write(pkgbuild_str) def parse_repo(name, repo) -> LegacyVersion: ver_split = find_all_files_for_pkg(name, repo)[0].split("-") return LegacyVersion(ver_split[-3] + "-" + ver_split[-2]) def sync_marchs_with_config() -> None: repos = [] with os.scandir(config["basedir"]["repo"]) as it: entry: os.DirEntry for entry in it: if not entry.name.startswith('logs') and entry.is_dir(): repos.append(entry.name) repo_quota = [] for r, a in ((x, y) for x in config["repos"] for y in config["march"]): repo_quota.append("{}-{}".format(r, a)) logging.info("Repos: %s", repo_quota) repos_create = list(set(repo_quota) - set(repos)) repos_delete = list(set(repos) - set(repo_quota)) for repo in repos_create: logging.debug("Create repo %s: %s", repo, os.path.join(config["basedir"]["repo"], repo, "os/x86_64")) pathlib.Path(os.path.join(config["basedir"]["repo"], repo, "os/x86_64")).mkdir(parents=True, exist_ok=True) setup_makepkg(repo) for repo in repos_delete: logging.debug("Delete repo %s: %s", repo, os.path.join(config["basedir"]["repo"], repo)) shutil.rmtree(os.path.join(config["basedir"]["repo"], repo)) os.remove(os.path.join(config["basedir"]["makepkg"], "makepkg-" + repo + ".conf")) def fill_queue() -> None: all_pkgbuild = [] for git_dir, git_url in config["svn2git"].items(): all_pkgbuild.extend( glob.glob(os.path.join(config["basedir"]["upstream"], git_dir) + "/**/PKGBUILD", recursive=True)) to_delete = [] for pkgbuild in all_pkgbuild: path_split = pkgbuild.split("/") # ignore pkgbuild if in trunk, -any package, not in repos, or on blacklist if path_split[-2] == "trunk" or path_split[-2].split("-")[0] not in config["repos"] or "any" in path_split[-2] \ or path_split[-4] in config["blacklist"] or "i686" in path_split[-2]: to_delete.append(pkgbuild) final_pkgbuilds = list(set(all_pkgbuild) - set(to_delete)) for pkgb in final_pkgbuilds: for march in config["march"]: path_split = pkgb.split("/") name = path_split[-4] repo = path_split[-2].split("-")[0] + "-" + march if name in get_failed_packages(repo): logging.info("[%s/%s] Skipped due to failing build", repo, name) continue if package_exists(name, repo): logging.debug("[SEMVER] Comparing %s=%s - %s=%s", name, parse_repo(name, repo), name, parse_pkgbuild(pkgb)) if not package_exists(name, repo): q.put((pkgb, repo)) logging.info("[%s/%s] Build queued (package not build yet)", repo, name) elif parse_repo(name, repo) < parse_pkgbuild(pkgb): q.put((pkgb, repo)) logging.info("[%s/%s] Build queued (new version available %s < %s)", repo, name, parse_repo(name, repo), parse_pkgbuild(pkgb)) logging.info("Build queue size: %s", q.qsize()) if __name__ == '__main__': with open("config.yaml") as c: config = yaml.safe_load(c) logging.config.dictConfig(config["logging"]) logging.getLogger("ALHP") if already_running(): logging.error("Another instance is already running") sys.exit(2) if not os.path.exists(config["basedir"]["repo"]): pathlib.Path(config["basedir"]["repo"]).mkdir(parents=True, exist_ok=True) os.nice(5) setup_chroot() sync_marchs_with_config() update_svn2git() q = JoinableQueue() with Pool(config["build"]["worker"], initializer=run_worker) as pool: fill_queue() signal.signal(signal.SIGINT, signal.default_int_handler) while True: try: if time.time() - update_last > 900 and q.empty(): logging.info("[SVN2GIT] Waiting for queue to finish...") q.join() update_last = time.time() update_svn2git() setup_chroot() fill_queue() if q.qsize() > 0: logging.info("[SVN2GIT] New Queue size: %d", q.qsize()) else: time.sleep(300) do_repo_work() except KeyboardInterrupt: with copy_l: pool.close() pool.terminate() q.close() do_repo_work() sys.exit(0)