diff --git a/config.yaml b/config.yaml index faca850..91022ec 100644 --- a/config.yaml +++ b/config.yaml @@ -9,10 +9,10 @@ svn2git: upstream-community: "https://github.com/archlinux/svntogit-community.git" basedir: - repo: /tmp/repo/ - chroot: /tmp/chroot/ - makepkg: /tmp/makepkg/ - upstream: /tmp/upstream/ + repo: /home/harting/repo/ + chroot: /home/harting/chroot/ + makepkg: /home/harting/makepkg/ + upstream: /home/harting/upstream/ march: - x86-64-v3 @@ -21,6 +21,9 @@ blacklist: - pacman - tensorflow - tensorflow-cuda + - brotli + - libarchive + - libb2 build: worker: 4 diff --git a/master.py b/master.py index e2f67a9..6e45f06 100644 --- a/master.py +++ b/master.py @@ -11,7 +11,7 @@ import signal import subprocess import sys import time -from multiprocessing import Pool, current_process, JoinableQueue, Lock +from multiprocessing import Pool, current_process, JoinableQueue, Lock, Manager import yaml from humanfriendly import format_timespan @@ -26,11 +26,9 @@ regex_validkeys = re.compile(r"^validpgpkeys\+?=\((.*?)\)", re.MULTILINE | re.DO fp = None update_last = time.time() copy_l = Lock() -to_add = {} -to_add_l = Lock() -def build(pkgbuild, repo) -> None: +def build(pkgbuild: str, repo: str, todo: dict) -> None: start_time = time.time() name = pathlib.Path(pkgbuild).parts[-4] process_name = current_process().name @@ -64,7 +62,7 @@ def build(pkgbuild, repo) -> None: pathlib.Path(os.path.join(config["basedir"]["repo"], "logs", repo)).mkdir(parents=True, exist_ok=True) with open(os.path.join(config["basedir"]["repo"], "logs", repo, name + ".log"), "w") as log: - log.write(res.stdout.decode()) + log.write(res.stdout.decode(errors="ignore")) return @@ -74,7 +72,8 @@ def build(pkgbuild, repo) -> None: s_res = subprocess.run(["gpg", "--batch", "--detach-sign", pkg], stdout=subprocess.PIPE, stderr=subprocess.STDOUT) if s_res.returncode: - logging.error("[%s/%s/%s] Signing failed: %s", process_name, repo, name, s_res.stdout.decode()) + logging.error("[%s/%s/%s] Signing failed: %s", process_name, repo, name, + s_res.stdout.decode(errors="ignore")) return # copying @@ -86,18 +85,19 @@ def build(pkgbuild, repo) -> None: shutil.copy2(pkg, os.path.join(config["basedir"]["repo"], repo, "os", config["arch"] + "/")) # repo - with to_add_l: - to_add[repo].extend(glob.glob("*.pkg.tar.zst")) + logging.debug("[%s/%s/%s] Adding packages to todo list: %s", process_name, repo, name, + ", ".join(glob.glob("*.pkg.tar.zst"))) + todo[repo].extend(glob.glob("*.pkg.tar.zst")) logging.info("[%s/%s/%s] Build successful (%s)", process_name, repo, name, format_timespan(time.time() - start_time)) -def run_worker() -> None: +def run_worker(todo: dict) -> None: os.nice(20) while True: try: - build(*q.get(block=True)) + build(*q.get(block=True), todo=todo) except Exception as e: logging.error("Error in worker: %s", e) finally: @@ -106,25 +106,28 @@ def run_worker() -> None: def do_repo_work(): - with to_add_l: - for repo in to_add: - if to_add[repo]: - logging.info("Adding to %s: %s", repo, ", ".join(to_add[repo])) - args = ["repo-add", "-s", "-v", - os.path.join(config["basedir"]["repo"], repo, "os", config["arch"], repo + ".db.tar.xz")] - args.extend(to_add[repo]) - r_res = subprocess.run(args, stdout=subprocess.PIPE, stderr=subprocess.STDOUT) - logging.debug("[REPO-ADD] %s", r_res.stdout.decode()) - if r_res.returncode: - logging.error("[%s] Repo action failed: %s", repo, r_res.stdout.decode()) + for repo in d: + if d[repo]: + logging.info("[REPO] Adding %s to %s", ", ".join(d[repo]), repo) + os.chdir(os.path.join(config["basedir"]["repo"], repo, "os", config["arch"])) + args = ["repo-add", "-s", "-v", + os.path.join(config["basedir"]["repo"], repo, "os", config["arch"], repo + ".db.tar.xz")] + args.extend(d[repo]) + r_res = subprocess.run(args, stdout=subprocess.PIPE, stderr=subprocess.STDOUT) + logging.debug("[REPO-ADD] %s", r_res.stdout.decode(errors="ignore")) + if r_res.returncode: + logging.error("[REPO/%s] Repo action failed: %s", repo, r_res.stdout.decode(errors="ignore")) - p_res = subprocess.run( - ["paccache", "-rc", os.path.join(config["basedir"]["repo"], repo, "os", config["arch"]), "-k", "1"], - stdout=subprocess.PIPE, stderr=subprocess.STDOUT) - logging.debug("[PACCACHE] %s", p_res.stdout.decode()) - if p_res.returncode: - logging.error("[%s] Repo cleanup failed: %s", repo, p_res.stdout.decode()) - to_add[repo].clear() + p_res = subprocess.run( + ["paccache", "-rc", os.path.join(config["basedir"]["repo"], repo, "os", config["arch"]), "-k", "1"], + stdout=subprocess.PIPE, stderr=subprocess.STDOUT) + logging.debug("[PACCACHE] %s", p_res.stdout.decode(errors="ignore")) + if p_res.returncode: + logging.error("[REPO/%s] Repo cleanup failed: %s", repo, p_res.stdout.decode(errors="ignore")) + d[repo][:] = [] + os.chdir(sys.path[0]) + else: + logging.debug("[REPO] Nothing to do for %s", repo) def already_running() -> bool: @@ -160,10 +163,13 @@ def get_failed_packages(repo) -> list: def setup_chroot(): if not os.path.exists(os.path.join(config["basedir"]["chroot"], "root")): pathlib.Path(config["basedir"]["chroot"]).mkdir(parents=True, exist_ok=True) - logging.debug("[MKCHROOT] %s", - subprocess.run( - ["mkarchroot", os.path.join(config["basedir"]["chroot"], "root"), "base-devel"], - stdout=subprocess.PIPE, stderr=subprocess.STDOUT).stdout.decode(errors='ignore')) + s = subprocess.run(["mkarchroot", "-C", "/usr/share/devtools/pacman-extra.conf", + os.path.join(config["basedir"]["chroot"], "root"), "base-devel"], + stdout=subprocess.PIPE, stderr=subprocess.STDOUT) + logging.debug("[MKCHROOT] %s", s.stdout.decode(errors='ignore')) + if s.returncode: + logging.fatal("[MKCHROOT] Failed to create root chroot: %s", s.stdout.decode(errors="ignore")) + sys.exit(2) else: logging.debug("[NSPAWN] %s", subprocess.run( ["arch-nspawn", os.path.join(config["basedir"]["chroot"], "root"), "pacman", "-Syuu", "--noconfirm"])) @@ -227,15 +233,16 @@ def update_svn2git() -> None: if not os.path.exists(git_path): logging.debug("[GIT] %s", subprocess.run(["git", "clone", "--depth=1", git_url, git_path], check=True, - stdout=subprocess.PIPE, stderr=subprocess.STDOUT).stdout.decode()) + stdout=subprocess.PIPE, stderr=subprocess.STDOUT).stdout.decode( + errors="ignore")) else: os.chdir(git_path) logging.debug("[GIT] %s", subprocess.run(["git", "clean", "-xdf"], check=False, stdout=subprocess.PIPE, - stderr=subprocess.STDOUT).stdout.decode()) + stderr=subprocess.STDOUT).stdout.decode(errors="ignore")) logging.debug("[GIT] %s", subprocess.run(["git", "reset", "--hard"], check=True, stdout=subprocess.PIPE, - stderr=subprocess.STDOUT).stdout.decode()) + stderr=subprocess.STDOUT).stdout.decode(errors="ignore")) logging.debug("[GIT] %s", subprocess.run(["git", "pull"], check=True, stdout=subprocess.PIPE, - stderr=subprocess.STDOUT).stdout.decode()) + stderr=subprocess.STDOUT).stdout.decode(errors="ignore")) os.chdir(sys.path[0]) @@ -288,7 +295,7 @@ def sync_marchs_with_config() -> None: repos_create = list(set(repo_quota) - set(repos)) repos_delete = list(set(repos) - set(repo_quota)) for repo in repo_quota: - to_add[repo] = [] + d[repo] = m.list() for repo in repos_create: logging.debug("Create repo %s: %s", repo, os.path.join(config["basedir"]["repo"], repo, "os/x86_64")) @@ -361,31 +368,34 @@ if __name__ == '__main__': os.nice(5) - setup_chroot() - sync_marchs_with_config() - update_svn2git() - q = JoinableQueue() + with Manager() as m: + d = m.dict() + setup_chroot() + sync_marchs_with_config() + update_svn2git() + q = JoinableQueue() - with Pool(config["build"]["worker"], initializer=run_worker) as pool: - fill_queue() - signal.signal(signal.SIGINT, signal.default_int_handler) + with Pool(config["build"]["worker"], initializer=run_worker, initargs=(d,)) as pool: + fill_queue() + signal.signal(signal.SIGINT, signal.default_int_handler) - while True: - try: - if time.time() - update_last > 900 and q.qsize() == 0: - q.join() - update_last = time.time() - update_svn2git() - fill_queue() - if q.qsize() > 0: - logging.info("New Queue size: %d", q.qsize()) - else: - do_repo_work() - time.sleep(60) - except KeyboardInterrupt: - with copy_l: - pool.close() - pool.terminate() - q.close() - do_repo_work() - sys.exit(0) + while True: + try: + if time.time() - update_last > 900 and q.empty(): + q.join() + do_repo_work() + update_last = time.time() + update_svn2git() + fill_queue() + if q.qsize() > 0: + logging.info("New Queue size: %d", q.qsize()) + else: + time.sleep(60) + do_repo_work() + except KeyboardInterrupt: + with copy_l: + pool.close() + pool.terminate() + q.close() + do_repo_work() + sys.exit(0)