diff --git a/config.yaml b/config.yaml index 02a95ed..4f3b7d8 100644 --- a/config.yaml +++ b/config.yaml @@ -4,20 +4,24 @@ repos: - extra - community +svn2git: + upstream-core-extra: "https://github.com/archlinux/svntogit-packages.git" + upstream-community: "https://github.com/archlinux/svntogit-community.git" + basedir: - repo: /tmp/alhp/ - svn2git: upstream/ - makepkg: /usr/share/devtools/makepkg-x86_64.conf + repo: /home/harting/repo/ + chroot: /home/harting/chroot/ + makepkg: /home/harting/Projects/ALHP/makepkg/ march: - - znver2 - - sandybridge - - ivybridge - - skylake + - x86-64-v3 blacklist: - pacman +build: + worker: 4 + logging: version: 1 disable_existing_loggers: True diff --git a/makepkg-x86_64.conf b/makepkg.tmpl similarity index 91% rename from makepkg-x86_64.conf rename to makepkg.tmpl index 4c895fc..78ba1c7 100644 --- a/makepkg-x86_64.conf +++ b/makepkg.tmpl @@ -1,6 +1,4 @@ #!/hint/bash -# shellcheck disable=2034 - # # /etc/makepkg.conf # @@ -38,15 +36,20 @@ CARCH="x86_64" CHOST="x86_64-pc-linux-gnu" #-- Compiler and Linker Flags -CPPFLAGS="-D_FORTIFY_SOURCE=2" -CFLAGS="-march=skylake -O3 -pipe -fno-plt" -CXXFLAGS="-march=skylake -O3 -pipe -fno-plt" +#CPPFLAGS="" +CFLAGS="-march=x86-64 -mtune=generic -O2 -pipe -fno-plt -fexceptions \ + -Wp,-D_FORTIFY_SOURCE=2,-D_GLIBCXX_ASSERTIONS \ + -Wformat -Werror=format-security \ + -fstack-clash-protection -fcf-protection" +CXXFLAGS="$CFLAGS" LDFLAGS="-Wl,-O1,--sort-common,--as-needed,-z,relro,-z,now" +#RUSTFLAGS="-C opt-level=2" #-- Make Flags: change this for DistCC/SMP systems -MAKEFLAGS="-j5" +MAKEFLAGS="-j2" #-- Debugging flags DEBUG_CFLAGS="-g -fvar-tracking-assignments" DEBUG_CXXFLAGS="-g -fvar-tracking-assignments" +#DEBUG_RUSTFLAGS="-C debuginfo=2" ######################################################################### # BUILD ENVIRONMENT @@ -89,7 +92,7 @@ BUILDENV=(!distcc !color !ccache !check !sign) # OPTIONS=(strip docs !libtool !staticlibs emptydirs zipman purge !debug) -#-- File integrity checks to use. Valid: md5, sha1, sha256, sha384, sha512 +#-- File integrity checks to use. Valid: md5, sha1, sha224, sha256, sha384, sha512, b2 INTEGRITY_CHECK=(md5) #-- Options to be used when stripping binaries. See `man strip' for details. STRIP_BINARIES="--strip-all" @@ -121,7 +124,7 @@ DBGSRCDIR="/usr/src/debug" #-- Log files: specify a fixed directory where all log files will be placed #LOGDEST=/home/makepkglogs #-- Packager: name/email of the person or organization building packages -PACKAGER="Archlinux CIE " +#PACKAGER="John Doe " #-- Specify a key to use for package signing #GPGKEY="" @@ -132,7 +135,7 @@ PACKAGER="Archlinux CIE " COMPRESSGZ=(gzip -c -f -n) COMPRESSBZ2=(bzip2 -c -f) COMPRESSXZ=(xz -c -z -) -COMPRESSZST=(zstd -c -T0 --ultra -20 -) +COMPRESSZST=(zstd -c -z -q -) COMPRESSLRZ=(lrzip -q) COMPRESSLZO=(lzop -q) COMPRESSZ=(compress -c -f) @@ -144,6 +147,4 @@ COMPRESSLZ=(lzip -c -f) ######################################################################### # PKGEXT='.pkg.tar.zst' -SRCEXT='.src.tar.gz' - -# vim: set ft=sh ts=2 sw=2 et: +SRCEXT='.src.tar.gz' \ No newline at end of file diff --git a/master.py b/master.py index 8b8399d..d92beed 100644 --- a/master.py +++ b/master.py @@ -7,10 +7,11 @@ import os import pathlib import re import shutil +import signal import subprocess import sys import time -from queue import Queue, Empty +from multiprocessing import Pool, Queue, current_process import yaml from packaging import version @@ -20,10 +21,93 @@ regex_pkgrel = re.compile(r"^pkgrel\s*=\s*(.+)$", re.MULTILINE) regex_march = re.compile(r"(-march=)(.+?) ", re.MULTILINE) regex_validkeys = re.compile(r"^validpgpkeys\+?=\((.*?)\)", re.MULTILINE | re.DOTALL) fp = None -q = Queue() update_last = time.time() +def build(pkgbuild, repo): + start_time = time.time() + name = pathlib.Path(pkgbuild).parts[-4] + process_name = current_process().name + logging.info("[%s/%s] Build starting (Queue ~= %s)", repo, name, q.qsize()) + + # setup makepkg + setup_makepkg(repo) + + # import pgp keys + import_keys(pkgbuild) + + # build with devtools + os.chdir(pathlib.Path(pkgbuild).parent) + res = subprocess.run( + ["makechrootpkg", "-D", os.path.join(sys.path[0], config["basedir"]["makepkg"]), "-l", process_name, "-r", + os.path.join(sys.path[0], config["basedir"]["chroot"]), "--", "--config", + os.path.join(sys.path[0], config["basedir"]["makepkg"]) + "makepkg-" + '-'.join( + repo.split("-")[1:]) + ".conf"], stdout=subprocess.PIPE, stderr=subprocess.STDOUT) + if res.returncode: + logging.warning("[%s/%s] Build failed. Check repo/logs for more information.", repo, name) + + # write packagename to failed list + with open(os.path.join(config["basedir"]["repo"], repo + "_failed.txt"), "a") as f: + f.write(name + "\n") + + # write logs + if not os.path.exists(os.path.join(config["basedir"]["repo"], "logs", repo)): + pathlib.Path(os.path.join(sys.path[0], config["basedir"]["repo"], "logs", repo)).mkdir(parents=True, + exist_ok=True) + with open(os.path.join(config["basedir"]["repo"], "logs", repo, name + ".log"), "w") as log: + log.write(res.stdout.decode()) + + build_cleanup() + return + + # signing + pkgs = glob.glob("*.pkg.tar.zst") + for pkg in pkgs: + s_res = subprocess.run(["gpg", "--batch", "--detach-sign", pkg], stdout=subprocess.PIPE, + stderr=subprocess.STDOUT) + if s_res.returncode: + logging.error("[%s/%s] Signing failed: %s", repo, name, s_res.stdout.decode()) + build_cleanup() + return + + # copying + pkgs.extend(glob.glob("*.pkg.tar.zst.sig")) + for pkg in pkgs: + logging.debug("[%s/%s] Copy %s to %s", repo, name, pkg, + os.path.join(config["basedir"]["repo"], repo, "os", config["arch"] + "/")) + shutil.copy2(pkg, os.path.join(config["basedir"]["repo"], repo, "os", config["arch"] + "/")) + + # repo + r_res = subprocess.run(["repo-add", "-s", "-v", + os.path.join(config["basedir"]["repo"], repo, "os", config["arch"], + repo + ".db.tar.xz"), + pkgs[0]], stdout=subprocess.PIPE, stderr=subprocess.STDOUT) + logging.debug("[REPO-ADD] %s", r_res.stdout.decode()) + if r_res.returncode: + logging.error("[%s/%s] Repo action failed: %s", repo, name, r_res.stdout.decode()) + build_cleanup() + return + + p_res = subprocess.run( + ["paccache", "-rc", os.path.join(config["basedir"]["repo"], repo, "os", config["arch"]), "-k", "1"], + stdout=subprocess.PIPE, stderr=subprocess.STDOUT) + logging.debug("[PACCACHE] %s", p_res.stdout.decode()) + if p_res.returncode: + logging.error("[%s/%s] Repo cleanup failed: %s", repo, name, p_res.stdout.decode()) + build_cleanup() + return + + # cleanup + build_cleanup() + logging.info("[%s/%s] Build successful (%s)", repo, name, int(time.time() - start_time)) + + +def run_worker() -> None: + os.nice(20) + while True: + build(*q.get(block=True)) + + def already_running(): global fp fp = os.open(f"/tmp/alhp.lock", os.O_WRONLY | os.O_CREAT) @@ -63,85 +147,29 @@ def build_cleanup(): os.chdir(sys.path[0]) -def build(pkgbuild, repo): - start_time = time.time() - name = pathlib.Path(pkgbuild).parts[-4] - logging.info("[%s/%s] Build starting (Queue ~= %s)", repo, name, q.qsize()) - - # setup buildflags - setup_makepkg(repo) - - # import pgp keys - import_keys(pkgbuild) - - # build with devtools - os.chdir(pathlib.Path(pkgbuild).parent) - res = subprocess.run(["sudo", "extra-x86_64-build"], stdout=subprocess.PIPE, stderr=subprocess.STDOUT) - if res.returncode: - logging.warning("[%s/%s] Build failed. Check repo/logs for more information.", repo, name) - - # write packagename to failed list - with open(os.path.join(config["basedir"]["repo"], repo + "_failed.txt"), "a") as f: - f.write(name + "\n") - - # write logs - if not os.path.exists(os.path.join(config["basedir"]["repo"], "logs", repo)): - pathlib.Path(os.path.join(config["basedir"]["repo"], "logs", repo)).mkdir(parents=True, exist_ok=True) - with open(os.path.join(config["basedir"]["repo"], "logs", repo, name + ".log"), "w") as log: - log.write(res.stdout.decode()) - - build_cleanup() - return - - # signing - pkgs = glob.glob("*.pkg.tar.zst") - for pkg in pkgs: - s_res = subprocess.run(["gpg", "--batch", "--detach-sign", pkg], stdout=subprocess.PIPE, - stderr=subprocess.STDOUT) - if s_res.returncode: - logging.error("[%s/%s] Signing failed: %s", repo, name, s_res.stdout.decode()) - build_cleanup() - return - - # copying - pkgs.extend(glob.glob("*.pkg.tar.zst.sig")) - for pkg in pkgs: - logging.debug("[%s/%s] Copy %s to %s", repo, name, pkg, - os.path.join(config["basedir"]["repo"], repo, "os", config["arch"] + "/")) - shutil.copy2(pkg, os.path.join(config["basedir"]["repo"], repo, "os", config["arch"] + "/")) - - # repo - r_res = subprocess.run(["repo-add", "-s", "-v", - os.path.join(config["basedir"]["repo"], repo, "os", config["arch"], repo + ".db.tar.xz"), - pkgs[0]], stdout=subprocess.PIPE, stderr=subprocess.STDOUT) - logging.debug("[REPO-ADD] %s", r_res.stdout.decode()) - if r_res.returncode: - logging.error("[%s/%s] Repo action failed: %s", repo, name, r_res.stdout.decode()) - build_cleanup() - return - - p_res = subprocess.run( - ["paccache", "-rc", os.path.join(config["basedir"]["repo"], repo, "os", config["arch"]), "-k", "1"], - stdout=subprocess.PIPE, stderr=subprocess.STDOUT) - logging.debug("[PACCACHE] %s", p_res.stdout.decode()) - if p_res.returncode: - logging.error("[%s/%s] Repo cleanup failed: %s", repo, name, p_res.stdout.decode()) - build_cleanup() - return - - # cleanup - build_cleanup() - logging.info("[%s/%s] Build successful (%s)", repo, name, int(time.time() - start_time)) +def setup_chroot(): + if not os.path.exists(os.path.join(config["basedir"]["chroot"], "root")): + pathlib.Path(config["basedir"]["chroot"]).mkdir(parents=True, exist_ok=True) + logging.debug("[MKCHROOT] %s", + subprocess.run( + ["mkarchroot", os.path.join(config["basedir"]["chroot"], "root"), "base-devel"], + stdout=subprocess.PIPE, stderr=subprocess.STDOUT).stdout.decode(errors='ignore')) def setup_makepkg(repo): - with open(config["basedir"]["makepkg"]) as conf: - c_all = conf.read() - c_all = c_all.replace("-mtune=generic", "") - c_all = c_all.replace("-O2", "-O3") - c_all = regex_march.sub(r"\1" + repo.split("-")[1] + " ", c_all) - with open(config["basedir"]["makepkg"], "w") as conf: - conf.write(c_all) + makepkg_repo = os.path.join(config["basedir"]["makepkg"], "makepkg-" + '-'.join(repo.split("-")[1:]) + ".conf") + + if not os.path.exists(makepkg_repo): + pathlib.Path(config["basedir"]["makepkg"]).mkdir(parents=True, exist_ok=True) + shutil.copyfile("makepkg.tmpl", makepkg_repo) + + with open(makepkg_repo) as conf: + c_all = conf.read() + c_all = c_all.replace("-mtune=generic", "") + c_all = c_all.replace("-O2", "-O3") + c_all = regex_march.sub(r"\1" + '-'.join(repo.split("-")[1:]) + " ", c_all) + with open(makepkg_repo, "w") as conf: + conf.write(c_all) def import_keys(pkgbuild): @@ -174,17 +202,18 @@ def package_exists(name, repo): def update_git2svn(): - if not os.path.exists(config["basedir"]["svn2git"]): - logging.debug("[GIT] %s", subprocess.run( - ["git", "clone", "https://github.com/archlinux/svntogit-packages.git", config["basedir"]["svn2git"]], - check=True, stdout=subprocess.PIPE, stderr=subprocess.STDOUT).stdout.decode()) - else: - os.chdir(config["basedir"]["svn2git"]) - logging.debug("[GIT] %s", subprocess.run(["git", "clean", "-xdf"], check=True, stdout=subprocess.PIPE, - stderr=subprocess.STDOUT).stdout.decode()) - logging.debug("[GIT] %s", subprocess.run(["git", "pull"], check=True, stdout=subprocess.PIPE, - stderr=subprocess.STDOUT).stdout.decode()) - os.chdir("..") + for git_dir, git_url in config["svn2git"].items(): + if not os.path.exists(git_dir): + logging.debug("[GIT] %s", + subprocess.run(["git", "clone", git_url, git_dir], check=True, stdout=subprocess.PIPE, + stderr=subprocess.STDOUT).stdout.decode()) + else: + os.chdir(git_dir) + logging.debug("[GIT] %s", subprocess.run(["git", "clean", "-xdf"], check=True, stdout=subprocess.PIPE, + stderr=subprocess.STDOUT).stdout.decode()) + logging.debug("[GIT] %s", subprocess.run(["git", "pull"], check=True, stdout=subprocess.PIPE, + stderr=subprocess.STDOUT).stdout.decode()) + os.chdir("..") def parse_pkgbuild(pkgbuild_file): @@ -226,22 +255,28 @@ def sync_marchs_with_config(): for repo in repos_create: logging.debug("Create repo %s: %s", repo, os.path.join(config["basedir"]["repo"], repo, "os/x86_64")) pathlib.Path(os.path.join(config["basedir"]["repo"], repo, "os/x86_64")).mkdir(parents=True, exist_ok=True) + setup_makepkg(repo) for repo in repos_delete: logging.debug("Delete repo %s: %s", repo, os.path.join(config["basedir"]["repo"], repo)) shutil.rmtree(os.path.join(config["basedir"]["repo"], repo)) + os.remove(os.path.join(config["basedir"]["makepkg"], "makepkg-" + repo + ".conf")) def fill_queue(): - all_pkgbuild = glob.glob(os.path.join(config["basedir"]["svn2git"]) + "/**/PKGBUILD", recursive=True) + all_pkgbuild = [] + + for git_dir, git_url in config["svn2git"].items(): + all_pkgbuild.extend(glob.glob(os.path.join(git_dir) + "/**/PKGBUILD", recursive=True)) + to_delete = [] for pkgbuild in all_pkgbuild: path_split = pkgbuild.split("/") # ignore pkgbuild if in trunk, -any package, not in repos, or on blacklist - if path_split[-2] == "trunk" or path_split[-2].split("-")[0] not in config[ - "repos"] or "any" in path_split[-2] or path_split[-4] in config["blacklist"]: + if path_split[-2] == "trunk" or path_split[-2].split("-")[0] not in config["repos"] or "any" in path_split[-2] \ + or path_split[-4] in config["blacklist"]: to_delete.append(pkgbuild) final_pkgbuilds = list(set(all_pkgbuild) - set(to_delete)) @@ -285,20 +320,27 @@ if __name__ == '__main__': if not os.path.exists(config["basedir"]["repo"]): pathlib.Path(config["basedir"]["repo"]).mkdir(parents=True, exist_ok=True) + os.nice(5) + + setup_chroot() sync_marchs_with_config() update_git2svn() - fill_queue() + q = Queue() - while True: - if q.qsize() > 0: + with Pool(config["build"]["worker"], initializer=run_worker) as pool: + fill_queue() + signal.signal(signal.SIGINT, signal.default_int_handler) + + while True: try: - build(*q.get_nowait()) - except Empty: - pass - else: - time.sleep(60) - - if time.time() - update_last > 900: - update_git2svn() - update_last = time.time() - fill_queue() + if time.time() - update_last > 900: + update_last = time.time() + update_git2svn() + fill_queue() + else: + time.sleep(60) + except KeyboardInterrupt: + pool.close() + pool.terminate() + q.close() + sys.exit(0)