This repository has been archived on 2021-06-12. You can view files and clone it, but cannot push or open issues or pull requests.
Files
alhp/master.py
2021-06-01 16:47:36 +02:00

360 lines
14 KiB
Python

#!/usr/bin/env python3
import fcntl
import glob
import logging.config
import os
import pathlib
import re
import shutil
import signal
import subprocess
import sys
import time
import traceback
from multiprocessing import Pool, current_process, JoinableQueue, Lock
import yaml
from humanfriendly import format_timespan
from packaging.version import LegacyVersion
from utils import parse_pkgbuild, parse_pkgbuild_ver, import_keys
regex_pkgver = re.compile(r"^_?pkgver\s*=\s*(.+)$", re.MULTILINE)
regex_pkgrel = re.compile(r"^pkgrel\s*=\s*(.+)$", re.MULTILINE)
regex_epoch = re.compile(r"^epoch\s*=\s*(.+)$", re.MULTILINE)
regex_march = re.compile(r"(-march=)(.+?) ", re.MULTILINE)
regex_validkeys = re.compile(r"^validpgpkeys\+?=\((.*?)\)", re.MULTILINE | re.DOTALL)
regex_pkg_repo = re.compile(r"^(.*)-.*-.*-(?:x86_64|any)\.pkg\.tar\.zst(?:\.sig)*$", re.MULTILINE)
fp = None
update_last = time.time()
copy_l = Lock()
repos = []
def build(pkgbuild: str, repo: str) -> None:
start_time = time.time()
name = pathlib.Path(pkgbuild).parts[-4]
process_name = current_process().name
logging.info("[%s/%s/%s] Build starting (Queue ~= %s)", process_name, repo, name, q.qsize())
# setup makepkg
setup_makepkg(repo)
# import pgp keys
import_keys(pkgbuild)
# increase pkgrel
increase_pkgrel(pkgbuild)
# build with devtools
os.chdir(pathlib.Path(pkgbuild).parent)
res = subprocess.run(
["makechrootpkg", "-c", "-D", os.path.join(config["basedir"]["makepkg"]), "-l", process_name, "-r",
os.path.join(config["basedir"]["chroot"]), "--", "--config",
os.path.join(config["basedir"]["makepkg"]) + "makepkg-" + '-'.join(
repo.split("-")[1:]) + ".conf"], stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
if res.returncode:
logging.warning("[%s/%s/%s] Build failed. Check repo/logs for more information.", process_name, repo, name)
# write packagename to failed list
with open(os.path.join(config["basedir"]["repo"], repo + "_failed.txt"), "a") as f:
f.write(name + "\n")
# write logs
if not os.path.exists(os.path.join(config["basedir"]["repo"], "logs", repo)):
pathlib.Path(os.path.join(config["basedir"]["repo"], "logs", repo)).mkdir(parents=True,
exist_ok=True)
with open(os.path.join(config["basedir"]["repo"], "logs", repo, name + ".log"), "w") as log:
log.write(res.stdout.decode(errors="ignore"))
logging.debug("[GIT] %s", subprocess.run(["git", "clean", "-xdff"], check=False, stdout=subprocess.PIPE,
stderr=subprocess.STDOUT).stdout.decode(errors="ignore"))
return
# signing
pkgs = glob.glob("*.pkg.tar.zst")
for pkg in pkgs:
s_res = subprocess.run(["gpg", "--batch", "--detach-sign", pkg], stdout=subprocess.PIPE,
stderr=subprocess.STDOUT)
if s_res.returncode:
logging.error("[%s/%s/%s] Signing failed: %s", process_name, repo, name,
s_res.stdout.decode(errors="ignore"))
logging.debug("[GIT] %s", subprocess.run(["git", "clean", "-xdff"], check=False, stdout=subprocess.PIPE,
stderr=subprocess.STDOUT).stdout.decode(errors="ignore"))
return
# copying
pkgs.extend(glob.glob("*.pkg.tar.zst.sig"))
with copy_l:
for pkg in pkgs:
logging.debug("[%s/%s/%s] Copy %s to %s", process_name, repo, name, pkg,
os.path.join(config["basedir"]["repo"], repo, "os", config["arch"] + "/"))
shutil.copy2(pkg, os.path.join(config["basedir"]["repo"], repo, "os", config["arch"] + "/"))
logging.debug("[GIT] %s", subprocess.run(["git", "clean", "-xdff"], check=False, stdout=subprocess.PIPE,
stderr=subprocess.STDOUT).stdout.decode(errors="ignore"))
logging.info("[%s/%s/%s] Build successful (%s)", process_name, repo, name,
format_timespan(time.time() - start_time))
def run_worker() -> None:
os.nice(20)
while True:
try:
build(*q.get(block=True))
except Exception as e:
logging.error("Error in worker: %s", e)
traceback.print_exc()
finally:
q.task_done()
def do_repo_work() -> None:
for repo in repos:
pkgs = glob.glob(os.path.join(config["basedir"]["repo"], repo, "os", config["arch"], "*.zst"))
args = ["repo-add", "-s", "-v", "-p", "-n",
os.path.join(config["basedir"]["repo"], repo, "os", config["arch"], repo + ".db.tar.xz")]
args.extend(pkgs)
r_res = subprocess.run(args, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
logging.debug("[REPO-ADD] %s", r_res.stdout.decode(errors="ignore"))
if r_res.returncode:
logging.error("[REPO/%s] Repo action failed: %s", repo, r_res.stdout.decode(errors="ignore"))
p_res = subprocess.run(
["paccache", "-rc", os.path.join(config["basedir"]["repo"], repo, "os", config["arch"]), "-k", "1"],
stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
logging.debug("[PACCACHE] %s", p_res.stdout.decode(errors="ignore"))
if p_res.returncode:
logging.error("[REPO/%s] Repo cleanup failed: %s", repo, p_res.stdout.decode(errors="ignore"))
def already_running() -> bool:
global fp
fp = os.open(f"/tmp/alhp.lock", os.O_WRONLY | os.O_CREAT)
try:
fcntl.lockf(fp, fcntl.LOCK_EX | fcntl.LOCK_NB)
return False
except OSError:
return True
def package_exists(names: list, repo) -> bool:
for name in names:
pkgs = find_all_files_for_pkg(name, repo)
if not pkgs:
return False
return True
def find_all_files_for_pkg(name: str, repo: str) -> list:
pkgs = []
for root, dirs, files in os.walk(os.path.join(config["basedir"]["repo"], repo, "os", config["arch"])):
for file in files:
res = regex_pkg_repo.findall(file)
for r in res:
if r == name and not file.endswith(".sig"):
pkgs.append(os.path.join(root, file))
return pkgs
def get_failed_packages(repo: str) -> list:
if os.path.exists(os.path.join(config["basedir"]["repo"], repo + "_failed.txt")):
with open(os.path.join(config["basedir"]["repo"], repo + "_failed.txt")) as p:
return p.read().splitlines()
else:
return []
def setup_chroot() -> None:
if not os.path.exists(os.path.join(config["basedir"]["chroot"], "root")):
pathlib.Path(config["basedir"]["chroot"]).mkdir(parents=True, exist_ok=True)
s = subprocess.run(["mkarchroot", "-C", "/usr/share/devtools/pacman-extra.conf",
os.path.join(config["basedir"]["chroot"], "root"), "base-devel"],
stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
logging.debug("[MKCHROOT] %s", s.stdout.decode(errors='ignore'))
if s.returncode:
logging.fatal("[MKCHROOT] Failed to create root chroot: %s", s.stdout.decode(errors="ignore"))
sys.exit(2)
else:
logging.debug("[NSPAWN] %s", subprocess.run(
["arch-nspawn", os.path.join(config["basedir"]["chroot"], "root"), "pacman", "-Syuu", "--noconfirm"]))
def setup_makepkg(repo) -> None:
makepkg_repo = os.path.join(config["basedir"]["makepkg"], "makepkg-" + '-'.join(repo.split("-")[1:]) + ".conf")
if not os.path.exists(makepkg_repo):
pathlib.Path(config["basedir"]["makepkg"]).mkdir(parents=True, exist_ok=True)
shutil.copyfile("makepkg.tmpl", makepkg_repo)
with open(makepkg_repo) as conf:
c_all = conf.read()
c_all = c_all.replace("-mtune=generic", "")
c_all = c_all.replace("-O2", "-O3")
c_all = regex_march.sub(r"\1" + '-'.join(repo.split("-")[1:]), c_all)
with open(makepkg_repo, "w") as conf:
conf.write(c_all)
def update_svn2git() -> None:
if not os.path.exists(config["basedir"]["upstream"]):
pathlib.Path(config["basedir"]["upstream"]).mkdir(parents=True, exist_ok=True)
for git_dir, git_url in config["svn2git"].items():
git_path = os.path.join(config["basedir"]["upstream"], git_dir)
if not os.path.exists(git_path):
logging.debug("[GIT] %s",
subprocess.run(["git", "clone", "--depth=1", git_url, git_path], check=True,
stdout=subprocess.PIPE, stderr=subprocess.STDOUT).stdout.decode(
errors="ignore"))
else:
os.chdir(git_path)
logging.debug("[GIT] %s", subprocess.run(["git", "clean", "-xdff"], check=False, stdout=subprocess.PIPE,
stderr=subprocess.STDOUT).stdout.decode(errors="ignore"))
logging.debug("[GIT] %s", subprocess.run(["git", "reset", "--hard"], check=True, stdout=subprocess.PIPE,
stderr=subprocess.STDOUT).stdout.decode(errors="ignore"))
logging.debug("[GIT] %s", subprocess.run(["git", "pull"], check=True, stdout=subprocess.PIPE,
stderr=subprocess.STDOUT).stdout.decode(errors="ignore"))
def increase_pkgrel(pkgbuild_file) -> None:
parsed = parse_pkgbuild(pkgbuild_file)
with open(pkgbuild_file, "r+", errors='ignore') as p:
pkgbuild_str = p.read()
p.truncate(0)
p.seek(0, 0)
pkgbuild_str = regex_pkgrel.sub("pkgrel=" + parsed["pkgrel"] + ".1", pkgbuild_str)
p.write(pkgbuild_str)
def parse_repo(name, repo) -> LegacyVersion:
ver_split = find_all_files_for_pkg(name, repo)[0].split("-")
return LegacyVersion(ver_split[-3] + "-" + ver_split[-2])
def sync_marchs_with_config() -> None:
existing_repos = []
with os.scandir(config["basedir"]["repo"]) as it:
entry: os.DirEntry
for entry in it:
if not entry.name.startswith('logs') and entry.is_dir():
existing_repos.append(entry.name)
repo_quota = []
for r, a in ((x, y) for x in config["repos"] for y in config["march"]):
repo_quota.append("{}-{}".format(r, a))
logging.info("Repos: %s", repo_quota)
global repos
repos = repo_quota
repos_create = list(set(repo_quota) - set(existing_repos))
repos_delete = list(set(existing_repos) - set(repo_quota))
for repo in repos_create:
logging.debug("Create repo %s: %s", repo, os.path.join(config["basedir"]["repo"], repo, "os/x86_64"))
pathlib.Path(os.path.join(config["basedir"]["repo"], repo, "os/x86_64")).mkdir(parents=True, exist_ok=True)
setup_makepkg(repo)
for repo in repos_delete:
logging.debug("Delete repo %s: %s", repo, os.path.join(config["basedir"]["repo"], repo))
shutil.rmtree(os.path.join(config["basedir"]["repo"], repo))
os.remove(os.path.join(config["basedir"]["makepkg"], "makepkg-" + repo + ".conf"))
def fill_queue() -> None:
all_pkgbuild = []
for git_dir, git_url in config["svn2git"].items():
all_pkgbuild.extend(
glob.glob(os.path.join(config["basedir"]["upstream"], git_dir) + "/**/PKGBUILD", recursive=True))
for pkgbuild in all_pkgbuild:
path_split = pkgbuild.split("/")
parsed = parse_pkgbuild(pkgbuild)
# ignore pkgbuild if in trunk, -any package, not in repos, on blacklist, not for current arch
if path_split[-2] == "trunk" or path_split[-2].split("-")[0] not in config["repos"] or "any" in parsed["arch"] \
or parsed["pkgbase"] in config["blacklist"] or "i686" in path_split[-2]:
# TODO: delete packages not to build
pass
else:
for march in config["march"]:
repo = path_split[-2].split("-")[0] + "-" + march
if parsed["pkgbase"] in get_failed_packages(repo):
logging.info("[%s/%s] Skipped due to failing build", repo, parsed["pkgbase"])
continue
ver = parse_pkgbuild_ver(parsed=parsed)
packages = list(parsed["packages"])
if package_exists(packages, repo):
logging.debug("[SEMVER] Comparing %s=%s - %s=%s", packages[0], parse_repo(packages[0], repo),
packages[0], ver)
rv = parse_repo(packages[0], repo)
if rv < ver:
q.put((pkgbuild, repo))
logging.info("[%s/%s] Build queued (new version available %s < %s)", repo, parsed["pkgbase"],
rv, ver)
else:
q.put((pkgbuild, repo))
logging.info("[%s/%s] Build queued (package not build yet)", repo, parsed["pkgbase"])
logging.info("Build queue size: %s", q.qsize())
if __name__ == '__main__':
with open("config.yaml") as c:
config = yaml.safe_load(c)
logging.config.dictConfig(config["logging"])
logging.getLogger("ALHP")
if already_running():
logging.error("Another instance is already running")
sys.exit(2)
if not os.path.exists(config["basedir"]["repo"]):
pathlib.Path(config["basedir"]["repo"]).mkdir(parents=True, exist_ok=True)
os.nice(5)
setup_chroot()
sync_marchs_with_config()
do_repo_work()
update_svn2git()
q = JoinableQueue()
with Pool(config["build"]["worker"], initializer=run_worker) as pool:
fill_queue()
signal.signal(signal.SIGINT, signal.default_int_handler)
while True:
try:
if time.time() - update_last > 900 and q.empty():
logging.info("[SVN2GIT] Waiting for queue to finish...")
q.join()
update_last = time.time()
update_svn2git()
setup_chroot()
fill_queue()
if q.qsize() > 0:
logging.info("[SVN2GIT] New Queue size: %d", q.qsize())
else:
time.sleep(300)
do_repo_work()
except KeyboardInterrupt:
with copy_l:
pool.close()
pool.terminate()
q.close()
do_repo_work()
sys.exit(0)