This repository has been archived on 2021-06-12. You can view files and clone it, but cannot push or open issues or pull requests.
Files
alhp/master.py
Giovanni Harting dd4e0d26f3 added -c to makechrootpkg
maybe fixed deadlocking if over 90% usage on start?
2021-05-23 11:35:44 +02:00

384 lines
14 KiB
Python

#!/usr/bin/env python3
import fcntl
import glob
import logging.config
import os
import pathlib
import re
import shutil
import signal
import subprocess
import sys
import time
from multiprocessing import Pool, current_process, Lock, JoinableQueue
from queue import Empty
import yaml
from humanfriendly import format_timespan
from packaging import version
from packaging.version import LegacyVersion
regex_pkgver = re.compile(r"^_?pkgver\s*=\s*(.+)$", re.MULTILINE)
regex_pkgrel = re.compile(r"^pkgrel\s*=\s*(.+)$", re.MULTILINE)
regex_march = re.compile(r"(-march=)(.+?) ", re.MULTILINE)
regex_validkeys = re.compile(r"^validpgpkeys\+?=\((.*?)\)", re.MULTILINE | re.DOTALL)
fp = None
update_last = time.time()
repo_lock = Lock()
def build(pkgbuild, repo):
start_time = time.time()
name = pathlib.Path(pkgbuild).parts[-4]
process_name = current_process().name
logging.info("[%s/%s/%s] Build starting (Queue ~= %s)", process_name, repo, name, q.qsize())
# setup makepkg
setup_makepkg(repo)
# import pgp keys
import_keys(pkgbuild)
# increase pkgrel
increase_pkgrel(pkgbuild)
# build with devtools
os.chdir(pathlib.Path(pkgbuild).parent)
res = subprocess.run(
["makechrootpkg", "-c", "-D", os.path.join(config["basedir"]["makepkg"]), "-l", process_name, "-r",
os.path.join(config["basedir"]["chroot"]), "--", "--config",
os.path.join(config["basedir"]["makepkg"]) + "makepkg-" + '-'.join(
repo.split("-")[1:]) + ".conf"], stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
if res.returncode:
logging.warning("[%s/%s/%s] Build failed. Check repo/logs for more information.", process_name, repo, name)
# write packagename to failed list
with open(os.path.join(config["basedir"]["repo"], repo + "_failed.txt"), "a") as f:
f.write(name + "\n")
# write logs
if not os.path.exists(os.path.join(config["basedir"]["repo"], "logs", repo)):
pathlib.Path(os.path.join(config["basedir"]["repo"], "logs", repo)).mkdir(parents=True,
exist_ok=True)
with open(os.path.join(config["basedir"]["repo"], "logs", repo, name + ".log"), "w") as log:
log.write(res.stdout.decode())
return
# signing
pkgs = glob.glob("*.pkg.tar.zst")
for pkg in pkgs:
s_res = subprocess.run(["gpg", "--batch", "--detach-sign", pkg], stdout=subprocess.PIPE,
stderr=subprocess.STDOUT)
if s_res.returncode:
logging.error("[%s/%s/%s] Signing failed: %s", process_name, repo, name, s_res.stdout.decode())
return
# copying
pkgs.extend(glob.glob("*.pkg.tar.zst.sig"))
for pkg in pkgs:
logging.debug("[%s/%s/%s] Copy %s to %s", process_name, repo, name, pkg,
os.path.join(config["basedir"]["repo"], repo, "os", config["arch"] + "/"))
shutil.copy2(pkg, os.path.join(config["basedir"]["repo"], repo, "os", config["arch"] + "/"))
# repo
repo_lock.acquire()
r_res = subprocess.run(["repo-add", "-s", "-v",
os.path.join(config["basedir"]["repo"], repo, "os", config["arch"],
repo + ".db.tar.xz"),
pkgs[0]], stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
logging.debug("[REPO-ADD] %s", r_res.stdout.decode())
if r_res.returncode:
logging.error("[%s/%s/%s] Repo action failed: %s", process_name, repo, name, r_res.stdout.decode())
repo_lock.release()
return
p_res = subprocess.run(
["paccache", "-rc", os.path.join(config["basedir"]["repo"], repo, "os", config["arch"]), "-k", "1"],
stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
logging.debug("[PACCACHE] %s", p_res.stdout.decode())
repo_lock.release()
if p_res.returncode:
logging.error("[%s/%s/%s] Repo cleanup failed: %s", process_name, repo, name, p_res.stdout.decode())
return
logging.info("[%s/%s/%s] Build successful (%s)", process_name, repo, name,
format_timespan(time.time() - start_time))
def run_worker() -> None:
os.nice(20)
while True:
try:
build(*q.get(block=True))
except Exception as e:
logging.error("Error in worker: %s", e)
finally:
q.task_done()
os.chdir(sys.path[0])
def already_running():
global fp
fp = os.open(f"/tmp/alhp.lock", os.O_WRONLY | os.O_CREAT)
try:
fcntl.lockf(fp, fcntl.LOCK_EX | fcntl.LOCK_NB)
return False
except OSError:
return True
def find_all_files_for_pkg(name, repo):
searchpath = os.path.join(config["basedir"]["repo"], repo, "os", config["arch"]) + "/" + name + "-*.pkg.*"
pkgs = glob.glob(searchpath)
for p in pkgs:
if p.endswith(".sig"):
pkgs.remove(p)
return pkgs
def get_failed_packages(repo):
if os.path.exists(os.path.join(config["basedir"]["repo"], repo + "_failed.txt")):
with open(os.path.join(config["basedir"]["repo"], repo + "_failed.txt")) as p:
return p.read().splitlines()
else:
return []
def setup_chroot():
if not os.path.exists(os.path.join(config["basedir"]["chroot"], "root")):
pathlib.Path(config["basedir"]["chroot"]).mkdir(parents=True, exist_ok=True)
logging.debug("[MKCHROOT] %s",
subprocess.run(
["mkarchroot", os.path.join(config["basedir"]["chroot"], "root"), "base-devel"],
stdout=subprocess.PIPE, stderr=subprocess.STDOUT).stdout.decode(errors='ignore'))
def setup_makepkg(repo):
makepkg_repo = os.path.join(config["basedir"]["makepkg"], "makepkg-" + '-'.join(repo.split("-")[1:]) + ".conf")
if not os.path.exists(makepkg_repo):
pathlib.Path(config["basedir"]["makepkg"]).mkdir(parents=True, exist_ok=True)
shutil.copyfile("makepkg.tmpl", makepkg_repo)
with open(makepkg_repo) as conf:
c_all = conf.read()
c_all = c_all.replace("-mtune=generic", "")
c_all = c_all.replace("-O2", "-O3")
c_all = regex_march.sub(r"\1" + '-'.join(repo.split("-")[1:]), c_all)
with open(makepkg_repo, "w") as conf:
conf.write(c_all)
def import_keys(pkgbuild):
with open(pkgbuild, errors='ignore') as pkgb:
keys_s = regex_validkeys.findall(pkgb.read())
if keys_s:
keys = []
for k in keys_s:
keys.extend(k.split(" "))
for k in keys:
k = k.strip()
k = k.replace("'", "")
k = k.replace("\"", "")
if len(k) == 40:
logging.debug("[GPG] %s",
subprocess.run(
["gpg", "--keyserver", "keyserver.ubuntu.com", "--recv-keys", k],
stdout=subprocess.PIPE, stderr=subprocess.STDOUT).stdout.decode(errors='ignore'))
logging.info("[GPG] Imported key %s", k)
def package_exists(name, repo):
pkgs = find_all_files_for_pkg(name, repo)
return len(pkgs) > 0
def update_svn2git():
if not os.path.exists(config["basedir"]["upstream"]):
pathlib.Path(config["basedir"]["upstream"]).mkdir(parents=True, exist_ok=True)
for git_dir, git_url in config["svn2git"].items():
git_path = os.path.join(config["basedir"]["upstream"], git_dir)
if not os.path.exists(git_path):
logging.debug("[GIT] %s",
subprocess.run(["git", "clone", "--depth=1", git_url, git_path], check=True,
stdout=subprocess.PIPE, stderr=subprocess.STDOUT).stdout.decode())
else:
os.chdir(git_path)
logging.debug("[GIT] %s", subprocess.run(["git", "clean", "-xdf"], check=False, stdout=subprocess.PIPE,
stderr=subprocess.STDOUT).stdout.decode())
logging.debug("[GIT] %s", subprocess.run(["git", "reset", "--hard"], check=True, stdout=subprocess.PIPE,
stderr=subprocess.STDOUT).stdout.decode())
logging.debug("[GIT] %s", subprocess.run(["git", "pull"], check=True, stdout=subprocess.PIPE,
stderr=subprocess.STDOUT).stdout.decode())
os.chdir(sys.path[0])
def parse_pkgbuild(pkgbuild_file):
with open(pkgbuild_file, errors='ignore') as p:
pkgbuild_str = p.read()
pkgver = regex_pkgver.findall(pkgbuild_str)
pkgrel = regex_pkgrel.findall(pkgbuild_str)
if not pkgver or not pkgrel:
logging.warning("[%s] Failed to parse pkgbuild", pkgbuild_file.split("/")[-4])
return version.parse("")
return LegacyVersion("{}-{}".format(pkgver[0], pkgrel[0]))
def increase_pkgrel(pkgbuild_file):
with open(pkgbuild_file, errors='ignore') as p:
pkgbuild_str = p.read()
pkgbuild_str = regex_pkgrel.sub(r"pkgrel=\1.1", pkgbuild_str)
with open(pkgbuild_file, "w") as pkg:
pkg.write(pkgbuild_str)
def parse_repo(name, repo):
ver_split = find_all_files_for_pkg(name, repo)[0].split("-")
return LegacyVersion(ver_split[-3] + "-" + ver_split[-2])
def sync_marchs_with_config():
repos = []
with os.scandir(config["basedir"]["repo"]) as it:
entry: os.DirEntry
for entry in it:
if not entry.name.startswith('logs') and entry.is_dir():
repos.append(entry.name)
repo_quota = []
for r, a in ((x, y) for x in config["repos"] for y in config["march"]):
repo_quota.append("{}-{}".format(r, a))
logging.info("Repos: %s", repo_quota)
repos_create = list(set(repo_quota) - set(repos))
repos_delete = list(set(repos) - set(repo_quota))
for repo in repos_create:
logging.debug("Create repo %s: %s", repo, os.path.join(config["basedir"]["repo"], repo, "os/x86_64"))
pathlib.Path(os.path.join(config["basedir"]["repo"], repo, "os/x86_64")).mkdir(parents=True, exist_ok=True)
setup_makepkg(repo)
for repo in repos_delete:
logging.debug("Delete repo %s: %s", repo, os.path.join(config["basedir"]["repo"], repo))
shutil.rmtree(os.path.join(config["basedir"]["repo"], repo))
os.remove(os.path.join(config["basedir"]["makepkg"], "makepkg-" + repo + ".conf"))
def fill_queue():
all_pkgbuild = []
for git_dir, git_url in config["svn2git"].items():
all_pkgbuild.extend(
glob.glob(os.path.join(config["basedir"]["upstream"], git_dir) + "/**/PKGBUILD", recursive=True))
to_delete = []
for pkgbuild in all_pkgbuild:
path_split = pkgbuild.split("/")
# ignore pkgbuild if in trunk, -any package, not in repos, or on blacklist
if path_split[-2] == "trunk" or path_split[-2].split("-")[0] not in config["repos"] or "any" in path_split[-2] \
or path_split[-4] in config["blacklist"] or "i686" in path_split[-2]:
to_delete.append(pkgbuild)
final_pkgbuilds = list(set(all_pkgbuild) - set(to_delete))
for pkgb in final_pkgbuilds:
for march in config["march"]:
path_split = pkgb.split("/")
name = path_split[-4]
repo = path_split[-2].split("-")[0] + "-" + march
if name in get_failed_packages(repo):
logging.info("[%s/%s] Skipped due to failing build", repo, name)
continue
if package_exists(name, repo):
logging.debug("[SEMVER] Comparing %s=%s - %s=%s", name, parse_repo(name, repo), name,
parse_pkgbuild(pkgb))
if not package_exists(name, repo):
q.put((pkgb, repo))
logging.info("[%s/%s] Build queued (package not build yet)", repo, name)
elif parse_repo(name, repo) < parse_pkgbuild(pkgb):
q.put((pkgb, repo))
logging.info("[%s/%s] Build queued (new version available %s < %s)", repo, name,
parse_repo(name, repo), parse_pkgbuild(pkgb))
logging.info("Build queue size: %s", q.qsize())
if __name__ == '__main__':
with open("config.yaml") as c:
config = yaml.safe_load(c)
logging.config.dictConfig(config["logging"])
logging.getLogger("ALHP")
if already_running():
logging.error("Another instance is already running")
sys.exit(2)
if not os.path.exists(config["basedir"]["repo"]):
pathlib.Path(config["basedir"]["repo"]).mkdir(parents=True, exist_ok=True)
os.nice(5)
setup_chroot()
sync_marchs_with_config()
update_svn2git()
q = JoinableQueue()
with Pool(config["build"]["worker"], initializer=run_worker) as pool:
fill_queue()
signal.signal(signal.SIGINT, signal.default_int_handler)
while True:
try:
du = shutil.disk_usage(config["basedir"]["upstream"])
if (du[1] / du[0]) > 0.9:
logging.warning("Less then 10% disk space remaining, performing cleanup...")
while not q.empty():
try:
q.get(False)
q.task_done()
except Empty:
continue
logging.info("Waiting for remaining queue items to finish...")
q.join()
logging.info("Cleared Queue, clearing upstream repos...")
update_svn2git()
logging.info("Cleanup done, refill queue")
fill_queue()
time.sleep(60)
elif time.time() - update_last > 900 and q.qsize() == 0:
update_last = time.time()
update_svn2git()
fill_queue()
else:
time.sleep(60)
except KeyboardInterrupt:
pool.close()
pool.terminate()
q.close()
sys.exit(0)