This repository has been archived on 2021-06-12. You can view files and clone it, but cannot push or open issues or pull requests.
Files
alhp/master.py

304 lines
10 KiB
Python

#!/usr/bin/env python3
import fcntl
import glob
import logging.config
import os
import pathlib
import re
import shutil
import subprocess
import sys
import time
from queue import Queue, Empty
import yaml
from packaging import version
regex_pkgver = re.compile(r"^pkgver\s*=\s*(.+)$", re.MULTILINE)
regex_pkgrel = re.compile(r"^pkgrel\s*=\s*(.+)$", re.MULTILINE)
regex_march = re.compile(r"(-march=)(.+?) ", re.MULTILINE)
regex_validkeys = re.compile(r"^validpgpkeys\+?=\((.*?)\)", re.MULTILINE | re.DOTALL)
fp = None
q = Queue()
update_last = time.time()
def already_running():
global fp
fp = os.open(f"/tmp/alhp.lock", os.O_WRONLY | os.O_CREAT)
try:
fcntl.lockf(fp, fcntl.LOCK_EX | fcntl.LOCK_NB)
return False
except OSError:
return True
def find_all_files_for_pkg(name, repo):
searchpath = os.path.join(config["basedir"]["repo"], repo, "os", config["arch"]) + "/" + name + "-*.pkg.*"
# logging.debug("[REPO] Search for package %s (path=%s)", name, searchpath)
pkgs = glob.glob(searchpath)
for p in pkgs:
if p.endswith(".sig"):
pkgs.remove(p)
# logging.debug("[REPO] Found %s", pkgs)
return pkgs
def get_failed_packages(repo):
if os.path.exists(os.path.join(config["basedir"]["repo"], repo + "_failed.txt")):
with open(os.path.join(config["basedir"]["repo"], repo + "_failed.txt")) as p:
return p.read().splitlines()
else:
return []
def build_cleanup():
logging.debug("[GIT] %s", subprocess.run(["git", "clean", "-xdf"], check=True, stdout=subprocess.PIPE,
stderr=subprocess.STDOUT).stdout.decode())
os.chdir(sys.path[0])
def build(pkgbuild, repo):
start_time = time.time()
name = pathlib.Path(pkgbuild).parts[-4]
logging.info("[%s/%s] Build starting (Queue ~= %s)", repo, name, q.qsize())
# setup buildflags
setup_makepkg(repo)
# import pgp keys
import_keys(pkgbuild)
# build with devtools
os.chdir(pathlib.Path(pkgbuild).parent)
res = subprocess.run(["sudo", "extra-x86_64-build"], stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
if res.returncode:
logging.warning("[%s/%s] Build failed. Check repo/logs for more information.", repo, name)
# write packagename to failed list
with open(os.path.join(config["basedir"]["repo"], repo + "_failed.txt"), "a") as f:
f.write(name + "\n")
# write logs
if not os.path.exists(os.path.join(config["basedir"]["repo"], "logs", repo)):
pathlib.Path(os.path.join(config["basedir"]["repo"], "logs", repo)).mkdir(parents=True, exist_ok=True)
with open(os.path.join(config["basedir"]["repo"], "logs", repo, name + ".log"), "w") as log:
log.write(res.stdout.decode())
build_cleanup()
return
# signing
pkgs = glob.glob("*.pkg.tar.zst")
for pkg in pkgs:
s_res = subprocess.run(["gpg", "--batch", "--detach-sign", pkg], stdout=subprocess.PIPE,
stderr=subprocess.STDOUT)
if s_res.returncode:
logging.error("[%s/%s] Signing failed: %s", repo, name, s_res.stdout.decode())
build_cleanup()
return
# copying
pkgs.extend(glob.glob("*.pkg.tar.zst.sig"))
for pkg in pkgs:
logging.debug("[%s/%s] Copy %s to %s", repo, name, pkg,
os.path.join(config["basedir"]["repo"], repo, "os", config["arch"] + "/"))
shutil.copy2(pkg, os.path.join(config["basedir"]["repo"], repo, "os", config["arch"] + "/"))
# repo
r_res = subprocess.run(["repo-add", "-s", "-v",
os.path.join(config["basedir"]["repo"], repo, "os", config["arch"], repo + ".db.tar.xz"),
pkgs[0]], stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
logging.debug("[REPO-ADD] %s", r_res.stdout.decode())
if r_res.returncode:
logging.error("[%s/%s] Repo action failed: %s", repo, name, r_res.stdout.decode())
build_cleanup()
return
p_res = subprocess.run(
["paccache", "-rc", os.path.join(config["basedir"]["repo"], repo, "os", config["arch"]), "-k", "1"],
stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
logging.debug("[PACCACHE] %s", p_res.stdout.decode())
if p_res.returncode:
logging.error("[%s/%s] Repo cleanup failed: %s", repo, name, p_res.stdout.decode())
build_cleanup()
return
# cleanup
build_cleanup()
logging.info("[%s/%s] Build successful (%s)", repo, name, int(time.time() - start_time))
def setup_makepkg(repo):
with open(config["basedir"]["makepkg"]) as conf:
c_all = conf.read()
c_all = c_all.replace("-mtune=generic", "")
c_all = c_all.replace("-O2", "-O3")
c_all = regex_march.sub(r"\1" + repo.split("-")[1] + " ", c_all)
with open(config["basedir"]["makepkg"], "w") as conf:
conf.write(c_all)
def import_keys(pkgbuild):
with open(pkgbuild, errors='ignore') as pkgb:
keys_s = regex_validkeys.findall(pkgb.read())
if keys_s:
keys = []
for k in keys_s:
keys.extend(k.split(" "))
for k in keys:
k = k.strip()
k = k.replace("'", "")
k = k.replace("\"", "")
if len(k) == 40:
logging.debug("[GPG] %s",
subprocess.run(
["gpg", "--keyserver", "keyserver.ubuntu.com", "--recv-keys", k],
stdout=subprocess.PIPE,
stderr=subprocess.STDOUT).stdout.decode())
logging.info("[GPG] Imported key %s", k)
def package_exists(name, repo):
pkgs = find_all_files_for_pkg(name, repo)
return len(pkgs) > 0
def update_git2svn():
if not os.path.exists(config["basedir"]["svn2git"]):
logging.debug("[GIT] %s", subprocess.run(
["git", "clone", "https://github.com/archlinux/svntogit-packages.git", config["basedir"]["svn2git"]],
check=True, stdout=subprocess.PIPE, stderr=subprocess.STDOUT).stdout.decode())
else:
os.chdir(config["basedir"]["svn2git"])
logging.debug("[GIT] %s", subprocess.run(["git", "clean", "-xdf"], check=True, stdout=subprocess.PIPE,
stderr=subprocess.STDOUT).stdout.decode())
logging.debug("[GIT] %s", subprocess.run(["git", "pull"], check=True, stdout=subprocess.PIPE,
stderr=subprocess.STDOUT).stdout.decode())
os.chdir("..")
def parse_pkgbuild(pkgbuild_file):
with open(pkgbuild_file, errors='ignore') as p:
pkgbuild_str = p.read()
pkgver = regex_pkgver.findall(pkgbuild_str)[0]
pkgrel = regex_pkgrel.findall(pkgbuild_str)[0]
if not pkgver or not pkgrel:
logging.warning("[%s] Failed to parse pkgbuild", pkgbuild_file.split("/")[-4])
return version.parse("{}-{}".format(pkgver, pkgrel))
def parse_repo(name, repo):
ver_split = find_all_files_for_pkg(name, repo)[0].split("-")
return version.parse(ver_split[-3] + "-" + ver_split[-2])
def sync_marchs_with_config():
repos = []
with os.scandir(config["basedir"]["repo"]) as it:
entry: os.DirEntry
for entry in it:
if not entry.name.startswith('logs') and entry.is_dir():
repos.append(entry.name)
repo_quota = []
for r, a in ((x, y) for x in config["repos"] for y in config["march"]):
repo_quota.append("{}-{}".format(r, a))
logging.info("Repos: %s", repo_quota)
repos_create = list(set(repo_quota) - set(repos))
repos_delete = list(set(repos) - set(repo_quota))
for repo in repos_create:
logging.debug("Create repo %s: %s", repo, os.path.join(config["basedir"]["repo"], repo, "os/x86_64"))
pathlib.Path(os.path.join(config["basedir"]["repo"], repo, "os/x86_64")).mkdir(parents=True, exist_ok=True)
for repo in repos_delete:
logging.debug("Delete repo %s: %s", repo, os.path.join(config["basedir"]["repo"], repo))
shutil.rmtree(os.path.join(config["basedir"]["repo"], repo))
def fill_queue():
all_pkgbuild = glob.glob(os.path.join(config["basedir"]["svn2git"]) + "/**/PKGBUILD", recursive=True)
to_delete = []
for pkgbuild in all_pkgbuild:
path_split = pkgbuild.split("/")
# ignore pkgbuild if in trunk, -any package, not in repos, or on blacklist
if path_split[-2] == "trunk" or path_split[-2].split("-")[0] not in config[
"repos"] or "any" in path_split[-2] or path_split[-4] in config["blacklist"]:
to_delete.append(pkgbuild)
final_pkgbuilds = list(set(all_pkgbuild) - set(to_delete))
for pkgb in final_pkgbuilds:
for march in config["march"]:
path_split = pkgb.split("/")
name = path_split[-4]
repo = path_split[-2].split("-")[0] + "-" + march
if name in get_failed_packages(repo):
logging.info("[%s/%s] Skipped due to failing build", repo, name)
continue
if package_exists(name, repo):
logging.debug("[SEMVER] Comparing %s=%s - %s=%s", name, parse_repo(name, repo), name,
parse_pkgbuild(pkgb))
if not package_exists(name, repo):
q.put((pkgb, repo))
logging.debug("[%s/%s] Build queued (package not build yet)", repo, name)
elif parse_repo(name, repo) < parse_pkgbuild(pkgb):
q.put((pkgb, repo))
logging.debug("[%s/%s] Build queued (new version available %s < %s)", repo, name,
parse_repo(name, repo), parse_pkgbuild(pkgb))
logging.info("Build queue size: %s", q.qsize())
if __name__ == '__main__':
with open("config.yaml") as c:
config = yaml.safe_load(c)
logging.config.dictConfig(config["logging"])
logging.getLogger("ALHP")
if already_running():
logging.error("Another instance is already running")
sys.exit(2)
if not os.path.exists(config["basedir"]["repo"]):
pathlib.Path(config["basedir"]["repo"]).mkdir(parents=True, exist_ok=True)
sync_marchs_with_config()
update_git2svn()
fill_queue()
while True:
if q.qsize() > 0:
try:
build(*q.get_nowait())
except Empty:
pass
else:
time.sleep(60)
if time.time() - update_last > 900:
update_git2svn()
update_last = time.time()
fill_queue()