added multiple workers, added community

This commit is contained in:
2021-05-18 19:36:41 +02:00
parent 1ff6c8ec2b
commit 9025738c55
3 changed files with 171 additions and 124 deletions

View File

@@ -4,20 +4,24 @@ repos:
- extra - extra
- community - community
svn2git:
upstream-core-extra: "https://github.com/archlinux/svntogit-packages.git"
upstream-community: "https://github.com/archlinux/svntogit-community.git"
basedir: basedir:
repo: /tmp/alhp/ repo: /home/harting/repo/
svn2git: upstream/ chroot: /home/harting/chroot/
makepkg: /usr/share/devtools/makepkg-x86_64.conf makepkg: /home/harting/Projects/ALHP/makepkg/
march: march:
- znver2 - x86-64-v3
- sandybridge
- ivybridge
- skylake
blacklist: blacklist:
- pacman - pacman
build:
worker: 4
logging: logging:
version: 1 version: 1
disable_existing_loggers: True disable_existing_loggers: True

View File

@@ -1,6 +1,4 @@
#!/hint/bash #!/hint/bash
# shellcheck disable=2034
# #
# /etc/makepkg.conf # /etc/makepkg.conf
# #
@@ -38,15 +36,20 @@ CARCH="x86_64"
CHOST="x86_64-pc-linux-gnu" CHOST="x86_64-pc-linux-gnu"
#-- Compiler and Linker Flags #-- Compiler and Linker Flags
CPPFLAGS="-D_FORTIFY_SOURCE=2" #CPPFLAGS=""
CFLAGS="-march=skylake -O3 -pipe -fno-plt" CFLAGS="-march=x86-64 -mtune=generic -O2 -pipe -fno-plt -fexceptions \
CXXFLAGS="-march=skylake -O3 -pipe -fno-plt" -Wp,-D_FORTIFY_SOURCE=2,-D_GLIBCXX_ASSERTIONS \
-Wformat -Werror=format-security \
-fstack-clash-protection -fcf-protection"
CXXFLAGS="$CFLAGS"
LDFLAGS="-Wl,-O1,--sort-common,--as-needed,-z,relro,-z,now" LDFLAGS="-Wl,-O1,--sort-common,--as-needed,-z,relro,-z,now"
#RUSTFLAGS="-C opt-level=2"
#-- Make Flags: change this for DistCC/SMP systems #-- Make Flags: change this for DistCC/SMP systems
MAKEFLAGS="-j5" MAKEFLAGS="-j2"
#-- Debugging flags #-- Debugging flags
DEBUG_CFLAGS="-g -fvar-tracking-assignments" DEBUG_CFLAGS="-g -fvar-tracking-assignments"
DEBUG_CXXFLAGS="-g -fvar-tracking-assignments" DEBUG_CXXFLAGS="-g -fvar-tracking-assignments"
#DEBUG_RUSTFLAGS="-C debuginfo=2"
######################################################################### #########################################################################
# BUILD ENVIRONMENT # BUILD ENVIRONMENT
@@ -89,7 +92,7 @@ BUILDENV=(!distcc !color !ccache !check !sign)
# #
OPTIONS=(strip docs !libtool !staticlibs emptydirs zipman purge !debug) OPTIONS=(strip docs !libtool !staticlibs emptydirs zipman purge !debug)
#-- File integrity checks to use. Valid: md5, sha1, sha256, sha384, sha512 #-- File integrity checks to use. Valid: md5, sha1, sha224, sha256, sha384, sha512, b2
INTEGRITY_CHECK=(md5) INTEGRITY_CHECK=(md5)
#-- Options to be used when stripping binaries. See `man strip' for details. #-- Options to be used when stripping binaries. See `man strip' for details.
STRIP_BINARIES="--strip-all" STRIP_BINARIES="--strip-all"
@@ -121,7 +124,7 @@ DBGSRCDIR="/usr/src/debug"
#-- Log files: specify a fixed directory where all log files will be placed #-- Log files: specify a fixed directory where all log files will be placed
#LOGDEST=/home/makepkglogs #LOGDEST=/home/makepkglogs
#-- Packager: name/email of the person or organization building packages #-- Packager: name/email of the person or organization building packages
PACKAGER="Archlinux CIE <cie@harting.dev>" #PACKAGER="John Doe <john@doe.com>"
#-- Specify a key to use for package signing #-- Specify a key to use for package signing
#GPGKEY="" #GPGKEY=""
@@ -132,7 +135,7 @@ PACKAGER="Archlinux CIE <cie@harting.dev>"
COMPRESSGZ=(gzip -c -f -n) COMPRESSGZ=(gzip -c -f -n)
COMPRESSBZ2=(bzip2 -c -f) COMPRESSBZ2=(bzip2 -c -f)
COMPRESSXZ=(xz -c -z -) COMPRESSXZ=(xz -c -z -)
COMPRESSZST=(zstd -c -T0 --ultra -20 -) COMPRESSZST=(zstd -c -z -q -)
COMPRESSLRZ=(lrzip -q) COMPRESSLRZ=(lrzip -q)
COMPRESSLZO=(lzop -q) COMPRESSLZO=(lzop -q)
COMPRESSZ=(compress -c -f) COMPRESSZ=(compress -c -f)
@@ -144,6 +147,4 @@ COMPRESSLZ=(lzip -c -f)
######################################################################### #########################################################################
# #
PKGEXT='.pkg.tar.zst' PKGEXT='.pkg.tar.zst'
SRCEXT='.src.tar.gz' SRCEXT='.src.tar.gz'
# vim: set ft=sh ts=2 sw=2 et:

252
master.py
View File

@@ -7,10 +7,11 @@ import os
import pathlib import pathlib
import re import re
import shutil import shutil
import signal
import subprocess import subprocess
import sys import sys
import time import time
from queue import Queue, Empty from multiprocessing import Pool, Queue, current_process
import yaml import yaml
from packaging import version from packaging import version
@@ -20,10 +21,93 @@ regex_pkgrel = re.compile(r"^pkgrel\s*=\s*(.+)$", re.MULTILINE)
regex_march = re.compile(r"(-march=)(.+?) ", re.MULTILINE) regex_march = re.compile(r"(-march=)(.+?) ", re.MULTILINE)
regex_validkeys = re.compile(r"^validpgpkeys\+?=\((.*?)\)", re.MULTILINE | re.DOTALL) regex_validkeys = re.compile(r"^validpgpkeys\+?=\((.*?)\)", re.MULTILINE | re.DOTALL)
fp = None fp = None
q = Queue()
update_last = time.time() update_last = time.time()
def build(pkgbuild, repo):
start_time = time.time()
name = pathlib.Path(pkgbuild).parts[-4]
process_name = current_process().name
logging.info("[%s/%s] Build starting (Queue ~= %s)", repo, name, q.qsize())
# setup makepkg
setup_makepkg(repo)
# import pgp keys
import_keys(pkgbuild)
# build with devtools
os.chdir(pathlib.Path(pkgbuild).parent)
res = subprocess.run(
["makechrootpkg", "-D", os.path.join(sys.path[0], config["basedir"]["makepkg"]), "-l", process_name, "-r",
os.path.join(sys.path[0], config["basedir"]["chroot"]), "--", "--config",
os.path.join(sys.path[0], config["basedir"]["makepkg"]) + "makepkg-" + '-'.join(
repo.split("-")[1:]) + ".conf"], stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
if res.returncode:
logging.warning("[%s/%s] Build failed. Check repo/logs for more information.", repo, name)
# write packagename to failed list
with open(os.path.join(config["basedir"]["repo"], repo + "_failed.txt"), "a") as f:
f.write(name + "\n")
# write logs
if not os.path.exists(os.path.join(config["basedir"]["repo"], "logs", repo)):
pathlib.Path(os.path.join(sys.path[0], config["basedir"]["repo"], "logs", repo)).mkdir(parents=True,
exist_ok=True)
with open(os.path.join(config["basedir"]["repo"], "logs", repo, name + ".log"), "w") as log:
log.write(res.stdout.decode())
build_cleanup()
return
# signing
pkgs = glob.glob("*.pkg.tar.zst")
for pkg in pkgs:
s_res = subprocess.run(["gpg", "--batch", "--detach-sign", pkg], stdout=subprocess.PIPE,
stderr=subprocess.STDOUT)
if s_res.returncode:
logging.error("[%s/%s] Signing failed: %s", repo, name, s_res.stdout.decode())
build_cleanup()
return
# copying
pkgs.extend(glob.glob("*.pkg.tar.zst.sig"))
for pkg in pkgs:
logging.debug("[%s/%s] Copy %s to %s", repo, name, pkg,
os.path.join(config["basedir"]["repo"], repo, "os", config["arch"] + "/"))
shutil.copy2(pkg, os.path.join(config["basedir"]["repo"], repo, "os", config["arch"] + "/"))
# repo
r_res = subprocess.run(["repo-add", "-s", "-v",
os.path.join(config["basedir"]["repo"], repo, "os", config["arch"],
repo + ".db.tar.xz"),
pkgs[0]], stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
logging.debug("[REPO-ADD] %s", r_res.stdout.decode())
if r_res.returncode:
logging.error("[%s/%s] Repo action failed: %s", repo, name, r_res.stdout.decode())
build_cleanup()
return
p_res = subprocess.run(
["paccache", "-rc", os.path.join(config["basedir"]["repo"], repo, "os", config["arch"]), "-k", "1"],
stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
logging.debug("[PACCACHE] %s", p_res.stdout.decode())
if p_res.returncode:
logging.error("[%s/%s] Repo cleanup failed: %s", repo, name, p_res.stdout.decode())
build_cleanup()
return
# cleanup
build_cleanup()
logging.info("[%s/%s] Build successful (%s)", repo, name, int(time.time() - start_time))
def run_worker() -> None:
os.nice(20)
while True:
build(*q.get(block=True))
def already_running(): def already_running():
global fp global fp
fp = os.open(f"/tmp/alhp.lock", os.O_WRONLY | os.O_CREAT) fp = os.open(f"/tmp/alhp.lock", os.O_WRONLY | os.O_CREAT)
@@ -63,85 +147,29 @@ def build_cleanup():
os.chdir(sys.path[0]) os.chdir(sys.path[0])
def build(pkgbuild, repo): def setup_chroot():
start_time = time.time() if not os.path.exists(os.path.join(config["basedir"]["chroot"], "root")):
name = pathlib.Path(pkgbuild).parts[-4] pathlib.Path(config["basedir"]["chroot"]).mkdir(parents=True, exist_ok=True)
logging.info("[%s/%s] Build starting (Queue ~= %s)", repo, name, q.qsize()) logging.debug("[MKCHROOT] %s",
subprocess.run(
# setup buildflags ["mkarchroot", os.path.join(config["basedir"]["chroot"], "root"), "base-devel"],
setup_makepkg(repo) stdout=subprocess.PIPE, stderr=subprocess.STDOUT).stdout.decode(errors='ignore'))
# import pgp keys
import_keys(pkgbuild)
# build with devtools
os.chdir(pathlib.Path(pkgbuild).parent)
res = subprocess.run(["sudo", "extra-x86_64-build"], stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
if res.returncode:
logging.warning("[%s/%s] Build failed. Check repo/logs for more information.", repo, name)
# write packagename to failed list
with open(os.path.join(config["basedir"]["repo"], repo + "_failed.txt"), "a") as f:
f.write(name + "\n")
# write logs
if not os.path.exists(os.path.join(config["basedir"]["repo"], "logs", repo)):
pathlib.Path(os.path.join(config["basedir"]["repo"], "logs", repo)).mkdir(parents=True, exist_ok=True)
with open(os.path.join(config["basedir"]["repo"], "logs", repo, name + ".log"), "w") as log:
log.write(res.stdout.decode())
build_cleanup()
return
# signing
pkgs = glob.glob("*.pkg.tar.zst")
for pkg in pkgs:
s_res = subprocess.run(["gpg", "--batch", "--detach-sign", pkg], stdout=subprocess.PIPE,
stderr=subprocess.STDOUT)
if s_res.returncode:
logging.error("[%s/%s] Signing failed: %s", repo, name, s_res.stdout.decode())
build_cleanup()
return
# copying
pkgs.extend(glob.glob("*.pkg.tar.zst.sig"))
for pkg in pkgs:
logging.debug("[%s/%s] Copy %s to %s", repo, name, pkg,
os.path.join(config["basedir"]["repo"], repo, "os", config["arch"] + "/"))
shutil.copy2(pkg, os.path.join(config["basedir"]["repo"], repo, "os", config["arch"] + "/"))
# repo
r_res = subprocess.run(["repo-add", "-s", "-v",
os.path.join(config["basedir"]["repo"], repo, "os", config["arch"], repo + ".db.tar.xz"),
pkgs[0]], stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
logging.debug("[REPO-ADD] %s", r_res.stdout.decode())
if r_res.returncode:
logging.error("[%s/%s] Repo action failed: %s", repo, name, r_res.stdout.decode())
build_cleanup()
return
p_res = subprocess.run(
["paccache", "-rc", os.path.join(config["basedir"]["repo"], repo, "os", config["arch"]), "-k", "1"],
stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
logging.debug("[PACCACHE] %s", p_res.stdout.decode())
if p_res.returncode:
logging.error("[%s/%s] Repo cleanup failed: %s", repo, name, p_res.stdout.decode())
build_cleanup()
return
# cleanup
build_cleanup()
logging.info("[%s/%s] Build successful (%s)", repo, name, int(time.time() - start_time))
def setup_makepkg(repo): def setup_makepkg(repo):
with open(config["basedir"]["makepkg"]) as conf: makepkg_repo = os.path.join(config["basedir"]["makepkg"], "makepkg-" + '-'.join(repo.split("-")[1:]) + ".conf")
c_all = conf.read()
c_all = c_all.replace("-mtune=generic", "") if not os.path.exists(makepkg_repo):
c_all = c_all.replace("-O2", "-O3") pathlib.Path(config["basedir"]["makepkg"]).mkdir(parents=True, exist_ok=True)
c_all = regex_march.sub(r"\1" + repo.split("-")[1] + " ", c_all) shutil.copyfile("makepkg.tmpl", makepkg_repo)
with open(config["basedir"]["makepkg"], "w") as conf:
conf.write(c_all) with open(makepkg_repo) as conf:
c_all = conf.read()
c_all = c_all.replace("-mtune=generic", "")
c_all = c_all.replace("-O2", "-O3")
c_all = regex_march.sub(r"\1" + '-'.join(repo.split("-")[1:]) + " ", c_all)
with open(makepkg_repo, "w") as conf:
conf.write(c_all)
def import_keys(pkgbuild): def import_keys(pkgbuild):
@@ -174,17 +202,18 @@ def package_exists(name, repo):
def update_git2svn(): def update_git2svn():
if not os.path.exists(config["basedir"]["svn2git"]): for git_dir, git_url in config["svn2git"].items():
logging.debug("[GIT] %s", subprocess.run( if not os.path.exists(git_dir):
["git", "clone", "https://github.com/archlinux/svntogit-packages.git", config["basedir"]["svn2git"]], logging.debug("[GIT] %s",
check=True, stdout=subprocess.PIPE, stderr=subprocess.STDOUT).stdout.decode()) subprocess.run(["git", "clone", git_url, git_dir], check=True, stdout=subprocess.PIPE,
else: stderr=subprocess.STDOUT).stdout.decode())
os.chdir(config["basedir"]["svn2git"]) else:
logging.debug("[GIT] %s", subprocess.run(["git", "clean", "-xdf"], check=True, stdout=subprocess.PIPE, os.chdir(git_dir)
stderr=subprocess.STDOUT).stdout.decode()) logging.debug("[GIT] %s", subprocess.run(["git", "clean", "-xdf"], check=True, stdout=subprocess.PIPE,
logging.debug("[GIT] %s", subprocess.run(["git", "pull"], check=True, stdout=subprocess.PIPE, stderr=subprocess.STDOUT).stdout.decode())
stderr=subprocess.STDOUT).stdout.decode()) logging.debug("[GIT] %s", subprocess.run(["git", "pull"], check=True, stdout=subprocess.PIPE,
os.chdir("..") stderr=subprocess.STDOUT).stdout.decode())
os.chdir("..")
def parse_pkgbuild(pkgbuild_file): def parse_pkgbuild(pkgbuild_file):
@@ -226,22 +255,28 @@ def sync_marchs_with_config():
for repo in repos_create: for repo in repos_create:
logging.debug("Create repo %s: %s", repo, os.path.join(config["basedir"]["repo"], repo, "os/x86_64")) logging.debug("Create repo %s: %s", repo, os.path.join(config["basedir"]["repo"], repo, "os/x86_64"))
pathlib.Path(os.path.join(config["basedir"]["repo"], repo, "os/x86_64")).mkdir(parents=True, exist_ok=True) pathlib.Path(os.path.join(config["basedir"]["repo"], repo, "os/x86_64")).mkdir(parents=True, exist_ok=True)
setup_makepkg(repo)
for repo in repos_delete: for repo in repos_delete:
logging.debug("Delete repo %s: %s", repo, os.path.join(config["basedir"]["repo"], repo)) logging.debug("Delete repo %s: %s", repo, os.path.join(config["basedir"]["repo"], repo))
shutil.rmtree(os.path.join(config["basedir"]["repo"], repo)) shutil.rmtree(os.path.join(config["basedir"]["repo"], repo))
os.remove(os.path.join(config["basedir"]["makepkg"], "makepkg-" + repo + ".conf"))
def fill_queue(): def fill_queue():
all_pkgbuild = glob.glob(os.path.join(config["basedir"]["svn2git"]) + "/**/PKGBUILD", recursive=True) all_pkgbuild = []
for git_dir, git_url in config["svn2git"].items():
all_pkgbuild.extend(glob.glob(os.path.join(git_dir) + "/**/PKGBUILD", recursive=True))
to_delete = [] to_delete = []
for pkgbuild in all_pkgbuild: for pkgbuild in all_pkgbuild:
path_split = pkgbuild.split("/") path_split = pkgbuild.split("/")
# ignore pkgbuild if in trunk, -any package, not in repos, or on blacklist # ignore pkgbuild if in trunk, -any package, not in repos, or on blacklist
if path_split[-2] == "trunk" or path_split[-2].split("-")[0] not in config[ if path_split[-2] == "trunk" or path_split[-2].split("-")[0] not in config["repos"] or "any" in path_split[-2] \
"repos"] or "any" in path_split[-2] or path_split[-4] in config["blacklist"]: or path_split[-4] in config["blacklist"]:
to_delete.append(pkgbuild) to_delete.append(pkgbuild)
final_pkgbuilds = list(set(all_pkgbuild) - set(to_delete)) final_pkgbuilds = list(set(all_pkgbuild) - set(to_delete))
@@ -285,20 +320,27 @@ if __name__ == '__main__':
if not os.path.exists(config["basedir"]["repo"]): if not os.path.exists(config["basedir"]["repo"]):
pathlib.Path(config["basedir"]["repo"]).mkdir(parents=True, exist_ok=True) pathlib.Path(config["basedir"]["repo"]).mkdir(parents=True, exist_ok=True)
os.nice(5)
setup_chroot()
sync_marchs_with_config() sync_marchs_with_config()
update_git2svn() update_git2svn()
fill_queue() q = Queue()
while True: with Pool(config["build"]["worker"], initializer=run_worker) as pool:
if q.qsize() > 0: fill_queue()
signal.signal(signal.SIGINT, signal.default_int_handler)
while True:
try: try:
build(*q.get_nowait()) if time.time() - update_last > 900:
except Empty: update_last = time.time()
pass update_git2svn()
else: fill_queue()
time.sleep(60) else:
time.sleep(60)
if time.time() - update_last > 900: except KeyboardInterrupt:
update_git2svn() pool.close()
update_last = time.time() pool.terminate()
fill_queue() q.close()
sys.exit(0)