Compare commits

...

48 Commits

Author SHA1 Message Date
ed150e0433 added deprecation notice 2021-06-12 20:02:21 +02:00
2fb7387cf8 fixed truncate 2021-06-04 17:06:07 +02:00
6af405ca14 remove old pkg versions from failed 2021-06-04 16:07:42 +02:00
6d2e371500 add version to failed package lists, so that a later version may be build again 2021-06-03 20:01:45 +02:00
208f70472c reverted previous blacklist changes 2021-06-02 14:06:25 +02:00
1d5bc7922d only look at relevant pkgbuilds 2021-06-02 13:46:11 +02:00
d3402c188e improved logging 2021-06-02 13:38:16 +02:00
e09f46b4e5 fixed some issues introduced with delete_package 2021-06-02 12:57:32 +02:00
783575045f delete packages failing to build or not meant to build 2021-06-02 01:44:23 +02:00
e625fcc4fc optimize fill_queue 2021-06-01 16:47:36 +02:00
d007ea6bf7 fade out sys.path II 2021-06-01 14:02:06 +02:00
f18830c894 fade out sys.path 2021-06-01 13:56:28 +02:00
5863c42532 don't change back to script dir if not changed beforehand 2021-06-01 13:44:03 +02:00
eeeab58ac9 do repo before and after build 2021-06-01 13:40:34 +02:00
6391cf8789 improved logging, 2. Edition 2021-05-31 20:49:45 +02:00
2e7129a776 improved logging 2021-05-31 20:29:12 +02:00
b772c5d320 handle failed makepkg call 2021-05-31 17:23:14 +02:00
d3898b2b82 build and fail pkgbuilds, compare pkgnames for version and exists 2021-05-31 15:30:47 +02:00
c77a632653 fixed parse_pkgbuild not returning anything; some logix fixes 2021-05-31 14:35:19 +02:00
ce43b426ac moved functions to utils; reworked PKGBUILD parsing 2021-05-31 14:17:41 +02:00
1e224335ea some more repo-add fixes 2021-05-30 20:26:27 +02:00
48de5f1782 a shell for repo-add 2021-05-30 19:50:46 +02:00
79e305f713 this needs to be executed in the right folder 2021-05-30 19:19:45 +02:00
387260480a singing needs to go into to correct path 2021-05-30 18:46:14 +02:00
1094efcec5 fixed repo parsing 2021-05-30 18:15:40 +02:00
b6dfbbb643 changed way signing works 2021-05-30 18:09:47 +02:00
9f557b9f6b fixed multipkgs not getting parsed correctly 2021-05-30 18:02:23 +02:00
718906efe6 refresh root chroot while running 2021-05-29 15:05:51 +02:00
d7eab66500 add to repo-add queue while in copy lock 2021-05-28 04:25:16 +02:00
8f396ead36 clean working dir after build 2021-05-28 03:54:20 +02:00
df931bece2 more logging for refresh timer 2021-05-27 16:46:41 +02:00
e2c3369e5b removed not working prio, needs to be reimplemented 2021-05-26 19:49:50 +02:00
d019772eed added type hints 2021-05-26 17:46:33 +02:00
65afb113ec unify logging in repo_worker 2021-05-26 17:43:01 +02:00
4573b2b72d sane defaults, again 2021-05-26 17:16:48 +02:00
f658702f32 prioritise rebuild over new packages 2021-05-26 17:14:19 +02:00
137b963969 fixed repo maintenance 2021-05-26 17:09:46 +02:00
528f5c387b added more logging 2021-05-26 12:21:38 +02:00
d8dfce570f fixed up repo_work 2021-05-26 12:19:16 +02:00
340e97d4bb join queue before updating svn2git 2021-05-26 11:26:38 +02:00
bb257d06e8 moved adding to db to main thread
In there we can bundle all adds and can shutdown cleaner.
Also this fixes split-packages not getting added.
2021-05-25 20:08:18 +02:00
1b5c0912b8 be more verbose if gpg key import fails 2021-05-25 19:17:55 +02:00
1257e6e49b update root chroot on start (runtime updating on todo) 2021-05-24 22:50:24 +02:00
d1789f8d96 added type hints; cleaner shutdown; add epoch parsing 2021-05-24 21:57:26 +02:00
ebf9a7c9b3 add warning for unsupported cpus 2021-05-23 17:39:50 +02:00
dd4e0d26f3 added -c to makechrootpkg
maybe fixed deadlocking if over 90% usage on start?
2021-05-23 11:35:44 +02:00
a1db6ef4d0 added tensorflow to blacklist
based makepkg.tmpl on makepkg.conf from devtools
2021-05-22 01:38:38 +02:00
7e17608dfa updated .gitignore 2021-05-21 22:58:38 +02:00
6 changed files with 285 additions and 188 deletions

3
.gitignore vendored
View File

@@ -1,4 +1,3 @@
# Created by https://www.toptal.com/developers/gitignore/api/linux,python,pycharm+all,windows
# Edit at https://www.toptal.com/developers/gitignore?templates=linux,python,pycharm+all,windows
@@ -269,5 +268,3 @@ $RECYCLE.BIN/
*.lnk
# End of https://www.toptal.com/developers/gitignore/api/linux,python,pycharm+all,windows
al_upstream/

View File

@@ -1,9 +1,27 @@
# Deprecation notice
This was replaced with [ALHP.GO](https://git.harting.dev/anonfunc/ALHP.GO), please open any issues or PRs there.
# alhp
Build script for archlinux instructionset enabled repos.
All packages are build with -march=<cpu-set> and -O3. Some packages will not build with -O3, they will just be provided from the official repos as usual.
## Check your system for support
**Important**: Before you enable any of these repos, check if your system supports x86-64-v3. You can do that with `/lib/ld-linux-x86-64.so.2 --help`. If you don't check beforehand you might be unable to boot your system anymore and need to downgrade any package that you may have upgraded.
Example output snippet for a system supporting up to `x86-64-v3`:
```
Subdirectories of glibc-hwcaps directories, in priority order:
x86-64-v4
x86-64-v3 (supported, searched)
x86-64-v2 (supported, searched)
```
## Enable Repos
To enable these complement repos you need to add them above the regular repos in `/etc/pacman.conf`
### Example pacman.conf

View File

@@ -9,16 +9,19 @@ svn2git:
upstream-community: "https://github.com/archlinux/svntogit-community.git"
basedir:
repo: /tmp/repo/
chroot: /tmp/chroot/
makepkg: /tmp/makepkg/
upstream: /tmp/upstream/
repo: /var/lib/alhp/repo/
chroot: /var/lib/alhp/chroot/
makepkg: /var/lib/alhp/makepkg/
upstream: /var/lib/alhp/upstream/
march:
- x86-64-v3
blacklist:
- pacman
- tensorflow
- tensorflow-cuda
- gcc
build:
worker: 4

View File

@@ -1,4 +1,6 @@
#!/hint/bash
# shellcheck disable=2034
#
# /etc/makepkg.conf
#
@@ -13,7 +15,7 @@ DLAGENTS=('file::/usr/bin/curl -gqC - -o %o %u'
'ftp::/usr/bin/curl -gqfC - --ftp-pasv --retry 3 --retry-delay 3 -o %o %u'
'http::/usr/bin/curl -gqb "" -fLC - --retry 3 --retry-delay 3 -o %o %u'
'https::/usr/bin/curl -gqb "" -fLC - --retry 3 --retry-delay 3 -o %o %u'
'rsync::/usr/bin/rsync --no-motd -z %u %o'
'rsync::/usr/bin/rsync --no-motd -zz %u %o'
'scp::/usr/bin/scp -C %u %o')
# Other common tools:
@@ -36,20 +38,15 @@ CARCH="x86_64"
CHOST="x86_64-pc-linux-gnu"
#-- Compiler and Linker Flags
#CPPFLAGS=""
CFLAGS="-march=x86-64 -mtune=generic -O2 -pipe -fno-plt -fexceptions \
-Wp,-D_FORTIFY_SOURCE=2,-D_GLIBCXX_ASSERTIONS \
-Wformat -Werror=format-security \
-fstack-clash-protection -fcf-protection"
CXXFLAGS="$CFLAGS"
CPPFLAGS="-D_FORTIFY_SOURCE=2"
CFLAGS="-march=x86-64 -mtune=generic -O2 -pipe -fno-plt"
CXXFLAGS="-march=x86-64 -mtune=generic -O2 -pipe -fno-plt"
LDFLAGS="-Wl,-O1,--sort-common,--as-needed,-z,relro,-z,now"
#RUSTFLAGS="-C opt-level=2"
#-- Make Flags: change this for DistCC/SMP systems
MAKEFLAGS="-j2"
#-- Debugging flags
DEBUG_CFLAGS="-g -fvar-tracking-assignments"
DEBUG_CXXFLAGS="-g -fvar-tracking-assignments"
#DEBUG_RUSTFLAGS="-C debuginfo=2"
#########################################################################
# BUILD ENVIRONMENT
@@ -92,7 +89,7 @@ BUILDENV=(!distcc !color !ccache !check !sign)
#
OPTIONS=(strip docs !libtool !staticlibs emptydirs zipman purge !debug)
#-- File integrity checks to use. Valid: md5, sha1, sha224, sha256, sha384, sha512, b2
#-- File integrity checks to use. Valid: md5, sha1, sha256, sha384, sha512
INTEGRITY_CHECK=(md5)
#-- Options to be used when stripping binaries. See `man strip' for details.
STRIP_BINARIES="--strip-all"
@@ -135,7 +132,7 @@ PACKAGER="ALHP Buildbot <alhp@anonfunc.net>"
COMPRESSGZ=(gzip -c -f -n)
COMPRESSBZ2=(bzip2 -c -f)
COMPRESSXZ=(xz -c -z -)
COMPRESSZST=(zstd -c -z -q -)
COMPRESSZST=(zstd -c -T0 --ultra -20 -)
COMPRESSLRZ=(lrzip -q)
COMPRESSLZO=(lzop -q)
COMPRESSZ=(compress -c -f)
@@ -147,4 +144,6 @@ COMPRESSLZ=(lzip -c -f)
#########################################################################
#
PKGEXT='.pkg.tar.zst'
SRCEXT='.src.tar.gz'
SRCEXT='.src.tar.gz'
# vim: set ft=sh ts=2 sw=2 et:

354
master.py
View File

@@ -11,26 +11,32 @@ import signal
import subprocess
import sys
import time
from multiprocessing import Pool, current_process, Lock, JoinableQueue
from queue import Empty
import traceback
from multiprocessing import Pool, current_process, JoinableQueue, Lock
from typing import AnyStr
import yaml
from humanfriendly import format_timespan
from packaging import version
from packaging.version import LegacyVersion
from utils import parse_pkgbuild, parse_pkgbuild_ver, import_keys, increase_pkgrel
regex_pkgver = re.compile(r"^_?pkgver\s*=\s*(.+)$", re.MULTILINE)
regex_pkgrel = re.compile(r"^pkgrel\s*=\s*(.+)$", re.MULTILINE)
regex_epoch = re.compile(r"^epoch\s*=\s*(.+)$", re.MULTILINE)
regex_march = re.compile(r"(-march=)(.+?) ", re.MULTILINE)
regex_validkeys = re.compile(r"^validpgpkeys\+?=\((.*?)\)", re.MULTILINE | re.DOTALL)
regex_pkg_repo = re.compile(r"^(.*)-.*-.*-(?:x86_64|any)\.pkg\.tar\.zst(?:\.sig)*$", re.MULTILINE)
fp = None
update_last = time.time()
repo_lock = Lock()
copy_l = Lock()
failed_l = Lock()
repos = []
def build(pkgbuild, repo):
def build(pkgbuild: str, repo: str) -> None:
start_time = time.time()
name = pathlib.Path(pkgbuild).parts[-4]
parsed = parse_pkgbuild(pkgbuild)
name = parsed["pkgbase"]
process_name = current_process().name
logging.info("[%s/%s/%s] Build starting (Queue ~= %s)", process_name, repo, name, q.qsize())
@@ -41,12 +47,12 @@ def build(pkgbuild, repo):
import_keys(pkgbuild)
# increase pkgrel
increase_pkgrel(pkgbuild)
increase_pkgrel(pkgbuild, parsed)
# build with devtools
os.chdir(pathlib.Path(pkgbuild).parent)
res = subprocess.run(
["makechrootpkg", "-D", os.path.join(config["basedir"]["makepkg"]), "-l", process_name, "-r",
["makechrootpkg", "-c", "-D", os.path.join(config["basedir"]["makepkg"]), "-l", process_name, "-r",
os.path.join(config["basedir"]["chroot"]), "--", "--config",
os.path.join(config["basedir"]["makepkg"]) + "makepkg-" + '-'.join(
repo.split("-")[1:]) + ".conf"], stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
@@ -54,15 +60,18 @@ def build(pkgbuild, repo):
logging.warning("[%s/%s/%s] Build failed. Check repo/logs for more information.", process_name, repo, name)
# write packagename to failed list
with open(os.path.join(config["basedir"]["repo"], repo + "_failed.txt"), "a") as f:
f.write(name + "\n")
with failed_l, open(os.path.join(config["basedir"]["repo"], repo + "_failed.txt"), "a") as f:
f.write(name + "==" + str(parse_pkgbuild_ver(parsed=parsed)) + "\n")
# write logs
if not os.path.exists(os.path.join(config["basedir"]["repo"], "logs", repo)):
pathlib.Path(os.path.join(config["basedir"]["repo"], "logs", repo)).mkdir(parents=True,
exist_ok=True)
with open(os.path.join(config["basedir"]["repo"], "logs", repo, name + ".log"), "w") as log:
log.write(res.stdout.decode())
log.write(res.stdout.decode(errors="ignore"))
logging.debug("[GIT] %s", subprocess.run(["git", "clean", "-xdff"], check=False, stdout=subprocess.PIPE,
stderr=subprocess.STDOUT).stdout.decode(errors="ignore"))
return
@@ -72,36 +81,26 @@ def build(pkgbuild, repo):
s_res = subprocess.run(["gpg", "--batch", "--detach-sign", pkg], stdout=subprocess.PIPE,
stderr=subprocess.STDOUT)
if s_res.returncode:
logging.error("[%s/%s/%s] Signing failed: %s", process_name, repo, name, s_res.stdout.decode())
logging.error("[%s/%s/%s] Signing failed: %s", process_name, repo, name,
s_res.stdout.decode(errors="ignore"))
logging.debug("[GIT] %s", subprocess.run(["git", "clean", "-xdff"], check=False, stdout=subprocess.PIPE,
stderr=subprocess.STDOUT).stdout.decode(errors="ignore"))
return
# copying
pkgs.extend(glob.glob("*.pkg.tar.zst.sig"))
for pkg in pkgs:
logging.debug("[%s/%s/%s] Copy %s to %s", process_name, repo, name, pkg,
os.path.join(config["basedir"]["repo"], repo, "os", config["arch"] + "/"))
shutil.copy2(pkg, os.path.join(config["basedir"]["repo"], repo, "os", config["arch"] + "/"))
with copy_l:
for pkg in pkgs:
logging.debug("[%s/%s/%s] Copy %s to %s", process_name, repo, name, pkg,
os.path.join(config["basedir"]["repo"], repo, "os", config["arch"] + "/"))
shutil.copy2(pkg, os.path.join(config["basedir"]["repo"], repo, "os", config["arch"] + "/"))
# repo
repo_lock.acquire()
r_res = subprocess.run(["repo-add", "-s", "-v",
os.path.join(config["basedir"]["repo"], repo, "os", config["arch"],
repo + ".db.tar.xz"),
pkgs[0]], stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
logging.debug("[REPO-ADD] %s", r_res.stdout.decode())
if r_res.returncode:
logging.error("[%s/%s/%s] Repo action failed: %s", process_name, repo, name, r_res.stdout.decode())
repo_lock.release()
return
logging.debug("[GIT] %s", subprocess.run(["git", "clean", "-xdff"], check=False, stdout=subprocess.PIPE,
stderr=subprocess.STDOUT).stdout.decode(errors="ignore"))
p_res = subprocess.run(
["paccache", "-rc", os.path.join(config["basedir"]["repo"], repo, "os", config["arch"]), "-k", "1"],
stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
logging.debug("[PACCACHE] %s", p_res.stdout.decode())
repo_lock.release()
if p_res.returncode:
logging.error("[%s/%s/%s] Repo cleanup failed: %s", process_name, repo, name, p_res.stdout.decode())
return
logpath = pathlib.Path(os.path.join(config["basedir"]["repo"], "logs", repo, name + ".log"))
if logpath.exists():
os.remove(logpath)
logging.info("[%s/%s/%s] Build successful (%s)", process_name, repo, name,
format_timespan(time.time() - start_time))
@@ -114,12 +113,31 @@ def run_worker() -> None:
build(*q.get(block=True))
except Exception as e:
logging.error("Error in worker: %s", e)
traceback.print_exc()
finally:
q.task_done()
os.chdir(sys.path[0])
def already_running():
def do_repo_work() -> None:
for repo in repos:
pkgs = glob.glob(os.path.join(config["basedir"]["repo"], repo, "os", config["arch"], "*.zst"))
args = ["repo-add", "-s", "-v", "-p", "-n",
os.path.join(config["basedir"]["repo"], repo, "os", config["arch"], repo + ".db.tar.xz")]
args.extend(pkgs)
r_res = subprocess.run(args, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
logging.debug("[REPO-ADD] %s", r_res.stdout.decode(errors="ignore"))
if r_res.returncode:
logging.error("[REPO/%s] Repo action failed: %s", repo, r_res.stdout.decode(errors="ignore"))
p_res = subprocess.run(
["paccache", "-rc", os.path.join(config["basedir"]["repo"], repo, "os", config["arch"]), "-k", "1"],
stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
logging.debug("[PACCACHE] %s", p_res.stdout.decode(errors="ignore"))
if p_res.returncode:
logging.error("[REPO/%s] Repo cleanup failed: %s", repo, p_res.stdout.decode(errors="ignore"))
def already_running() -> bool:
global fp
fp = os.open(f"/tmp/alhp.lock", os.O_WRONLY | os.O_CREAT)
@@ -130,35 +148,90 @@ def already_running():
return True
def find_all_files_for_pkg(name, repo):
searchpath = os.path.join(config["basedir"]["repo"], repo, "os", config["arch"]) + "/" + name + "-*.pkg.*"
pkgs = glob.glob(searchpath)
def delete_package(parsed_pkgbuild: dict, repo: list[AnyStr]):
for pkg in parsed_pkgbuild["packages"]:
pkg_f = find_package_files(pkg, repo)
for p in pkgs:
if p.endswith(".sig"):
pkgs.remove(p)
if pkg_f:
for f in pkg_f:
base = pathlib.Path(f).parent
repo = f.split("/")[-4]
logging.info("[%s/%s] Deleting package files: %s", repo, parsed_pkgbuild["pkgbase"],
pathlib.Path(f).name)
args = ["repo-remove", "-s", "-v", os.path.join(base, repo + ".db.tar.xz"), pkg]
r_res = subprocess.run(args, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
logging.debug("[REPO-REMOVE] %s", r_res.stdout.decode(errors="ignore"))
if r_res.returncode:
logging.error("[REPO/%s] Repo action failed: %s", repo, r_res.stdout.decode(errors="ignore"))
continue
os.remove(f)
os.remove(f + ".sig")
def find_package_files(name: str, repo: list[AnyStr]) -> list[AnyStr]:
pkgs = []
for t_repo in repo:
files: list[str]
root: str
for root, dirs, files in os.walk(os.path.join(config["basedir"]["repo"], t_repo, "os", config["arch"])):
for file in files:
if file.endswith(".sig"):
continue
res = regex_pkg_repo.search(file)
if res and res.group(1) == name:
pkgs.append(os.path.join(root, file))
return pkgs
def get_failed_packages(repo):
def is_package_failed(package: str, ver: LegacyVersion, repo: str):
pkgs = get_failed_packages(repo)
p: str
for p in pkgs:
s = p.split("==")
if s[0] == package:
if ver > LegacyVersion(s[1]):
with failed_l, open(os.path.join(config["basedir"]["repo"], repo + "_failed.txt"), "r+") as f:
d = f.readlines()
f.seek(0)
f.truncate()
for i in d:
if i.strip("\n") != p:
f.write(i)
return False
else:
return True
return False
def get_failed_packages(repo: str) -> list:
if os.path.exists(os.path.join(config["basedir"]["repo"], repo + "_failed.txt")):
with open(os.path.join(config["basedir"]["repo"], repo + "_failed.txt")) as p:
return p.read().splitlines()
with failed_l, open(os.path.join(config["basedir"]["repo"], repo + "_failed.txt")) as p:
return p.readlines()
else:
return []
def setup_chroot():
def setup_chroot() -> None:
if not os.path.exists(os.path.join(config["basedir"]["chroot"], "root")):
pathlib.Path(config["basedir"]["chroot"]).mkdir(parents=True, exist_ok=True)
logging.debug("[MKCHROOT] %s",
subprocess.run(
["mkarchroot", os.path.join(config["basedir"]["chroot"], "root"), "base-devel"],
stdout=subprocess.PIPE, stderr=subprocess.STDOUT).stdout.decode(errors='ignore'))
s = subprocess.run(["mkarchroot", "-C", "/usr/share/devtools/pacman-extra.conf",
os.path.join(config["basedir"]["chroot"], "root"), "base-devel"],
stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
logging.debug("[MKCHROOT] %s", s.stdout.decode(errors='ignore'))
if s.returncode:
logging.fatal("[MKCHROOT] Failed to create root chroot: %s", s.stdout.decode(errors="ignore"))
sys.exit(2)
else:
logging.debug("[NSPAWN] %s", subprocess.run(
["arch-nspawn", os.path.join(config["basedir"]["chroot"], "root"), "pacman", "-Syuu", "--noconfirm"]))
def setup_makepkg(repo):
def setup_makepkg(repo) -> None:
makepkg_repo = os.path.join(config["basedir"]["makepkg"], "makepkg-" + '-'.join(repo.split("-")[1:]) + ".conf")
if not os.path.exists(makepkg_repo):
@@ -174,36 +247,7 @@ def setup_makepkg(repo):
conf.write(c_all)
def import_keys(pkgbuild):
with open(pkgbuild, errors='ignore') as pkgb:
keys_s = regex_validkeys.findall(pkgb.read())
if keys_s:
keys = []
for k in keys_s:
keys.extend(k.split(" "))
for k in keys:
k = k.strip()
k = k.replace("'", "")
k = k.replace("\"", "")
if len(k) == 40:
logging.debug("[GPG] %s",
subprocess.run(
["gpg", "--keyserver", "keyserver.ubuntu.com", "--recv-keys", k],
stdout=subprocess.PIPE,
stderr=subprocess.STDOUT).stdout.decode(errors='ignore'))
logging.info("[GPG] Imported key %s", k)
def package_exists(name, repo):
pkgs = find_all_files_for_pkg(name, repo)
return len(pkgs) > 0
def update_svn2git():
def update_svn2git() -> None:
if not os.path.exists(config["basedir"]["upstream"]):
pathlib.Path(config["basedir"]["upstream"]).mkdir(parents=True, exist_ok=True)
@@ -212,54 +256,31 @@ def update_svn2git():
if not os.path.exists(git_path):
logging.debug("[GIT] %s",
subprocess.run(["git", "clone", "--depth=1", git_url, git_path], check=True,
stdout=subprocess.PIPE, stderr=subprocess.STDOUT).stdout.decode())
stdout=subprocess.PIPE, stderr=subprocess.STDOUT).stdout.decode(
errors="ignore"))
else:
os.chdir(git_path)
logging.debug("[GIT] %s", subprocess.run(["git", "clean", "-xdf"], check=False, stdout=subprocess.PIPE,
stderr=subprocess.STDOUT).stdout.decode())
logging.debug("[GIT] %s", subprocess.run(["git", "clean", "-xdff"], check=False, stdout=subprocess.PIPE,
stderr=subprocess.STDOUT).stdout.decode(errors="ignore"))
logging.debug("[GIT] %s", subprocess.run(["git", "reset", "--hard"], check=True, stdout=subprocess.PIPE,
stderr=subprocess.STDOUT).stdout.decode())
stderr=subprocess.STDOUT).stdout.decode(errors="ignore"))
logging.debug("[GIT] %s", subprocess.run(["git", "pull"], check=True, stdout=subprocess.PIPE,
stderr=subprocess.STDOUT).stdout.decode())
os.chdir(sys.path[0])
stderr=subprocess.STDOUT).stdout.decode(errors="ignore"))
def parse_pkgbuild(pkgbuild_file):
with open(pkgbuild_file, errors='ignore') as p:
pkgbuild_str = p.read()
pkgver = regex_pkgver.findall(pkgbuild_str)
pkgrel = regex_pkgrel.findall(pkgbuild_str)
if not pkgver or not pkgrel:
logging.warning("[%s] Failed to parse pkgbuild", pkgbuild_file.split("/")[-4])
return version.parse("")
return LegacyVersion("{}-{}".format(pkgver[0], pkgrel[0]))
def increase_pkgrel(pkgbuild_file):
with open(pkgbuild_file, errors='ignore') as p:
pkgbuild_str = p.read()
pkgbuild_str = regex_pkgrel.sub(r"pkgrel=\1.1", pkgbuild_str)
with open(pkgbuild_file, "w") as pkg:
pkg.write(pkgbuild_str)
def parse_repo(name, repo):
ver_split = find_all_files_for_pkg(name, repo)[0].split("-")
def parse_repo(name, repo) -> LegacyVersion:
ver_split = find_package_files(name, [repo, ])[0].split("-")
return LegacyVersion(ver_split[-3] + "-" + ver_split[-2])
def sync_marchs_with_config():
repos = []
def sync_marchs_with_config() -> None:
existing_repos = []
with os.scandir(config["basedir"]["repo"]) as it:
entry: os.DirEntry
for entry in it:
if not entry.name.startswith('logs') and entry.is_dir():
repos.append(entry.name)
existing_repos.append(entry.name)
repo_quota = []
@@ -267,8 +288,10 @@ def sync_marchs_with_config():
repo_quota.append("{}-{}".format(r, a))
logging.info("Repos: %s", repo_quota)
repos_create = list(set(repo_quota) - set(repos))
repos_delete = list(set(repos) - set(repo_quota))
global repos
repos = repo_quota
repos_create = list(set(repo_quota) - set(existing_repos))
repos_delete = list(set(existing_repos) - set(repo_quota))
for repo in repos_create:
logging.debug("Create repo %s: %s", repo, os.path.join(config["basedir"]["repo"], repo, "os/x86_64"))
@@ -281,46 +304,48 @@ def sync_marchs_with_config():
os.remove(os.path.join(config["basedir"]["makepkg"], "makepkg-" + repo + ".conf"))
def fill_queue():
def fill_queue() -> None:
all_pkgbuild = []
for git_dir, git_url in config["svn2git"].items():
all_pkgbuild.extend(
glob.glob(os.path.join(config["basedir"]["upstream"], git_dir) + "/**/PKGBUILD", recursive=True))
to_delete = []
for pkgbuild in all_pkgbuild:
path_split = pkgbuild.split("/")
# ignore pkgbuild if in trunk, -any package, not in repos, or on blacklist
if path_split[-2] == "trunk" or path_split[-2].split("-")[0] not in config["repos"] or "any" in path_split[-2] \
or path_split[-4] in config["blacklist"] or "i686" in path_split[-2]:
to_delete.append(pkgbuild)
if path_split[-2] == "trunk" or path_split[-2].split("-")[0] not in config["repos"] or "i686" in path_split[-2]:
continue
final_pkgbuilds = list(set(all_pkgbuild) - set(to_delete))
parsed_pkgb = parse_pkgbuild(pkgbuild)
for pkgb in final_pkgbuilds:
for march in config["march"]:
path_split = pkgb.split("/")
name = path_split[-4]
repo = path_split[-2].split("-")[0] + "-" + march
# ignore pkgbuild if in trunk, -any package, not in repos, on blacklist, not for current arch
if "any" in parsed_pkgb["arch"] or parsed_pkgb["pkgbase"] in config["blacklist"]:
delete_package(parsed_pkgb, repos)
else:
for march in config["march"]:
repo = path_split[-2].split("-")[0] + "-" + march
ver = parse_pkgbuild_ver(parsed=parsed_pkgb)
if name in get_failed_packages(repo):
logging.info("[%s/%s] Skipped due to failing build", repo, name)
continue
if is_package_failed(parsed_pkgb["pkgbase"], ver, repo):
logging.info("[%s/%s] Skipped due to failing build", repo, parsed_pkgb["pkgbase"])
delete_package(parsed_pkgb, [repo, ])
continue
if package_exists(name, repo):
logging.debug("[SEMVER] Comparing %s=%s - %s=%s", name, parse_repo(name, repo), name,
parse_pkgbuild(pkgb))
if not package_exists(name, repo):
q.put((pkgb, repo))
logging.info("[%s/%s] Build queued (package not build yet)", repo, name)
elif parse_repo(name, repo) < parse_pkgbuild(pkgb):
q.put((pkgb, repo))
logging.info("[%s/%s] Build queued (new version available %s < %s)", repo, name,
parse_repo(name, repo), parse_pkgbuild(pkgb))
packages = list(parsed_pkgb["packages"])
pkg_f = find_package_files(packages[0], [repo, ])
if pkg_f:
logging.debug("[SEMVER] Comparing %s=%s - %s=%s", packages[0], parse_repo(packages[0], repo),
packages[0], ver)
rv = parse_repo(packages[0], repo)
if rv < ver:
q.put((pkgbuild, repo))
logging.info("[%s/%s] Build queued (new version available %s < %s)", repo,
parsed_pkgb["pkgbase"],
rv, ver)
else:
q.put((pkgbuild, repo))
logging.info("[%s/%s] Build queued (package not build yet)", repo, parsed_pkgb["pkgbase"])
logging.info("Build queue size: %s", q.qsize())
@@ -343,6 +368,7 @@ if __name__ == '__main__':
setup_chroot()
sync_marchs_with_config()
do_repo_work()
update_svn2git()
q = JoinableQueue()
@@ -352,32 +378,22 @@ if __name__ == '__main__':
while True:
try:
du = shutil.disk_usage(config["basedir"]["upstream"])
if (du[1] / du[0]) > 0.9:
logging.warning("Less then 10% disk space remaining, performing cleanup...")
while not q.empty():
try:
q.get(False)
except Empty:
continue
q.task_done()
if time.time() - update_last > 900 and q.empty():
logging.info("[SVN2GIT] Waiting for queue to finish...")
q.join()
logging.info("Cleared Queue, clearing upstream repos...")
update_svn2git()
logging.info("Cleanup done, refill queue")
fill_queue()
time.sleep(60)
elif time.time() - update_last > 900 and q.qsize() == 0:
update_last = time.time()
update_svn2git()
setup_chroot()
fill_queue()
if q.qsize() > 0:
logging.info("[SVN2GIT] New Queue size: %d", q.qsize())
else:
time.sleep(60)
time.sleep(300)
do_repo_work()
except KeyboardInterrupt:
pool.close()
pool.terminate()
q.close()
sys.exit(0)
with copy_l, failed_l:
pool.close()
pool.terminate()
q.close()
do_repo_work()
sys.exit(0)

64
utils.py Normal file
View File

@@ -0,0 +1,64 @@
import logging
import os
import pathlib
import re
import subprocess
from packaging.version import LegacyVersion
from srcinfo.parse import parse_srcinfo
regex_pkgrel = re.compile(r"^pkgrel\s*=\s*(.+)$", re.MULTILINE)
def import_keys(pkgbuild: str) -> bool:
parsed = parse_pkgbuild(pkgbuild)
if "validpgpkeys" in parsed:
for k in parsed["validpgpkeys"]:
s = subprocess.run(["gpg", "--keyserver", "keyserver.ubuntu.com", "--recv-keys", k],
stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
logging.debug("[GPG] %s", s.stdout.decode(errors='ignore'))
if s.returncode:
logging.warning("[GPG] Import of key %s failed: %s", k, s.stdout.decode(errors="ignore"))
return False
else:
logging.info("[GPG] Imported key %s", k)
return True
def increase_pkgrel(pkgbuild_file: str, parsed: dict = None) -> None:
if not parsed:
parsed = parse_pkgbuild(pkgbuild_file)
with open(pkgbuild_file, "r+", errors='ignore') as p:
pkgbuild_str = p.read()
p.seek(0)
p.truncate(0)
pkgbuild_str = regex_pkgrel.sub("pkgrel=" + parsed["pkgrel"] + ".1", pkgbuild_str)
p.write(pkgbuild_str)
def parse_pkgbuild(pkgbuild_file: str) -> dict:
pkgbuild_path = pathlib.Path(pkgbuild_file)
os.chdir(pkgbuild_path.parent)
res = subprocess.run(["makepkg", "--printsrcinfo"], check=False, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
if res.returncode:
logging.warning("[PKGBUILD/%s] makepkg failed: %s", pkgbuild_path.name, res.stdout.decode(errors="ignore"))
return {}
(parsed, errors) = parse_srcinfo(res.stdout.decode(errors="ignore"))
if errors:
logging.warning("[PKGBUILD/%s] Failed to parse: %s", pkgbuild_path.name, errors)
return {}
return parsed
def parse_pkgbuild_ver(pkgbuild_file: str = None, parsed: dict = None) -> LegacyVersion:
if not parsed:
parsed = parse_pkgbuild(pkgbuild_file)
if "epoch" in parsed:
return LegacyVersion("{}:{}-{}".format(parsed["epoch"], parsed["pkgver"], parsed["pkgrel"]))
return LegacyVersion("{}-{}".format(parsed["pkgver"], parsed["pkgrel"]))