Compare commits

...

18 Commits

Author SHA1 Message Date
ed150e0433 added deprecation notice 2021-06-12 20:02:21 +02:00
2fb7387cf8 fixed truncate 2021-06-04 17:06:07 +02:00
6af405ca14 remove old pkg versions from failed 2021-06-04 16:07:42 +02:00
6d2e371500 add version to failed package lists, so that a later version may be build again 2021-06-03 20:01:45 +02:00
208f70472c reverted previous blacklist changes 2021-06-02 14:06:25 +02:00
1d5bc7922d only look at relevant pkgbuilds 2021-06-02 13:46:11 +02:00
d3402c188e improved logging 2021-06-02 13:38:16 +02:00
e09f46b4e5 fixed some issues introduced with delete_package 2021-06-02 12:57:32 +02:00
783575045f delete packages failing to build or not meant to build 2021-06-02 01:44:23 +02:00
e625fcc4fc optimize fill_queue 2021-06-01 16:47:36 +02:00
d007ea6bf7 fade out sys.path II 2021-06-01 14:02:06 +02:00
f18830c894 fade out sys.path 2021-06-01 13:56:28 +02:00
5863c42532 don't change back to script dir if not changed beforehand 2021-06-01 13:44:03 +02:00
eeeab58ac9 do repo before and after build 2021-06-01 13:40:34 +02:00
6391cf8789 improved logging, 2. Edition 2021-05-31 20:49:45 +02:00
2e7129a776 improved logging 2021-05-31 20:29:12 +02:00
b772c5d320 handle failed makepkg call 2021-05-31 17:23:14 +02:00
d3898b2b82 build and fail pkgbuilds, compare pkgnames for version and exists 2021-05-31 15:30:47 +02:00
4 changed files with 122 additions and 66 deletions

View File

@@ -1,3 +1,7 @@
# Deprecation notice
This was replaced with [ALHP.GO](https://git.harting.dev/anonfunc/ALHP.GO), please open any issues or PRs there.
# alhp # alhp
Build script for archlinux instructionset enabled repos. Build script for archlinux instructionset enabled repos.

View File

@@ -21,9 +21,6 @@ blacklist:
- pacman - pacman
- tensorflow - tensorflow
- tensorflow-cuda - tensorflow-cuda
- brotli
- libarchive
- libb2
- gcc - gcc
build: build:

151
master.py
View File

@@ -13,15 +13,15 @@ import sys
import time import time
import traceback import traceback
from multiprocessing import Pool, current_process, JoinableQueue, Lock from multiprocessing import Pool, current_process, JoinableQueue, Lock
from typing import AnyStr
import yaml import yaml
from humanfriendly import format_timespan from humanfriendly import format_timespan
from packaging.version import LegacyVersion from packaging.version import LegacyVersion
from utils import parse_pkgbuild, parse_pkgbuild_ver, import_keys from utils import parse_pkgbuild, parse_pkgbuild_ver, import_keys, increase_pkgrel
regex_pkgver = re.compile(r"^_?pkgver\s*=\s*(.+)$", re.MULTILINE) regex_pkgver = re.compile(r"^_?pkgver\s*=\s*(.+)$", re.MULTILINE)
regex_pkgrel = re.compile(r"^pkgrel\s*=\s*(.+)$", re.MULTILINE)
regex_epoch = re.compile(r"^epoch\s*=\s*(.+)$", re.MULTILINE) regex_epoch = re.compile(r"^epoch\s*=\s*(.+)$", re.MULTILINE)
regex_march = re.compile(r"(-march=)(.+?) ", re.MULTILINE) regex_march = re.compile(r"(-march=)(.+?) ", re.MULTILINE)
regex_validkeys = re.compile(r"^validpgpkeys\+?=\((.*?)\)", re.MULTILINE | re.DOTALL) regex_validkeys = re.compile(r"^validpgpkeys\+?=\((.*?)\)", re.MULTILINE | re.DOTALL)
@@ -29,12 +29,14 @@ regex_pkg_repo = re.compile(r"^(.*)-.*-.*-(?:x86_64|any)\.pkg\.tar\.zst(?:\.sig)
fp = None fp = None
update_last = time.time() update_last = time.time()
copy_l = Lock() copy_l = Lock()
failed_l = Lock()
repos = [] repos = []
def build(pkgbuild: str, repo: str) -> None: def build(pkgbuild: str, repo: str) -> None:
start_time = time.time() start_time = time.time()
name = pathlib.Path(pkgbuild).parts[-4] parsed = parse_pkgbuild(pkgbuild)
name = parsed["pkgbase"]
process_name = current_process().name process_name = current_process().name
logging.info("[%s/%s/%s] Build starting (Queue ~= %s)", process_name, repo, name, q.qsize()) logging.info("[%s/%s/%s] Build starting (Queue ~= %s)", process_name, repo, name, q.qsize())
@@ -45,7 +47,7 @@ def build(pkgbuild: str, repo: str) -> None:
import_keys(pkgbuild) import_keys(pkgbuild)
# increase pkgrel # increase pkgrel
increase_pkgrel(pkgbuild) increase_pkgrel(pkgbuild, parsed)
# build with devtools # build with devtools
os.chdir(pathlib.Path(pkgbuild).parent) os.chdir(pathlib.Path(pkgbuild).parent)
@@ -58,8 +60,8 @@ def build(pkgbuild: str, repo: str) -> None:
logging.warning("[%s/%s/%s] Build failed. Check repo/logs for more information.", process_name, repo, name) logging.warning("[%s/%s/%s] Build failed. Check repo/logs for more information.", process_name, repo, name)
# write packagename to failed list # write packagename to failed list
with open(os.path.join(config["basedir"]["repo"], repo + "_failed.txt"), "a") as f: with failed_l, open(os.path.join(config["basedir"]["repo"], repo + "_failed.txt"), "a") as f:
f.write(name + "\n") f.write(name + "==" + str(parse_pkgbuild_ver(parsed=parsed)) + "\n")
# write logs # write logs
if not os.path.exists(os.path.join(config["basedir"]["repo"], "logs", repo)): if not os.path.exists(os.path.join(config["basedir"]["repo"], "logs", repo)):
@@ -96,6 +98,10 @@ def build(pkgbuild: str, repo: str) -> None:
logging.debug("[GIT] %s", subprocess.run(["git", "clean", "-xdff"], check=False, stdout=subprocess.PIPE, logging.debug("[GIT] %s", subprocess.run(["git", "clean", "-xdff"], check=False, stdout=subprocess.PIPE,
stderr=subprocess.STDOUT).stdout.decode(errors="ignore")) stderr=subprocess.STDOUT).stdout.decode(errors="ignore"))
logpath = pathlib.Path(os.path.join(config["basedir"]["repo"], "logs", repo, name + ".log"))
if logpath.exists():
os.remove(logpath)
logging.info("[%s/%s/%s] Build successful (%s)", process_name, repo, name, logging.info("[%s/%s/%s] Build successful (%s)", process_name, repo, name,
format_timespan(time.time() - start_time)) format_timespan(time.time() - start_time))
@@ -110,7 +116,6 @@ def run_worker() -> None:
traceback.print_exc() traceback.print_exc()
finally: finally:
q.task_done() q.task_done()
os.chdir(sys.path[0])
def do_repo_work() -> None: def do_repo_work() -> None:
@@ -130,7 +135,6 @@ def do_repo_work() -> None:
logging.debug("[PACCACHE] %s", p_res.stdout.decode(errors="ignore")) logging.debug("[PACCACHE] %s", p_res.stdout.decode(errors="ignore"))
if p_res.returncode: if p_res.returncode:
logging.error("[REPO/%s] Repo cleanup failed: %s", repo, p_res.stdout.decode(errors="ignore")) logging.error("[REPO/%s] Repo cleanup failed: %s", repo, p_res.stdout.decode(errors="ignore"))
os.chdir(sys.path[0])
def already_running() -> bool: def already_running() -> bool:
@@ -144,32 +148,70 @@ def already_running() -> bool:
return True return True
def package_exists(name, repo) -> bool: def delete_package(parsed_pkgbuild: dict, repo: list[AnyStr]):
pkgs = find_all_files_for_pkg(name, repo) for pkg in parsed_pkgbuild["packages"]:
pkg_f = find_package_files(pkg, repo)
return len(pkgs) > 0 if pkg_f:
for f in pkg_f:
base = pathlib.Path(f).parent
repo = f.split("/")[-4]
logging.info("[%s/%s] Deleting package files: %s", repo, parsed_pkgbuild["pkgbase"],
pathlib.Path(f).name)
args = ["repo-remove", "-s", "-v", os.path.join(base, repo + ".db.tar.xz"), pkg]
r_res = subprocess.run(args, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
logging.debug("[REPO-REMOVE] %s", r_res.stdout.decode(errors="ignore"))
if r_res.returncode:
logging.error("[REPO/%s] Repo action failed: %s", repo, r_res.stdout.decode(errors="ignore"))
continue
os.remove(f)
os.remove(f + ".sig")
def find_all_files_for_pkg(name: str, repo: str) -> list: def find_package_files(name: str, repo: list[AnyStr]) -> list[AnyStr]:
pkgs = [] pkgs = []
for root, dirs, files in os.walk(os.path.join(config["basedir"]["repo"], repo, "os", config["arch"])):
for file in files: for t_repo in repo:
res = regex_pkg_repo.findall(file) files: list[str]
for r in res: root: str
if r == name: for root, dirs, files in os.walk(os.path.join(config["basedir"]["repo"], t_repo, "os", config["arch"])):
for file in files:
if file.endswith(".sig"):
continue
res = regex_pkg_repo.search(file)
if res and res.group(1) == name:
pkgs.append(os.path.join(root, file)) pkgs.append(os.path.join(root, file))
for p in pkgs:
if p.endswith(".sig"):
pkgs.remove(p)
return pkgs return pkgs
def is_package_failed(package: str, ver: LegacyVersion, repo: str):
pkgs = get_failed_packages(repo)
p: str
for p in pkgs:
s = p.split("==")
if s[0] == package:
if ver > LegacyVersion(s[1]):
with failed_l, open(os.path.join(config["basedir"]["repo"], repo + "_failed.txt"), "r+") as f:
d = f.readlines()
f.seek(0)
f.truncate()
for i in d:
if i.strip("\n") != p:
f.write(i)
return False
else:
return True
return False
def get_failed_packages(repo: str) -> list: def get_failed_packages(repo: str) -> list:
if os.path.exists(os.path.join(config["basedir"]["repo"], repo + "_failed.txt")): if os.path.exists(os.path.join(config["basedir"]["repo"], repo + "_failed.txt")):
with open(os.path.join(config["basedir"]["repo"], repo + "_failed.txt")) as p: with failed_l, open(os.path.join(config["basedir"]["repo"], repo + "_failed.txt")) as p:
return p.read().splitlines() return p.readlines()
else: else:
return [] return []
@@ -224,22 +266,10 @@ def update_svn2git() -> None:
stderr=subprocess.STDOUT).stdout.decode(errors="ignore")) stderr=subprocess.STDOUT).stdout.decode(errors="ignore"))
logging.debug("[GIT] %s", subprocess.run(["git", "pull"], check=True, stdout=subprocess.PIPE, logging.debug("[GIT] %s", subprocess.run(["git", "pull"], check=True, stdout=subprocess.PIPE,
stderr=subprocess.STDOUT).stdout.decode(errors="ignore")) stderr=subprocess.STDOUT).stdout.decode(errors="ignore"))
os.chdir(sys.path[0])
def increase_pkgrel(pkgbuild_file) -> None:
parsed = parse_pkgbuild(pkgbuild_file)
with open(pkgbuild_file, "r+", errors='ignore') as p:
pkgbuild_str = p.read()
p.truncate(0)
p.seek(0, 0)
pkgbuild_str = regex_pkgrel.sub("pkgrel=" + parsed["pkgrel"] + ".1", pkgbuild_str)
p.write(pkgbuild_str)
def parse_repo(name, repo) -> LegacyVersion: def parse_repo(name, repo) -> LegacyVersion:
ver_split = find_all_files_for_pkg(name, repo)[0].split("-") ver_split = find_package_files(name, [repo, ])[0].split("-")
return LegacyVersion(ver_split[-3] + "-" + ver_split[-2]) return LegacyVersion(ver_split[-3] + "-" + ver_split[-2])
@@ -283,33 +313,39 @@ def fill_queue() -> None:
for pkgbuild in all_pkgbuild: for pkgbuild in all_pkgbuild:
path_split = pkgbuild.split("/") path_split = pkgbuild.split("/")
parsed = parse_pkgbuild(pkgbuild)
if path_split[-2] == "trunk" or path_split[-2].split("-")[0] not in config["repos"] or "i686" in path_split[-2]:
continue
parsed_pkgb = parse_pkgbuild(pkgbuild)
# ignore pkgbuild if in trunk, -any package, not in repos, on blacklist, not for current arch # ignore pkgbuild if in trunk, -any package, not in repos, on blacklist, not for current arch
if path_split[-2] == "trunk" or path_split[-2].split("-")[0] not in config["repos"] or "any" in parsed["arch"] \ if "any" in parsed_pkgb["arch"] or parsed_pkgb["pkgbase"] in config["blacklist"]:
or parsed["pkgbase"] in config["blacklist"] or "i686" in path_split[-2]: delete_package(parsed_pkgb, repos)
# TODO: delete pkgs not build anymore
pass
else: else:
for march in config["march"]: for march in config["march"]:
repo = path_split[-2].split("-")[0] + "-" + march repo = path_split[-2].split("-")[0] + "-" + march
ver = parse_pkgbuild_ver(parsed=parsed_pkgb)
for pkgname in list(parsed["packages"]): if is_package_failed(parsed_pkgb["pkgbase"], ver, repo):
if pkgname in get_failed_packages(repo): logging.info("[%s/%s] Skipped due to failing build", repo, parsed_pkgb["pkgbase"])
logging.info("[%s/%s] Skipped due to failing build", repo, pkgname) delete_package(parsed_pkgb, [repo, ])
continue continue
if package_exists(pkgname, repo): packages = list(parsed_pkgb["packages"])
logging.debug("[SEMVER] Comparing %s=%s - %s=%s", pkgname, parse_repo(pkgname, repo), pkgname, pkg_f = find_package_files(packages[0], [repo, ])
parse_pkgbuild_ver(pkgbuild)) if pkg_f:
logging.debug("[SEMVER] Comparing %s=%s - %s=%s", packages[0], parse_repo(packages[0], repo),
if not package_exists(pkgname, repo): packages[0], ver)
rv = parse_repo(packages[0], repo)
if rv < ver:
q.put((pkgbuild, repo)) q.put((pkgbuild, repo))
logging.info("[%s/%s] Build queued (package not build yet)", repo, pkgname) logging.info("[%s/%s] Build queued (new version available %s < %s)", repo,
elif parse_repo(pkgname, repo) < parse_pkgbuild_ver(pkgbuild): parsed_pkgb["pkgbase"],
q.put((pkgbuild, repo)) rv, ver)
logging.info("[%s/%s] Build queued (new version available %s < %s)", repo, pkgname, else:
parse_repo(pkgname, repo), parse_pkgbuild(pkgbuild)) q.put((pkgbuild, repo))
logging.info("[%s/%s] Build queued (package not build yet)", repo, parsed_pkgb["pkgbase"])
logging.info("Build queue size: %s", q.qsize()) logging.info("Build queue size: %s", q.qsize())
@@ -332,6 +368,7 @@ if __name__ == '__main__':
setup_chroot() setup_chroot()
sync_marchs_with_config() sync_marchs_with_config()
do_repo_work()
update_svn2git() update_svn2git()
q = JoinableQueue() q = JoinableQueue()
@@ -354,7 +391,7 @@ if __name__ == '__main__':
time.sleep(300) time.sleep(300)
do_repo_work() do_repo_work()
except KeyboardInterrupt: except KeyboardInterrupt:
with copy_l: with copy_l, failed_l:
pool.close() pool.close()
pool.terminate() pool.terminate()
q.close() q.close()

View File

@@ -1,12 +1,14 @@
import logging import logging
import os import os
import pathlib import pathlib
import re
import subprocess import subprocess
import sys
from packaging.version import LegacyVersion from packaging.version import LegacyVersion
from srcinfo.parse import parse_srcinfo from srcinfo.parse import parse_srcinfo
regex_pkgrel = re.compile(r"^pkgrel\s*=\s*(.+)$", re.MULTILINE)
def import_keys(pkgbuild: str) -> bool: def import_keys(pkgbuild: str) -> bool:
parsed = parse_pkgbuild(pkgbuild) parsed = parse_pkgbuild(pkgbuild)
@@ -25,22 +27,38 @@ def import_keys(pkgbuild: str) -> bool:
return True return True
def increase_pkgrel(pkgbuild_file: str, parsed: dict = None) -> None:
if not parsed:
parsed = parse_pkgbuild(pkgbuild_file)
with open(pkgbuild_file, "r+", errors='ignore') as p:
pkgbuild_str = p.read()
p.seek(0)
p.truncate(0)
pkgbuild_str = regex_pkgrel.sub("pkgrel=" + parsed["pkgrel"] + ".1", pkgbuild_str)
p.write(pkgbuild_str)
def parse_pkgbuild(pkgbuild_file: str) -> dict: def parse_pkgbuild(pkgbuild_file: str) -> dict:
pkgbuild_path = pathlib.Path(pkgbuild_file) pkgbuild_path = pathlib.Path(pkgbuild_file)
os.chdir(pkgbuild_path.parent) os.chdir(pkgbuild_path.parent)
res = subprocess.run(["makepkg", "--printsrcinfo"], check=True, stdout=subprocess.PIPE, stderr=subprocess.STDOUT) res = subprocess.run(["makepkg", "--printsrcinfo"], check=False, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
os.chdir(sys.path[0])
if res.returncode:
logging.warning("[PKGBUILD/%s] makepkg failed: %s", pkgbuild_path.name, res.stdout.decode(errors="ignore"))
return {}
(parsed, errors) = parse_srcinfo(res.stdout.decode(errors="ignore")) (parsed, errors) = parse_srcinfo(res.stdout.decode(errors="ignore"))
if errors: if errors:
logging.warning("[PKGBUILD] Failed to parse: %s", pkgbuild_path.name) logging.warning("[PKGBUILD/%s] Failed to parse: %s", pkgbuild_path.name, errors)
return {} return {}
return parsed return parsed
def parse_pkgbuild_ver(pkgbuild_file: str) -> LegacyVersion: def parse_pkgbuild_ver(pkgbuild_file: str = None, parsed: dict = None) -> LegacyVersion:
parsed = parse_pkgbuild(pkgbuild_file) if not parsed:
parsed = parse_pkgbuild(pkgbuild_file)
if "epoch" in parsed: if "epoch" in parsed:
return LegacyVersion("{}:{}-{}".format(parsed["epoch"], parsed["pkgver"], parsed["pkgrel"])) return LegacyVersion("{}:{}-{}".format(parsed["epoch"], parsed["pkgver"], parsed["pkgrel"]))
return LegacyVersion("{}-{}".format(parsed["pkgver"], parsed["pkgrel"])) return LegacyVersion("{}-{}".format(parsed["pkgver"], parsed["pkgrel"]))