diff --git a/master.py b/master.py index f2205d9..4342df0 100644 --- a/master.py +++ b/master.py @@ -5,6 +5,7 @@ import glob import logging.config import os import pathlib +import pprint import re import shutil import signal @@ -17,6 +18,9 @@ import yaml from humanfriendly import format_timespan from packaging import version from packaging.version import LegacyVersion +from srcinfo.parse import parse_srcinfo + +from utils import parse_pkgbuild, parse_pkgbuild_ver regex_pkgver = re.compile(r"^_?pkgver\s*=\s*(.+)$", re.MULTILINE) regex_pkgrel = re.compile(r"^pkgrel\s*=\s*(.+)$", re.MULTILINE) @@ -141,6 +145,12 @@ def already_running() -> bool: return True +def package_exists(name, repo) -> bool: + pkgs = find_all_files_for_pkg(name, repo) + + return len(pkgs) > 0 + + def find_all_files_for_pkg(name: str, repo: str) -> list: pkgs = [] for root, dirs, files in os.walk(os.path.join(config["basedir"]["repo"], repo, "os", config["arch"])): @@ -150,9 +160,6 @@ def find_all_files_for_pkg(name: str, repo: str) -> list: if r == name: pkgs.append(os.path.join(root, file)) - # searchpath = os.path.join(config["basedir"]["repo"], repo, "os", config["arch"]) + "/" + name + "-*.pkg.*" - # pkgs = glob.glob(searchpath) - for p in pkgs: if p.endswith(".sig"): pkgs.remove(p) @@ -199,39 +206,6 @@ def setup_makepkg(repo) -> None: conf.write(c_all) -def import_keys(pkgbuild) -> bool: - with open(pkgbuild, errors='ignore') as pkgb: - keys_s = regex_validkeys.findall(pkgb.read()) - - if keys_s: - keys = [] - - for k in keys_s: - keys.extend(k.split(" ")) - - for k in keys: - k = k.strip() - k = k.replace("'", "") - k = k.replace("\"", "") - if len(k) == 40: - s = subprocess.run(["gpg", "--keyserver", "keyserver.ubuntu.com", "--recv-keys", k], - stdout=subprocess.PIPE, stderr=subprocess.STDOUT) - logging.debug("[GPG] %s", s.stdout.decode(errors='ignore')) - if s.returncode: - logging.warning("[GPG] Import of key %s failed: %s", k, s.stdout.decode(errors="ignore")) - return False - else: - logging.info("[GPG] Imported key %s", k) - - return True - - -def package_exists(name, repo) -> bool: - pkgs = find_all_files_for_pkg(name, repo) - - return len(pkgs) > 0 - - def update_svn2git() -> None: if not os.path.exists(config["basedir"]["upstream"]): pathlib.Path(config["basedir"]["upstream"]).mkdir(parents=True, exist_ok=True) @@ -254,30 +228,12 @@ def update_svn2git() -> None: os.chdir(sys.path[0]) -def parse_pkgbuild(pkgbuild_file) -> LegacyVersion: - with open(pkgbuild_file, errors='ignore') as p: - pkgbuild_str = p.read() - - pkgver = regex_pkgver.findall(pkgbuild_str) - pkgrel = regex_pkgrel.findall(pkgbuild_str) - epoch = regex_epoch.findall(pkgbuild_str) - if not pkgver or not pkgrel: - logging.warning("[%s] Failed to parse pkgbuild", pkgbuild_file.split("/")[-4]) - return version.parse("") - - if epoch: - return LegacyVersion("{}:{}-{}".format(epoch[0], pkgver[0], pkgrel[0])) - return LegacyVersion("{}-{}".format(pkgver[0], pkgrel[0])) - - def increase_pkgrel(pkgbuild_file) -> None: - with open(pkgbuild_file, errors='ignore') as p: + with open(pkgbuild_file, "rw+", errors='ignore') as p: pkgbuild_str = p.read() - pkgbuild_str = regex_pkgrel.sub(r"pkgrel=\1.1", pkgbuild_str) - - with open(pkgbuild_file, "w") as pkg: - pkg.write(pkgbuild_str) + pkgbuild_str = regex_pkgrel.sub(r"pkgrel=\1.1", pkgbuild_str) + p.write(pkgbuild_str) def parse_repo(name, repo) -> LegacyVersion: @@ -323,39 +279,35 @@ def fill_queue() -> None: all_pkgbuild.extend( glob.glob(os.path.join(config["basedir"]["upstream"], git_dir) + "/**/PKGBUILD", recursive=True)) - to_delete = [] - for pkgbuild in all_pkgbuild: path_split = pkgbuild.split("/") + parsed = parse_pkgbuild(pkgbuild) - # ignore pkgbuild if in trunk, -any package, not in repos, or on blacklist - if path_split[-2] == "trunk" or path_split[-2].split("-")[0] not in config["repos"] or "any" in path_split[-2] \ - or path_split[-4] in config["blacklist"] or "i686" in path_split[-2]: - to_delete.append(pkgbuild) + # ignore pkgbuild if in trunk, -any package, not in repos, on blacklist, not for current arch + if path_split[-2] == "trunk" or path_split[-2].split("-")[0] not in config["repos"] or parsed["arch"] == "any" \ + or parsed["pkgbase"] in config["blacklist"] or "i686" in path_split[-2]: + # TODO: delete pkgs not build anymore + pass + else: + for march in config["march"]: + repo = path_split[-2].split("-")[0] + "-" + march - final_pkgbuilds = list(set(all_pkgbuild) - set(to_delete)) + for pkgname in list(parsed["packages"]): + if pkgname in get_failed_packages(repo): + logging.info("[%s/%s] Skipped due to failing build", repo, pkgname) + continue - for pkgb in final_pkgbuilds: - for march in config["march"]: - path_split = pkgb.split("/") - name = path_split[-4] - repo = path_split[-2].split("-")[0] + "-" + march + if package_exists(pkgname, repo): + logging.debug("[SEMVER] Comparing %s=%s - %s=%s", pkgname, parse_repo(pkgname, repo), pkgname, + parse_pkgbuild_ver(pkgbuild)) - if name in get_failed_packages(repo): - logging.info("[%s/%s] Skipped due to failing build", repo, name) - continue - - if package_exists(name, repo): - logging.debug("[SEMVER] Comparing %s=%s - %s=%s", name, parse_repo(name, repo), name, - parse_pkgbuild(pkgb)) - - if not package_exists(name, repo): - q.put((pkgb, repo)) - logging.info("[%s/%s] Build queued (package not build yet)", repo, name) - elif parse_repo(name, repo) < parse_pkgbuild(pkgb): - q.put((pkgb, repo)) - logging.info("[%s/%s] Build queued (new version available %s < %s)", repo, name, - parse_repo(name, repo), parse_pkgbuild(pkgb)) + if not package_exists(pkgname, repo): + q.put((pkgbuild, repo)) + logging.info("[%s/%s] Build queued (package not build yet)", repo, pkgname) + elif parse_repo(pkgname, repo) < parse_pkgbuild_ver(pkgbuild): + q.put((pkgbuild, repo)) + logging.info("[%s/%s] Build queued (new version available %s < %s)", repo, pkgname, + parse_repo(pkgname, repo), parse_pkgbuild(pkgbuild)) logging.info("Build queue size: %s", q.qsize()) diff --git a/utils.py b/utils.py new file mode 100644 index 0000000..e1cb218 --- /dev/null +++ b/utils.py @@ -0,0 +1,48 @@ +import logging +import os +import pathlib +import pprint +import subprocess +import sys + +from packaging.version import LegacyVersion +from srcinfo.parse import parse_srcinfo + + +def import_keys(pkgbuild: str) -> bool: + parsed = parse_pkgbuild(pkgbuild) + + if "validpgpkeys" in parsed: + for k in parsed["validpgpkeys"]: + s = subprocess.run(["gpg", "--keyserver", "keyserver.ubuntu.com", "--recv-keys", k], + stdout=subprocess.PIPE, stderr=subprocess.STDOUT) + logging.debug("[GPG] %s", s.stdout.decode(errors='ignore')) + if s.returncode: + logging.warning("[GPG] Import of key %s failed: %s", k, s.stdout.decode(errors="ignore")) + return False + else: + logging.info("[GPG] Imported key %s", k) + + return True + + +def parse_pkgbuild(pkgbuild_file: str) -> dict: + pkgbuild_path = pathlib.Path(pkgbuild_file) + os.chdir(pkgbuild_path.parent) + res = subprocess.run(["makepkg", "--printsrcinfo"], check=True, stdout=subprocess.PIPE, stderr=subprocess.STDOUT) + os.chdir(sys.path[0]) + + (parsed, errors) = parse_srcinfo(res.stdout.decode(errors="ignore")) + + if errors: + logging.warning("[PKGBUILD] Failed to parse: %s", pkgbuild_path.name) + return {} + + pprint.pp(parsed) + + +def parse_pkgbuild_ver(pkgbuild_file: str) -> LegacyVersion: + parsed = parse_pkgbuild(pkgbuild_file) + if "epoch" in parsed: + return LegacyVersion("{}:{}-{}".format(parsed["epoch"], parsed["pkgver"], parsed["pkgrel"])) + return LegacyVersion("{}-{}".format(parsed["pkgver"], parsed["pkgrel"]))