Compare commits
2 Commits
1e224335ea
...
c77a632653
Author | SHA1 | Date | |
---|---|---|---|
c77a632653 | |||
ce43b426ac |
126
master.py
126
master.py
@@ -11,13 +11,15 @@ import signal
|
||||
import subprocess
|
||||
import sys
|
||||
import time
|
||||
from multiprocessing import Pool, current_process, JoinableQueue, Lock, Manager
|
||||
import traceback
|
||||
from multiprocessing import Pool, current_process, JoinableQueue, Lock
|
||||
|
||||
import yaml
|
||||
from humanfriendly import format_timespan
|
||||
from packaging import version
|
||||
from packaging.version import LegacyVersion
|
||||
|
||||
from utils import parse_pkgbuild, parse_pkgbuild_ver, import_keys
|
||||
|
||||
regex_pkgver = re.compile(r"^_?pkgver\s*=\s*(.+)$", re.MULTILINE)
|
||||
regex_pkgrel = re.compile(r"^pkgrel\s*=\s*(.+)$", re.MULTILINE)
|
||||
regex_epoch = re.compile(r"^epoch\s*=\s*(.+)$", re.MULTILINE)
|
||||
@@ -105,6 +107,7 @@ def run_worker() -> None:
|
||||
build(*q.get(block=True))
|
||||
except Exception as e:
|
||||
logging.error("Error in worker: %s", e)
|
||||
traceback.print_exc()
|
||||
finally:
|
||||
q.task_done()
|
||||
os.chdir(sys.path[0])
|
||||
@@ -141,6 +144,12 @@ def already_running() -> bool:
|
||||
return True
|
||||
|
||||
|
||||
def package_exists(name, repo) -> bool:
|
||||
pkgs = find_all_files_for_pkg(name, repo)
|
||||
|
||||
return len(pkgs) > 0
|
||||
|
||||
|
||||
def find_all_files_for_pkg(name: str, repo: str) -> list:
|
||||
pkgs = []
|
||||
for root, dirs, files in os.walk(os.path.join(config["basedir"]["repo"], repo, "os", config["arch"])):
|
||||
@@ -150,9 +159,6 @@ def find_all_files_for_pkg(name: str, repo: str) -> list:
|
||||
if r == name:
|
||||
pkgs.append(os.path.join(root, file))
|
||||
|
||||
# searchpath = os.path.join(config["basedir"]["repo"], repo, "os", config["arch"]) + "/" + name + "-*.pkg.*"
|
||||
# pkgs = glob.glob(searchpath)
|
||||
|
||||
for p in pkgs:
|
||||
if p.endswith(".sig"):
|
||||
pkgs.remove(p)
|
||||
@@ -199,39 +205,6 @@ def setup_makepkg(repo) -> None:
|
||||
conf.write(c_all)
|
||||
|
||||
|
||||
def import_keys(pkgbuild) -> bool:
|
||||
with open(pkgbuild, errors='ignore') as pkgb:
|
||||
keys_s = regex_validkeys.findall(pkgb.read())
|
||||
|
||||
if keys_s:
|
||||
keys = []
|
||||
|
||||
for k in keys_s:
|
||||
keys.extend(k.split(" "))
|
||||
|
||||
for k in keys:
|
||||
k = k.strip()
|
||||
k = k.replace("'", "")
|
||||
k = k.replace("\"", "")
|
||||
if len(k) == 40:
|
||||
s = subprocess.run(["gpg", "--keyserver", "keyserver.ubuntu.com", "--recv-keys", k],
|
||||
stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
|
||||
logging.debug("[GPG] %s", s.stdout.decode(errors='ignore'))
|
||||
if s.returncode:
|
||||
logging.warning("[GPG] Import of key %s failed: %s", k, s.stdout.decode(errors="ignore"))
|
||||
return False
|
||||
else:
|
||||
logging.info("[GPG] Imported key %s", k)
|
||||
|
||||
return True
|
||||
|
||||
|
||||
def package_exists(name, repo) -> bool:
|
||||
pkgs = find_all_files_for_pkg(name, repo)
|
||||
|
||||
return len(pkgs) > 0
|
||||
|
||||
|
||||
def update_svn2git() -> None:
|
||||
if not os.path.exists(config["basedir"]["upstream"]):
|
||||
pathlib.Path(config["basedir"]["upstream"]).mkdir(parents=True, exist_ok=True)
|
||||
@@ -254,30 +227,15 @@ def update_svn2git() -> None:
|
||||
os.chdir(sys.path[0])
|
||||
|
||||
|
||||
def parse_pkgbuild(pkgbuild_file) -> LegacyVersion:
|
||||
with open(pkgbuild_file, errors='ignore') as p:
|
||||
pkgbuild_str = p.read()
|
||||
|
||||
pkgver = regex_pkgver.findall(pkgbuild_str)
|
||||
pkgrel = regex_pkgrel.findall(pkgbuild_str)
|
||||
epoch = regex_epoch.findall(pkgbuild_str)
|
||||
if not pkgver or not pkgrel:
|
||||
logging.warning("[%s] Failed to parse pkgbuild", pkgbuild_file.split("/")[-4])
|
||||
return version.parse("")
|
||||
|
||||
if epoch:
|
||||
return LegacyVersion("{}:{}-{}".format(epoch[0], pkgver[0], pkgrel[0]))
|
||||
return LegacyVersion("{}-{}".format(pkgver[0], pkgrel[0]))
|
||||
|
||||
|
||||
def increase_pkgrel(pkgbuild_file) -> None:
|
||||
with open(pkgbuild_file, errors='ignore') as p:
|
||||
parsed = parse_pkgbuild(pkgbuild_file)
|
||||
with open(pkgbuild_file, "r+", errors='ignore') as p:
|
||||
pkgbuild_str = p.read()
|
||||
p.truncate(0)
|
||||
p.seek(0, 0)
|
||||
|
||||
pkgbuild_str = regex_pkgrel.sub(r"pkgrel=\1.1", pkgbuild_str)
|
||||
|
||||
with open(pkgbuild_file, "w") as pkg:
|
||||
pkg.write(pkgbuild_str)
|
||||
pkgbuild_str = regex_pkgrel.sub("pkgrel=" + parsed["pkgrel"] + ".1", pkgbuild_str)
|
||||
p.write(pkgbuild_str)
|
||||
|
||||
|
||||
def parse_repo(name, repo) -> LegacyVersion:
|
||||
@@ -323,39 +281,35 @@ def fill_queue() -> None:
|
||||
all_pkgbuild.extend(
|
||||
glob.glob(os.path.join(config["basedir"]["upstream"], git_dir) + "/**/PKGBUILD", recursive=True))
|
||||
|
||||
to_delete = []
|
||||
|
||||
for pkgbuild in all_pkgbuild:
|
||||
path_split = pkgbuild.split("/")
|
||||
parsed = parse_pkgbuild(pkgbuild)
|
||||
|
||||
# ignore pkgbuild if in trunk, -any package, not in repos, or on blacklist
|
||||
if path_split[-2] == "trunk" or path_split[-2].split("-")[0] not in config["repos"] or "any" in path_split[-2] \
|
||||
or path_split[-4] in config["blacklist"] or "i686" in path_split[-2]:
|
||||
to_delete.append(pkgbuild)
|
||||
# ignore pkgbuild if in trunk, -any package, not in repos, on blacklist, not for current arch
|
||||
if path_split[-2] == "trunk" or path_split[-2].split("-")[0] not in config["repos"] or "any" in parsed["arch"] \
|
||||
or parsed["pkgbase"] in config["blacklist"] or "i686" in path_split[-2]:
|
||||
# TODO: delete pkgs not build anymore
|
||||
pass
|
||||
else:
|
||||
for march in config["march"]:
|
||||
repo = path_split[-2].split("-")[0] + "-" + march
|
||||
|
||||
final_pkgbuilds = list(set(all_pkgbuild) - set(to_delete))
|
||||
for pkgname in list(parsed["packages"]):
|
||||
if pkgname in get_failed_packages(repo):
|
||||
logging.info("[%s/%s] Skipped due to failing build", repo, pkgname)
|
||||
continue
|
||||
|
||||
for pkgb in final_pkgbuilds:
|
||||
for march in config["march"]:
|
||||
path_split = pkgb.split("/")
|
||||
name = path_split[-4]
|
||||
repo = path_split[-2].split("-")[0] + "-" + march
|
||||
if package_exists(pkgname, repo):
|
||||
logging.debug("[SEMVER] Comparing %s=%s - %s=%s", pkgname, parse_repo(pkgname, repo), pkgname,
|
||||
parse_pkgbuild_ver(pkgbuild))
|
||||
|
||||
if name in get_failed_packages(repo):
|
||||
logging.info("[%s/%s] Skipped due to failing build", repo, name)
|
||||
continue
|
||||
|
||||
if package_exists(name, repo):
|
||||
logging.debug("[SEMVER] Comparing %s=%s - %s=%s", name, parse_repo(name, repo), name,
|
||||
parse_pkgbuild(pkgb))
|
||||
|
||||
if not package_exists(name, repo):
|
||||
q.put((pkgb, repo))
|
||||
logging.info("[%s/%s] Build queued (package not build yet)", repo, name)
|
||||
elif parse_repo(name, repo) < parse_pkgbuild(pkgb):
|
||||
q.put((pkgb, repo))
|
||||
logging.info("[%s/%s] Build queued (new version available %s < %s)", repo, name,
|
||||
parse_repo(name, repo), parse_pkgbuild(pkgb))
|
||||
if not package_exists(pkgname, repo):
|
||||
q.put((pkgbuild, repo))
|
||||
logging.info("[%s/%s] Build queued (package not build yet)", repo, pkgname)
|
||||
elif parse_repo(pkgname, repo) < parse_pkgbuild_ver(pkgbuild):
|
||||
q.put((pkgbuild, repo))
|
||||
logging.info("[%s/%s] Build queued (new version available %s < %s)", repo, pkgname,
|
||||
parse_repo(pkgname, repo), parse_pkgbuild(pkgbuild))
|
||||
|
||||
logging.info("Build queue size: %s", q.qsize())
|
||||
|
||||
|
46
utils.py
Normal file
46
utils.py
Normal file
@@ -0,0 +1,46 @@
|
||||
import logging
|
||||
import os
|
||||
import pathlib
|
||||
import subprocess
|
||||
import sys
|
||||
|
||||
from packaging.version import LegacyVersion
|
||||
from srcinfo.parse import parse_srcinfo
|
||||
|
||||
|
||||
def import_keys(pkgbuild: str) -> bool:
|
||||
parsed = parse_pkgbuild(pkgbuild)
|
||||
|
||||
if "validpgpkeys" in parsed:
|
||||
for k in parsed["validpgpkeys"]:
|
||||
s = subprocess.run(["gpg", "--keyserver", "keyserver.ubuntu.com", "--recv-keys", k],
|
||||
stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
|
||||
logging.debug("[GPG] %s", s.stdout.decode(errors='ignore'))
|
||||
if s.returncode:
|
||||
logging.warning("[GPG] Import of key %s failed: %s", k, s.stdout.decode(errors="ignore"))
|
||||
return False
|
||||
else:
|
||||
logging.info("[GPG] Imported key %s", k)
|
||||
|
||||
return True
|
||||
|
||||
|
||||
def parse_pkgbuild(pkgbuild_file: str) -> dict:
|
||||
pkgbuild_path = pathlib.Path(pkgbuild_file)
|
||||
os.chdir(pkgbuild_path.parent)
|
||||
res = subprocess.run(["makepkg", "--printsrcinfo"], check=True, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
|
||||
os.chdir(sys.path[0])
|
||||
|
||||
(parsed, errors) = parse_srcinfo(res.stdout.decode(errors="ignore"))
|
||||
|
||||
if errors:
|
||||
logging.warning("[PKGBUILD] Failed to parse: %s", pkgbuild_path.name)
|
||||
return {}
|
||||
return parsed
|
||||
|
||||
|
||||
def parse_pkgbuild_ver(pkgbuild_file: str) -> LegacyVersion:
|
||||
parsed = parse_pkgbuild(pkgbuild_file)
|
||||
if "epoch" in parsed:
|
||||
return LegacyVersion("{}:{}-{}".format(parsed["epoch"], parsed["pkgver"], parsed["pkgrel"]))
|
||||
return LegacyVersion("{}-{}".format(parsed["pkgver"], parsed["pkgrel"]))
|
Reference in New Issue
Block a user