Compare commits

..

2 Commits

Author SHA1 Message Date
bb257d06e8 moved adding to db to main thread
In there we can bundle all adds and can shutdown cleaner.
Also this fixes split-packages not getting added.
2021-05-25 20:08:18 +02:00
1b5c0912b8 be more verbose if gpg key import fails 2021-05-25 19:17:55 +02:00

View File

@@ -11,7 +11,7 @@ import signal
import subprocess import subprocess
import sys import sys
import time import time
from multiprocessing import Pool, current_process, Lock, JoinableQueue from multiprocessing import Pool, current_process, JoinableQueue, Lock
import yaml import yaml
from humanfriendly import format_timespan from humanfriendly import format_timespan
@@ -25,7 +25,9 @@ regex_march = re.compile(r"(-march=)(.+?) ", re.MULTILINE)
regex_validkeys = re.compile(r"^validpgpkeys\+?=\((.*?)\)", re.MULTILINE | re.DOTALL) regex_validkeys = re.compile(r"^validpgpkeys\+?=\((.*?)\)", re.MULTILINE | re.DOTALL)
fp = None fp = None
update_last = time.time() update_last = time.time()
repo_lock = Lock() copy_l = Lock()
to_add = {}
to_add_l = Lock()
def build(pkgbuild, repo) -> None: def build(pkgbuild, repo) -> None:
@@ -77,31 +79,15 @@ def build(pkgbuild, repo) -> None:
# copying # copying
pkgs.extend(glob.glob("*.pkg.tar.zst.sig")) pkgs.extend(glob.glob("*.pkg.tar.zst.sig"))
with copy_l:
for pkg in pkgs: for pkg in pkgs:
logging.debug("[%s/%s/%s] Copy %s to %s", process_name, repo, name, pkg, logging.debug("[%s/%s/%s] Copy %s to %s", process_name, repo, name, pkg,
os.path.join(config["basedir"]["repo"], repo, "os", config["arch"] + "/")) os.path.join(config["basedir"]["repo"], repo, "os", config["arch"] + "/"))
shutil.copy2(pkg, os.path.join(config["basedir"]["repo"], repo, "os", config["arch"] + "/")) shutil.copy2(pkg, os.path.join(config["basedir"]["repo"], repo, "os", config["arch"] + "/"))
# repo # repo
repo_lock.acquire() with to_add_l:
r_res = subprocess.run(["repo-add", "-s", "-v", to_add[repo].extend(glob.glob("*.pkg.tar.zst"))
os.path.join(config["basedir"]["repo"], repo, "os", config["arch"],
repo + ".db.tar.xz"),
pkgs[0]], stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
logging.debug("[REPO-ADD] %s", r_res.stdout.decode())
if r_res.returncode:
logging.error("[%s/%s/%s] Repo action failed: %s", process_name, repo, name, r_res.stdout.decode())
repo_lock.release()
return
p_res = subprocess.run(
["paccache", "-rc", os.path.join(config["basedir"]["repo"], repo, "os", config["arch"]), "-k", "1"],
stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
logging.debug("[PACCACHE] %s", p_res.stdout.decode())
repo_lock.release()
if p_res.returncode:
logging.error("[%s/%s/%s] Repo cleanup failed: %s", process_name, repo, name, p_res.stdout.decode())
return
logging.info("[%s/%s/%s] Build successful (%s)", process_name, repo, name, logging.info("[%s/%s/%s] Build successful (%s)", process_name, repo, name,
format_timespan(time.time() - start_time)) format_timespan(time.time() - start_time))
@@ -119,6 +105,28 @@ def run_worker() -> None:
os.chdir(sys.path[0]) os.chdir(sys.path[0])
def do_repo_work():
with to_add_l:
for repo in to_add:
if to_add[repo]:
args = ["repo-add", "-s", "-v",
os.path.join(config["basedir"]["repo"], repo, "os", config["arch"], repo + ".db.tar.xz")]
args.extend(to_add[repo])
r_res = subprocess.run(args, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
logging.debug("[REPO-ADD] %s", r_res.stdout.decode())
if r_res.returncode:
logging.error("[%s] Repo action failed: %s", repo, r_res.stdout.decode())
return
p_res = subprocess.run(
["paccache", "-rc", os.path.join(config["basedir"]["repo"], repo, "os", config["arch"]), "-k", "1"],
stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
logging.debug("[PACCACHE] %s", p_res.stdout.decode())
if p_res.returncode:
logging.error("[%s] Repo cleanup failed: %s", repo, p_res.stdout.decode())
return
def already_running() -> bool: def already_running() -> bool:
global fp global fp
fp = os.open(f"/tmp/alhp.lock", os.O_WRONLY | os.O_CREAT) fp = os.open(f"/tmp/alhp.lock", os.O_WRONLY | os.O_CREAT)
@@ -177,7 +185,7 @@ def setup_makepkg(repo) -> None:
conf.write(c_all) conf.write(c_all)
def import_keys(pkgbuild) -> None: def import_keys(pkgbuild) -> bool:
with open(pkgbuild, errors='ignore') as pkgb: with open(pkgbuild, errors='ignore') as pkgb:
keys_s = regex_validkeys.findall(pkgb.read()) keys_s = regex_validkeys.findall(pkgb.read())
@@ -192,12 +200,17 @@ def import_keys(pkgbuild) -> None:
k = k.replace("'", "") k = k.replace("'", "")
k = k.replace("\"", "") k = k.replace("\"", "")
if len(k) == 40: if len(k) == 40:
logging.debug("[GPG] %s", s = subprocess.run(["gpg", "--keyserver", "keyserver.ubuntu.com", "--recv-keys", k],
subprocess.run( stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
["gpg", "--keyserver", "keyserver.ubuntu.com", "--recv-keys", k], logging.debug("[GPG] %s", s.stdout.decode(errors='ignore'))
stdout=subprocess.PIPE, stderr=subprocess.STDOUT).stdout.decode(errors='ignore')) if s.returncode:
logging.warning("[GPG] Import of key %s failed: %s", k, s.stdout.decode(errors="ignore"))
return False
else:
logging.info("[GPG] Imported key %s", k) logging.info("[GPG] Imported key %s", k)
return True
def package_exists(name, repo) -> bool: def package_exists(name, repo) -> bool:
pkgs = find_all_files_for_pkg(name, repo) pkgs = find_all_files_for_pkg(name, repo)
@@ -274,6 +287,8 @@ def sync_marchs_with_config() -> None:
logging.info("Repos: %s", repo_quota) logging.info("Repos: %s", repo_quota)
repos_create = list(set(repo_quota) - set(repos)) repos_create = list(set(repo_quota) - set(repos))
repos_delete = list(set(repos) - set(repo_quota)) repos_delete = list(set(repos) - set(repo_quota))
for repo in repo_quota:
to_add[repo] = []
for repo in repos_create: for repo in repos_create:
logging.debug("Create repo %s: %s", repo, os.path.join(config["basedir"]["repo"], repo, "os/x86_64")) logging.debug("Create repo %s: %s", repo, os.path.join(config["basedir"]["repo"], repo, "os/x86_64"))
@@ -364,10 +379,12 @@ if __name__ == '__main__':
if q.qsize() > 0: if q.qsize() > 0:
logging.info("New Queue size: %d", q.qsize()) logging.info("New Queue size: %d", q.qsize())
else: else:
do_repo_work()
time.sleep(60) time.sleep(60)
except KeyboardInterrupt: except KeyboardInterrupt:
repo_lock.acquire() with copy_l:
pool.close() pool.close()
pool.terminate() pool.terminate()
q.close() q.close()
do_repo_work()
sys.exit(0) sys.exit(0)