Compare commits

...

5 Commits

4 changed files with 45 additions and 15 deletions

View File

@@ -1,3 +1,7 @@
# Deprecation notice
This was replaced with [ALHP.GO](https://git.harting.dev/anonfunc/ALHP.GO), please open any issues or PRs there.
# alhp # alhp
Build script for archlinux instructionset enabled repos. Build script for archlinux instructionset enabled repos.

View File

@@ -21,9 +21,6 @@ blacklist:
- pacman - pacman
- tensorflow - tensorflow
- tensorflow-cuda - tensorflow-cuda
- brotli
- libarchive
- libb2
- gcc - gcc
build: build:

View File

@@ -29,12 +29,14 @@ regex_pkg_repo = re.compile(r"^(.*)-.*-.*-(?:x86_64|any)\.pkg\.tar\.zst(?:\.sig)
fp = None fp = None
update_last = time.time() update_last = time.time()
copy_l = Lock() copy_l = Lock()
failed_l = Lock()
repos = [] repos = []
def build(pkgbuild: str, repo: str) -> None: def build(pkgbuild: str, repo: str) -> None:
start_time = time.time() start_time = time.time()
name = pathlib.Path(pkgbuild).parts[-4] parsed = parse_pkgbuild(pkgbuild)
name = parsed["pkgbase"]
process_name = current_process().name process_name = current_process().name
logging.info("[%s/%s/%s] Build starting (Queue ~= %s)", process_name, repo, name, q.qsize()) logging.info("[%s/%s/%s] Build starting (Queue ~= %s)", process_name, repo, name, q.qsize())
@@ -45,7 +47,7 @@ def build(pkgbuild: str, repo: str) -> None:
import_keys(pkgbuild) import_keys(pkgbuild)
# increase pkgrel # increase pkgrel
increase_pkgrel(pkgbuild) increase_pkgrel(pkgbuild, parsed)
# build with devtools # build with devtools
os.chdir(pathlib.Path(pkgbuild).parent) os.chdir(pathlib.Path(pkgbuild).parent)
@@ -58,8 +60,8 @@ def build(pkgbuild: str, repo: str) -> None:
logging.warning("[%s/%s/%s] Build failed. Check repo/logs for more information.", process_name, repo, name) logging.warning("[%s/%s/%s] Build failed. Check repo/logs for more information.", process_name, repo, name)
# write packagename to failed list # write packagename to failed list
with open(os.path.join(config["basedir"]["repo"], repo + "_failed.txt"), "a") as f: with failed_l, open(os.path.join(config["basedir"]["repo"], repo + "_failed.txt"), "a") as f:
f.write(name + "\n") f.write(name + "==" + str(parse_pkgbuild_ver(parsed=parsed)) + "\n")
# write logs # write logs
if not os.path.exists(os.path.join(config["basedir"]["repo"], "logs", repo)): if not os.path.exists(os.path.join(config["basedir"]["repo"], "logs", repo)):
@@ -96,6 +98,10 @@ def build(pkgbuild: str, repo: str) -> None:
logging.debug("[GIT] %s", subprocess.run(["git", "clean", "-xdff"], check=False, stdout=subprocess.PIPE, logging.debug("[GIT] %s", subprocess.run(["git", "clean", "-xdff"], check=False, stdout=subprocess.PIPE,
stderr=subprocess.STDOUT).stdout.decode(errors="ignore")) stderr=subprocess.STDOUT).stdout.decode(errors="ignore"))
logpath = pathlib.Path(os.path.join(config["basedir"]["repo"], "logs", repo, name + ".log"))
if logpath.exists():
os.remove(logpath)
logging.info("[%s/%s/%s] Build successful (%s)", process_name, repo, name, logging.info("[%s/%s/%s] Build successful (%s)", process_name, repo, name,
format_timespan(time.time() - start_time)) format_timespan(time.time() - start_time))
@@ -180,10 +186,32 @@ def find_package_files(name: str, repo: list[AnyStr]) -> list[AnyStr]:
return pkgs return pkgs
def is_package_failed(package: str, ver: LegacyVersion, repo: str):
pkgs = get_failed_packages(repo)
p: str
for p in pkgs:
s = p.split("==")
if s[0] == package:
if ver > LegacyVersion(s[1]):
with failed_l, open(os.path.join(config["basedir"]["repo"], repo + "_failed.txt"), "r+") as f:
d = f.readlines()
f.seek(0)
f.truncate()
for i in d:
if i.strip("\n") != p:
f.write(i)
return False
else:
return True
return False
def get_failed_packages(repo: str) -> list: def get_failed_packages(repo: str) -> list:
if os.path.exists(os.path.join(config["basedir"]["repo"], repo + "_failed.txt")): if os.path.exists(os.path.join(config["basedir"]["repo"], repo + "_failed.txt")):
with open(os.path.join(config["basedir"]["repo"], repo + "_failed.txt")) as p: with failed_l, open(os.path.join(config["basedir"]["repo"], repo + "_failed.txt")) as p:
return p.read().splitlines() return p.readlines()
else: else:
return [] return []
@@ -297,13 +325,13 @@ def fill_queue() -> None:
else: else:
for march in config["march"]: for march in config["march"]:
repo = path_split[-2].split("-")[0] + "-" + march repo = path_split[-2].split("-")[0] + "-" + march
ver = parse_pkgbuild_ver(parsed=parsed_pkgb)
if parsed_pkgb["pkgbase"] in get_failed_packages(repo): if is_package_failed(parsed_pkgb["pkgbase"], ver, repo):
logging.info("[%s/%s] Skipped due to failing build", repo, parsed_pkgb["pkgbase"]) logging.info("[%s/%s] Skipped due to failing build", repo, parsed_pkgb["pkgbase"])
delete_package(parsed_pkgb, [repo, ]) delete_package(parsed_pkgb, [repo, ])
continue continue
ver = parse_pkgbuild_ver(parsed=parsed_pkgb)
packages = list(parsed_pkgb["packages"]) packages = list(parsed_pkgb["packages"])
pkg_f = find_package_files(packages[0], [repo, ]) pkg_f = find_package_files(packages[0], [repo, ])
if pkg_f: if pkg_f:
@@ -363,7 +391,7 @@ if __name__ == '__main__':
time.sleep(300) time.sleep(300)
do_repo_work() do_repo_work()
except KeyboardInterrupt: except KeyboardInterrupt:
with copy_l: with copy_l, failed_l:
pool.close() pool.close()
pool.terminate() pool.terminate()
q.close() q.close()

View File

@@ -27,12 +27,13 @@ def import_keys(pkgbuild: str) -> bool:
return True return True
def increase_pkgrel(pkgbuild_file) -> None: def increase_pkgrel(pkgbuild_file: str, parsed: dict = None) -> None:
parsed = parse_pkgbuild(pkgbuild_file) if not parsed:
parsed = parse_pkgbuild(pkgbuild_file)
with open(pkgbuild_file, "r+", errors='ignore') as p: with open(pkgbuild_file, "r+", errors='ignore') as p:
pkgbuild_str = p.read() pkgbuild_str = p.read()
p.seek(0)
p.truncate(0) p.truncate(0)
p.seek(0, 0)
pkgbuild_str = regex_pkgrel.sub("pkgrel=" + parsed["pkgrel"] + ".1", pkgbuild_str) pkgbuild_str = regex_pkgrel.sub("pkgrel=" + parsed["pkgrel"] + ".1", pkgbuild_str)
p.write(pkgbuild_str) p.write(pkgbuild_str)