improved handling of full disk space
This commit is contained in:
36
master.py
36
master.py
@@ -12,6 +12,7 @@ import subprocess
|
|||||||
import sys
|
import sys
|
||||||
import time
|
import time
|
||||||
from multiprocessing import Pool, Queue, current_process, Lock
|
from multiprocessing import Pool, Queue, current_process, Lock
|
||||||
|
from queue import Empty
|
||||||
|
|
||||||
import yaml
|
import yaml
|
||||||
from humanfriendly import format_timespan
|
from humanfriendly import format_timespan
|
||||||
@@ -62,7 +63,6 @@ def build(pkgbuild, repo):
|
|||||||
with open(os.path.join(config["basedir"]["repo"], "logs", repo, name + ".log"), "w") as log:
|
with open(os.path.join(config["basedir"]["repo"], "logs", repo, name + ".log"), "w") as log:
|
||||||
log.write(res.stdout.decode())
|
log.write(res.stdout.decode())
|
||||||
|
|
||||||
build_cleanup()
|
|
||||||
return
|
return
|
||||||
|
|
||||||
# signing
|
# signing
|
||||||
@@ -72,7 +72,6 @@ def build(pkgbuild, repo):
|
|||||||
stderr=subprocess.STDOUT)
|
stderr=subprocess.STDOUT)
|
||||||
if s_res.returncode:
|
if s_res.returncode:
|
||||||
logging.error("[%s/%s/%s] Signing failed: %s", process_name, repo, name, s_res.stdout.decode())
|
logging.error("[%s/%s/%s] Signing failed: %s", process_name, repo, name, s_res.stdout.decode())
|
||||||
build_cleanup()
|
|
||||||
return
|
return
|
||||||
|
|
||||||
# copying
|
# copying
|
||||||
@@ -92,7 +91,6 @@ def build(pkgbuild, repo):
|
|||||||
if r_res.returncode:
|
if r_res.returncode:
|
||||||
logging.error("[%s/%s/%s] Repo action failed: %s", process_name, repo, name, r_res.stdout.decode())
|
logging.error("[%s/%s/%s] Repo action failed: %s", process_name, repo, name, r_res.stdout.decode())
|
||||||
repo_lock.release()
|
repo_lock.release()
|
||||||
build_cleanup()
|
|
||||||
return
|
return
|
||||||
|
|
||||||
p_res = subprocess.run(
|
p_res = subprocess.run(
|
||||||
@@ -102,11 +100,8 @@ def build(pkgbuild, repo):
|
|||||||
repo_lock.release()
|
repo_lock.release()
|
||||||
if p_res.returncode:
|
if p_res.returncode:
|
||||||
logging.error("[%s/%s/%s] Repo cleanup failed: %s", process_name, repo, name, p_res.stdout.decode())
|
logging.error("[%s/%s/%s] Repo cleanup failed: %s", process_name, repo, name, p_res.stdout.decode())
|
||||||
build_cleanup()
|
|
||||||
return
|
return
|
||||||
|
|
||||||
# cleanup
|
|
||||||
build_cleanup()
|
|
||||||
logging.info("[%s/%s/%s] Build successful (%s)", process_name, repo, name,
|
logging.info("[%s/%s/%s] Build successful (%s)", process_name, repo, name,
|
||||||
format_timespan(time.time() - start_time))
|
format_timespan(time.time() - start_time))
|
||||||
|
|
||||||
@@ -118,6 +113,9 @@ def run_worker() -> None:
|
|||||||
build(*q.get(block=True))
|
build(*q.get(block=True))
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logging.error("Error in worker: %s", e)
|
logging.error("Error in worker: %s", e)
|
||||||
|
finally:
|
||||||
|
q.task_done()
|
||||||
|
os.chdir(sys.path[0])
|
||||||
|
|
||||||
|
|
||||||
def already_running():
|
def already_running():
|
||||||
@@ -150,10 +148,6 @@ def get_failed_packages(repo):
|
|||||||
return []
|
return []
|
||||||
|
|
||||||
|
|
||||||
def build_cleanup():
|
|
||||||
os.chdir(sys.path[0])
|
|
||||||
|
|
||||||
|
|
||||||
def setup_chroot():
|
def setup_chroot():
|
||||||
if not os.path.exists(os.path.join(config["basedir"]["chroot"], "root")):
|
if not os.path.exists(os.path.join(config["basedir"]["chroot"], "root")):
|
||||||
pathlib.Path(config["basedir"]["chroot"]).mkdir(parents=True, exist_ok=True)
|
pathlib.Path(config["basedir"]["chroot"]).mkdir(parents=True, exist_ok=True)
|
||||||
@@ -300,7 +294,7 @@ def fill_queue():
|
|||||||
|
|
||||||
# ignore pkgbuild if in trunk, -any package, not in repos, or on blacklist
|
# ignore pkgbuild if in trunk, -any package, not in repos, or on blacklist
|
||||||
if path_split[-2] == "trunk" or path_split[-2].split("-")[0] not in config["repos"] or "any" in path_split[-2] \
|
if path_split[-2] == "trunk" or path_split[-2].split("-")[0] not in config["repos"] or "any" in path_split[-2] \
|
||||||
or path_split[-4] in config["blacklist"] or "core-i686" in path_split[-2]:
|
or path_split[-4] in config["blacklist"] or "i686" in path_split[-2]:
|
||||||
to_delete.append(pkgbuild)
|
to_delete.append(pkgbuild)
|
||||||
|
|
||||||
final_pkgbuilds = list(set(all_pkgbuild) - set(to_delete))
|
final_pkgbuilds = list(set(all_pkgbuild) - set(to_delete))
|
||||||
@@ -357,7 +351,25 @@ if __name__ == '__main__':
|
|||||||
|
|
||||||
while True:
|
while True:
|
||||||
try:
|
try:
|
||||||
if time.time() - update_last > 900 and q.qsize() == 0:
|
du = shutil.disk_usage(config["basedir"]["upstream"])
|
||||||
|
if (du[1] / du[0]) > 0.9:
|
||||||
|
logging.warning("Less then 10% disk space remaining, performing cleanup...")
|
||||||
|
|
||||||
|
while not q.empty():
|
||||||
|
try:
|
||||||
|
q.get(False)
|
||||||
|
except Empty:
|
||||||
|
continue
|
||||||
|
q.task_done()
|
||||||
|
q.join()
|
||||||
|
|
||||||
|
logging.info("Cleared Queue, clearing upstream repos...")
|
||||||
|
|
||||||
|
update_svn2git()
|
||||||
|
logging.info("Cleanup done, refill queue")
|
||||||
|
fill_queue()
|
||||||
|
time.sleep(60)
|
||||||
|
elif time.time() - update_last > 900 and q.qsize() == 0:
|
||||||
update_last = time.time()
|
update_last = time.time()
|
||||||
update_svn2git()
|
update_svn2git()
|
||||||
fill_queue()
|
fill_queue()
|
||||||
|
Reference in New Issue
Block a user