|
|
|
@@ -11,26 +11,32 @@ import signal
|
|
|
|
|
import subprocess
|
|
|
|
|
import sys
|
|
|
|
|
import time
|
|
|
|
|
from multiprocessing import Pool, current_process, JoinableQueue, Lock, Manager
|
|
|
|
|
import traceback
|
|
|
|
|
from multiprocessing import Pool, current_process, JoinableQueue, Lock
|
|
|
|
|
from typing import AnyStr
|
|
|
|
|
|
|
|
|
|
import yaml
|
|
|
|
|
from humanfriendly import format_timespan
|
|
|
|
|
from packaging import version
|
|
|
|
|
from packaging.version import LegacyVersion
|
|
|
|
|
|
|
|
|
|
from utils import parse_pkgbuild, parse_pkgbuild_ver, import_keys, increase_pkgrel
|
|
|
|
|
|
|
|
|
|
regex_pkgver = re.compile(r"^_?pkgver\s*=\s*(.+)$", re.MULTILINE)
|
|
|
|
|
regex_pkgrel = re.compile(r"^pkgrel\s*=\s*(.+)$", re.MULTILINE)
|
|
|
|
|
regex_epoch = re.compile(r"^epoch\s*=\s*(.+)$", re.MULTILINE)
|
|
|
|
|
regex_march = re.compile(r"(-march=)(.+?) ", re.MULTILINE)
|
|
|
|
|
regex_validkeys = re.compile(r"^validpgpkeys\+?=\((.*?)\)", re.MULTILINE | re.DOTALL)
|
|
|
|
|
regex_pkg_repo = re.compile(r"^(.*)-.*-.*-(?:x86_64|any)\.pkg\.tar\.zst(?:\.sig)*$", re.MULTILINE)
|
|
|
|
|
fp = None
|
|
|
|
|
update_last = time.time()
|
|
|
|
|
copy_l = Lock()
|
|
|
|
|
failed_l = Lock()
|
|
|
|
|
repos = []
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def build(pkgbuild: str, repo: str, todo: dict) -> None:
|
|
|
|
|
def build(pkgbuild: str, repo: str) -> None:
|
|
|
|
|
start_time = time.time()
|
|
|
|
|
name = pathlib.Path(pkgbuild).parts[-4]
|
|
|
|
|
parsed = parse_pkgbuild(pkgbuild)
|
|
|
|
|
name = parsed["pkgbase"]
|
|
|
|
|
process_name = current_process().name
|
|
|
|
|
logging.info("[%s/%s/%s] Build starting (Queue ~= %s)", process_name, repo, name, q.qsize())
|
|
|
|
|
|
|
|
|
@@ -41,7 +47,7 @@ def build(pkgbuild: str, repo: str, todo: dict) -> None:
|
|
|
|
|
import_keys(pkgbuild)
|
|
|
|
|
|
|
|
|
|
# increase pkgrel
|
|
|
|
|
increase_pkgrel(pkgbuild)
|
|
|
|
|
increase_pkgrel(pkgbuild, parsed)
|
|
|
|
|
|
|
|
|
|
# build with devtools
|
|
|
|
|
os.chdir(pathlib.Path(pkgbuild).parent)
|
|
|
|
@@ -54,8 +60,8 @@ def build(pkgbuild: str, repo: str, todo: dict) -> None:
|
|
|
|
|
logging.warning("[%s/%s/%s] Build failed. Check repo/logs for more information.", process_name, repo, name)
|
|
|
|
|
|
|
|
|
|
# write packagename to failed list
|
|
|
|
|
with open(os.path.join(config["basedir"]["repo"], repo + "_failed.txt"), "a") as f:
|
|
|
|
|
f.write(name + "\n")
|
|
|
|
|
with failed_l, open(os.path.join(config["basedir"]["repo"], repo + "_failed.txt"), "a") as f:
|
|
|
|
|
f.write(name + "==" + str(parse_pkgbuild_ver(parsed=parsed)) + "\n")
|
|
|
|
|
|
|
|
|
|
# write logs
|
|
|
|
|
if not os.path.exists(os.path.join(config["basedir"]["repo"], "logs", repo)):
|
|
|
|
@@ -89,38 +95,35 @@ def build(pkgbuild: str, repo: str, todo: dict) -> None:
|
|
|
|
|
os.path.join(config["basedir"]["repo"], repo, "os", config["arch"] + "/"))
|
|
|
|
|
shutil.copy2(pkg, os.path.join(config["basedir"]["repo"], repo, "os", config["arch"] + "/"))
|
|
|
|
|
|
|
|
|
|
# repo
|
|
|
|
|
logging.debug("[%s/%s/%s] Adding packages to todo list: %s", process_name, repo, name,
|
|
|
|
|
", ".join(glob.glob("*.pkg.tar.zst")))
|
|
|
|
|
todo[repo].extend(glob.glob("*.pkg.tar.zst"))
|
|
|
|
|
|
|
|
|
|
logging.debug("[GIT] %s", subprocess.run(["git", "clean", "-xdff"], check=False, stdout=subprocess.PIPE,
|
|
|
|
|
stderr=subprocess.STDOUT).stdout.decode(errors="ignore"))
|
|
|
|
|
|
|
|
|
|
logpath = pathlib.Path(os.path.join(config["basedir"]["repo"], "logs", repo, name + ".log"))
|
|
|
|
|
if logpath.exists():
|
|
|
|
|
os.remove(logpath)
|
|
|
|
|
|
|
|
|
|
logging.info("[%s/%s/%s] Build successful (%s)", process_name, repo, name,
|
|
|
|
|
format_timespan(time.time() - start_time))
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def run_worker(todo: dict) -> None:
|
|
|
|
|
def run_worker() -> None:
|
|
|
|
|
os.nice(20)
|
|
|
|
|
while True:
|
|
|
|
|
try:
|
|
|
|
|
build(*q.get(block=True), todo=todo)
|
|
|
|
|
build(*q.get(block=True))
|
|
|
|
|
except Exception as e:
|
|
|
|
|
logging.error("Error in worker: %s", e)
|
|
|
|
|
traceback.print_exc()
|
|
|
|
|
finally:
|
|
|
|
|
q.task_done()
|
|
|
|
|
os.chdir(sys.path[0])
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def do_repo_work() -> None:
|
|
|
|
|
for repo in d:
|
|
|
|
|
if d[repo]:
|
|
|
|
|
logging.info("[REPO/%s] Adding %s", repo, ", ".join(d[repo]))
|
|
|
|
|
os.chdir(os.path.join(config["basedir"]["repo"], repo, "os", config["arch"]))
|
|
|
|
|
args = ["repo-add", "-s", "-v",
|
|
|
|
|
for repo in repos:
|
|
|
|
|
pkgs = glob.glob(os.path.join(config["basedir"]["repo"], repo, "os", config["arch"], "*.zst"))
|
|
|
|
|
args = ["repo-add", "-s", "-v", "-p", "-n",
|
|
|
|
|
os.path.join(config["basedir"]["repo"], repo, "os", config["arch"], repo + ".db.tar.xz")]
|
|
|
|
|
args.extend(d[repo])
|
|
|
|
|
args.extend(pkgs)
|
|
|
|
|
r_res = subprocess.run(args, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
|
|
|
|
|
logging.debug("[REPO-ADD] %s", r_res.stdout.decode(errors="ignore"))
|
|
|
|
|
if r_res.returncode:
|
|
|
|
@@ -132,10 +135,6 @@ def do_repo_work() -> None:
|
|
|
|
|
logging.debug("[PACCACHE] %s", p_res.stdout.decode(errors="ignore"))
|
|
|
|
|
if p_res.returncode:
|
|
|
|
|
logging.error("[REPO/%s] Repo cleanup failed: %s", repo, p_res.stdout.decode(errors="ignore"))
|
|
|
|
|
d[repo][:] = []
|
|
|
|
|
os.chdir(sys.path[0])
|
|
|
|
|
else:
|
|
|
|
|
logging.debug("[REPO/%s] Nothing to do", repo)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def already_running() -> bool:
|
|
|
|
@@ -149,21 +148,70 @@ def already_running() -> bool:
|
|
|
|
|
return True
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def find_all_files_for_pkg(name: str, repo: str) -> list:
|
|
|
|
|
searchpath = os.path.join(config["basedir"]["repo"], repo, "os", config["arch"]) + "/" + name + "-*.pkg.*"
|
|
|
|
|
pkgs = glob.glob(searchpath)
|
|
|
|
|
def delete_package(parsed_pkgbuild: dict, repo: list[AnyStr]):
|
|
|
|
|
for pkg in parsed_pkgbuild["packages"]:
|
|
|
|
|
pkg_f = find_package_files(pkg, repo)
|
|
|
|
|
|
|
|
|
|
for p in pkgs:
|
|
|
|
|
if p.endswith(".sig"):
|
|
|
|
|
pkgs.remove(p)
|
|
|
|
|
if pkg_f:
|
|
|
|
|
for f in pkg_f:
|
|
|
|
|
base = pathlib.Path(f).parent
|
|
|
|
|
repo = f.split("/")[-4]
|
|
|
|
|
logging.info("[%s/%s] Deleting package files: %s", repo, parsed_pkgbuild["pkgbase"],
|
|
|
|
|
pathlib.Path(f).name)
|
|
|
|
|
|
|
|
|
|
args = ["repo-remove", "-s", "-v", os.path.join(base, repo + ".db.tar.xz"), pkg]
|
|
|
|
|
r_res = subprocess.run(args, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
|
|
|
|
|
logging.debug("[REPO-REMOVE] %s", r_res.stdout.decode(errors="ignore"))
|
|
|
|
|
if r_res.returncode:
|
|
|
|
|
logging.error("[REPO/%s] Repo action failed: %s", repo, r_res.stdout.decode(errors="ignore"))
|
|
|
|
|
continue
|
|
|
|
|
os.remove(f)
|
|
|
|
|
os.remove(f + ".sig")
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def find_package_files(name: str, repo: list[AnyStr]) -> list[AnyStr]:
|
|
|
|
|
pkgs = []
|
|
|
|
|
|
|
|
|
|
for t_repo in repo:
|
|
|
|
|
files: list[str]
|
|
|
|
|
root: str
|
|
|
|
|
for root, dirs, files in os.walk(os.path.join(config["basedir"]["repo"], t_repo, "os", config["arch"])):
|
|
|
|
|
for file in files:
|
|
|
|
|
if file.endswith(".sig"):
|
|
|
|
|
continue
|
|
|
|
|
|
|
|
|
|
res = regex_pkg_repo.search(file)
|
|
|
|
|
if res and res.group(1) == name:
|
|
|
|
|
pkgs.append(os.path.join(root, file))
|
|
|
|
|
return pkgs
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def is_package_failed(package: str, ver: LegacyVersion, repo: str):
|
|
|
|
|
pkgs = get_failed_packages(repo)
|
|
|
|
|
|
|
|
|
|
p: str
|
|
|
|
|
for p in pkgs:
|
|
|
|
|
s = p.split("==")
|
|
|
|
|
|
|
|
|
|
if s[0] == package:
|
|
|
|
|
if ver > LegacyVersion(s[1]):
|
|
|
|
|
with failed_l, open(os.path.join(config["basedir"]["repo"], repo + "_failed.txt"), "r+") as f:
|
|
|
|
|
d = f.readlines()
|
|
|
|
|
f.seek(0)
|
|
|
|
|
f.truncate()
|
|
|
|
|
for i in d:
|
|
|
|
|
if i.strip("\n") != p:
|
|
|
|
|
f.write(i)
|
|
|
|
|
return False
|
|
|
|
|
else:
|
|
|
|
|
return True
|
|
|
|
|
return False
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def get_failed_packages(repo: str) -> list:
|
|
|
|
|
if os.path.exists(os.path.join(config["basedir"]["repo"], repo + "_failed.txt")):
|
|
|
|
|
with open(os.path.join(config["basedir"]["repo"], repo + "_failed.txt")) as p:
|
|
|
|
|
return p.read().splitlines()
|
|
|
|
|
with failed_l, open(os.path.join(config["basedir"]["repo"], repo + "_failed.txt")) as p:
|
|
|
|
|
return p.readlines()
|
|
|
|
|
else:
|
|
|
|
|
return []
|
|
|
|
|
|
|
|
|
@@ -199,39 +247,6 @@ def setup_makepkg(repo) -> None:
|
|
|
|
|
conf.write(c_all)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def import_keys(pkgbuild) -> bool:
|
|
|
|
|
with open(pkgbuild, errors='ignore') as pkgb:
|
|
|
|
|
keys_s = regex_validkeys.findall(pkgb.read())
|
|
|
|
|
|
|
|
|
|
if keys_s:
|
|
|
|
|
keys = []
|
|
|
|
|
|
|
|
|
|
for k in keys_s:
|
|
|
|
|
keys.extend(k.split(" "))
|
|
|
|
|
|
|
|
|
|
for k in keys:
|
|
|
|
|
k = k.strip()
|
|
|
|
|
k = k.replace("'", "")
|
|
|
|
|
k = k.replace("\"", "")
|
|
|
|
|
if len(k) == 40:
|
|
|
|
|
s = subprocess.run(["gpg", "--keyserver", "keyserver.ubuntu.com", "--recv-keys", k],
|
|
|
|
|
stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
|
|
|
|
|
logging.debug("[GPG] %s", s.stdout.decode(errors='ignore'))
|
|
|
|
|
if s.returncode:
|
|
|
|
|
logging.warning("[GPG] Import of key %s failed: %s", k, s.stdout.decode(errors="ignore"))
|
|
|
|
|
return False
|
|
|
|
|
else:
|
|
|
|
|
logging.info("[GPG] Imported key %s", k)
|
|
|
|
|
|
|
|
|
|
return True
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def package_exists(name, repo) -> bool:
|
|
|
|
|
pkgs = find_all_files_for_pkg(name, repo)
|
|
|
|
|
|
|
|
|
|
return len(pkgs) > 0
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def update_svn2git() -> None:
|
|
|
|
|
if not os.path.exists(config["basedir"]["upstream"]):
|
|
|
|
|
pathlib.Path(config["basedir"]["upstream"]).mkdir(parents=True, exist_ok=True)
|
|
|
|
@@ -251,48 +266,21 @@ def update_svn2git() -> None:
|
|
|
|
|
stderr=subprocess.STDOUT).stdout.decode(errors="ignore"))
|
|
|
|
|
logging.debug("[GIT] %s", subprocess.run(["git", "pull"], check=True, stdout=subprocess.PIPE,
|
|
|
|
|
stderr=subprocess.STDOUT).stdout.decode(errors="ignore"))
|
|
|
|
|
os.chdir(sys.path[0])
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def parse_pkgbuild(pkgbuild_file) -> LegacyVersion:
|
|
|
|
|
with open(pkgbuild_file, errors='ignore') as p:
|
|
|
|
|
pkgbuild_str = p.read()
|
|
|
|
|
|
|
|
|
|
pkgver = regex_pkgver.findall(pkgbuild_str)
|
|
|
|
|
pkgrel = regex_pkgrel.findall(pkgbuild_str)
|
|
|
|
|
epoch = regex_epoch.findall(pkgbuild_str)
|
|
|
|
|
if not pkgver or not pkgrel:
|
|
|
|
|
logging.warning("[%s] Failed to parse pkgbuild", pkgbuild_file.split("/")[-4])
|
|
|
|
|
return version.parse("")
|
|
|
|
|
|
|
|
|
|
if epoch:
|
|
|
|
|
return LegacyVersion("{}:{}-{}".format(epoch[0], pkgver[0], pkgrel[0]))
|
|
|
|
|
return LegacyVersion("{}-{}".format(pkgver[0], pkgrel[0]))
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def increase_pkgrel(pkgbuild_file) -> None:
|
|
|
|
|
with open(pkgbuild_file, errors='ignore') as p:
|
|
|
|
|
pkgbuild_str = p.read()
|
|
|
|
|
|
|
|
|
|
pkgbuild_str = regex_pkgrel.sub(r"pkgrel=\1.1", pkgbuild_str)
|
|
|
|
|
|
|
|
|
|
with open(pkgbuild_file, "w") as pkg:
|
|
|
|
|
pkg.write(pkgbuild_str)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def parse_repo(name, repo) -> LegacyVersion:
|
|
|
|
|
ver_split = find_all_files_for_pkg(name, repo)[0].split("-")
|
|
|
|
|
ver_split = find_package_files(name, [repo, ])[0].split("-")
|
|
|
|
|
|
|
|
|
|
return LegacyVersion(ver_split[-3] + "-" + ver_split[-2])
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def sync_marchs_with_config() -> None:
|
|
|
|
|
repos = []
|
|
|
|
|
existing_repos = []
|
|
|
|
|
with os.scandir(config["basedir"]["repo"]) as it:
|
|
|
|
|
entry: os.DirEntry
|
|
|
|
|
for entry in it:
|
|
|
|
|
if not entry.name.startswith('logs') and entry.is_dir():
|
|
|
|
|
repos.append(entry.name)
|
|
|
|
|
existing_repos.append(entry.name)
|
|
|
|
|
|
|
|
|
|
repo_quota = []
|
|
|
|
|
|
|
|
|
@@ -300,10 +288,10 @@ def sync_marchs_with_config() -> None:
|
|
|
|
|
repo_quota.append("{}-{}".format(r, a))
|
|
|
|
|
|
|
|
|
|
logging.info("Repos: %s", repo_quota)
|
|
|
|
|
repos_create = list(set(repo_quota) - set(repos))
|
|
|
|
|
repos_delete = list(set(repos) - set(repo_quota))
|
|
|
|
|
for repo in repo_quota:
|
|
|
|
|
d[repo] = m.list()
|
|
|
|
|
global repos
|
|
|
|
|
repos = repo_quota
|
|
|
|
|
repos_create = list(set(repo_quota) - set(existing_repos))
|
|
|
|
|
repos_delete = list(set(existing_repos) - set(repo_quota))
|
|
|
|
|
|
|
|
|
|
for repo in repos_create:
|
|
|
|
|
logging.debug("Create repo %s: %s", repo, os.path.join(config["basedir"]["repo"], repo, "os/x86_64"))
|
|
|
|
@@ -323,39 +311,41 @@ def fill_queue() -> None:
|
|
|
|
|
all_pkgbuild.extend(
|
|
|
|
|
glob.glob(os.path.join(config["basedir"]["upstream"], git_dir) + "/**/PKGBUILD", recursive=True))
|
|
|
|
|
|
|
|
|
|
to_delete = []
|
|
|
|
|
|
|
|
|
|
for pkgbuild in all_pkgbuild:
|
|
|
|
|
path_split = pkgbuild.split("/")
|
|
|
|
|
|
|
|
|
|
# ignore pkgbuild if in trunk, -any package, not in repos, or on blacklist
|
|
|
|
|
if path_split[-2] == "trunk" or path_split[-2].split("-")[0] not in config["repos"] or "any" in path_split[-2] \
|
|
|
|
|
or path_split[-4] in config["blacklist"] or "i686" in path_split[-2]:
|
|
|
|
|
to_delete.append(pkgbuild)
|
|
|
|
|
|
|
|
|
|
final_pkgbuilds = list(set(all_pkgbuild) - set(to_delete))
|
|
|
|
|
|
|
|
|
|
for pkgb in final_pkgbuilds:
|
|
|
|
|
for march in config["march"]:
|
|
|
|
|
path_split = pkgb.split("/")
|
|
|
|
|
name = path_split[-4]
|
|
|
|
|
repo = path_split[-2].split("-")[0] + "-" + march
|
|
|
|
|
|
|
|
|
|
if name in get_failed_packages(repo):
|
|
|
|
|
logging.info("[%s/%s] Skipped due to failing build", repo, name)
|
|
|
|
|
if path_split[-2] == "trunk" or path_split[-2].split("-")[0] not in config["repos"] or "i686" in path_split[-2]:
|
|
|
|
|
continue
|
|
|
|
|
|
|
|
|
|
if package_exists(name, repo):
|
|
|
|
|
logging.debug("[SEMVER] Comparing %s=%s - %s=%s", name, parse_repo(name, repo), name,
|
|
|
|
|
parse_pkgbuild(pkgb))
|
|
|
|
|
parsed_pkgb = parse_pkgbuild(pkgbuild)
|
|
|
|
|
|
|
|
|
|
if not package_exists(name, repo):
|
|
|
|
|
q.put((pkgb, repo))
|
|
|
|
|
logging.info("[%s/%s] Build queued (package not build yet)", repo, name)
|
|
|
|
|
elif parse_repo(name, repo) < parse_pkgbuild(pkgb):
|
|
|
|
|
q.put((pkgb, repo))
|
|
|
|
|
logging.info("[%s/%s] Build queued (new version available %s < %s)", repo, name,
|
|
|
|
|
parse_repo(name, repo), parse_pkgbuild(pkgb))
|
|
|
|
|
# ignore pkgbuild if in trunk, -any package, not in repos, on blacklist, not for current arch
|
|
|
|
|
if "any" in parsed_pkgb["arch"] or parsed_pkgb["pkgbase"] in config["blacklist"]:
|
|
|
|
|
delete_package(parsed_pkgb, repos)
|
|
|
|
|
else:
|
|
|
|
|
for march in config["march"]:
|
|
|
|
|
repo = path_split[-2].split("-")[0] + "-" + march
|
|
|
|
|
ver = parse_pkgbuild_ver(parsed=parsed_pkgb)
|
|
|
|
|
|
|
|
|
|
if is_package_failed(parsed_pkgb["pkgbase"], ver, repo):
|
|
|
|
|
logging.info("[%s/%s] Skipped due to failing build", repo, parsed_pkgb["pkgbase"])
|
|
|
|
|
delete_package(parsed_pkgb, [repo, ])
|
|
|
|
|
continue
|
|
|
|
|
|
|
|
|
|
packages = list(parsed_pkgb["packages"])
|
|
|
|
|
pkg_f = find_package_files(packages[0], [repo, ])
|
|
|
|
|
if pkg_f:
|
|
|
|
|
logging.debug("[SEMVER] Comparing %s=%s - %s=%s", packages[0], parse_repo(packages[0], repo),
|
|
|
|
|
packages[0], ver)
|
|
|
|
|
rv = parse_repo(packages[0], repo)
|
|
|
|
|
if rv < ver:
|
|
|
|
|
q.put((pkgbuild, repo))
|
|
|
|
|
logging.info("[%s/%s] Build queued (new version available %s < %s)", repo,
|
|
|
|
|
parsed_pkgb["pkgbase"],
|
|
|
|
|
rv, ver)
|
|
|
|
|
else:
|
|
|
|
|
q.put((pkgbuild, repo))
|
|
|
|
|
logging.info("[%s/%s] Build queued (package not build yet)", repo, parsed_pkgb["pkgbase"])
|
|
|
|
|
|
|
|
|
|
logging.info("Build queue size: %s", q.qsize())
|
|
|
|
|
|
|
|
|
@@ -376,14 +366,13 @@ if __name__ == '__main__':
|
|
|
|
|
|
|
|
|
|
os.nice(5)
|
|
|
|
|
|
|
|
|
|
with Manager() as m:
|
|
|
|
|
d = m.dict()
|
|
|
|
|
setup_chroot()
|
|
|
|
|
sync_marchs_with_config()
|
|
|
|
|
do_repo_work()
|
|
|
|
|
update_svn2git()
|
|
|
|
|
q = JoinableQueue()
|
|
|
|
|
|
|
|
|
|
with Pool(config["build"]["worker"], initializer=run_worker, initargs=(d,)) as pool:
|
|
|
|
|
with Pool(config["build"]["worker"], initializer=run_worker) as pool:
|
|
|
|
|
fill_queue()
|
|
|
|
|
signal.signal(signal.SIGINT, signal.default_int_handler)
|
|
|
|
|
|
|
|
|
@@ -392,7 +381,6 @@ if __name__ == '__main__':
|
|
|
|
|
if time.time() - update_last > 900 and q.empty():
|
|
|
|
|
logging.info("[SVN2GIT] Waiting for queue to finish...")
|
|
|
|
|
q.join()
|
|
|
|
|
do_repo_work()
|
|
|
|
|
update_last = time.time()
|
|
|
|
|
update_svn2git()
|
|
|
|
|
setup_chroot()
|
|
|
|
@@ -400,10 +388,10 @@ if __name__ == '__main__':
|
|
|
|
|
if q.qsize() > 0:
|
|
|
|
|
logging.info("[SVN2GIT] New Queue size: %d", q.qsize())
|
|
|
|
|
else:
|
|
|
|
|
time.sleep(60)
|
|
|
|
|
time.sleep(300)
|
|
|
|
|
do_repo_work()
|
|
|
|
|
except KeyboardInterrupt:
|
|
|
|
|
with copy_l:
|
|
|
|
|
with copy_l, failed_l:
|
|
|
|
|
pool.close()
|
|
|
|
|
pool.terminate()
|
|
|
|
|
q.close()
|
|
|
|
|