This repository has been archived on 2021-06-12. You can view files and clone it, but cannot push or open issues or pull requests.
alhp/master.py
2021-06-04 17:06:07 +02:00

400 lines
16 KiB
Python

#!/usr/bin/env python3
import fcntl
import glob
import logging.config
import os
import pathlib
import re
import shutil
import signal
import subprocess
import sys
import time
import traceback
from multiprocessing import Pool, current_process, JoinableQueue, Lock
from typing import AnyStr
import yaml
from humanfriendly import format_timespan
from packaging.version import LegacyVersion
from utils import parse_pkgbuild, parse_pkgbuild_ver, import_keys, increase_pkgrel
regex_pkgver = re.compile(r"^_?pkgver\s*=\s*(.+)$", re.MULTILINE)
regex_epoch = re.compile(r"^epoch\s*=\s*(.+)$", re.MULTILINE)
regex_march = re.compile(r"(-march=)(.+?) ", re.MULTILINE)
regex_validkeys = re.compile(r"^validpgpkeys\+?=\((.*?)\)", re.MULTILINE | re.DOTALL)
regex_pkg_repo = re.compile(r"^(.*)-.*-.*-(?:x86_64|any)\.pkg\.tar\.zst(?:\.sig)*$", re.MULTILINE)
fp = None
update_last = time.time()
copy_l = Lock()
failed_l = Lock()
repos = []
def build(pkgbuild: str, repo: str) -> None:
start_time = time.time()
parsed = parse_pkgbuild(pkgbuild)
name = parsed["pkgbase"]
process_name = current_process().name
logging.info("[%s/%s/%s] Build starting (Queue ~= %s)", process_name, repo, name, q.qsize())
# setup makepkg
setup_makepkg(repo)
# import pgp keys
import_keys(pkgbuild)
# increase pkgrel
increase_pkgrel(pkgbuild, parsed)
# build with devtools
os.chdir(pathlib.Path(pkgbuild).parent)
res = subprocess.run(
["makechrootpkg", "-c", "-D", os.path.join(config["basedir"]["makepkg"]), "-l", process_name, "-r",
os.path.join(config["basedir"]["chroot"]), "--", "--config",
os.path.join(config["basedir"]["makepkg"]) + "makepkg-" + '-'.join(
repo.split("-")[1:]) + ".conf"], stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
if res.returncode:
logging.warning("[%s/%s/%s] Build failed. Check repo/logs for more information.", process_name, repo, name)
# write packagename to failed list
with failed_l, open(os.path.join(config["basedir"]["repo"], repo + "_failed.txt"), "a") as f:
f.write(name + "==" + str(parse_pkgbuild_ver(parsed=parsed)) + "\n")
# write logs
if not os.path.exists(os.path.join(config["basedir"]["repo"], "logs", repo)):
pathlib.Path(os.path.join(config["basedir"]["repo"], "logs", repo)).mkdir(parents=True,
exist_ok=True)
with open(os.path.join(config["basedir"]["repo"], "logs", repo, name + ".log"), "w") as log:
log.write(res.stdout.decode(errors="ignore"))
logging.debug("[GIT] %s", subprocess.run(["git", "clean", "-xdff"], check=False, stdout=subprocess.PIPE,
stderr=subprocess.STDOUT).stdout.decode(errors="ignore"))
return
# signing
pkgs = glob.glob("*.pkg.tar.zst")
for pkg in pkgs:
s_res = subprocess.run(["gpg", "--batch", "--detach-sign", pkg], stdout=subprocess.PIPE,
stderr=subprocess.STDOUT)
if s_res.returncode:
logging.error("[%s/%s/%s] Signing failed: %s", process_name, repo, name,
s_res.stdout.decode(errors="ignore"))
logging.debug("[GIT] %s", subprocess.run(["git", "clean", "-xdff"], check=False, stdout=subprocess.PIPE,
stderr=subprocess.STDOUT).stdout.decode(errors="ignore"))
return
# copying
pkgs.extend(glob.glob("*.pkg.tar.zst.sig"))
with copy_l:
for pkg in pkgs:
logging.debug("[%s/%s/%s] Copy %s to %s", process_name, repo, name, pkg,
os.path.join(config["basedir"]["repo"], repo, "os", config["arch"] + "/"))
shutil.copy2(pkg, os.path.join(config["basedir"]["repo"], repo, "os", config["arch"] + "/"))
logging.debug("[GIT] %s", subprocess.run(["git", "clean", "-xdff"], check=False, stdout=subprocess.PIPE,
stderr=subprocess.STDOUT).stdout.decode(errors="ignore"))
logpath = pathlib.Path(os.path.join(config["basedir"]["repo"], "logs", repo, name + ".log"))
if logpath.exists():
os.remove(logpath)
logging.info("[%s/%s/%s] Build successful (%s)", process_name, repo, name,
format_timespan(time.time() - start_time))
def run_worker() -> None:
os.nice(20)
while True:
try:
build(*q.get(block=True))
except Exception as e:
logging.error("Error in worker: %s", e)
traceback.print_exc()
finally:
q.task_done()
def do_repo_work() -> None:
for repo in repos:
pkgs = glob.glob(os.path.join(config["basedir"]["repo"], repo, "os", config["arch"], "*.zst"))
args = ["repo-add", "-s", "-v", "-p", "-n",
os.path.join(config["basedir"]["repo"], repo, "os", config["arch"], repo + ".db.tar.xz")]
args.extend(pkgs)
r_res = subprocess.run(args, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
logging.debug("[REPO-ADD] %s", r_res.stdout.decode(errors="ignore"))
if r_res.returncode:
logging.error("[REPO/%s] Repo action failed: %s", repo, r_res.stdout.decode(errors="ignore"))
p_res = subprocess.run(
["paccache", "-rc", os.path.join(config["basedir"]["repo"], repo, "os", config["arch"]), "-k", "1"],
stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
logging.debug("[PACCACHE] %s", p_res.stdout.decode(errors="ignore"))
if p_res.returncode:
logging.error("[REPO/%s] Repo cleanup failed: %s", repo, p_res.stdout.decode(errors="ignore"))
def already_running() -> bool:
global fp
fp = os.open(f"/tmp/alhp.lock", os.O_WRONLY | os.O_CREAT)
try:
fcntl.lockf(fp, fcntl.LOCK_EX | fcntl.LOCK_NB)
return False
except OSError:
return True
def delete_package(parsed_pkgbuild: dict, repo: list[AnyStr]):
for pkg in parsed_pkgbuild["packages"]:
pkg_f = find_package_files(pkg, repo)
if pkg_f:
for f in pkg_f:
base = pathlib.Path(f).parent
repo = f.split("/")[-4]
logging.info("[%s/%s] Deleting package files: %s", repo, parsed_pkgbuild["pkgbase"],
pathlib.Path(f).name)
args = ["repo-remove", "-s", "-v", os.path.join(base, repo + ".db.tar.xz"), pkg]
r_res = subprocess.run(args, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
logging.debug("[REPO-REMOVE] %s", r_res.stdout.decode(errors="ignore"))
if r_res.returncode:
logging.error("[REPO/%s] Repo action failed: %s", repo, r_res.stdout.decode(errors="ignore"))
continue
os.remove(f)
os.remove(f + ".sig")
def find_package_files(name: str, repo: list[AnyStr]) -> list[AnyStr]:
pkgs = []
for t_repo in repo:
files: list[str]
root: str
for root, dirs, files in os.walk(os.path.join(config["basedir"]["repo"], t_repo, "os", config["arch"])):
for file in files:
if file.endswith(".sig"):
continue
res = regex_pkg_repo.search(file)
if res and res.group(1) == name:
pkgs.append(os.path.join(root, file))
return pkgs
def is_package_failed(package: str, ver: LegacyVersion, repo: str):
pkgs = get_failed_packages(repo)
p: str
for p in pkgs:
s = p.split("==")
if s[0] == package:
if ver > LegacyVersion(s[1]):
with failed_l, open(os.path.join(config["basedir"]["repo"], repo + "_failed.txt"), "r+") as f:
d = f.readlines()
f.seek(0)
f.truncate()
for i in d:
if i.strip("\n") != p:
f.write(i)
return False
else:
return True
return False
def get_failed_packages(repo: str) -> list:
if os.path.exists(os.path.join(config["basedir"]["repo"], repo + "_failed.txt")):
with failed_l, open(os.path.join(config["basedir"]["repo"], repo + "_failed.txt")) as p:
return p.readlines()
else:
return []
def setup_chroot() -> None:
if not os.path.exists(os.path.join(config["basedir"]["chroot"], "root")):
pathlib.Path(config["basedir"]["chroot"]).mkdir(parents=True, exist_ok=True)
s = subprocess.run(["mkarchroot", "-C", "/usr/share/devtools/pacman-extra.conf",
os.path.join(config["basedir"]["chroot"], "root"), "base-devel"],
stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
logging.debug("[MKCHROOT] %s", s.stdout.decode(errors='ignore'))
if s.returncode:
logging.fatal("[MKCHROOT] Failed to create root chroot: %s", s.stdout.decode(errors="ignore"))
sys.exit(2)
else:
logging.debug("[NSPAWN] %s", subprocess.run(
["arch-nspawn", os.path.join(config["basedir"]["chroot"], "root"), "pacman", "-Syuu", "--noconfirm"]))
def setup_makepkg(repo) -> None:
makepkg_repo = os.path.join(config["basedir"]["makepkg"], "makepkg-" + '-'.join(repo.split("-")[1:]) + ".conf")
if not os.path.exists(makepkg_repo):
pathlib.Path(config["basedir"]["makepkg"]).mkdir(parents=True, exist_ok=True)
shutil.copyfile("makepkg.tmpl", makepkg_repo)
with open(makepkg_repo) as conf:
c_all = conf.read()
c_all = c_all.replace("-mtune=generic", "")
c_all = c_all.replace("-O2", "-O3")
c_all = regex_march.sub(r"\1" + '-'.join(repo.split("-")[1:]), c_all)
with open(makepkg_repo, "w") as conf:
conf.write(c_all)
def update_svn2git() -> None:
if not os.path.exists(config["basedir"]["upstream"]):
pathlib.Path(config["basedir"]["upstream"]).mkdir(parents=True, exist_ok=True)
for git_dir, git_url in config["svn2git"].items():
git_path = os.path.join(config["basedir"]["upstream"], git_dir)
if not os.path.exists(git_path):
logging.debug("[GIT] %s",
subprocess.run(["git", "clone", "--depth=1", git_url, git_path], check=True,
stdout=subprocess.PIPE, stderr=subprocess.STDOUT).stdout.decode(
errors="ignore"))
else:
os.chdir(git_path)
logging.debug("[GIT] %s", subprocess.run(["git", "clean", "-xdff"], check=False, stdout=subprocess.PIPE,
stderr=subprocess.STDOUT).stdout.decode(errors="ignore"))
logging.debug("[GIT] %s", subprocess.run(["git", "reset", "--hard"], check=True, stdout=subprocess.PIPE,
stderr=subprocess.STDOUT).stdout.decode(errors="ignore"))
logging.debug("[GIT] %s", subprocess.run(["git", "pull"], check=True, stdout=subprocess.PIPE,
stderr=subprocess.STDOUT).stdout.decode(errors="ignore"))
def parse_repo(name, repo) -> LegacyVersion:
ver_split = find_package_files(name, [repo, ])[0].split("-")
return LegacyVersion(ver_split[-3] + "-" + ver_split[-2])
def sync_marchs_with_config() -> None:
existing_repos = []
with os.scandir(config["basedir"]["repo"]) as it:
entry: os.DirEntry
for entry in it:
if not entry.name.startswith('logs') and entry.is_dir():
existing_repos.append(entry.name)
repo_quota = []
for r, a in ((x, y) for x in config["repos"] for y in config["march"]):
repo_quota.append("{}-{}".format(r, a))
logging.info("Repos: %s", repo_quota)
global repos
repos = repo_quota
repos_create = list(set(repo_quota) - set(existing_repos))
repos_delete = list(set(existing_repos) - set(repo_quota))
for repo in repos_create:
logging.debug("Create repo %s: %s", repo, os.path.join(config["basedir"]["repo"], repo, "os/x86_64"))
pathlib.Path(os.path.join(config["basedir"]["repo"], repo, "os/x86_64")).mkdir(parents=True, exist_ok=True)
setup_makepkg(repo)
for repo in repos_delete:
logging.debug("Delete repo %s: %s", repo, os.path.join(config["basedir"]["repo"], repo))
shutil.rmtree(os.path.join(config["basedir"]["repo"], repo))
os.remove(os.path.join(config["basedir"]["makepkg"], "makepkg-" + repo + ".conf"))
def fill_queue() -> None:
all_pkgbuild = []
for git_dir, git_url in config["svn2git"].items():
all_pkgbuild.extend(
glob.glob(os.path.join(config["basedir"]["upstream"], git_dir) + "/**/PKGBUILD", recursive=True))
for pkgbuild in all_pkgbuild:
path_split = pkgbuild.split("/")
if path_split[-2] == "trunk" or path_split[-2].split("-")[0] not in config["repos"] or "i686" in path_split[-2]:
continue
parsed_pkgb = parse_pkgbuild(pkgbuild)
# ignore pkgbuild if in trunk, -any package, not in repos, on blacklist, not for current arch
if "any" in parsed_pkgb["arch"] or parsed_pkgb["pkgbase"] in config["blacklist"]:
delete_package(parsed_pkgb, repos)
else:
for march in config["march"]:
repo = path_split[-2].split("-")[0] + "-" + march
ver = parse_pkgbuild_ver(parsed=parsed_pkgb)
if is_package_failed(parsed_pkgb["pkgbase"], ver, repo):
logging.info("[%s/%s] Skipped due to failing build", repo, parsed_pkgb["pkgbase"])
delete_package(parsed_pkgb, [repo, ])
continue
packages = list(parsed_pkgb["packages"])
pkg_f = find_package_files(packages[0], [repo, ])
if pkg_f:
logging.debug("[SEMVER] Comparing %s=%s - %s=%s", packages[0], parse_repo(packages[0], repo),
packages[0], ver)
rv = parse_repo(packages[0], repo)
if rv < ver:
q.put((pkgbuild, repo))
logging.info("[%s/%s] Build queued (new version available %s < %s)", repo,
parsed_pkgb["pkgbase"],
rv, ver)
else:
q.put((pkgbuild, repo))
logging.info("[%s/%s] Build queued (package not build yet)", repo, parsed_pkgb["pkgbase"])
logging.info("Build queue size: %s", q.qsize())
if __name__ == '__main__':
with open("config.yaml") as c:
config = yaml.safe_load(c)
logging.config.dictConfig(config["logging"])
logging.getLogger("ALHP")
if already_running():
logging.error("Another instance is already running")
sys.exit(2)
if not os.path.exists(config["basedir"]["repo"]):
pathlib.Path(config["basedir"]["repo"]).mkdir(parents=True, exist_ok=True)
os.nice(5)
setup_chroot()
sync_marchs_with_config()
do_repo_work()
update_svn2git()
q = JoinableQueue()
with Pool(config["build"]["worker"], initializer=run_worker) as pool:
fill_queue()
signal.signal(signal.SIGINT, signal.default_int_handler)
while True:
try:
if time.time() - update_last > 900 and q.empty():
logging.info("[SVN2GIT] Waiting for queue to finish...")
q.join()
update_last = time.time()
update_svn2git()
setup_chroot()
fill_queue()
if q.qsize() > 0:
logging.info("[SVN2GIT] New Queue size: %d", q.qsize())
else:
time.sleep(300)
do_repo_work()
except KeyboardInterrupt:
with copy_l, failed_l:
pool.close()
pool.terminate()
q.close()
do_repo_work()
sys.exit(0)