From 3b3e5e14233d88a51e50fc79b52502d587049f64 Mon Sep 17 00:00:00 2001 From: Giovanni Harting <539@idlegandalf.com> Date: Wed, 12 Aug 2020 09:05:00 +0200 Subject: [PATCH] initial commit --- alhp.service | 14 ++++ config.yaml | 43 ++++++++++ master.py | 224 +++++++++++++++++++++++++++++++++++++++++++++++++++ 3 files changed, 281 insertions(+) create mode 100644 alhp.service create mode 100644 config.yaml create mode 100644 master.py diff --git a/alhp.service b/alhp.service new file mode 100644 index 0000000..aa11289 --- /dev/null +++ b/alhp.service @@ -0,0 +1,14 @@ +[Unit] +Description=Python based Archlinux instructionset enabled repo build manager. +After=network.target + +[Service] +User=build +Group=build +WorkingDirectory=/home/build +ExecStart=/opt/ALHP/master.py +MemoryHigh=10G +CPUQuota=60% + +[Install] +WantedBy=multi-user.target \ No newline at end of file diff --git a/config.yaml b/config.yaml new file mode 100644 index 0000000..27f0044 --- /dev/null +++ b/config.yaml @@ -0,0 +1,43 @@ +arch: x86_64 +repos: + - core + - extra + - community + +basedir: + repo: /tmp/www/alhp/ + svn2git: al_upstream/ + build: build/ + makepkg: /usr/share/devtools/makepkg-x86_64.conf + +march: + - znver2 + - sandybridge + - ivybridge + +blacklist: + - pacman + +logging: + version: 1 + disable_existing_loggers: True + formatters: + simple: + format: '%(asctime)s - %(name)s - %(levelname)s - %(message)s' + + handlers: + console: + class: logging.StreamHandler + level: DEBUG + formatter: simple + stream: ext://sys.stdout + + loggers: + ALHP: + level: DEBUG + handlers: [console] + propagate: yes + + root: + level: DEBUG + handlers: [console] \ No newline at end of file diff --git a/master.py b/master.py new file mode 100644 index 0000000..4578215 --- /dev/null +++ b/master.py @@ -0,0 +1,224 @@ +import fcntl +import glob +import logging.config +import os +import pathlib +import re +import shutil +import subprocess +import sys +import time +from queue import Queue + +import semver +import yaml + +regex_pkgver = re.compile(r"^pkgrel\s*=\s*(.+)$") +regex_pkgrel = re.compile(r"^pkgrel\s*=\s*(.+)$") +regex_march = re.compile(r"(-march=)(.+?) ") +fp = None +q = Queue() +update_last = time.time() + + +def already_running(): + global fp + fp = os.open(f"/tmp/alhp.lock", os.O_WRONLY | os.O_CREAT) + + try: + fcntl.lockf(fp, fcntl.LOCK_EX | fcntl.LOCK_NB) + return False + except OSError: + return True + + +def find_all_files_for_pkg(name, repo): + searchpath = os.path.join(config["basedir"]["repo"], repo, "os", config["arch"]) + "/" + name + "/*.pkg.*" + # logging.debug("Search for packages with %s", searchpath) + pkgs = glob.glob(searchpath) + + return pkgs + + +def build(pkgbuild, repo): + start_time = time.time() + logging.info("[%s] Build starting", pathlib.Path(pkgbuild).parts[-4]) + + # setup buildflags + setup_makepkg(repo) + + # build with devtools + os.chdir(pathlib.Path(pkgbuild).parent) + res = subprocess.run(["sudo", "extra-x86_64-build"], capture_output=True) + if res.returncode: + logging.warning("[%s] Build failed: %s", pathlib.Path(pkgbuild).parts[-4], str(res.stderr)) + subprocess.run(["git", "clean", "-xdf"], check=True, capture_output=True) + os.chdir(sys.path[0]) + return + + # signing + pkgs = glob.glob("*.pkg.tar.zst") + for pkg in pkgs: + s_res = subprocess.run(["gpg", "--batch", "--detach-sign", pkg], capture_output=True) + if s_res.returncode: + logging.error("[%s] Signing failed: %s", pathlib.Path(pkgbuild).parts[-4], s_res.stderr) + subprocess.run(["git", "clean", "-xdf"], check=True, capture_output=True) + os.chdir(sys.path[0]) + return + + # copying + pkgs.append(glob.glob("*.pkg.tar.zst.sig")) + for pkg in pkgs: + logging.debug("[%s] Copy %s to %s", pathlib.Path(pkgbuild).parts[-4], pkg, + os.path.join(config["basedir"]["repo"], repo, "os", config["arch"] + "/")) + shutil.copy2(pkg, os.path.join(config["basedir"]["repo"], repo, "os", config["arch"] + "/")) + + # repo + r_res = subprocess.run(["repo-add", "-s", "-v", + os.path.join(config["basedir"]["repo"], repo, "os", config["arch"], repo + ".db.tar.xz"), + pkgs[0]], capture_output=True) + if r_res.returncode: + logging.error("[%s] Repo action failed: %s", pathlib.Path(pkgbuild).parts[-4], r_res) + subprocess.run(["git", "clean", "-xdf"], check=True, capture_output=True) + os.chdir(sys.path[0]) + return + + p_res = subprocess.run( + ["paccache", "-rc", "-k 1", os.path.join(config["basedir"]["repo"], repo, "os", config["arch"])], + capture_output=True) + if p_res.returncode: + logging.error("[%s] Repo cleanup failed: %s", pathlib.Path(pkgbuild).parts[-4], p_res) + subprocess.run(["git", "clean", "-xdf"], check=True, capture_output=True) + os.chdir(sys.path[0]) + return + + # cleanup + subprocess.run(["git", "clean", "-xdf"], check=True, capture_output=True) + os.chdir(sys.path[0]) + logging.info("[%s] Build successful (%s)", pathlib.Path(pkgbuild).parts[-4], time.time() - start_time) + + +def setup_makepkg(repo): + with open(config["basedir"]["makepkg"]) as conf: + c_all = conf.read() + c_all = c_all.replace("-mtune=generic", "") + c_all = c_all.replace("-O2", "-O3") + c_all = regex_march.sub(r"\1" + repo.split("-")[1] + " ", c_all) + with open(config["basedir"]["makepkg"], "w") as conf: + conf.write(c_all) + + +def package_exists(name, repo): + pkgs = find_all_files_for_pkg(name, repo) + + return len(pkgs) > 0 + + +def update_git2svn(): + if not os.path.exists(config["basedir"]["svn2git"]): + logging.debug(subprocess.run( + ["git", "clone", "https://github.com/archlinux/svntogit-packages.git", config["basedir"]["svn2git"]], + check=True, capture_output=True)) + else: + os.chdir(config["basedir"]["svn2git"]) + logging.debug(subprocess.run(["git", "pull"], check=True, capture_output=True)) + os.chdir("..") + + +def parse_pkgbuild(pkgbuild_file): + with open(pkgbuild_file) as p: + pkgbuild_str = p.read() + pkgver = regex_pkgver.findall(pkgbuild_str) + pkgrel = regex_pkgrel.findall(pkgbuild_str) + + return semver.VersionInfo.parse("{}-{}".format(pkgver, pkgrel)) + + +def parse_repo(name, repo): + ver_split = find_all_files_for_pkg(name, repo)[0].split("-") + + return semver.VersionInfo.parse(ver_split[-3] + ver_split[-2]) + + +def sync_marchs_with_config(): + repos = [dI for dI in os.listdir(config["basedir"]["repo"]) if + os.path.isdir(os.path.join(config["basedir"]["repo"], dI))] + + repo_quota = [] + + for r, a in ((x, y) for x in config["repos"] for y in config["march"]): + repo_quota.append("{}-{}".format(r, a)) + + logging.info("Repos: %s", repo_quota) + repos_create = list(set(repo_quota) - set(repos)) + repos_delete = list(set(repos) - set(repo_quota)) + + for repo in repos_create: + logging.debug("Create repo %s: %s", repo, os.path.join(config["basedir"]["repo"], repo, "os/x86_64")) + pathlib.Path(os.path.join(config["basedir"]["repo"], repo, "os/x86_64")).mkdir(parents=True, exist_ok=True) + + for repo in repos_delete: + logging.debug("Delete repo %s: %s", repo, os.path.join(config["basedir"]["repo"], repo)) + shutil.rmtree(os.path.join(config["basedir"]["repo"], repo)) + + +def fill_queue(): + all_pkgbuild = glob.glob(os.path.join(config["basedir"]["svn2git"]) + "/**/PKGBUILD", recursive=True) + to_delete = [] + + for pkgbuild in all_pkgbuild: + path_split = pkgbuild.split("/") + + # ignore pkgbuild if in trunk, -any package, testing or on blacklist + if len(path_split) < 5 or "any" in path_split[3] or "testing" in path_split[3] or path_split[1] in config[ + "blacklist"]: + to_delete.append(pkgbuild) + + final_pkgbuilds = list(set(all_pkgbuild) - set(to_delete)) + + for pkgb in final_pkgbuilds: + for march in config["march"]: + path_split = pkgb.split("/") + name = path_split[1] + repo = path_split[3].split("-")[0] + "-" + march + + if not package_exists(name, repo) or parse_repo(name, repo) < parse_pkgbuild(pkgb): + q.put((pkgb, repo)) + + logging.info("Queue size after fill: %s", q.qsize()) + + +if __name__ == '__main__': + with open("config.yaml") as c: + config = yaml.safe_load(c) + + logging.config.dictConfig(config["logging"]) + logging.getLogger("ALHP") + + if already_running(): + logging.error("Another instance is already running") + sys.exit(2) + + if os.path.exists(config["basedir"]["build"]): + shutil.rmtree(config["basedir"]["build"]) + + os.mkdir(config["basedir"]["build"]) + logging.debug("Build dir created") + + if not os.path.exists(config["basedir"]["repo"]): + pathlib.Path(config["basedir"]["repo"]).mkdir(parents=True, exist_ok=True) + + sync_marchs_with_config() + update_git2svn() + fill_queue() + + while True: + if q.qsize() > 0: + build(*q.get()) + else: + time.sleep(60) + + if time.time() - update_last > 900: + update_git2svn() + update_last = time.time() + fill_queue()