This repository has been archived on 2021-06-12. You can view files and clone it. You cannot open issues or pull requests or push a commit.
Files
alhp/master.py

249 lines
8.0 KiB
Python
Raw Normal View History

2020-08-12 09:05:00 +02:00
import fcntl
import glob
import logging.config
import os
import pathlib
import re
import shutil
import subprocess
import sys
import time
2020-08-12 09:23:11 +02:00
from queue import Queue, Empty
2020-08-12 09:05:00 +02:00
import semver
import yaml
regex_pkgver = re.compile(r"^pkgrel\s*=\s*(.+)$")
regex_pkgrel = re.compile(r"^pkgrel\s*=\s*(.+)$")
regex_march = re.compile(r"(-march=)(.+?) ")
2020-08-12 10:39:22 +02:00
regex_validkeys = re.compile(r"^validpgpkeys=\((.*)\).*$")
2020-08-12 09:05:00 +02:00
fp = None
q = Queue()
update_last = time.time()
def already_running():
global fp
fp = os.open(f"/tmp/alhp.lock", os.O_WRONLY | os.O_CREAT)
try:
fcntl.lockf(fp, fcntl.LOCK_EX | fcntl.LOCK_NB)
return False
except OSError:
return True
def find_all_files_for_pkg(name, repo):
searchpath = os.path.join(config["basedir"]["repo"], repo, "os", config["arch"]) + "/" + name + "/*.pkg.*"
# logging.debug("Search for packages with %s", searchpath)
pkgs = glob.glob(searchpath)
return pkgs
def build(pkgbuild, repo):
start_time = time.time()
name = pathlib.Path(pkgbuild).parts[-4]
2020-08-12 10:36:02 +02:00
logging.info("[%s/%s] Build starting", repo, name)
2020-08-12 09:05:00 +02:00
# setup buildflags
setup_makepkg(repo)
2020-08-12 10:23:53 +02:00
# import pgp keys
import_keys(pkgbuild)
2020-08-12 09:05:00 +02:00
# build with devtools
os.chdir(pathlib.Path(pkgbuild).parent)
res = subprocess.run(["sudo", "extra-x86_64-build"], capture_output=True)
if res.returncode:
2020-08-12 10:35:19 +02:00
logging.warning("[%s/%s] Build failed: %s", repo, name, res)
2020-08-12 09:05:00 +02:00
subprocess.run(["git", "clean", "-xdf"], check=True, capture_output=True)
os.chdir(sys.path[0])
return
# signing
pkgs = glob.glob("*.pkg.tar.zst")
for pkg in pkgs:
s_res = subprocess.run(["gpg", "--batch", "--detach-sign", pkg], capture_output=True)
if s_res.returncode:
2020-08-12 10:35:19 +02:00
logging.error("[%s/%s] Signing failed: %s", repo, name, s_res)
2020-08-12 09:05:00 +02:00
subprocess.run(["git", "clean", "-xdf"], check=True, capture_output=True)
os.chdir(sys.path[0])
return
# copying
2020-08-12 10:32:23 +02:00
pkgs.extend(glob.glob("*.pkg.tar.zst.sig"))
2020-08-12 09:05:00 +02:00
for pkg in pkgs:
2020-08-12 10:35:19 +02:00
logging.debug("[%s/%s] Copy %s to %s", repo, name, pkg,
2020-08-12 09:05:00 +02:00
os.path.join(config["basedir"]["repo"], repo, "os", config["arch"] + "/"))
shutil.copy2(pkg, os.path.join(config["basedir"]["repo"], repo, "os", config["arch"] + "/"))
# repo
r_res = subprocess.run(["repo-add", "-s", "-v",
os.path.join(config["basedir"]["repo"], repo, "os", config["arch"], repo + ".db.tar.xz"),
pkgs[0]], capture_output=True)
if r_res.returncode:
2020-08-12 10:35:19 +02:00
logging.error("[%s/%s] Repo action failed: %s", repo, name, r_res)
2020-08-12 09:05:00 +02:00
subprocess.run(["git", "clean", "-xdf"], check=True, capture_output=True)
os.chdir(sys.path[0])
return
p_res = subprocess.run(
2020-08-12 10:35:19 +02:00
["paccache", "-rc", os.path.join(config["basedir"]["repo"], repo, "os", config["arch"]), "-k 1"],
2020-08-12 09:05:00 +02:00
capture_output=True)
if p_res.returncode:
2020-08-12 10:35:19 +02:00
logging.error("[%s/%s] Repo cleanup failed: %s", repo, name, p_res)
2020-08-12 09:05:00 +02:00
subprocess.run(["git", "clean", "-xdf"], check=True, capture_output=True)
os.chdir(sys.path[0])
return
# cleanup
subprocess.run(["git", "clean", "-xdf"], check=True, capture_output=True)
os.chdir(sys.path[0])
2020-08-12 10:35:19 +02:00
logging.info("[%s/%s] Build successful (%s)", repo, name, time.time() - start_time)
2020-08-12 09:05:00 +02:00
def setup_makepkg(repo):
with open(config["basedir"]["makepkg"]) as conf:
c_all = conf.read()
c_all = c_all.replace("-mtune=generic", "")
c_all = c_all.replace("-O2", "-O3")
c_all = regex_march.sub(r"\1" + repo.split("-")[1] + " ", c_all)
with open(config["basedir"]["makepkg"], "w") as conf:
conf.write(c_all)
def import_keys(pkgbuild):
with open(pkgbuild) as p:
keys_s = regex_validkeys.findall(p.read())
2020-08-12 10:41:59 +02:00
logging.debug("Found raw keys: %s", keys_s)
if keys_s:
keys = []
for k in keys_s:
keys.append(k.split(" "))
logging.debug("Found keys: %s", keys)
for k in keys:
nk = k.replace("'", "")
logging.debug(subprocess.run(
["gpg", "--keyserver keyserver.ubuntu.com", "--recv-keys", nk],
check=True, capture_output=True))
logging.debug("Imported key %s", nk)
2020-08-12 09:05:00 +02:00
def package_exists(name, repo):
pkgs = find_all_files_for_pkg(name, repo)
return len(pkgs) > 0
def update_git2svn():
if not os.path.exists(config["basedir"]["svn2git"]):
logging.debug(subprocess.run(
["git", "clone", "https://github.com/archlinux/svntogit-packages.git", config["basedir"]["svn2git"]],
check=True, capture_output=True))
else:
os.chdir(config["basedir"]["svn2git"])
2020-08-12 09:57:09 +02:00
logging.debug(subprocess.run(["git", "clean", "-xdf"], check=True, capture_output=True))
2020-08-12 09:05:00 +02:00
logging.debug(subprocess.run(["git", "pull"], check=True, capture_output=True))
os.chdir("..")
def parse_pkgbuild(pkgbuild_file):
with open(pkgbuild_file) as p:
pkgbuild_str = p.read()
pkgver = regex_pkgver.findall(pkgbuild_str)
pkgrel = regex_pkgrel.findall(pkgbuild_str)
return semver.VersionInfo.parse("{}-{}".format(pkgver, pkgrel))
def parse_repo(name, repo):
ver_split = find_all_files_for_pkg(name, repo)[0].split("-")
return semver.VersionInfo.parse(ver_split[-3] + ver_split[-2])
def sync_marchs_with_config():
repos = [dI for dI in os.listdir(config["basedir"]["repo"]) if
os.path.isdir(os.path.join(config["basedir"]["repo"], dI))]
repo_quota = []
for r, a in ((x, y) for x in config["repos"] for y in config["march"]):
repo_quota.append("{}-{}".format(r, a))
logging.info("Repos: %s", repo_quota)
repos_create = list(set(repo_quota) - set(repos))
repos_delete = list(set(repos) - set(repo_quota))
for repo in repos_create:
logging.debug("Create repo %s: %s", repo, os.path.join(config["basedir"]["repo"], repo, "os/x86_64"))
pathlib.Path(os.path.join(config["basedir"]["repo"], repo, "os/x86_64")).mkdir(parents=True, exist_ok=True)
for repo in repos_delete:
logging.debug("Delete repo %s: %s", repo, os.path.join(config["basedir"]["repo"], repo))
shutil.rmtree(os.path.join(config["basedir"]["repo"], repo))
def fill_queue():
all_pkgbuild = glob.glob(os.path.join(config["basedir"]["svn2git"]) + "/**/PKGBUILD", recursive=True)
to_delete = []
for pkgbuild in all_pkgbuild:
path_split = pkgbuild.split("/")
2020-08-12 09:18:50 +02:00
# ignore pkgbuild if in trunk, -any package, not in repos, or on blacklist
if path_split[-2] == "trunk" or path_split[-2].split("-")[0] not in config[
"repos"] or "any" in path_split[-2] or path_split[-4] in config["blacklist"]:
2020-08-12 09:05:00 +02:00
to_delete.append(pkgbuild)
final_pkgbuilds = list(set(all_pkgbuild) - set(to_delete))
for pkgb in final_pkgbuilds:
for march in config["march"]:
path_split = pkgb.split("/")
name = path_split[1]
repo = path_split[3].split("-")[0] + "-" + march
if not package_exists(name, repo) or parse_repo(name, repo) < parse_pkgbuild(pkgb):
q.put((pkgb, repo))
logging.info("Queue size after fill: %s", q.qsize())
if __name__ == '__main__':
with open("config.yaml") as c:
config = yaml.safe_load(c)
logging.config.dictConfig(config["logging"])
logging.getLogger("ALHP")
if already_running():
logging.error("Another instance is already running")
sys.exit(2)
if not os.path.exists(config["basedir"]["repo"]):
pathlib.Path(config["basedir"]["repo"]).mkdir(parents=True, exist_ok=True)
sync_marchs_with_config()
update_git2svn()
fill_queue()
while True:
if q.qsize() > 0:
2020-08-12 09:23:11 +02:00
try:
build(*q.get_nowait())
except Empty:
pass
2020-08-12 09:05:00 +02:00
else:
time.sleep(60)
if time.time() - update_last > 900:
update_git2svn()
update_last = time.time()
fill_queue()