ALHP.GO/proto_package.go

774 lines
24 KiB
Go
Raw Permalink Normal View History

2022-02-16 08:11:34 +01:00
package main
import (
2022-02-19 18:03:55 +01:00
"bytes"
2022-02-16 08:11:34 +01:00
"context"
2022-08-13 22:51:43 +02:00
"errors"
2022-02-16 08:11:34 +01:00
"fmt"
"github.com/Jguer/go-alpm/v2"
"github.com/Morganamilo/go-srcinfo"
"github.com/c2h5oh/datasize"
2022-02-19 18:03:55 +01:00
"github.com/google/uuid"
"github.com/otiai10/copy"
2023-05-21 21:19:24 +02:00
"github.com/sethvargo/go-retry"
2022-02-16 08:11:34 +01:00
log "github.com/sirupsen/logrus"
"io"
"os"
"os/exec"
"path/filepath"
"somegit.dev/ALHP/ALHP.GO/ent"
"somegit.dev/ALHP/ALHP.GO/ent/dbpackage"
2022-02-16 08:11:34 +01:00
"strconv"
"strings"
"syscall"
2022-02-19 18:03:55 +01:00
"time"
2022-02-16 08:11:34 +01:00
)
type ProtoPackage struct {
Pkgbase string
Srcinfo *srcinfo.Srcinfo
Arch string
PkgFiles []string
Repo dbpackage.Repository
March string
FullRepo string
Version string
DBPackage *ent.DBPackage
Pkgbuild string
State *StateInfo
2022-02-16 08:11:34 +01:00
}
var (
ErrorNotEligible = errors.New("package is not eligible")
)
2022-02-19 18:03:55 +01:00
2023-05-22 14:28:37 +02:00
func (p *ProtoPackage) isEligible(ctx context.Context) bool {
2024-06-22 20:02:26 +02:00
globMatch, err := MatchGlobList(p.Pkgbase, conf.Blacklist.Packages)
if err != nil {
2024-06-22 20:03:40 +02:00
log.Errorf("error parsing glob from no-build list: %v", err)
2024-06-22 20:02:26 +02:00
}
2022-02-19 18:03:55 +01:00
skipping := false
2022-11-20 19:19:16 +01:00
switch {
2023-05-22 14:28:37 +02:00
case p.Arch == "any":
log.Debugf("skipped %s: any-package", p.Pkgbase)
2022-11-20 19:19:16 +01:00
p.DBPackage.SkipReason = "arch = any"
p.DBPackage.Status = dbpackage.StatusSkipped
2022-02-19 18:03:55 +01:00
skipping = true
2024-06-22 20:02:26 +02:00
case globMatch:
log.Debugf("skipped %s: package on no-build list", p.Pkgbase)
2022-11-20 19:19:16 +01:00
p.DBPackage.SkipReason = "blacklisted"
p.DBPackage.Status = dbpackage.StatusSkipped
2022-02-19 18:03:55 +01:00
skipping = true
case p.DBPackage.MaxRss != nil && datasize.ByteSize(*p.DBPackage.MaxRss)*datasize.KB > conf.Build.MemoryLimit:
2023-05-22 14:28:37 +02:00
log.Debugf("skipped %s: memory limit exceeded (%s)", p.Pkgbase, datasize.ByteSize(*p.DBPackage.MaxRss)*datasize.KB)
p.DBPackage.SkipReason = "memory limit exceeded"
p.DBPackage.Status = dbpackage.StatusSkipped
skipping = true
2022-11-20 19:19:16 +01:00
case p.isPkgFailed():
2023-05-22 14:28:37 +02:00
log.Debugf("skipped %s: failed build", p.Pkgbase)
2022-02-19 18:03:55 +01:00
skipping = true
2024-08-09 02:11:35 +02:00
case p.Srcinfo != nil:
// skip haskell packages, since they cannot be optimized currently (no -O3 & march has no effect as far as I know)
if Contains(p.Srcinfo.MakeDepends, "ghc") || Contains(p.Srcinfo.MakeDepends, "haskell-ghc") ||
Contains(p.Srcinfo.Depends, "ghc") || Contains(p.Srcinfo.Depends, "haskell-ghc") {
log.Debugf("skipped %s: haskell", p.Pkgbase)
p.DBPackage.SkipReason = "haskell"
p.DBPackage.Status = dbpackage.StatusSkipped
skipping = true
}
2022-02-19 18:03:55 +01:00
}
if skipping {
2023-05-22 14:28:37 +02:00
p.DBPackage = p.DBPackage.Update().SetUpdated(time.Now()).SetVersion(p.Version).SetStatus(p.DBPackage.Status).
SetSkipReason(p.DBPackage.SkipReason).SetTagRev(p.State.TagRev).SaveX(ctx)
2023-05-22 14:28:37 +02:00
return false
2022-02-19 18:03:55 +01:00
}
2023-06-14 15:56:26 +02:00
p.DBPackage = p.DBPackage.Update().SetUpdated(time.Now()).SetVersion(p.Version).SaveX(ctx)
2022-02-19 18:03:55 +01:00
2023-05-22 14:28:37 +02:00
if Contains(conf.Blacklist.LTO, p.Pkgbase) && p.DBPackage.Lto != dbpackage.LtoDisabled {
2022-11-20 19:19:16 +01:00
p.DBPackage = p.DBPackage.Update().SetLto(dbpackage.LtoDisabled).SaveX(ctx)
2022-02-19 18:03:55 +01:00
}
repoVer, err := p.repoVersion()
if err != nil {
2022-11-20 19:19:16 +01:00
p.DBPackage = p.DBPackage.Update().ClearRepoVersion().SaveX(ctx)
2024-06-22 20:02:26 +02:00
} else if alpm.VerCmp(repoVer, p.Version) > 0 {
2023-05-23 18:27:44 +02:00
log.Debugf("skipped %s: version in repo higher than in PKGBUILD (%s < %s)", p.Pkgbase, p.Version, repoVer)
p.DBPackage = p.DBPackage.Update().SetStatus(dbpackage.StatusLatest).ClearSkipReason().SetTagRev(p.State.TagRev).SaveX(ctx)
2023-05-22 14:28:37 +02:00
return false
2022-02-19 18:03:55 +01:00
}
2023-05-22 14:28:37 +02:00
return true
}
func (p *ProtoPackage) build(ctx context.Context) (time.Duration, error) {
start := time.Now().UTC()
chroot := "build_" + uuid.New().String()
2024-02-13 18:29:05 +01:00
buildFolder, err := p.setupBuildDir(ctx)
2023-05-22 14:28:37 +02:00
if err != nil {
return time.Since(start), fmt.Errorf("error setting up build folder: %w", err)
}
defer func() {
chroot := chroot
log.Debugf("removing chroot %s", chroot)
err := cleanBuildDir(buildFolder, filepath.Join(conf.Basedir.Work, chrootDir, chroot))
if err != nil {
log.Errorf("error removing builddir/chroot %s/%s: %v", buildDir, chroot, err)
}
}()
err = p.genSrcinfo()
if err != nil {
return time.Since(start), fmt.Errorf("error generating srcinfo: %w", err)
}
p.Version = constructVersion(p.Srcinfo.Pkgver, p.Srcinfo.Pkgrel, p.Srcinfo.Epoch)
p.DBPackage = p.DBPackage.Update().SetPackages(packages2slice(p.Srcinfo.Packages)).SaveX(ctx)
// skip haskell packages, since they cannot be optimized currently (no -O3 & march has no effect as far as I know)
if Contains(p.Srcinfo.MakeDepends, "ghc") || Contains(p.Srcinfo.MakeDepends, "haskell-ghc") ||
Contains(p.Srcinfo.Depends, "ghc") || Contains(p.Srcinfo.Depends, "haskell-ghc") {
2023-05-23 18:04:11 +02:00
p.DBPackage = p.DBPackage.Update().SetStatus(dbpackage.StatusSkipped).SetSkipReason("haskell").SetTagRev(p.State.TagRev).SaveX(ctx)
buildManager.repoPurge[p.FullRepo] <- []*ProtoPackage{p}
return time.Since(start), ErrorNotEligible
}
2022-02-19 18:03:55 +01:00
isLatest, local, syncVersion, err := p.isMirrorLatest(alpmHandle)
if err != nil {
var multipleStateFilesError MultipleStateFilesError
var unableToSatisfyError UnableToSatisfyError
switch {
2022-02-19 18:03:55 +01:00
default:
return time.Since(start), fmt.Errorf("error solving deps: %w", err)
case errors.As(err, &multipleStateFilesError):
2023-05-22 14:28:37 +02:00
log.Infof("skipped %s: multiple PKGBUILDs for dependency found: %v", p.Srcinfo.Pkgbase, err)
2022-11-20 19:19:16 +01:00
p.DBPackage = p.DBPackage.Update().SetStatus(dbpackage.StatusSkipped).SetSkipReason("multiple PKGBUILD for dep. found").SaveX(ctx)
return time.Since(start), err
case errors.As(err, &unableToSatisfyError):
log.Infof("skipped %s: unable to resolve dependencies: %v", p.Srcinfo.Pkgbase, err)
2022-11-20 19:19:16 +01:00
p.DBPackage = p.DBPackage.Update().SetStatus(dbpackage.StatusSkipped).SetSkipReason("unable to resolve dependencies").SaveX(ctx)
return time.Since(start), ErrorNotEligible
2022-02-19 18:03:55 +01:00
}
}
if !isLatest {
if local != nil {
log.Infof("delayed %s: not all dependencies are up to date (local: %s==%s, sync: %s==%s)",
2022-11-20 19:19:16 +01:00
p.Srcinfo.Pkgbase, local.Name(), local.Version(), local.Name(), syncVersion)
p.DBPackage.Update().SetStatus(dbpackage.StatusDelayed).
SetSkipReason(fmt.Sprintf("waiting for %s==%s", local.Name(), syncVersion)).ExecX(ctx)
// Returning an error here causes the package to be purged.
2023-05-22 14:28:37 +02:00
// Purge delayed packages in case delay is caused by inconsistencies in state.
// Worst case would be clients downloading a package update twice, once from their official mirror,
// and then after build from ALHP. Best case we prevent a not buildable package from staying in the repos
// in an outdated version.
2022-11-20 19:19:16 +01:00
if time.Since(local.BuildDate()).Hours() >= 48 && p.DBPackage.RepoVersion != "" {
return time.Since(start), errors.New("overdue package waiting")
2022-08-13 22:51:43 +02:00
}
2022-02-19 18:03:55 +01:00
} else {
log.Infof("delayed %s: not all dependencies are up to date or resolvable", p.Srcinfo.Pkgbase)
2022-11-20 19:19:16 +01:00
p.DBPackage.Update().SetStatus(dbpackage.StatusDelayed).SetSkipReason("waiting for mirror").ExecX(ctx)
2022-02-19 18:03:55 +01:00
}
return time.Since(start), ErrorNotEligible
}
2023-03-15 03:03:07 +01:00
log.Infof("[P] build starting: %s->%s->%s", p.FullRepo, p.Pkgbase, p.Version)
2022-02-19 18:03:55 +01:00
2022-11-20 19:19:16 +01:00
p.DBPackage = p.DBPackage.Update().SetStatus(dbpackage.StatusBuilding).ClearSkipReason().SaveX(ctx)
2022-02-19 18:03:55 +01:00
err = p.importKeys()
2022-02-19 18:03:55 +01:00
if err != nil {
2023-03-15 03:03:07 +01:00
log.Warningf("[P] failed to import pgp keys for %s->%s->%s: %v", p.FullRepo, p.Pkgbase, p.Version, err)
2022-02-19 18:03:55 +01:00
}
buildNo := 1
2022-11-20 19:19:16 +01:00
versionSlice := strings.Split(p.DBPackage.LastVersionBuild, ".")
2022-02-19 18:03:55 +01:00
if strings.Join(versionSlice[:len(versionSlice)-1], ".") == p.Version {
buildNo, err = strconv.Atoi(versionSlice[len(versionSlice)-1])
if err != nil {
return time.Since(start), fmt.Errorf("error while reading buildNo from pkgrel: %w", err)
}
buildNo++
}
err = p.increasePkgRel(buildNo)
if err != nil {
return time.Since(start), fmt.Errorf("error while increasing pkgrel: %w", err)
}
p.PkgFiles = []string{}
// default to LTO
makepkgFile := makepkg
2022-11-20 19:19:16 +01:00
if p.DBPackage.Lto == dbpackage.LtoDisabled || p.DBPackage.Lto == dbpackage.LtoAutoDisabled {
2022-02-19 18:03:55 +01:00
// use non-lto makepkg.conf if LTO is blacklisted for this package
makepkgFile = makepkgLTO
}
2022-11-20 19:19:16 +01:00
cmd := exec.CommandContext(ctx, "makechrootpkg", "-c", "-D", filepath.Join(conf.Basedir.Work, makepkgDir), //nolint:gosec
"-l", chroot, "-r", filepath.Join(conf.Basedir.Work, chrootDir), "--", "-m", "--noprogressbar", "--config",
filepath.Join(conf.Basedir.Work, makepkgDir, fmt.Sprintf(makepkgFile, p.March)))
cmd.Dir = filepath.Dir(p.Pkgbuild)
2022-02-19 18:03:55 +01:00
var out bytes.Buffer
cmd.Stdout = &out
cmd.Stderr = &out
err = cmd.Start()
if err != nil {
return time.Since(start), fmt.Errorf("error starting build: %w", err)
}
err = cmd.Wait()
Rusage, ok := cmd.ProcessState.SysUsage().(*syscall.Rusage)
if !ok {
log.Panicf("rusage is not of type *syscall.Rusage, are we running on unix-like?")
}
2022-02-19 18:03:55 +01:00
if err != nil {
if ctx.Err() != nil {
return time.Since(start), ctx.Err()
}
2022-11-20 19:19:16 +01:00
if p.DBPackage.Lto != dbpackage.LtoAutoDisabled && p.DBPackage.Lto != dbpackage.LtoDisabled &&
(reLdError.MatchString(out.String()) || reRustLTOError.MatchString(out.String())) {
p.DBPackage.Update().SetStatus(dbpackage.StatusQueued).SetSkipReason("non-LTO rebuild").SetLto(dbpackage.LtoAutoDisabled).ExecX(ctx)
return time.Since(start), errors.New("ld/lto-incompatibility error detected, LTO disabled")
2022-02-19 18:03:55 +01:00
}
if reDownloadError.MatchString(out.String()) || reDownloadError2.MatchString(out.String()) ||
rePortError.MatchString(out.String()) || reSigError.MatchString(out.String()) {
2022-11-20 19:19:16 +01:00
p.DBPackage.Update().SetStatus(dbpackage.StatusQueued).ExecX(ctx)
return time.Since(start), errors.New("known build error detected")
2022-02-19 18:03:55 +01:00
}
2022-11-20 19:19:16 +01:00
err = os.MkdirAll(filepath.Join(conf.Basedir.Repo, logDir, p.March), 0o755)
2022-02-19 18:03:55 +01:00
if err != nil {
return time.Since(start), fmt.Errorf("error creating logdir: %w", err)
}
2023-07-22 11:38:14 +02:00
err = os.WriteFile(filepath.Join(conf.Basedir.Repo, logDir, p.March, p.Pkgbase+".log"), //nolint:gosec
[]byte(strings.ToValidUTF8(out.String(), "")), 0o644)
2022-02-19 18:03:55 +01:00
if err != nil {
return time.Since(start), fmt.Errorf("error warting to logdir: %w", err)
}
2022-11-20 19:19:16 +01:00
p.DBPackage.Update().
SetStatus(dbpackage.StatusFailed).
ClearSkipReason().
SetBuildTimeStart(start).
ClearMaxRss().
ClearLastVersionBuild().
ClearIoOut().
ClearIoIn().
ClearUTime().
ClearSTime().
SetTagRev(p.State.TagRev).
ExecX(ctx)
2022-02-19 18:03:55 +01:00
return time.Since(start), fmt.Errorf("build failed: exit code %d", cmd.ProcessState.ExitCode())
}
pkgFiles, err := filepath.Glob(filepath.Join(filepath.Dir(p.Pkgbuild), "*.pkg.tar.zst"))
if err != nil {
return time.Since(start), fmt.Errorf("error scanning builddir for artifacts: %w", err)
}
if len(pkgFiles) == 0 {
2024-02-13 18:29:05 +01:00
return time.Since(start), errors.New("no build-artifacts found")
2022-02-19 18:03:55 +01:00
}
for _, file := range pkgFiles {
cmd = exec.Command("gpg", "--batch", "--detach-sign", file)
res, err := cmd.CombinedOutput()
if err != nil {
return time.Since(start), fmt.Errorf("error while signing artifact: %w (%s)", err, string(res))
}
}
copyFiles, err := filepath.Glob(filepath.Join(filepath.Dir(p.Pkgbuild), "*.pkg.tar.zst*"))
if err != nil {
return time.Since(start), fmt.Errorf("error scanning builddir for artifacts: %w", err)
}
holdingDir := filepath.Join(conf.Basedir.Work, waitingDir, p.FullRepo)
for _, file := range copyFiles {
2022-11-20 19:19:16 +01:00
err = os.MkdirAll(holdingDir, 0o755)
2022-02-19 18:03:55 +01:00
if err != nil {
return time.Since(start), fmt.Errorf("error creating %s: %w", holdingDir, err)
}
err = copy.Copy(file, filepath.Join(holdingDir, filepath.Base(file)))
2022-02-19 18:03:55 +01:00
if err != nil {
return time.Since(start), fmt.Errorf("error while copying file to %s: %w", filepath.Join(holdingDir, filepath.Base(file)), err)
}
if filepath.Ext(file) != ".sig" {
p.PkgFiles = append(p.PkgFiles, filepath.Join(holdingDir, filepath.Base(file)))
}
}
if _, err := os.Stat(filepath.Join(conf.Basedir.Repo, logDir, p.March, p.Pkgbase+".log")); err == nil {
err := os.Remove(filepath.Join(conf.Basedir.Repo, logDir, p.March, p.Pkgbase+".log"))
if err != nil {
return time.Since(start), fmt.Errorf("error removing log: %w", err)
}
}
updatePkg := p.DBPackage.Update().
SetStatus(dbpackage.StatusBuilt).
SetLto(dbpackage.LtoEnabled).
SetBuildTimeStart(start).
SetLastVersionBuild(p.Version).
SetTagRev(p.State.TagRev).
SetMaxRss(Rusage.Maxrss).
SetIoOut(Rusage.Oublock).
SetIoIn(Rusage.Inblock).
SetUTime(Rusage.Utime.Sec).
SetSTime(Rusage.Stime.Sec)
2022-11-20 19:19:16 +01:00
if p.DBPackage.Lto != dbpackage.LtoDisabled && p.DBPackage.Lto != dbpackage.LtoAutoDisabled {
updatePkg.SetLto(dbpackage.LtoEnabled)
2022-02-19 18:03:55 +01:00
}
updatePkg.ExecX(ctx)
2022-02-19 18:03:55 +01:00
return time.Since(start), nil
}
2024-02-13 18:29:05 +01:00
func (p *ProtoPackage) setupBuildDir(ctx context.Context) (string, error) {
2022-02-16 08:11:34 +01:00
buildDir := filepath.Join(conf.Basedir.Work, buildDir, p.March, p.Pkgbase+"-"+p.Version)
2022-02-19 18:03:55 +01:00
err := cleanBuildDir(buildDir, "")
2022-02-16 08:11:34 +01:00
if err != nil {
return "", fmt.Errorf("removing old builddir failed: %w", err)
}
2022-11-20 19:19:16 +01:00
err = os.MkdirAll(buildDir, 0o755)
2022-02-16 08:11:34 +01:00
if err != nil {
return "", err
}
gitlabPath := reReplaceSinglePlus.ReplaceAllString(p.Pkgbase, "$1-$2")
gitlabPath = reReplaceRemainingPlus.ReplaceAllString(gitlabPath, "plus")
gitlabPath = reReplaceSpecialChars.ReplaceAllString(gitlabPath, "-")
gitlabPath = reReplaceUnderscore.ReplaceAllString(gitlabPath, "-")
gitlabPath = reReplaceTree.ReplaceAllString(gitlabPath, "unix-tree")
gr := retry.NewFibonacci(10 * time.Second)
gr = retry.WithMaxRetries(conf.MaxCloneRetries, gr)
2024-02-13 18:29:05 +01:00
if err := retry.Do(ctx, gr, func(ctx context.Context) error {
cmd := exec.CommandContext(ctx, "git", "clone", "--depth", "1", "--branch", p.State.TagVer, //nolint:gosec
2023-05-21 21:19:24 +02:00
fmt.Sprintf("https://gitlab.archlinux.org/archlinux/packaging/packages/%s.git", gitlabPath), buildDir)
res, err := cmd.CombinedOutput()
log.Debug(string(res))
if err != nil {
return retry.RetryableError(err)
2023-05-21 21:19:24 +02:00
}
return nil
}); err != nil {
return "", err
2022-02-16 08:11:34 +01:00
}
p.Pkgbuild = filepath.Join(buildDir, "PKGBUILD")
2022-02-16 08:11:34 +01:00
return buildDir, nil
}
func (p *ProtoPackage) repoVersion() (string, error) {
2023-06-14 15:56:26 +02:00
if err := p.findPkgFiles(); err != nil {
2022-02-16 08:11:34 +01:00
return "", err
}
if len(p.PkgFiles) == 0 {
2024-02-13 18:29:05 +01:00
return "", errors.New("not found")
2022-02-16 08:11:34 +01:00
}
fNameSplit := strings.Split(p.PkgFiles[0], "-")
return fNameSplit[len(fNameSplit)-3] + "-" + fNameSplit[len(fNameSplit)-2], nil
}
func (p *ProtoPackage) increasePkgRel(buildNo int) error {
if p.Srcinfo == nil {
err := p.genSrcinfo()
if err != nil {
return fmt.Errorf("error generating srcinfo: %w", err)
}
}
if p.Version == "" {
p.Version = constructVersion(p.Srcinfo.Pkgver, p.Srcinfo.Pkgrel, p.Srcinfo.Epoch)
}
2022-11-20 19:19:16 +01:00
f, err := os.OpenFile(p.Pkgbuild, os.O_RDWR, 0o644)
2022-02-16 08:11:34 +01:00
if err != nil {
return err
}
defer func(f *os.File) {
err := f.Close()
if err != nil {
panic(err)
}
}(f)
fStr, err := io.ReadAll(f)
if err != nil {
return err
}
// increase buildno if already existing
var nStr string
if strings.Contains(p.Srcinfo.Pkgrel, ".") {
pkgRelSplit := strings.Split(p.Srcinfo.Pkgrel, ".")
pkgRelBuildNo, err := strconv.Atoi(pkgRelSplit[len(pkgRelSplit)-1])
if err != nil {
return err
}
nStr = rePkgRel.ReplaceAllLiteralString(string(fStr), "pkgrel="+pkgRelSplit[0]+"."+strconv.Itoa(buildNo+pkgRelBuildNo))
versionSplit := strings.Split(p.Version, "-")
versionSplit[len(versionSplit)-1] = pkgRelSplit[0] + "." + strconv.Itoa(buildNo+pkgRelBuildNo)
p.Version = strings.Join(versionSplit, "-")
} else {
nStr = rePkgRel.ReplaceAllLiteralString(string(fStr), "pkgrel="+p.Srcinfo.Pkgrel+"."+strconv.Itoa(buildNo))
p.Version += "." + strconv.Itoa(buildNo)
}
2022-02-16 08:11:34 +01:00
_, err = f.Seek(0, 0)
if err != nil {
return err
}
err = f.Truncate(0)
if err != nil {
return err
}
_, err = f.WriteString(nStr)
if err != nil {
return err
}
return nil
}
func (p *ProtoPackage) importKeys() error {
2022-08-14 16:09:22 +02:00
if p.Srcinfo == nil {
err := p.genSrcinfo()
if err != nil {
return fmt.Errorf("error generating srcinfo: %w", err)
}
}
2022-02-16 08:11:34 +01:00
if p.Srcinfo.ValidPGPKeys != nil {
args := []string{"--keyserver", "keyserver.ubuntu.com", "--recv-keys"}
args = append(args, p.Srcinfo.ValidPGPKeys...)
cmd := exec.Command("gpg", args...)
_, err := cmd.CombinedOutput()
return err
}
return nil
}
func (p *ProtoPackage) isAvailable(h *alpm.Handle) bool {
dbs, err := h.SyncDBs()
if err != nil {
return false
}
buildManager.alpmMutex.Lock()
defer buildManager.alpmMutex.Unlock()
2022-02-16 08:11:34 +01:00
var pkg alpm.IPackage
switch {
case p.Srcinfo != nil:
2022-02-16 08:11:34 +01:00
pkg, err = dbs.FindSatisfier(p.Srcinfo.Packages[0].Pkgname)
case p.DBPackage != nil && len(p.DBPackage.Packages) > 0:
2022-11-20 19:19:16 +01:00
pkg, err = dbs.FindSatisfier(p.DBPackage.Packages[0])
default:
cmd := exec.Command("unbuffer", "pacsift", "--exact", "--base="+p.Pkgbase, "--repo="+p.Repo.String(), //nolint:gosec
"--sysroot="+filepath.Join(conf.Basedir.Work, chrootDir, pristineChroot))
var res []byte
res, err = cmd.Output()
if err != nil {
log.Warningf("error getting packages from pacsift for %s: %v", p.Pkgbase, err)
return false
} else if len(res) == 0 {
return false
}
// workaround for https://github.com/andrewgregory/pacutils/issues/66
// TODO: remove once fixed
rRes := reReplacePacsiftWarning.ReplaceAllString(string(res), "")
2023-10-13 20:56:59 +02:00
if strings.TrimSpace(rRes) == "" {
return false
}
if len(strings.Split(strings.TrimSpace(rRes), "\n")) > 0 {
pacsiftLines := strings.Split(strings.TrimSpace(rRes), "\n")
2023-05-26 12:59:59 +02:00
var splitPkgs []string
for _, line := range pacsiftLines {
splitPkgs = append(splitPkgs, strings.Split(line, "/")[1])
}
if p.DBPackage != nil {
p.DBPackage = p.DBPackage.Update().SetPackages(splitPkgs).SaveX(context.Background())
}
pkg, err = dbs.FindSatisfier(splitPkgs[0])
} else {
log.Warningf("error getting packages from pacsift for %s", p.Pkgbase)
return false
}
2022-02-16 08:11:34 +01:00
}
if err != nil {
2022-06-22 14:11:52 +02:00
log.Debugf("error resolving %s: %v", p.Pkgbase, err)
2022-02-16 08:11:34 +01:00
return false
}
if pkg.DB().Name() != p.Repo.String() || pkg.Base() != p.Pkgbase {
2022-06-22 14:11:52 +02:00
log.Debugf("%s: repo (%s!=%s) or pkgbase (%s!=%s) does not match", p.Pkgbase, pkg.DB().Name(), p.Repo.String(), pkg.Base(), p.Pkgbase)
2022-02-16 08:11:34 +01:00
return false
}
if p.Srcinfo != nil && (!Contains(p.Srcinfo.Arch, pkg.Architecture()) || p.Srcinfo.Pkgbase != pkg.Base()) {
2022-11-20 19:19:16 +01:00
log.Debugf("%s: arch (%s!=%s) or pkgbase (%s!=%s) does not match", p.Pkgbase, p.Srcinfo.Arch[0],
pkg.Architecture(), pkg.Base(), p.Pkgbase)
2022-02-16 08:11:34 +01:00
return false
}
return true
}
func (p *ProtoPackage) GitVersion(h *alpm.Handle) (string, error) {
if p.Pkgbase == "" {
2024-02-13 18:29:05 +01:00
return "", errors.New("invalid arguments")
2022-02-16 08:11:34 +01:00
}
stateFiles, _ := Glob(filepath.Join(conf.Basedir.Work, stateDir, "**/"+p.Pkgbase))
var fStateFiles []string
for _, stateFile := range stateFiles {
_, subRepo, _, err := stateFileMeta(stateFile)
if err != nil {
continue
}
2022-02-16 08:11:34 +01:00
if subRepo != nil {
2022-02-16 08:11:34 +01:00
continue
}
if !Contains(fStateFiles, stateFile) {
fStateFiles = append(fStateFiles, stateFile)
2022-02-16 08:11:34 +01:00
}
}
if len(fStateFiles) > 1 {
log.Infof("%s: multiple statefiles found, try resolving from mirror", p.Pkgbase)
2022-02-16 08:11:34 +01:00
dbs, err := h.SyncDBs()
if err != nil {
return "", err
}
buildManager.alpmMutex.Lock()
iPackage, err := dbs.FindSatisfier(p.Pkgbase)
buildManager.alpmMutex.Unlock()
if err != nil {
return "", err
}
for _, stateFile := range fStateFiles {
repo, _, _, err := stateFileMeta(stateFile)
if err != nil {
continue
2022-02-16 08:11:34 +01:00
}
if iPackage.DB().Name() == repo {
fStateFiles = []string{stateFile}
2023-06-05 15:33:02 +02:00
break
}
2022-02-16 08:11:34 +01:00
}
if len(fStateFiles) > 1 {
return "", MultipleStateFilesError{fmt.Errorf("%s: multiple statefiles found: %s", p.Pkgbase, fStateFiles)}
}
log.Infof("%s: resolving successful: MirrorRepo=%s; statefile chosen: %s", p.Pkgbase, iPackage.DB().Name(), fStateFiles[0])
} else if len(fStateFiles) == 0 {
return "", fmt.Errorf("%s: no matching statefile found (searched: %s, canidates: %s)", p.Pkgbase,
filepath.Join(conf.Basedir.Work, stateDir, "**/"+p.Pkgbase), stateFiles)
2022-08-13 22:27:56 +02:00
}
rawState, err := os.ReadFile(fStateFiles[0])
2022-02-16 08:11:34 +01:00
if err != nil {
return "", fmt.Errorf("error reading statefile %s: %w", fStateFiles[0], err)
2022-02-16 08:11:34 +01:00
}
state, err := parseState(string(rawState))
2022-02-16 08:11:34 +01:00
if err != nil {
return "", fmt.Errorf("error parsing statefile: %w", err)
2022-02-16 08:11:34 +01:00
}
return state.PkgVer, nil
2022-02-16 08:11:34 +01:00
}
func (p *ProtoPackage) isPkgFailed() bool {
2022-11-20 19:19:16 +01:00
if p.DBPackage.Version == "" {
2022-02-16 08:11:34 +01:00
return false
}
2022-11-20 19:19:16 +01:00
if alpm.VerCmp(p.DBPackage.Version, p.Version) < 0 {
2022-02-16 08:11:34 +01:00
return false
}
2022-11-20 19:19:16 +01:00
return p.DBPackage.Status == dbpackage.StatusFailed
2022-02-16 08:11:34 +01:00
}
func (p *ProtoPackage) genSrcinfo() error {
if p.Srcinfo != nil {
return nil
}
2023-06-05 15:33:02 +02:00
cmd := exec.Command("makepkg", "--printsrcinfo", "-p", filepath.Base(p.Pkgbuild)) //nolint:gosec
cmd.Dir = filepath.Dir(p.Pkgbuild)
2022-02-16 08:11:34 +01:00
res, err := cmd.CombinedOutput()
if err != nil {
return fmt.Errorf("makepkg exit non-zero (PKGBUILD: %s): %w (%s)", p.Pkgbuild, err, string(res))
}
info, err := srcinfo.Parse(string(res))
if err != nil {
return err
}
p.Srcinfo = info
2022-08-13 20:48:34 +02:00
2022-02-16 08:11:34 +01:00
return nil
}
func (p *ProtoPackage) findPkgFiles() error {
pkgs, err := os.ReadDir(filepath.Join(conf.Basedir.Repo, p.FullRepo, "os", conf.Arch))
if err != nil {
return err
}
2022-11-20 19:19:16 +01:00
if p.DBPackage == nil && p.Srcinfo == nil {
2024-02-13 18:29:05 +01:00
return errors.New("unable to find pkgfiles without dbpkg or srcinfo present")
}
2022-02-16 08:11:34 +01:00
var realPkgs []string
2022-11-20 19:19:16 +01:00
if p.DBPackage != nil {
realPkgs = append(realPkgs, p.DBPackage.Packages...)
2022-05-18 14:27:54 +02:00
} else {
for _, realPkg := range p.Srcinfo.Packages {
realPkgs = append(realPkgs, realPkg.Pkgname)
}
2022-02-16 08:11:34 +01:00
}
var fPkg []string
for _, file := range pkgs {
if !file.IsDir() && !strings.HasSuffix(file.Name(), ".sig") {
matches := rePkgFile.FindStringSubmatch(file.Name())
if len(matches) > 1 && Contains(realPkgs, matches[1]) {
2022-02-16 08:11:34 +01:00
fPkg = append(fPkg, filepath.Join(conf.Basedir.Repo, p.FullRepo, "os", conf.Arch, file.Name()))
}
}
}
p.PkgFiles = fPkg
return nil
}
func (p *ProtoPackage) toDBPackage(create bool) error {
2022-11-20 19:19:16 +01:00
if p.DBPackage != nil {
return nil
2022-02-16 08:11:34 +01:00
}
dbPkg, err := db.DBPackage.Query().Where(
dbpackage.Pkgbase(p.Pkgbase),
dbpackage.March(p.March),
dbpackage.RepositoryEQ(p.Repo),
).Only(context.Background())
if err != nil && ent.IsNotFound(err) && create {
dbPkg = db.DBPackage.Create().
2022-11-20 19:19:16 +01:00
SetPkgbase(p.Pkgbase).
SetMarch(p.March).
SetRepository(p.Repo).
SaveX(context.Background())
} else if err != nil && !ent.IsNotFound(err) {
return err
2022-02-16 08:11:34 +01:00
}
2022-11-20 19:19:16 +01:00
p.DBPackage = dbPkg
return nil
2022-02-16 08:11:34 +01:00
}
2022-08-12 23:00:34 +02:00
func (p *ProtoPackage) exists() (bool, error) {
dbPkg, err := db.DBPackage.Query().Where(dbpackage.And(dbpackage.Pkgbase(p.Pkgbase), dbpackage.March(p.March))).Exist(context.Background())
2022-02-16 08:11:34 +01:00
if err != nil {
return false, err
}
return dbPkg, nil
}
func (p *ProtoPackage) isMirrorLatest(h *alpm.Handle) (latest bool, foundPkg *alpm.Package, version string, err error) {
2022-02-16 08:11:34 +01:00
dbs, err := h.SyncDBs()
if err != nil {
return false, nil, "", err
}
allDepends := p.Srcinfo.Depends
allDepends = append(allDepends, p.Srcinfo.MakeDepends...)
// add gcc to dependents, since we can't know for sure if its in use