summaryrefslogtreecommitdiff
path: root/poky/meta/classes-global
diff options
context:
space:
mode:
Diffstat (limited to 'poky/meta/classes-global')
-rw-r--r--poky/meta/classes-global/insane.bbclass332
-rw-r--r--poky/meta/classes-global/license.bbclass18
-rw-r--r--poky/meta/classes-global/mirrors.bbclass2
-rw-r--r--poky/meta/classes-global/package.bbclass2
-rw-r--r--poky/meta/classes-global/package_rpm.bbclass19
-rw-r--r--poky/meta/classes-global/retain.bbclass182
-rw-r--r--poky/meta/classes-global/sanity.bbclass24
-rw-r--r--poky/meta/classes-global/sstate.bbclass33
-rw-r--r--poky/meta/classes-global/staging.bbclass9
-rw-r--r--poky/meta/classes-global/utils.bbclass4
10 files changed, 384 insertions, 241 deletions
diff --git a/poky/meta/classes-global/insane.bbclass b/poky/meta/classes-global/insane.bbclass
index 6888fa06f0..1691d96b64 100644
--- a/poky/meta/classes-global/insane.bbclass
+++ b/poky/meta/classes-global/insane.bbclass
@@ -26,26 +26,25 @@
# Elect whether a given type of error is a warning or error, they may
# have been set by other files.
-WARN_QA ?= " libdir xorg-driver-abi buildpaths \
+WARN_QA ?= "32bit-time native-last pep517-backend"
+ERROR_QA ?= "dev-so debug-deps dev-deps debug-files arch pkgconfig la \
textrel incompatible-license files-invalid \
infodir build-deps src-uri-bad symlink-to-sysroot multilib \
invalid-packageconfig host-user-contaminated uppercase-pn \
mime mime-xdg unlisted-pkg-lics unhandled-features-check \
- missing-update-alternatives native-last missing-ptest \
+ missing-update-alternatives missing-ptest \
license-exists license-no-generic license-syntax license-format \
- license-incompatible license-file-missing obsolete-license \
- 32bit-time virtual-slash \
- "
-ERROR_QA ?= "dev-so debug-deps dev-deps debug-files arch pkgconfig la \
- perms dep-cmp pkgvarcheck perm-config perm-line perm-link \
- split-strip packages-list pkgv-undefined var-undefined \
+ license-exception license-file-missing obsolete-license \
+ libdir xorg-driver-abi buildpaths \
+ dep-cmp pkgvarcheck perm-config perm-line perm-link \
+ packages-list pkgv-undefined var-undefined \
version-going-backwards expanded-d invalid-chars \
license-checksum dev-elf file-rdeps configure-unsafe \
configure-gettext perllocalpod shebang-size \
- already-stripped installed-vs-shipped ldflags compile-host-path \
- install-host-path pn-overrides unknown-configure-option \
+ already-stripped installed-vs-shipped ldflags \
+ pn-overrides unknown-configure-option \
useless-rpaths rpaths staticdev empty-dirs \
- patch-fuzz patch-status \
+ patch-fuzz patch-status virtual-slash \
"
# Add usrmerge QA check based on distro feature
ERROR_QA:append = "${@bb.utils.contains('DISTRO_FEATURES', 'usrmerge', ' usrmerge', '', d)}"
@@ -55,8 +54,6 @@ FAKEROOT_QA = "host-user-contaminated"
FAKEROOT_QA[doc] = "QA tests which need to run under fakeroot. If any \
enabled tests are listed here, the do_package_qa task will run under fakeroot."
-ALL_QA = "${WARN_QA} ${ERROR_QA}"
-
UNKNOWN_CONFIGURE_OPT_IGNORE ?= "--enable-nls --disable-nls --disable-silent-rules --disable-dependency-tracking --disable-static"
# This is a list of directories that are expected to be empty.
@@ -84,7 +81,7 @@ def package_qa_clean_path(path, d, pkg=None):
return path.replace(d.getVar("TMPDIR"), "/").replace("//", "/")
QAPATHTEST[shebang-size] = "package_qa_check_shebang_size"
-def package_qa_check_shebang_size(path, name, d, elf, messages):
+def package_qa_check_shebang_size(path, name, d, elf):
import stat
if os.path.islink(path) or stat.S_ISFIFO(os.stat(path).st_mode) or elf:
return
@@ -103,25 +100,22 @@ def package_qa_check_shebang_size(path, name, d, elf, messages):
return
if len(stanza) > 129:
- oe.qa.add_message(messages, "shebang-size", "%s: %s maximum shebang size exceeded, the maximum size is 128." % (name, package_qa_clean_path(path, d, name)))
+ oe.qa.handle_error("shebang-size", "%s: %s maximum shebang size exceeded, the maximum size is 128." % (name, package_qa_clean_path(path, d, name)), d)
return
QAPATHTEST[libexec] = "package_qa_check_libexec"
-def package_qa_check_libexec(path,name, d, elf, messages):
+def package_qa_check_libexec(path,name, d, elf):
# Skip the case where the default is explicitly /usr/libexec
libexec = d.getVar('libexecdir')
if libexec == "/usr/libexec":
- return True
+ return
if 'libexec' in path.split(os.path.sep):
- oe.qa.add_message(messages, "libexec", "%s: %s is using libexec please relocate to %s" % (name, package_qa_clean_path(path, d, name), libexec))
- return False
-
- return True
+ oe.qa.handle_error("libexec", "%s: %s is using libexec please relocate to %s" % (name, package_qa_clean_path(path, d, name), libexec), d)
QAPATHTEST[rpaths] = "package_qa_check_rpath"
-def package_qa_check_rpath(file,name, d, elf, messages):
+def package_qa_check_rpath(file,name, d, elf):
"""
Check for dangerous RPATHs
"""
@@ -143,10 +137,10 @@ def package_qa_check_rpath(file,name, d, elf, messages):
rpath = m.group(1)
for dir in bad_dirs:
if dir in rpath:
- oe.qa.add_message(messages, "rpaths", "package %s contains bad RPATH %s in file %s" % (name, rpath, file))
+ oe.qa.handle_error("rpaths", "package %s contains bad RPATH %s in file %s" % (name, rpath, file), d)
QAPATHTEST[useless-rpaths] = "package_qa_check_useless_rpaths"
-def package_qa_check_useless_rpaths(file, name, d, elf, messages):
+def package_qa_check_useless_rpaths(file, name, d, elf):
"""
Check for RPATHs that are useless but not dangerous
"""
@@ -173,31 +167,31 @@ def package_qa_check_useless_rpaths(file, name, d, elf, messages):
if rpath_eq(rpath, libdir) or rpath_eq(rpath, base_libdir):
# The dynamic linker searches both these places anyway. There is no point in
# looking there again.
- oe.qa.add_message(messages, "useless-rpaths", "%s: %s contains probably-redundant RPATH %s" % (name, package_qa_clean_path(file, d, name), rpath))
+ oe.qa.handle_error("useless-rpaths", "%s: %s contains probably-redundant RPATH %s" % (name, package_qa_clean_path(file, d, name), rpath), d)
QAPATHTEST[dev-so] = "package_qa_check_dev"
-def package_qa_check_dev(path, name, d, elf, messages):
+def package_qa_check_dev(path, name, d, elf):
"""
Check for ".so" library symlinks in non-dev packages
"""
if not name.endswith("-dev") and not name.endswith("-dbg") and not name.endswith("-ptest") and not name.startswith("nativesdk-") and path.endswith(".so") and os.path.islink(path):
- oe.qa.add_message(messages, "dev-so", "non -dev/-dbg/nativesdk- package %s contains symlink .so '%s'" % \
- (name, package_qa_clean_path(path, d, name)))
+ oe.qa.handle_error("dev-so", "non -dev/-dbg/nativesdk- package %s contains symlink .so '%s'" % \
+ (name, package_qa_clean_path(path, d, name)), d)
QAPATHTEST[dev-elf] = "package_qa_check_dev_elf"
-def package_qa_check_dev_elf(path, name, d, elf, messages):
+def package_qa_check_dev_elf(path, name, d, elf):
"""
Check that -dev doesn't contain real shared libraries. The test has to
check that the file is not a link and is an ELF object as some recipes
install link-time .so files that are linker scripts.
"""
if name.endswith("-dev") and path.endswith(".so") and not os.path.islink(path) and elf:
- oe.qa.add_message(messages, "dev-elf", "-dev package %s contains non-symlink .so '%s'" % \
- (name, package_qa_clean_path(path, d, name)))
+ oe.qa.handle_error("dev-elf", "-dev package %s contains non-symlink .so '%s'" % \
+ (name, package_qa_clean_path(path, d, name)), d)
QAPATHTEST[staticdev] = "package_qa_check_staticdev"
-def package_qa_check_staticdev(path, name, d, elf, messages):
+def package_qa_check_staticdev(path, name, d, elf):
"""
Check for ".a" library in non-staticdev packages
There are a number of exceptions to this rule, -pic packages can contain
@@ -206,22 +200,22 @@ def package_qa_check_staticdev(path, name, d, elf, messages):
"""
if not name.endswith("-pic") and not name.endswith("-staticdev") and not name.endswith("-ptest") and path.endswith(".a") and not path.endswith("_nonshared.a") and not '/usr/lib/debug-static/' in path and not '/.debug-static/' in path:
- oe.qa.add_message(messages, "staticdev", "non -staticdev package contains static .a library: %s path '%s'" % \
- (name, package_qa_clean_path(path, d, name)))
+ oe.qa.handle_error("staticdev", "non -staticdev package contains static .a library: %s path '%s'" % \
+ (name, package_qa_clean_path(path, d, name)), d)
QAPATHTEST[mime] = "package_qa_check_mime"
-def package_qa_check_mime(path, name, d, elf, messages):
+def package_qa_check_mime(path, name, d, elf):
"""
Check if package installs mime types to /usr/share/mime/packages
while no inheriting mime.bbclass
"""
if d.getVar("datadir") + "/mime/packages" in path and path.endswith('.xml') and not bb.data.inherits_class("mime", d):
- oe.qa.add_message(messages, "mime", "package contains mime types but does not inherit mime: %s path '%s'" % \
- (name, package_qa_clean_path(path, d, name)))
+ oe.qa.handle_error("mime", "package contains mime types but does not inherit mime: %s path '%s'" % \
+ (name, package_qa_clean_path(path, d, name)), d)
QAPATHTEST[mime-xdg] = "package_qa_check_mime_xdg"
-def package_qa_check_mime_xdg(path, name, d, elf, messages):
+def package_qa_check_mime_xdg(path, name, d, elf):
"""
Check if package installs desktop file containing MimeType and requires
mime-types.bbclass to create /usr/share/applications/mimeinfo.cache
@@ -244,10 +238,10 @@ def package_qa_check_mime_xdg(path, name, d, elf, messages):
if name == d.getVar('PN'):
pkgname = '${PN}'
wstr += "If yes: add \'inhert mime-xdg\' and \'MIME_XDG_PACKAGES += \"%s\"\' / if no add \'INSANE_SKIP:%s += \"mime-xdg\"\' to recipe." % (pkgname, pkgname)
- oe.qa.add_message(messages, "mime-xdg", wstr)
+ oe.qa.handle_error("mime-xdg", wstr, d)
if mime_type_found:
- oe.qa.add_message(messages, "mime-xdg", "%s: contains desktop file with key 'MimeType' but does not inhert mime-xdg: %s" % \
- (name, package_qa_clean_path(path, d, name)))
+ oe.qa.handle_error("mime-xdg", "%s: contains desktop file with key 'MimeType' but does not inhert mime-xdg: %s" % \
+ (name, package_qa_clean_path(path, d, name)), d)
def package_qa_check_libdir(d):
"""
@@ -313,18 +307,18 @@ def package_qa_check_libdir(d):
oe.qa.handle_error("libdir", "\n".join(messages), d)
QAPATHTEST[debug-files] = "package_qa_check_dbg"
-def package_qa_check_dbg(path, name, d, elf, messages):
+def package_qa_check_dbg(path, name, d, elf):
"""
Check for ".debug" files or directories outside of the dbg package
"""
if not "-dbg" in name and not "-ptest" in name:
if '.debug' in path.split(os.path.sep):
- oe.qa.add_message(messages, "debug-files", "%s: non debug package contains .debug directory %s" % \
- (name, package_qa_clean_path(path, d, name)))
+ oe.qa.handle_error("debug-files", "%s: non debug package contains .debug directory %s" % \
+ (name, package_qa_clean_path(path, d, name)), d)
QAPATHTEST[arch] = "package_qa_check_arch"
-def package_qa_check_arch(path,name,d, elf, messages):
+def package_qa_check_arch(path,name,d, elf):
"""
Check if archs are compatible
"""
@@ -340,7 +334,7 @@ def package_qa_check_arch(path,name,d, elf, messages):
if host_arch == "allarch":
pn = d.getVar('PN')
- oe.qa.add_message(messages, "arch", pn + ": Recipe inherits the allarch class, but has packaged architecture-specific binaries")
+ oe.qa.handle_error("arch", pn + ": Recipe inherits the allarch class, but has packaged architecture-specific binaries", d)
return
# avoid following links to /usr/bin (e.g. on udev builds)
@@ -358,17 +352,18 @@ def package_qa_check_arch(path,name,d, elf, messages):
host_os == "linux-gnu_ilp32" or re.match(r'mips64.*32', d.getVar('DEFAULTTUNE')))
is_bpf = (oe.qa.elf_machine_to_string(elf.machine()) == "BPF")
if not ((machine == elf.machine()) or is_32 or is_bpf):
- oe.qa.add_message(messages, "arch", "Architecture did not match (%s, expected %s) in %s" % \
- (oe.qa.elf_machine_to_string(elf.machine()), oe.qa.elf_machine_to_string(machine), package_qa_clean_path(path, d, name)))
+ oe.qa.handle_error("arch", "Architecture did not match (%s, expected %s) in %s" % \
+ (oe.qa.elf_machine_to_string(elf.machine()), oe.qa.elf_machine_to_string(machine), package_qa_clean_path(path, d, name)), d)
elif not ((bits == elf.abiSize()) or is_32 or is_bpf):
- oe.qa.add_message(messages, "arch", "Bit size did not match (%d, expected %d) in %s" % \
- (elf.abiSize(), bits, package_qa_clean_path(path, d, name)))
+ oe.qa.handle_error("arch", "Bit size did not match (%d, expected %d) in %s" % \
+ (elf.abiSize(), bits, package_qa_clean_path(path, d, name)), d)
elif not ((littleendian == elf.isLittleEndian()) or is_bpf):
- oe.qa.add_message(messages, "arch", "Endiannes did not match (%d, expected %d) in %s" % \
- (elf.isLittleEndian(), littleendian, package_qa_clean_path(path, d, name)))
+ oe.qa.handle_error("arch", "Endiannes did not match (%d, expected %d) in %s" % \
+ (elf.isLittleEndian(), littleendian, package_qa_clean_path(path, d, name)), d)
+package_qa_check_arch[vardepsexclude] = "DEFAULTTUNE"
QAPATHTEST[desktop] = "package_qa_check_desktop"
-def package_qa_check_desktop(path, name, d, elf, messages):
+def package_qa_check_desktop(path, name, d, elf):
"""
Run all desktop files through desktop-file-validate.
"""
@@ -377,10 +372,10 @@ def package_qa_check_desktop(path, name, d, elf, messages):
output = os.popen("%s %s" % (desktop_file_validate, path))
# This only produces output on errors
for l in output:
- oe.qa.add_message(messages, "desktop", "Desktop file issue: " + l.strip())
+ oe.qa.handle_error("desktop", "Desktop file issue: " + l.strip(), d)
QAPATHTEST[textrel] = "package_qa_textrel"
-def package_qa_textrel(path, name, d, elf, messages):
+def package_qa_textrel(path, name, d, elf):
"""
Check if the binary contains relocations in .text
"""
@@ -392,21 +387,17 @@ def package_qa_textrel(path, name, d, elf, messages):
return
phdrs = elf.run_objdump("-p", d)
- sane = True
import re
textrel_re = re.compile(r"\s+TEXTREL\s+")
for line in phdrs.split("\n"):
if textrel_re.match(line):
- sane = False
- break
-
- if not sane:
- path = package_qa_clean_path(path, d, name)
- oe.qa.add_message(messages, "textrel", "%s: ELF binary %s has relocations in .text" % (name, path))
+ path = package_qa_clean_path(path, d, name)
+ oe.qa.handle_error("textrel", "%s: ELF binary %s has relocations in .text" % (name, path), d)
+ return
QAPATHTEST[ldflags] = "package_qa_hash_style"
-def package_qa_hash_style(path, name, d, elf, messages):
+def package_qa_hash_style(path, name, d, elf):
"""
Check if the binary has the right hash style...
"""
@@ -438,11 +429,12 @@ def package_qa_hash_style(path, name, d, elf, messages):
sane = True
if has_syms and not sane:
path = package_qa_clean_path(path, d, name)
- oe.qa.add_message(messages, "ldflags", "File %s in package %s doesn't have GNU_HASH (didn't pass LDFLAGS?)" % (path, name))
+ oe.qa.handle_error("ldflags", "File %s in package %s doesn't have GNU_HASH (didn't pass LDFLAGS?)" % (path, name), d)
+package_qa_hash_style[vardepsexclude] = "TCLIBC"
QAPATHTEST[buildpaths] = "package_qa_check_buildpaths"
-def package_qa_check_buildpaths(path, name, d, elf, messages):
+def package_qa_check_buildpaths(path, name, d, elf):
"""
Check for build paths inside target files and error if paths are not
explicitly ignored.
@@ -459,11 +451,11 @@ def package_qa_check_buildpaths(path, name, d, elf, messages):
file_content = f.read()
if tmpdir in file_content:
path = package_qa_clean_path(path, d, name)
- oe.qa.add_message(messages, "buildpaths", "File %s in package %s contains reference to TMPDIR" % (path, name))
+ oe.qa.handle_error("buildpaths", "File %s in package %s contains reference to TMPDIR" % (path, name), d)
QAPATHTEST[xorg-driver-abi] = "package_qa_check_xorg_driver_abi"
-def package_qa_check_xorg_driver_abi(path, name, d, elf, messages):
+def package_qa_check_xorg_driver_abi(path, name, d, elf):
"""
Check that all packages containing Xorg drivers have ABI dependencies
"""
@@ -478,20 +470,20 @@ def package_qa_check_xorg_driver_abi(path, name, d, elf, messages):
for rdep in bb.utils.explode_deps(d.getVar('RDEPENDS:' + name) or ""):
if rdep.startswith("%sxorg-abi-" % mlprefix):
return
- oe.qa.add_message(messages, "xorg-driver-abi", "Package %s contains Xorg driver (%s) but no xorg-abi- dependencies" % (name, os.path.basename(path)))
+ oe.qa.handle_error("xorg-driver-abi", "Package %s contains Xorg driver (%s) but no xorg-abi- dependencies" % (name, os.path.basename(path)), d)
QAPATHTEST[infodir] = "package_qa_check_infodir"
-def package_qa_check_infodir(path, name, d, elf, messages):
+def package_qa_check_infodir(path, name, d, elf):
"""
Check that /usr/share/info/dir isn't shipped in a particular package
"""
infodir = d.expand("${infodir}/dir")
if infodir in path:
- oe.qa.add_message(messages, "infodir", "The %s file is not meant to be shipped in a particular package." % infodir)
+ oe.qa.handle_error("infodir", "The %s file is not meant to be shipped in a particular package." % infodir, d)
QAPATHTEST[symlink-to-sysroot] = "package_qa_check_symlink_to_sysroot"
-def package_qa_check_symlink_to_sysroot(path, name, d, elf, messages):
+def package_qa_check_symlink_to_sysroot(path, name, d, elf):
"""
Check that the package doesn't contain any absolute symlinks to the sysroot.
"""
@@ -501,10 +493,10 @@ def package_qa_check_symlink_to_sysroot(path, name, d, elf, messages):
tmpdir = d.getVar('TMPDIR')
if target.startswith(tmpdir):
path = package_qa_clean_path(path, d, name)
- oe.qa.add_message(messages, "symlink-to-sysroot", "Symlink %s in %s points to TMPDIR" % (path, name))
+ oe.qa.handle_error("symlink-to-sysroot", "Symlink %s in %s points to TMPDIR" % (path, name), d)
QAPATHTEST[32bit-time] = "check_32bit_symbols"
-def check_32bit_symbols(path, packagename, d, elf, messages):
+def check_32bit_symbols(path, packagename, d, elf):
"""
Check that ELF files do not use any 32 bit time APIs from glibc.
"""
@@ -623,11 +615,9 @@ def check_32bit_symbols(path, packagename, d, elf, messages):
if not allowed:
msgformat = elfpath + " uses 32-bit api '%s'"
for sym in usedapis:
- oe.qa.add_message(messages, '32bit-time', msgformat % sym)
- oe.qa.add_message(
- messages, '32bit-time',
- 'Suppress with INSANE_SKIP = "32bit-time"'
- )
+ oe.qa.handle_error('32bit-time', msgformat % sym, d)
+ oe.qa.handle_error('32bit-time', 'Suppress with INSANE_SKIP = "32bit-time"', d)
+check_32bit_symbols[vardepsexclude] = "OVERRIDES"
# Check license variables
do_populate_lic[postfuncs] += "populate_lic_qa_checksum"
@@ -788,54 +778,14 @@ def qa_check_staged(path,d):
oe.qa.handle_error("pkgconfig", error_msg, d)
if not skip_shebang_size:
- errors = {}
- package_qa_check_shebang_size(path, "", d, None, errors)
- for e in errors:
- oe.qa.handle_error(e, errors[e], d)
-
-
-# Run all package-wide warnfuncs and errorfuncs
-def package_qa_package(warnfuncs, errorfuncs, package, d):
- warnings = {}
- errors = {}
-
- for func in warnfuncs:
- func(package, d, warnings)
- for func in errorfuncs:
- func(package, d, errors)
-
- for w in warnings:
- oe.qa.handle_error(w, warnings[w], d)
- for e in errors:
- oe.qa.handle_error(e, errors[e], d)
-
- return len(errors) == 0
-
-# Run all recipe-wide warnfuncs and errorfuncs
-def package_qa_recipe(warnfuncs, errorfuncs, pn, d):
- warnings = {}
- errors = {}
-
- for func in warnfuncs:
- func(pn, d, warnings)
- for func in errorfuncs:
- func(pn, d, errors)
-
- for w in warnings:
- oe.qa.handle_error(w, warnings[w], d)
- for e in errors:
- oe.qa.handle_error(e, errors[e], d)
-
- return len(errors) == 0
+ package_qa_check_shebang_size(path, "", d, None)
def prepopulate_objdump_p(elf, d):
output = elf.run_objdump("-p", d)
return (elf.name, output)
# Walk over all files in a directory and call func
-def package_qa_walk(warnfuncs, errorfuncs, package, d):
- warnings = {}
- errors = {}
+def package_qa_walk(checkfuncs, package, d):
elves = {}
for path in pkgfiles[package]:
elf = None
@@ -856,18 +806,11 @@ def package_qa_walk(warnfuncs, errorfuncs, package, d):
for path in pkgfiles[package]:
if path in elves:
elves[path].open()
- for func in warnfuncs:
- func(path, package, d, elves.get(path), warnings)
- for func in errorfuncs:
- func(path, package, d, elves.get(path), errors)
+ for func in checkfuncs:
+ func(path, package, d, elves.get(path))
if path in elves:
elves[path].close()
- for w in warnings:
- oe.qa.handle_error(w, warnings[w], d)
- for e in errors:
- oe.qa.handle_error(e, errors[e], d)
-
def package_qa_check_rdepends(pkg, pkgdest, skip, taskdeps, packages, d):
# Don't do this check for kernel/module recipes, there aren't too many debug/development
# packages and you can get false positives e.g. on kernel-module-lirc-dev
@@ -988,20 +931,18 @@ def package_qa_check_deps(pkg, pkgdest, d):
check_valid_deps('RCONFLICTS')
QAPKGTEST[usrmerge] = "package_qa_check_usrmerge"
-def package_qa_check_usrmerge(pkg, d, messages):
-
+def package_qa_check_usrmerge(pkg, d):
pkgdest = d.getVar('PKGDEST')
pkg_dir = pkgdest + os.sep + pkg + os.sep
merged_dirs = ['bin', 'sbin', 'lib'] + d.getVar('MULTILIB_VARIANTS').split()
for f in merged_dirs:
if os.path.exists(pkg_dir + f) and not os.path.islink(pkg_dir + f):
msg = "%s package is not obeying usrmerge distro feature. /%s should be relocated to /usr." % (pkg, f)
- oe.qa.add_message(messages, "usrmerge", msg)
- return False
- return True
+ oe.qa.handle_error("usrmerge", msg, d)
+ return
QAPKGTEST[perllocalpod] = "package_qa_check_perllocalpod"
-def package_qa_check_perllocalpod(pkg, d, messages):
+def package_qa_check_perllocalpod(pkg, d):
"""
Check that the recipe didn't ship a perlocal.pod file, which shouldn't be
installed in a distribution package. cpan.bbclass sets NO_PERLLOCAL=1 to
@@ -1015,54 +956,47 @@ def package_qa_check_perllocalpod(pkg, d, messages):
if matches:
matches = [package_qa_clean_path(path, d, pkg) for path in matches]
msg = "%s contains perllocal.pod (%s), should not be installed" % (pkg, " ".join(matches))
- oe.qa.add_message(messages, "perllocalpod", msg)
+ oe.qa.handle_error("perllocalpod", msg, d)
QAPKGTEST[expanded-d] = "package_qa_check_expanded_d"
-def package_qa_check_expanded_d(package, d, messages):
+def package_qa_check_expanded_d(package, d):
"""
Check for the expanded D (${D}) value in pkg_* and FILES
variables, warn the user to use it correctly.
"""
- sane = True
expanded_d = d.getVar('D')
for var in 'FILES','pkg_preinst', 'pkg_postinst', 'pkg_prerm', 'pkg_postrm':
bbvar = d.getVar(var + ":" + package) or ""
if expanded_d in bbvar:
if var == 'FILES':
- oe.qa.add_message(messages, "expanded-d", "FILES in %s recipe should not contain the ${D} variable as it references the local build directory not the target filesystem, best solution is to remove the ${D} reference" % package)
- sane = False
+ oe.qa.handle_error("expanded-d", "FILES in %s recipe should not contain the ${D} variable as it references the local build directory not the target filesystem, best solution is to remove the ${D} reference" % package, d)
else:
- oe.qa.add_message(messages, "expanded-d", "%s in %s recipe contains ${D}, it should be replaced by $D instead" % (var, package))
- sane = False
- return sane
+ oe.qa.handle_error("expanded-d", "%s in %s recipe contains ${D}, it should be replaced by $D instead" % (var, package), d)
QAPKGTEST[unlisted-pkg-lics] = "package_qa_check_unlisted_pkg_lics"
-def package_qa_check_unlisted_pkg_lics(package, d, messages):
+def package_qa_check_unlisted_pkg_lics(package, d):
"""
Check that all licenses for a package are among the licenses for the recipe.
"""
pkg_lics = d.getVar('LICENSE:' + package)
if not pkg_lics:
- return True
+ return
recipe_lics_set = oe.license.list_licenses(d.getVar('LICENSE'))
package_lics = oe.license.list_licenses(pkg_lics)
unlisted = package_lics - recipe_lics_set
if unlisted:
- oe.qa.add_message(messages, "unlisted-pkg-lics",
+ oe.qa.handle_error("unlisted-pkg-lics",
"LICENSE:%s includes licenses (%s) that are not "
- "listed in LICENSE" % (package, ' '.join(unlisted)))
- return False
+ "listed in LICENSE" % (package, ' '.join(unlisted)), d)
obsolete = set(oe.license.obsolete_license_list()) & package_lics - recipe_lics_set
if obsolete:
- oe.qa.add_message(messages, "obsolete-license",
- "LICENSE:%s includes obsolete licenses %s" % (package, ' '.join(obsolete)))
- return False
- return True
+ oe.qa.handle_error("obsolete-license",
+ "LICENSE:%s includes obsolete licenses %s" % (package, ' '.join(obsolete)), d)
QAPKGTEST[empty-dirs] = "package_qa_check_empty_dirs"
-def package_qa_check_empty_dirs(pkg, d, messages):
+def package_qa_check_empty_dirs(pkg, d):
"""
Check for the existence of files in directories that are expected to be
empty.
@@ -1075,7 +1009,7 @@ def package_qa_check_empty_dirs(pkg, d, messages):
recommendation = (d.getVar('QA_EMPTY_DIRS_RECOMMENDATION:' + dir) or
"but it is expected to be empty")
msg = "%s installs files in %s, %s" % (pkg, dir, recommendation)
- oe.qa.add_message(messages, "empty-dirs", msg)
+ oe.qa.handle_error("empty-dirs", msg, d)
def package_qa_check_encoding(keys, encode, d):
def check_encoding(key, enc):
@@ -1099,7 +1033,7 @@ HOST_USER_UID := "${@os.getuid()}"
HOST_USER_GID := "${@os.getgid()}"
QAPATHTEST[host-user-contaminated] = "package_qa_check_host_user"
-def package_qa_check_host_user(path, name, d, elf, messages):
+def package_qa_check_host_user(path, name, d, elf):
"""Check for paths outside of /home which are owned by the user running bitbake."""
if not os.path.lexists(path):
@@ -1120,17 +1054,14 @@ def package_qa_check_host_user(path, name, d, elf, messages):
else:
check_uid = int(d.getVar('HOST_USER_UID'))
if stat.st_uid == check_uid:
- oe.qa.add_message(messages, "host-user-contaminated", "%s: %s is owned by uid %d, which is the same as the user running bitbake. This may be due to host contamination" % (pn, package_qa_clean_path(path, d, name), check_uid))
- return False
+ oe.qa.handle_error("host-user-contaminated", "%s: %s is owned by uid %d, which is the same as the user running bitbake. This may be due to host contamination" % (pn, package_qa_clean_path(path, d, name), check_uid), d)
check_gid = int(d.getVar('HOST_USER_GID'))
if stat.st_gid == check_gid:
- oe.qa.add_message(messages, "host-user-contaminated", "%s: %s is owned by gid %d, which is the same as the user running bitbake. This may be due to host contamination" % (pn, package_qa_clean_path(path, d, name), check_gid))
- return False
- return True
+ oe.qa.handle_error("host-user-contaminated", "%s: %s is owned by gid %d, which is the same as the user running bitbake. This may be due to host contamination" % (pn, package_qa_clean_path(path, d, name), check_gid), d)
QARECIPETEST[unhandled-features-check] = "package_qa_check_unhandled_features_check"
-def package_qa_check_unhandled_features_check(pn, d, messages):
+def package_qa_check_unhandled_features_check(pn, d):
if not bb.data.inherits_class('features_check', d):
var_set = False
for kind in ['DISTRO', 'MACHINE', 'COMBINED']:
@@ -1141,13 +1072,32 @@ def package_qa_check_unhandled_features_check(pn, d, messages):
oe.qa.handle_error("unhandled-features-check", "%s: recipe doesn't inherit features_check" % pn, d)
QARECIPETEST[missing-update-alternatives] = "package_qa_check_missing_update_alternatives"
-def package_qa_check_missing_update_alternatives(pn, d, messages):
+def package_qa_check_missing_update_alternatives(pn, d):
# Look at all packages and find out if any of those sets ALTERNATIVE variable
# without inheriting update-alternatives class
for pkg in (d.getVar('PACKAGES') or '').split():
if d.getVar('ALTERNATIVE:%s' % pkg) and not bb.data.inherits_class('update-alternatives', d):
oe.qa.handle_error("missing-update-alternatives", "%s: recipe defines ALTERNATIVE:%s but doesn't inherit update-alternatives. This might fail during do_rootfs later!" % (pn, pkg), d)
+def parse_test_matrix(matrix_name, skip, d):
+ testmatrix = d.getVarFlags(matrix_name) or {}
+ g = globals()
+ checks = []
+ for w in (d.getVar("WARN_QA") or "").split():
+ if w in skip:
+ continue
+ if w in testmatrix and testmatrix[w] in g:
+ checks.append(g[testmatrix[w]])
+
+ for e in (d.getVar("ERROR_QA") or "").split():
+ if e in skip:
+ continue
+ if e in testmatrix and testmatrix[e] in g:
+ checks.append(g[testmatrix[e]])
+ return checks
+parse_test_matrix[vardepsexclude] = "ERROR_QA WARN_QA"
+
+
# The PACKAGE FUNC to scan each package
python do_package_qa () {
import subprocess
@@ -1203,24 +1153,6 @@ python do_package_qa () {
for dep in taskdepdata:
taskdeps.add(taskdepdata[dep][0])
- def parse_test_matrix(matrix_name):
- testmatrix = d.getVarFlags(matrix_name) or {}
- g = globals()
- warnchecks = []
- for w in (d.getVar("WARN_QA") or "").split():
- if w in skip:
- continue
- if w in testmatrix and testmatrix[w] in g:
- warnchecks.append(g[testmatrix[w]])
-
- errorchecks = []
- for e in (d.getVar("ERROR_QA") or "").split():
- if e in skip:
- continue
- if e in testmatrix and testmatrix[e] in g:
- errorchecks.append(g[testmatrix[e]])
- return warnchecks, errorchecks
-
for package in packages:
skip = set((d.getVar('INSANE_SKIP') or "").split() +
(d.getVar('INSANE_SKIP:' + package) or "").split())
@@ -1233,20 +1165,21 @@ python do_package_qa () {
oe.qa.handle_error("pkgname",
"%s doesn't match the [a-z0-9.+-]+ regex" % package, d)
- warn_checks, error_checks = parse_test_matrix("QAPATHTEST")
- package_qa_walk(warn_checks, error_checks, package, d)
+ checks = parse_test_matrix("QAPATHTEST", skip, d)
+ package_qa_walk(checks, package, d)
- warn_checks, error_checks = parse_test_matrix("QAPKGTEST")
- package_qa_package(warn_checks, error_checks, package, d)
+ checks = parse_test_matrix("QAPKGTEST", skip, d)
+ for func in checks:
+ func(package, d)
package_qa_check_rdepends(package, pkgdest, skip, taskdeps, packages, d)
package_qa_check_deps(package, pkgdest, d)
- warn_checks, error_checks = parse_test_matrix("QARECIPETEST")
- package_qa_recipe(warn_checks, error_checks, pn, d)
+ checks = parse_test_matrix("QARECIPETEST", skip, d)
+ for func in checks:
+ func(pn, d)
- if 'libdir' in d.getVar("ALL_QA").split():
- package_qa_check_libdir(d)
+ package_qa_check_libdir(d)
oe.qa.exit_if_errors(d)
}
@@ -1264,6 +1197,10 @@ python() {
pkgs = (d.getVar('PACKAGES') or '').split()
for pkg in pkgs:
d.appendVarFlag("do_package_qa", "vardeps", " INSANE_SKIP:{}".format(pkg))
+ funcs = d.getVarFlags("QAPATHTEST")
+ funcs.update(d.getVarFlags("QAPKGTEST"))
+ funcs.update(d.getVarFlags("QARECIPETEST"))
+ d.appendVarFlag("do_package_qa", "vardeps", " ".join(funcs.values()))
}
SSTATETASKS += "do_package_qa"
@@ -1363,10 +1300,10 @@ python do_qa_patch() {
srcdir = d.getVar('S')
if not bb.utils.contains('DISTRO_FEATURES', 'ptest', True, False, d):
pass
+ elif not (bb.utils.contains('ERROR_QA', 'unimplemented-ptest', True, False, d) or bb.utils.contains('WARN_QA', 'unimplemented-ptest', True, False, d)):
+ pass
elif bb.data.inherits_class('ptest', d):
bb.note("Package %s QA: skipping unimplemented-ptest: ptest implementation detected" % d.getVar('PN'))
- elif srcdir == d.getVar('WORKDIR'):
- bb.note("Package %s QA: skipping unimplemented-ptest: This check is not supported for recipe with \"S = \"${WORKDIR}\"" % d.getVar('PN'))
# Detect perl Test:: based tests
elif os.path.exists(os.path.join(srcdir, "t")) and any(filename.endswith('.t') for filename in os.listdir(os.path.join(srcdir, 't'))):
@@ -1562,8 +1499,7 @@ do_unpack[postfuncs] += "do_qa_unpack"
python () {
import re
- tests = d.getVar('ALL_QA').split()
- if "desktop" in tests:
+ if bb.utils.contains('ERROR_QA', 'desktop', True, False, d) or bb.utils.contains('WARN_QA', 'desktop', True, False, d):
d.appendVar("PACKAGE_DEPENDS", " desktop-file-utils-native")
###########################################################################
@@ -1613,11 +1549,10 @@ python () {
oe.qa.handle_error("pkgvarcheck", "recipe uses DEPENDS:${PN}, should use DEPENDS", d)
# virtual/ is meaningless for these variables
- if "virtual-slash" in (d.getVar("ALL_QA") or "").split():
- for k in ['RDEPENDS', 'RPROVIDES']:
- for var in bb.utils.explode_deps(d.getVar(k + ':' + pn) or ""):
- if var.startswith("virtual/"):
- oe.qa.handle_error("virtual-slash", "%s is set to %s but the substring 'virtual/' holds no meaning in this context. It only works for build time dependencies, not runtime ones. It is suggested to use 'VIRTUAL-RUNTIME_' variables instead." % (k, var), d)
+ for k in ['RDEPENDS', 'RPROVIDES']:
+ for var in bb.utils.explode_deps(d.getVar(k + ':' + pn) or ""):
+ if var.startswith("virtual/"):
+ oe.qa.handle_error("virtual-slash", "%s is set to %s but the substring 'virtual/' holds no meaning in this context. It only works for build time dependencies, not runtime ones. It is suggested to use 'VIRTUAL-RUNTIME_' variables instead." % (k, var), d)
issues = []
if (d.getVar('PACKAGES') or "").split():
@@ -1627,8 +1562,7 @@ python () {
if d.getVar(var, False):
issues.append(var)
- fakeroot_tests = d.getVar('FAKEROOT_QA').split()
- if set(tests) & set(fakeroot_tests):
+ if bb.utils.contains('ERROR_QA', 'host-user-contaminated', True, False, d) or bb.utils.contains('WARN_QA', 'host-user-contaminated', True, False, d):
d.setVarFlag('do_package_qa', 'fakeroot', '1')
d.appendVarFlag('do_package_qa', 'depends', ' virtual/fakeroot-native:do_populate_sysroot')
else:
diff --git a/poky/meta/classes-global/license.bbclass b/poky/meta/classes-global/license.bbclass
index b2e0d3faba..043715fcc3 100644
--- a/poky/meta/classes-global/license.bbclass
+++ b/poky/meta/classes-global/license.bbclass
@@ -18,8 +18,14 @@ LICENSE_CREATE_PACKAGE ??= "0"
LICENSE_PACKAGE_SUFFIX ??= "-lic"
LICENSE_FILES_DIRECTORY ??= "${datadir}/licenses/"
+LICENSE_DEPLOY_PATHCOMPONENT = "${SSTATE_PKGARCH}"
+LICENSE_DEPLOY_PATHCOMPONENT:class-cross = "native"
+LICENSE_DEPLOY_PATHCOMPONENT:class-native = "native"
+# Ensure the *value* of SSTATE_PKGARCH is captured as it is used in the output paths
+LICENSE_DEPLOY_PATHCOMPONENT[vardepvalue] += "${LICENSE_DEPLOY_PATHCOMPONENT}"
+
addtask populate_lic after do_patch before do_build
-do_populate_lic[dirs] = "${LICSSTATEDIR}/${PN}"
+do_populate_lic[dirs] = "${LICSSTATEDIR}/${LICENSE_DEPLOY_PATHCOMPONENT}/${PN}"
do_populate_lic[cleandirs] = "${LICSSTATEDIR}"
python do_populate_lic() {
@@ -29,7 +35,7 @@ python do_populate_lic() {
lic_files_paths = find_license_files(d)
# The base directory we wrangle licenses to
- destdir = os.path.join(d.getVar('LICSSTATEDIR'), d.getVar('SSTATE_PKGARCH'), d.getVar('PN'))
+ destdir = os.path.join(d.getVar('LICSSTATEDIR'), d.getVar('LICENSE_DEPLOY_PATHCOMPONENT'), d.getVar('PN'))
copy_license_files(lic_files_paths, destdir)
info = get_recipe_info(d)
with open(os.path.join(destdir, "recipeinfo"), "w") as f:
@@ -39,7 +45,7 @@ python do_populate_lic() {
}
PSEUDO_IGNORE_PATHS .= ",${@','.join(((d.getVar('COMMON_LICENSE_DIR') or '') + ' ' + (d.getVar('LICENSE_PATH') or '') + ' ' + d.getVar('COREBASE') + '/meta/COPYING').split())}"
-# it would be better to copy them in do_install:append, but find_license_filesa is python
+# it would be better to copy them in do_install:append, but find_license_files is python
python perform_packagecopy:prepend () {
enabled = oe.data.typed_value('LICENSE_CREATE_PACKAGE', d)
if d.getVar('CLASSOVERRIDE') == 'class-target' and enabled:
@@ -149,14 +155,14 @@ def find_license_files(d):
# and "with exceptions" being *
# we'll just strip out the modifier and put
# the base license.
- find_license(node.s.replace("+", "").replace("*", ""))
+ find_licenses(node.s.replace("+", "").replace("*", ""))
self.generic_visit(node)
def visit_Constant(self, node):
- find_license(node.value.replace("+", "").replace("*", ""))
+ find_licenses(node.value.replace("+", "").replace("*", ""))
self.generic_visit(node)
- def find_license(license_type):
+ def find_licenses(license_type):
try:
bb.utils.mkdirhier(gen_lic_dest)
except:
diff --git a/poky/meta/classes-global/mirrors.bbclass b/poky/meta/classes-global/mirrors.bbclass
index 862648eec5..d68d30b0f2 100644
--- a/poky/meta/classes-global/mirrors.bbclass
+++ b/poky/meta/classes-global/mirrors.bbclass
@@ -88,7 +88,7 @@ git://.*/.* git://HOST/git/PATH;protocol=https \
BB_GIT_SHALLOW:pn-binutils = "1"
BB_GIT_SHALLOW:pn-binutils-cross-${TARGET_ARCH} = "1"
BB_GIT_SHALLOW:pn-binutils-cross-canadian-${TRANSLATED_TARGET_ARCH} = "1"
-BB_GIT_SHALLOW:pn-binutils-cross-testsuite = "1"
+BB_GIT_SHALLOW:pn-binutils-testsuite = "1"
BB_GIT_SHALLOW:pn-binutils-crosssdk-${SDK_SYS} = "1"
BB_GIT_SHALLOW:pn-binutils-native = "1"
BB_GIT_SHALLOW:pn-nativesdk-binutils = "1"
diff --git a/poky/meta/classes-global/package.bbclass b/poky/meta/classes-global/package.bbclass
index aa1eb5e901..6cd8c0140f 100644
--- a/poky/meta/classes-global/package.bbclass
+++ b/poky/meta/classes-global/package.bbclass
@@ -476,7 +476,7 @@ python do_package () {
# cache. This is useful if an item this class depends on changes in a
# way that the output of this class changes. rpmdeps is a good example
# as any change to rpmdeps requires this to be rerun.
- # PACKAGE_BBCLASS_VERSION = "5"
+ # PACKAGE_BBCLASS_VERSION = "6"
# Init cachedpath
global cpath
diff --git a/poky/meta/classes-global/package_rpm.bbclass b/poky/meta/classes-global/package_rpm.bbclass
index 474d2491eb..021c53593f 100644
--- a/poky/meta/classes-global/package_rpm.bbclass
+++ b/poky/meta/classes-global/package_rpm.bbclass
@@ -10,7 +10,7 @@ IMAGE_PKGTYPE ?= "rpm"
RPM = "rpm"
RPMBUILD = "rpmbuild"
-RPMBUILD_COMPMODE ?= "${@'w19T%d.zstdio' % int(d.getVar('ZSTD_THREADS'))}"
+RPMBUILD_COMPMODE ?= "${@'w3T%d.zstdio' % int(d.getVar('ZSTD_THREADS'))}"
PKGWRITEDIRRPM = "${WORKDIR}/deploy-rpms"
@@ -201,14 +201,22 @@ python write_specfile () {
try:
owner = pwd.getpwuid(stat_f.st_uid).pw_name
except Exception as e:
- bb.error("Content of /etc/passwd in sysroot:\n{}".format(
- open(d.getVar("RECIPE_SYSROOT") +"/etc/passwd").read()))
+ filename = d.getVar('RECIPE_SYSROOT') + '/etc/passwd'
+ if os.path.exists(filename):
+ bb.error("Content of /etc/passwd in sysroot:\n{}".format(
+ open(filename).read()))
+ else:
+ bb.error("File {} doesn't exist in sysroot!".format(filename))
raise e
try:
group = grp.getgrgid(stat_f.st_gid).gr_name
except Exception as e:
- bb.error("Content of /etc/group in sysroot:\n{}".format(
- open(d.getVar("RECIPE_SYSROOT") +"/etc/group").read()))
+ filename = d.getVar("RECIPE_SYSROOT") +"/etc/group"
+ if os.path.exists(filename):
+ bb.error("Content of /etc/group in sysroot:\n{}".format(
+ open(filename).read()))
+ else:
+ bb.error("File {} doesn't exists in sysroot!".format(filename))
raise e
return "%attr({:o},{},{}) ".format(mode, owner, group)
@@ -688,6 +696,7 @@ python do_package_rpm () {
cmd = cmd + " --define '_use_internal_dependency_generator 0'"
cmd = cmd + " --define '_binaries_in_noarch_packages_terminate_build 0'"
cmd = cmd + " --define '_build_id_links none'"
+ cmd = cmd + " --define '_smp_ncpus_max 4'"
cmd = cmd + " --define '_source_payload %s'" % rpmbuild_compmode
cmd = cmd + " --define '_binary_payload %s'" % rpmbuild_compmode
cmd = cmd + " --define 'clamp_mtime_to_source_date_epoch 1'"
diff --git a/poky/meta/classes-global/retain.bbclass b/poky/meta/classes-global/retain.bbclass
new file mode 100644
index 0000000000..46e8c256cf
--- /dev/null
+++ b/poky/meta/classes-global/retain.bbclass
@@ -0,0 +1,182 @@
+# Creates a tarball of the work directory for a recipe when one of its
+# tasks fails, or any other nominated directories.
+# Useful in cases where the environment in which builds are run is
+# ephemeral or otherwise inaccessible for examination during
+# debugging.
+#
+# To enable, simply add the following to your configuration:
+#
+# INHERIT += "retain"
+#
+# You can specify the recipe-specific directories to save upon failure
+# or always (space-separated) e.g.:
+#
+# RETAIN_DIRS_FAILURE = "${WORKDIR};prefix=workdir" # default
+# RETAIN_DIRS_ALWAYS = "${T}"
+#
+# Naturally you can use overrides to limit it to a specific recipe:
+# RETAIN_DIRS_ALWAYS:pn-somerecipe = "${T}"
+#
+# You can also specify global (non-recipe-specific) directories to save:
+#
+# RETAIN_DIRS_GLOBAL_FAILURE = "${LOG_DIR}"
+# RETAIN_DIRS_GLOBAL_ALWAYS = "${BUILDSTATS_BASE}"
+#
+# If you wish to use a different tarball name prefix than the default of
+# the directory name, you can do so by specifying a ;prefix= followed by
+# the desired prefix (no spaces) in any of the RETAIN_DIRS_* variables.
+# e.g. to always save the log files with a "recipelogs" as the prefix for
+# the tarball of ${T} you would do this:
+#
+# RETAIN_DIRS_ALWAYS = "${T};prefix=recipelogs"
+#
+# Notes:
+# * For this to be useful you also need corresponding logic in your build
+# orchestration tool to pick up any files written out to RETAIN_OUTDIR
+# (with the other assumption being that no files are present there at
+# the start of the build, since there is no logic to purge old files).
+# * Work directories can be quite large, so saving them can take some time
+# and of course space.
+# * Tarball creation is deferred to the end of the build, thus you will
+# get the state at the end, not immediately upon failure.
+# * Extra directories must naturally be populated at the time the retain
+# class goes to save them (build completion); to try ensure this for
+# things that are also saved on build completion (e.g. buildstats), put
+# the INHERIT += "retain" after the INHERIT += lines for the class that
+# is writing out the data that you wish to save.
+# * The tarballs have the tarball name as a top-level directory so that
+# multiple tarballs can be extracted side-by-side easily.
+#
+# Copyright (c) 2020, 2024 Microsoft Corporation
+#
+# SPDX-License-Identifier: GPL-2.0-only
+#
+
+RETAIN_OUTDIR ?= "${TMPDIR}/retained"
+RETAIN_DIRS_FAILURE ?= "${WORKDIR};prefix=workdir"
+RETAIN_DIRS_ALWAYS ?= ""
+RETAIN_DIRS_GLOBAL_FAILURE ?= ""
+RETAIN_DIRS_GLOBAL_ALWAYS ?= ""
+RETAIN_TARBALL_SUFFIX ?= "${DATETIME}.tar.gz"
+RETAIN_ENABLED ?= "1"
+
+
+def retain_retain_dir(desc, tarprefix, path, tarbasepath, d):
+ import datetime
+
+ outdir = d.getVar('RETAIN_OUTDIR')
+ bb.utils.mkdirhier(outdir)
+ suffix = d.getVar('RETAIN_TARBALL_SUFFIX')
+ tarname = '%s_%s' % (tarprefix, suffix)
+ tarfp = os.path.join(outdir, '%s' % tarname)
+ tardir = os.path.relpath(path, tarbasepath)
+ cmdargs = ['tar', 'cfa', tarfp]
+ # Prefix paths within the tarball with the tarball name so that
+ # multiple tarballs can be extracted side-by-side
+ tarname_noext = os.path.splitext(tarname)[0]
+ if tarname_noext.endswith('.tar'):
+ tarname_noext = tarname_noext[:-4]
+ cmdargs += ['--transform', 's:^:%s/:' % tarname_noext]
+ cmdargs += [tardir]
+ try:
+ bb.process.run(cmdargs, cwd=tarbasepath)
+ except bb.process.ExecutionError as e:
+ # It is possible for other tasks to be writing to the workdir
+ # while we are tarring it up, in which case tar will return 1,
+ # but we don't care in this situation (tar returns 2 for other
+ # errors so we we will see those)
+ if e.exitcode != 1:
+ bb.warn('retain: error saving %s: %s' % (desc, str(e)))
+
+
+addhandler retain_task_handler
+retain_task_handler[eventmask] = "bb.build.TaskFailed bb.build.TaskSucceeded"
+
+addhandler retain_build_handler
+retain_build_handler[eventmask] = "bb.event.BuildStarted bb.event.BuildCompleted"
+
+python retain_task_handler() {
+ if d.getVar('RETAIN_ENABLED') != '1':
+ return
+
+ dirs = d.getVar('RETAIN_DIRS_ALWAYS')
+ if isinstance(e, bb.build.TaskFailed):
+ dirs += ' ' + d.getVar('RETAIN_DIRS_FAILURE')
+
+ dirs = dirs.strip().split()
+ if dirs:
+ outdir = d.getVar('RETAIN_OUTDIR')
+ bb.utils.mkdirhier(outdir)
+ dirlist_file = os.path.join(outdir, 'retain_dirs.list')
+ pn = d.getVar('PN')
+ taskname = d.getVar('BB_CURRENTTASK')
+ with open(dirlist_file, 'a') as f:
+ for entry in dirs:
+ f.write('%s %s %s\n' % (pn, taskname, entry))
+}
+
+python retain_build_handler() {
+ outdir = d.getVar('RETAIN_OUTDIR')
+ dirlist_file = os.path.join(outdir, 'retain_dirs.list')
+
+ if isinstance(e, bb.event.BuildStarted):
+ if os.path.exists(dirlist_file):
+ os.remove(dirlist_file)
+ return
+
+ if d.getVar('RETAIN_ENABLED') != '1':
+ return
+
+ savedirs = {}
+ try:
+ with open(dirlist_file, 'r') as f:
+ for line in f:
+ pn, _, path = line.rstrip().split()
+ if not path in savedirs:
+ savedirs[path] = pn
+ os.remove(dirlist_file)
+ except FileNotFoundError:
+ pass
+
+ if e.getFailures():
+ for path in (d.getVar('RETAIN_DIRS_GLOBAL_FAILURE') or '').strip().split():
+ savedirs[path] = ''
+
+ for path in (d.getVar('RETAIN_DIRS_GLOBAL_ALWAYS') or '').strip().split():
+ savedirs[path] = ''
+
+ if savedirs:
+ bb.plain('NOTE: retain: retaining build output...')
+ count = 0
+ for path, pn in savedirs.items():
+ prefix = None
+ if ';' in path:
+ pathsplit = path.split(';')
+ path = pathsplit[0]
+ for param in pathsplit[1:]:
+ if '=' in param:
+ name, value = param.split('=', 1)
+ if name == 'prefix':
+ prefix = value
+ else:
+ bb.error('retain: invalid parameter "%s" in RETAIN_* variable value' % param)
+ return
+ else:
+ bb.error('retain: parameter "%s" missing value in RETAIN_* variable value' % param)
+ return
+ if prefix:
+ itemname = prefix
+ else:
+ itemname = os.path.basename(path)
+ if pn:
+ # Always add the recipe name in front
+ itemname = pn + '_' + itemname
+ if os.path.exists(path):
+ retain_retain_dir(itemname, itemname, path, os.path.dirname(path), d)
+ count += 1
+ else:
+ bb.warn('retain: path %s does not currently exist' % path)
+ if count:
+ item = 'archive' if count == 1 else 'archives'
+ bb.plain('NOTE: retain: saved %d %s to %s' % (count, item, outdir))
+}
diff --git a/poky/meta/classes-global/sanity.bbclass b/poky/meta/classes-global/sanity.bbclass
index 1d242f0f0a..72dab0fea2 100644
--- a/poky/meta/classes-global/sanity.bbclass
+++ b/poky/meta/classes-global/sanity.bbclass
@@ -475,6 +475,29 @@ def check_wsl(d):
bb.warn("You are running bitbake under WSLv2, this works properly but you should optimize your VHDX file eventually to avoid running out of storage space")
return None
+def check_userns():
+ """
+ Check that user namespaces are functional, as they're used for network isolation.
+ """
+
+ # There is a known failure case with AppAmrmor where the unshare() call
+ # succeeds (at which point the uid is nobody) but writing to the uid_map
+ # fails (so the uid isn't reset back to the user's uid). We can detect this.
+ parentuid = os.getuid()
+ pid = os.fork()
+ if not pid:
+ try:
+ bb.utils.disable_network()
+ except:
+ pass
+ os._exit(parentuid != os.getuid())
+
+ ret = os.waitpid(pid, 0)[1]
+ if ret:
+ bb.fatal("User namespaces are not usable by BitBake, possibly due to AppArmor.\n"
+ "See https://discourse.ubuntu.com/t/ubuntu-24-04-lts-noble-numbat-release-notes/39890#unprivileged-user-namespace-restrictions for more information.")
+
+
# Require at least gcc version 8.0
#
# This can be fixed on CentOS-7 with devtoolset-6+
@@ -641,6 +664,7 @@ def check_sanity_version_change(status, d):
status.addresult(check_git_version(d))
status.addresult(check_perl_modules(d))
status.addresult(check_wsl(d))
+ status.addresult(check_userns())
missing = ""
diff --git a/poky/meta/classes-global/sstate.bbclass b/poky/meta/classes-global/sstate.bbclass
index beb22f424e..8e0391c666 100644
--- a/poky/meta/classes-global/sstate.bbclass
+++ b/poky/meta/classes-global/sstate.bbclass
@@ -4,7 +4,7 @@
# SPDX-License-Identifier: MIT
#
-SSTATE_VERSION = "12"
+SSTATE_VERSION = "14"
SSTATE_ZSTD_CLEVEL ??= "8"
@@ -103,7 +103,6 @@ SSTATECREATEFUNCS[vardeps] = "SSTATE_SCAN_FILES"
SSTATEPOSTCREATEFUNCS = ""
SSTATEPREINSTFUNCS = ""
SSTATEPOSTUNPACKFUNCS = "sstate_hardcode_path_unpack"
-SSTATEPOSTINSTFUNCS = ""
EXTRA_STAGING_FIXMES ?= "HOSTTOOLS_DIR"
# Check whether sstate exists for tasks that support sstate and are in the
@@ -161,7 +160,10 @@ python () {
d.setVar('SSTATETASKS', " ".join(unique_tasks))
for task in unique_tasks:
d.prependVarFlag(task, 'prefuncs', "sstate_task_prefunc ")
- d.appendVarFlag(task, 'postfuncs', " sstate_task_postfunc")
+ # Generally sstate should be last, execpt for buildhistory functions
+ postfuncs = (d.getVarFlag(task, 'postfuncs') or "").split()
+ newpostfuncs = [p for p in postfuncs if "buildhistory" not in p] + ["sstate_task_postfunc"] + [p for p in postfuncs if "buildhistory" in p]
+ d.setVarFlag(task, 'postfuncs', " ".join(newpostfuncs))
d.setVarFlag(task, 'network', '1')
d.setVarFlag(task + "_setscene", 'network', '1')
}
@@ -349,15 +351,10 @@ def sstate_install(ss, d):
prepdir(dest)
bb.utils.rename(src, dest)
- for postinst in (d.getVar('SSTATEPOSTINSTFUNCS') or '').split():
- # All hooks should run in the SSTATE_INSTDIR
- bb.build.exec_func(postinst, d, (sstateinst,))
-
for lock in locks:
bb.utils.unlockfile(lock)
-sstate_install[vardepsexclude] += "SSTATE_ALLOW_OVERLAP_FILES SSTATE_MANMACH SSTATE_MANFILEPREFIX"
-sstate_install[vardeps] += "${SSTATEPOSTINSTFUNCS}"
+sstate_install[vardepsexclude] += "SSTATE_ALLOW_OVERLAP_FILES SSTATE_MANMACH SSTATE_MANFILEPREFIX STAMP"
def sstate_installpkg(ss, d):
from oe.gpg_sign import get_signer
@@ -644,15 +641,6 @@ def sstate_package(ss, d):
tmpdir = d.getVar('TMPDIR')
- fixtime = False
- if ss['task'] == "package":
- fixtime = True
-
- def fixtimestamp(root, path):
- f = os.path.join(root, path)
- if os.lstat(f).st_mtime > sde:
- os.utime(f, (sde, sde), follow_symlinks=False)
-
sstatebuild = d.expand("${WORKDIR}/sstate-build-%s/" % ss['task'])
sde = int(d.getVar("SOURCE_DATE_EPOCH") or time.time())
d.setVar("SSTATE_CURRTASK", ss['task'])
@@ -667,8 +655,6 @@ def sstate_package(ss, d):
# to sstate tasks but there aren't many of these so better just avoid them entirely.
for walkroot, dirs, files in os.walk(state[1]):
for file in files + dirs:
- if fixtime:
- fixtimestamp(walkroot, file)
srcpath = os.path.join(walkroot, file)
if not os.path.islink(srcpath):
continue
@@ -690,11 +676,6 @@ def sstate_package(ss, d):
bb.utils.mkdirhier(plain)
bb.utils.mkdirhier(pdir)
bb.utils.rename(plain, pdir)
- if fixtime:
- fixtimestamp(pdir, "")
- for walkroot, dirs, files in os.walk(pdir):
- for file in files + dirs:
- fixtimestamp(walkroot, file)
d.setVar('SSTATE_BUILDDIR', sstatebuild)
d.setVar('SSTATE_INSTDIR', sstatebuild)
@@ -727,7 +708,7 @@ def sstate_package(ss, d):
return
-sstate_package[vardepsexclude] += "SSTATE_SIG_KEY"
+sstate_package[vardepsexclude] += "SSTATE_SIG_KEY SSTATE_PKG"
def pstaging_fetch(sstatefetch, d):
import bb.fetch2
diff --git a/poky/meta/classes-global/staging.bbclass b/poky/meta/classes-global/staging.bbclass
index 3678a1b441..c2213ffa2b 100644
--- a/poky/meta/classes-global/staging.bbclass
+++ b/poky/meta/classes-global/staging.bbclass
@@ -652,10 +652,17 @@ python do_prepare_recipe_sysroot () {
addtask do_prepare_recipe_sysroot before do_configure after do_fetch
python staging_taskhandler() {
+ EXCLUDED_TASKS = (
+ "do_prepare_recipe_sysroot",
+ "do_create_spdx",
+ )
bbtasks = e.tasklist
for task in bbtasks:
+ if task in EXCLUDED_TASKS:
+ continue
+
deps = d.getVarFlag(task, "depends")
- if task != 'do_prepare_recipe_sysroot' and (task == "do_configure" or (deps and "populate_sysroot" in deps)):
+ if task == "do_configure" or (deps and "populate_sysroot" in deps):
d.prependVarFlag(task, "prefuncs", "extend_recipe_sysroot ")
}
staging_taskhandler[eventmask] = "bb.event.RecipeTaskPreProcess"
diff --git a/poky/meta/classes-global/utils.bbclass b/poky/meta/classes-global/utils.bbclass
index 957389928f..c9cae8930f 100644
--- a/poky/meta/classes-global/utils.bbclass
+++ b/poky/meta/classes-global/utils.bbclass
@@ -15,7 +15,7 @@ oe_soinstall() {
;;
esac
install -m 755 $1 $2/$libname
- sonamelink=`${READELF} -d $1 |grep 'Library soname:' |sed -e 's/.*\[\(.*\)\].*/\1/'`
+ sonamelink=`${OBJDUMP} -p $1 | grep SONAME | awk '{print $2}'`
if [ -z $sonamelink ]; then
bbfatal "oe_soinstall: $libname is missing ELF tag 'SONAME'."
fi
@@ -147,7 +147,7 @@ oe_libinstall() {
# special case hack for non-libtool .so.#.#.# links
baselibfile=`basename "$libfile"`
if (echo $baselibfile | grep -qE '^lib.*\.so\.[0-9.]*$'); then
- sonamelink=`${READELF} -d $libfile |grep 'Library soname:' |sed -e 's/.*\[\(.*\)\].*/\1/'`
+ sonamelink=`${OBJDUMP} -p $libfile | grep SONAME | awk '{print $2}'`
solink=`echo $baselibfile | sed -e 's/\.so\..*/.so/'`
if [ -n "$sonamelink" -a x"$baselibfile" != x"$sonamelink" ]; then
__runcmd ln -sf $baselibfile $destpath/$sonamelink