summaryrefslogtreecommitdiff
path: root/poky/meta/classes
diff options
context:
space:
mode:
Diffstat (limited to 'poky/meta/classes')
-rw-r--r--poky/meta/classes/archiver.bbclass2
-rw-r--r--poky/meta/classes/buildhistory.bbclass85
-rw-r--r--poky/meta/classes/create-spdx-2.2.bbclass72
-rw-r--r--poky/meta/classes/create-spdx-3.0.bbclass192
-rw-r--r--poky/meta/classes/create-spdx.bbclass2
-rw-r--r--poky/meta/classes/cve-check.bbclass300
-rw-r--r--poky/meta/classes/spdx-common.bbclass204
-rw-r--r--poky/meta/classes/vex.bbclass311
8 files changed, 718 insertions, 450 deletions
diff --git a/poky/meta/classes/archiver.bbclass b/poky/meta/classes/archiver.bbclass
index 9d286224d6..df271feddd 100644
--- a/poky/meta/classes/archiver.bbclass
+++ b/poky/meta/classes/archiver.bbclass
@@ -339,7 +339,7 @@ python do_ar_mirror() {
dl_dir = d.getVar('DL_DIR')
mirror_exclusions = (d.getVar('ARCHIVER_MIRROR_EXCLUDE') or '').split()
mirror_mode = d.getVarFlag('ARCHIVER_MODE', 'mirror')
- have_mirror_tarballs = d.getVar('BB_GENERATE_MIRROR_TARBALLS')
+ have_mirror_tarballs = oe.types.boolean(d.getVar('BB_GENERATE_MIRROR_TARBALLS'))
if mirror_mode == 'combined':
destdir = d.getVar('ARCHIVER_COMBINED_MIRRORDIR')
diff --git a/poky/meta/classes/buildhistory.bbclass b/poky/meta/classes/buildhistory.bbclass
index fd53e92402..ce3abaa69d 100644
--- a/poky/meta/classes/buildhistory.bbclass
+++ b/poky/meta/classes/buildhistory.bbclass
@@ -47,18 +47,25 @@ BUILDHISTORY_PUSH_REPO ?= ""
BUILDHISTORY_TAG ?= "build"
BUILDHISTORY_PATH_PREFIX_STRIP ?= ""
-SSTATEPOSTINSTFUNCS:append = " buildhistory_emit_pkghistory"
-# We want to avoid influencing the signatures of sstate tasks - first the function itself:
-sstate_install[vardepsexclude] += "buildhistory_emit_pkghistory"
-# then the value added to SSTATEPOSTINSTFUNCS:
-SSTATEPOSTINSTFUNCS[vardepvalueexclude] .= "| buildhistory_emit_pkghistory"
+# We want to avoid influencing the signatures of the task so use vardepsexclude
+do_populate_sysroot[postfuncs] += "buildhistory_emit_sysroot"
+do_populate_sysroot_setscene[postfuncs] += "buildhistory_emit_sysroot"
+do_populate_sysroot[vardepsexclude] += "buildhistory_emit_sysroot"
+
+do_package[postfuncs] += "buildhistory_list_pkg_files"
+do_package_setscene[postfuncs] += "buildhistory_list_pkg_files"
+do_package[vardepsexclude] += "buildhistory_list_pkg_files"
+
+do_packagedata[postfuncs] += "buildhistory_emit_pkghistory"
+do_packagedata_setscene[postfuncs] += "buildhistory_emit_pkghistory"
+do_packagedata[vardepsexclude] += "buildhistory_emit_pkghistory"
# Similarly for our function that gets the output signatures
SSTATEPOSTUNPACKFUNCS:append = " buildhistory_emit_outputsigs"
sstate_installpkgdir[vardepsexclude] += "buildhistory_emit_outputsigs"
SSTATEPOSTUNPACKFUNCS[vardepvalueexclude] .= "| buildhistory_emit_outputsigs"
-# All items excepts those listed here will be removed from a recipe's
+# All items except those listed here will be removed from a recipe's
# build history directory by buildhistory_emit_pkghistory(). This is
# necessary because some of these items (package directories, files that
# we no longer emit) might be obsolete.
@@ -91,25 +98,14 @@ buildhistory_emit_sysroot() {
# Write out metadata about this package for comparison when writing future packages
#
python buildhistory_emit_pkghistory() {
- if d.getVar('BB_CURRENTTASK') in ['populate_sysroot', 'populate_sysroot_setscene']:
- bb.build.exec_func("buildhistory_emit_sysroot", d)
- return 0
-
- if not "package" in (d.getVar('BUILDHISTORY_FEATURES') or "").split():
- return 0
-
- if d.getVar('BB_CURRENTTASK') in ['package', 'package_setscene']:
- # Create files-in-<package-name>.txt files containing a list of files of each recipe's package
- bb.build.exec_func("buildhistory_list_pkg_files", d)
- return 0
-
- if not d.getVar('BB_CURRENTTASK') in ['packagedata', 'packagedata_setscene']:
- return 0
-
import re
import json
import shlex
import errno
+ import shutil
+
+ if not "package" in (d.getVar('BUILDHISTORY_FEATURES') or "").split():
+ return 0
pkghistdir = d.getVar('BUILDHISTORY_DIR_PACKAGE')
oldpkghistdir = d.getVar('BUILDHISTORY_OLD_DIR_PACKAGE')
@@ -153,7 +149,7 @@ python buildhistory_emit_pkghistory() {
# Variables that need to be written to their own separate file
self.filevars = dict.fromkeys(['pkg_preinst', 'pkg_postinst', 'pkg_prerm', 'pkg_postrm'])
- # Should check PACKAGES here to see if anything removed
+ # Should check PACKAGES here to see if anything was removed
def readPackageInfo(pkg, histfile):
pkginfo = PackageInfo(pkg)
@@ -223,6 +219,20 @@ python buildhistory_emit_pkghistory() {
items.sort()
return ' '.join(items)
+ def preservebuildhistoryfiles(pkg, preserve):
+ if os.path.exists(os.path.join(oldpkghistdir, pkg)):
+ listofobjs = os.listdir(os.path.join(oldpkghistdir, pkg))
+ for obj in listofobjs:
+ if obj not in preserve:
+ continue
+ try:
+ bb.utils.mkdirhier(os.path.join(pkghistdir, pkg))
+ shutil.copyfile(os.path.join(oldpkghistdir, pkg, obj), os.path.join(pkghistdir, pkg, obj))
+ except IOError as e:
+ bb.note("Unable to copy file. %s" % e)
+ except EnvironmentError as e:
+ bb.note("Unable to copy file. %s" % e)
+
pn = d.getVar('PN')
pe = d.getVar('PE') or "0"
pv = d.getVar('PV')
@@ -250,6 +260,14 @@ python buildhistory_emit_pkghistory() {
if not os.path.exists(pkghistdir):
bb.utils.mkdirhier(pkghistdir)
else:
+ # We need to make sure that all files kept in
+ # buildhistory/old are restored successfully
+ # otherwise next block of code wont have files to
+ # check and purge
+ if d.getVar("BUILDHISTORY_RESET"):
+ for pkg in packagelist:
+ preservebuildhistoryfiles(pkg, preserve)
+
# Remove files for packages that no longer exist
for item in os.listdir(pkghistdir):
if item not in preserve:
@@ -535,7 +553,7 @@ buildhistory_get_installed() {
grep -v kernel-module $1/depends-nokernel-nolibc-noupdate.dot > $1/depends-nokernel-nolibc-noupdate-nomodules.dot
fi
- # add complementary package information
+ # Add complementary package information
if [ -e ${WORKDIR}/complementary_pkgs.txt ]; then
cp ${WORKDIR}/complementary_pkgs.txt $1
fi
@@ -573,7 +591,7 @@ buildhistory_get_sdk_installed_target() {
buildhistory_list_files() {
# List the files in the specified directory, but exclude date/time etc.
- # This is somewhat messy, but handles where the size is not printed for device files under pseudo
+ # This is somewhat messy, but handles cases where the size is not printed for device files under pseudo
( cd $1
find_cmd='find . ! -path . -printf "%M %-10u %-10g %10s %p -> %l\n"'
if [ "$3" = "fakeroot" ] ; then
@@ -587,7 +605,7 @@ buildhistory_list_files_no_owners() {
# List the files in the specified directory, but exclude date/time etc.
# Also don't output the ownership data, but instead output just - - so
# that the same parsing code as for _list_files works.
- # This is somewhat messy, but handles where the size is not printed for device files under pseudo
+ # This is somewhat messy, but handles cases where the size is not printed for device files under pseudo
( cd $1
find_cmd='find . ! -path . -printf "%M - - %10s %p -> %l\n"'
if [ "$3" = "fakeroot" ] ; then
@@ -598,16 +616,17 @@ buildhistory_list_files_no_owners() {
}
buildhistory_list_pkg_files() {
+ if [ "${@bb.utils.contains('BUILDHISTORY_FEATURES', 'package', '1', '0', d)}" = "0" ] ; then
+ return
+ fi
+
# Create individual files-in-package for each recipe's package
- for pkgdir in $(find ${PKGDEST}/* -maxdepth 0 -type d); do
+ pkgdirlist=$(find ${PKGDEST}/* -maxdepth 0 -type d)
+ for pkgdir in $pkgdirlist; do
pkgname=$(basename $pkgdir)
outfolder="${BUILDHISTORY_DIR_PACKAGE}/$pkgname"
outfile="$outfolder/files-in-package.txt"
- # Make sure the output folder exists so we can create the file
- if [ ! -d $outfolder ] ; then
- bbdebug 2 "Folder $outfolder does not exist, file $outfile not created"
- continue
- fi
+ mkdir -p $outfolder
buildhistory_list_files $pkgdir $outfile fakeroot
done
}
@@ -855,7 +874,7 @@ END
CMDLINE="${@buildhistory_get_cmdline(d)}"
if [ "$repostatus" != "" ] ; then
git add -A .
- # porcelain output looks like "?? packages/foo/bar"
+ # Porcelain output looks like "?? packages/foo/bar"
# Ensure we commit metadata-revs with the first commit
buildhistory_single_commit "$CMDLINE" "$HOSTNAME" dummy
git gc --auto --quiet
@@ -990,7 +1009,7 @@ def write_latest_ptest_result(d, histdir):
output_ptest = os.path.join(histdir, 'ptest')
if os.path.exists(input_ptest):
try:
- # Lock it avoid race issue
+ # Lock it to avoid race issue
lock = bb.utils.lockfile(output_ptest + "/ptest.lock")
bb.utils.mkdirhier(output_ptest)
oe.path.copytree(input_ptest, output_ptest)
diff --git a/poky/meta/classes/create-spdx-2.2.bbclass b/poky/meta/classes/create-spdx-2.2.bbclass
index a2d688ec47..795ba1a882 100644
--- a/poky/meta/classes/create-spdx-2.2.bbclass
+++ b/poky/meta/classes/create-spdx-2.2.bbclass
@@ -8,6 +8,14 @@ inherit spdx-common
SPDX_VERSION = "2.2"
+SPDX_ORG ??= "OpenEmbedded ()"
+SPDX_SUPPLIER ??= "Organization: ${SPDX_ORG}"
+SPDX_SUPPLIER[doc] = "The SPDX PackageSupplier field for SPDX packages created from \
+ this recipe. For SPDX documents create using this class during the build, this \
+ is the contact information for the person or organization who is doing the \
+ build."
+
+
def get_namespace(d, name):
import uuid
namespace_uuid = uuid.uuid5(uuid.NAMESPACE_DNS, d.getVar("SPDX_UUID_NAMESPACE"))
@@ -30,11 +38,16 @@ def recipe_spdx_is_native(d, recipe):
a.annotator == "Tool: %s - %s" % (d.getVar("SPDX_TOOL_NAME"), d.getVar("SPDX_TOOL_VERSION")) and
a.comment == "isNative" for a in recipe.annotations)
-def convert_license_to_spdx(lic, document, d, existing={}):
+def get_json_indent(d):
+ if d.getVar("SPDX_PRETTY") == "1":
+ return 2
+ return None
+
+
+def convert_license_to_spdx(lic, license_data, document, d, existing={}):
from pathlib import Path
import oe.spdx
- license_data = d.getVar("SPDX_LICENSE_DATA")
extracted = {}
def add_extracted_license(ident, name):
@@ -105,6 +118,7 @@ def convert_license_to_spdx(lic, document, d, existing={}):
def add_package_files(d, doc, spdx_pkg, topdir, get_spdxid, get_types, *, archive=None, ignore_dirs=[], ignore_top_level_dirs=[]):
from pathlib import Path
import oe.spdx
+ import oe.spdx_common
import hashlib
source_date_epoch = d.getVar("SOURCE_DATE_EPOCH")
@@ -157,7 +171,7 @@ def add_package_files(d, doc, spdx_pkg, topdir, get_spdxid, get_types, *, archiv
))
if "SOURCE" in spdx_file.fileTypes:
- extracted_lics = extract_licenses(filepath)
+ extracted_lics = oe.spdx_common.extract_licenses(filepath)
if extracted_lics:
spdx_file.licenseInfoInFiles = extracted_lics
@@ -215,7 +229,8 @@ def add_package_sources_from_debug(d, package_doc, spdx_package, package, packag
debugsrc_path = search / debugsrc.replace('/usr/src/kernel/', '')
else:
debugsrc_path = search / debugsrc.lstrip("/")
- if not debugsrc_path.exists():
+ # We can only hash files below, skip directories, links, etc.
+ if not os.path.isfile(debugsrc_path):
continue
file_sha256 = bb.utils.sha256_file(debugsrc_path)
@@ -248,14 +263,15 @@ def collect_dep_recipes(d, doc, spdx_recipe):
from pathlib import Path
import oe.sbom
import oe.spdx
+ import oe.spdx_common
deploy_dir_spdx = Path(d.getVar("DEPLOY_DIR_SPDX"))
- package_archs = d.getVar("SSTATE_ARCHS").split()
+ package_archs = d.getVar("SPDX_MULTILIB_SSTATE_ARCHS").split()
package_archs.reverse()
dep_recipes = []
- deps = get_spdx_deps(d)
+ deps = oe.spdx_common.get_spdx_deps(d)
for dep_pn, dep_hashfn, in_taskhash in deps:
# If this dependency is not calculated in the taskhash skip it.
@@ -295,7 +311,7 @@ def collect_dep_recipes(d, doc, spdx_recipe):
return dep_recipes
-collect_dep_recipes[vardepsexclude] = "SSTATE_ARCHS"
+collect_dep_recipes[vardepsexclude] = "SPDX_MULTILIB_SSTATE_ARCHS"
def collect_dep_sources(d, dep_recipes):
import oe.sbom
@@ -368,16 +384,24 @@ def add_download_packages(d, doc, recipe):
# but this should be sufficient for now
doc.add_relationship(package, "BUILD_DEPENDENCY_OF", recipe)
+def get_license_list_version(license_data, d):
+ # Newer versions of the SPDX license list are SemVer ("MAJOR.MINOR.MICRO"),
+ # but SPDX 2 only uses "MAJOR.MINOR".
+ return ".".join(license_data["licenseListVersion"].split(".")[:2])
+
python do_create_spdx() {
from datetime import datetime, timezone
import oe.sbom
import oe.spdx
+ import oe.spdx_common
import uuid
from pathlib import Path
from contextlib import contextmanager
import oe.cve_check
+ license_data = oe.spdx_common.load_spdx_license_data(d)
+
@contextmanager
def optional_tarfile(name, guard, mode="w"):
import tarfile
@@ -409,7 +433,7 @@ python do_create_spdx() {
doc.documentNamespace = get_namespace(d, doc.name)
doc.creationInfo.created = creation_time
doc.creationInfo.comment = "This document was created by analyzing recipe files during the build."
- doc.creationInfo.licenseListVersion = d.getVar("SPDX_LICENSE_DATA")["licenseListVersion"]
+ doc.creationInfo.licenseListVersion = get_license_list_version(license_data, d)
doc.creationInfo.creators.append("Tool: OpenEmbedded Core create-spdx.bbclass")
doc.creationInfo.creators.append("Organization: %s" % d.getVar("SPDX_ORG"))
doc.creationInfo.creators.append("Person: N/A ()")
@@ -428,7 +452,7 @@ python do_create_spdx() {
license = d.getVar("LICENSE")
if license:
- recipe.licenseDeclared = convert_license_to_spdx(license, doc, d)
+ recipe.licenseDeclared = convert_license_to_spdx(license, license_data, doc, d)
summary = d.getVar("SUMMARY")
if summary:
@@ -465,10 +489,10 @@ python do_create_spdx() {
add_download_packages(d, doc, recipe)
- if process_sources(d) and include_sources:
+ if oe.spdx_common.process_sources(d) and include_sources:
recipe_archive = deploy_dir_spdx / "recipes" / (doc.name + ".tar.zst")
with optional_tarfile(recipe_archive, archive_sources) as archive:
- spdx_get_src(d)
+ oe.spdx_common.get_patched_src(d)
add_package_files(
d,
@@ -513,7 +537,7 @@ python do_create_spdx() {
package_doc.documentNamespace = get_namespace(d, package_doc.name)
package_doc.creationInfo.created = creation_time
package_doc.creationInfo.comment = "This document was created by analyzing packages created during the build."
- package_doc.creationInfo.licenseListVersion = d.getVar("SPDX_LICENSE_DATA")["licenseListVersion"]
+ package_doc.creationInfo.licenseListVersion = get_license_list_version(license_data, d)
package_doc.creationInfo.creators.append("Tool: OpenEmbedded Core create-spdx.bbclass")
package_doc.creationInfo.creators.append("Organization: %s" % d.getVar("SPDX_ORG"))
package_doc.creationInfo.creators.append("Person: N/A ()")
@@ -526,7 +550,7 @@ python do_create_spdx() {
spdx_package.SPDXID = oe.sbom.get_package_spdxid(pkg_name)
spdx_package.name = pkg_name
spdx_package.versionInfo = d.getVar("PV")
- spdx_package.licenseDeclared = convert_license_to_spdx(package_license, package_doc, d, found_licenses)
+ spdx_package.licenseDeclared = convert_license_to_spdx(package_license, license_data, package_doc, d, found_licenses)
spdx_package.supplier = d.getVar("SPDX_SUPPLIER")
package_doc.packages.append(spdx_package)
@@ -575,6 +599,7 @@ python do_create_runtime_spdx() {
from datetime import datetime, timezone
import oe.sbom
import oe.spdx
+ import oe.spdx_common
import oe.packagedata
from pathlib import Path
@@ -584,9 +609,11 @@ python do_create_runtime_spdx() {
creation_time = datetime.now(tz=timezone.utc).strftime("%Y-%m-%dT%H:%M:%SZ")
- providers = collect_package_providers(d)
+ license_data = oe.spdx_common.load_spdx_license_data(d)
+
+ providers = oe.spdx_common.collect_package_providers(d)
pkg_arch = d.getVar("SSTATE_PKGARCH")
- package_archs = d.getVar("SSTATE_ARCHS").split()
+ package_archs = d.getVar("SPDX_MULTILIB_SSTATE_ARCHS").split()
package_archs.reverse()
if not is_native:
@@ -620,7 +647,7 @@ python do_create_runtime_spdx() {
runtime_doc.documentNamespace = get_namespace(localdata, runtime_doc.name)
runtime_doc.creationInfo.created = creation_time
runtime_doc.creationInfo.comment = "This document was created by analyzing package runtime dependencies."
- runtime_doc.creationInfo.licenseListVersion = d.getVar("SPDX_LICENSE_DATA")["licenseListVersion"]
+ runtime_doc.creationInfo.licenseListVersion = get_license_list_version(license_data, d)
runtime_doc.creationInfo.creators.append("Tool: OpenEmbedded Core create-spdx.bbclass")
runtime_doc.creationInfo.creators.append("Organization: %s" % d.getVar("SPDX_ORG"))
runtime_doc.creationInfo.creators.append("Person: N/A ()")
@@ -692,7 +719,7 @@ python do_create_runtime_spdx() {
oe.sbom.write_doc(d, runtime_doc, pkg_arch, "runtime", spdx_deploy, indent=get_json_indent(d))
}
-do_create_runtime_spdx[vardepsexclude] += "OVERRIDES SSTATE_ARCHS"
+do_create_runtime_spdx[vardepsexclude] += "OVERRIDES SPDX_MULTILIB_SSTATE_ARCHS"
addtask do_create_runtime_spdx after do_create_spdx before do_build do_rm_work
SSTATETASKS += "do_create_runtime_spdx"
@@ -765,6 +792,7 @@ def combine_spdx(d, rootfs_name, rootfs_deploydir, rootfs_spdxid, packages, spdx
import os
import oe.spdx
import oe.sbom
+ import oe.spdx_common
import io
import json
from datetime import timezone, datetime
@@ -772,8 +800,10 @@ def combine_spdx(d, rootfs_name, rootfs_deploydir, rootfs_spdxid, packages, spdx
import tarfile
import bb.compress.zstd
- providers = collect_package_providers(d)
- package_archs = d.getVar("SSTATE_ARCHS").split()
+ license_data = oe.spdx_common.load_spdx_license_data(d)
+
+ providers = oe.spdx_common.collect_package_providers(d)
+ package_archs = d.getVar("SPDX_MULTILIB_SSTATE_ARCHS").split()
package_archs.reverse()
creation_time = datetime.now(tz=timezone.utc).strftime("%Y-%m-%dT%H:%M:%SZ")
@@ -785,7 +815,7 @@ def combine_spdx(d, rootfs_name, rootfs_deploydir, rootfs_spdxid, packages, spdx
doc.documentNamespace = get_namespace(d, doc.name)
doc.creationInfo.created = creation_time
doc.creationInfo.comment = "This document was created by analyzing the source of the Yocto recipe during the build."
- doc.creationInfo.licenseListVersion = d.getVar("SPDX_LICENSE_DATA")["licenseListVersion"]
+ doc.creationInfo.licenseListVersion = get_license_list_version(license_data, d)
doc.creationInfo.creators.append("Tool: OpenEmbedded Core create-spdx.bbclass")
doc.creationInfo.creators.append("Organization: %s" % d.getVar("SPDX_ORG"))
doc.creationInfo.creators.append("Person: N/A ()")
@@ -925,4 +955,4 @@ def combine_spdx(d, rootfs_name, rootfs_deploydir, rootfs_spdxid, packages, spdx
tar.addfile(info, fileobj=index_str)
-combine_spdx[vardepsexclude] += "BB_NUMBER_THREADS SSTATE_ARCHS"
+combine_spdx[vardepsexclude] += "BB_NUMBER_THREADS SPDX_MULTILIB_SSTATE_ARCHS"
diff --git a/poky/meta/classes/create-spdx-3.0.bbclass b/poky/meta/classes/create-spdx-3.0.bbclass
new file mode 100644
index 0000000000..c1241e5856
--- /dev/null
+++ b/poky/meta/classes/create-spdx-3.0.bbclass
@@ -0,0 +1,192 @@
+#
+# Copyright OpenEmbedded Contributors
+#
+# SPDX-License-Identifier: GPL-2.0-only
+#
+
+inherit spdx-common
+
+SPDX_VERSION = "3.0.0"
+
+# The list of SPDX profiles generated documents will conform to
+SPDX_PROFILES ?= "core build software simpleLicensing security"
+
+SPDX_INCLUDE_BUILD_VARIABLES ??= "0"
+SPDX_INCLUDE_BUILD_VARIABLES[doc] = "If set to '1', the bitbake variables for a \
+ recipe will be included in the Build object. This will most likely result \
+ in non-reproducible SPDX output"
+
+SPDX_INCLUDE_BITBAKE_PARENT_BUILD ??= "0"
+SPDX_INCLUDE_BITBAKE_PARENT_BUILD[doc] = "Report the parent invocation of bitbake \
+ for each Build object. This allows you to know who invoked bitbake to perform \
+ a build, but will result in non-reproducible SPDX output."
+
+SPDX_PACKAGE_ADDITIONAL_PURPOSE ?= ""
+SPDX_PACKAGE_ADDITIONAL_PURPOSE[doc] = "The list of additional purposes to assign to \
+ the generated packages for a recipe. The primary purpose is always `install`. \
+ Packages overrides are allowed to override the additional purposes for \
+ individual packages."
+
+SPDX_IMAGE_PURPOSE ?= "filesystemImage"
+SPDX_IMAGE_PURPOSE[doc] = "The list of purposes to assign to the generated images. \
+ The first listed item will be the Primary Purpose and all additional items will \
+ be added as additional purposes"
+
+SPDX_SDK_PURPOSE ?= "install"
+SPDX_SDK_PURPOSE[doc] = "The list of purposes to assign to the generate SDK installer. \
+ The first listed item will be the Primary Purpose and all additional items will \
+ be added as additional purposes"
+
+SPDX_INCLUDE_VEX ??= "current"
+SPDX_INCLUDE_VEX[doc] = "Controls what VEX information is in the output. Set to \
+ 'none' to disable all VEX data. Set to 'current' to only include VEX data \
+ for vulnerabilities not already fixed in the upstream source code \
+ (recommended). Set to 'all' to get all known historical vulnerabilities, \
+ including those already fixed upstream (warning: This can be large and \
+ slow)."
+
+SPDX_INCLUDE_TIMESTAMPS ?= "0"
+SPDX_INCLUDE_TIMESTAMPS[doc] = "Include time stamps in SPDX output. This is \
+ useful if you want to know when artifacts were produced and when builds \
+ occurred, but will result in non-reproducible SPDX output"
+
+SPDX_IMPORTS ??= ""
+SPDX_IMPORTS[doc] = "SPDX_IMPORTS is the base variable that describes how to \
+ reference external SPDX ids. Each import is defined as a key in this \
+ variable with a suffix to describe to as a suffix to look up more \
+ information about the import. Each key can have the following variables: \
+ SPDX_IMPORTS_<key>_spdxid: The Fully qualified SPDX ID of the object \
+ SPDX_IMPORTS_<key>_uri: The URI where the SPDX Document that contains \
+ the external object can be found. Optional but recommended \
+ SPDX_IMPORTS_<key>_hash_<hash>: The Checksum of the SPDX Document that \
+ contains the External ID. <hash> must be one the valid SPDX hashing \
+ algorithms, as described by the HashAlgorithm vocabulary in the\
+ SPDX 3 spec. Optional but recommended"
+
+# Agents
+# Bitbake variables can be used to describe an SPDX Agent that may be used
+# during the build. An Agent is specified using a set of variables which all
+# start with some common base name:
+#
+# <BASE>_name: The name of the Agent (required)
+# <BASE>_type: The type of Agent. Must be one of "person", "organization",
+# "software", or "agent" (the default if not specified)
+# <BASE>_comment: The comment for the Agent (optional)
+# <BASE>_id_<ID>: And External Identifier for the Agent. <ID> must be a valid
+# ExternalIdentifierType from the SPDX 3 spec. Commonly, an E-mail address
+# can be specified with <BASE>_id_email
+#
+# Alternatively, an Agent can be an external reference by referencing a key
+# in SPDX_IMPORTS like so:
+#
+# <BASE>_import = "<key>"
+#
+# Finally, the same agent described by another set of agent variables can be
+# referenced by specifying the basename of the variable that should be
+# referenced:
+#
+# SPDX_PACKAGE_SUPPLIER_ref = "SPDX_AUTHORS_openembedded"
+
+SPDX_AUTHORS ??= "openembedded"
+SPDX_AUTHORS[doc] = "A space separated list of the document authors. Each item \
+ is used to name a base variable like SPDX_AUTHORS_<AUTHOR> that \
+ describes the author."
+
+SPDX_AUTHORS_openembedded_name = "OpenEmbedded"
+SPDX_AUTHORS_openembedded_type = "organization"
+
+SPDX_BUILD_HOST[doc] = "The base variable name to describe the build host on \
+ which a build is running. Must be an SPDX_IMPORTS key. Requires \
+ SPDX_INCLUDE_BITBAKE_PARENT_BUILD. NOTE: Setting this will result in \
+ non-reproducible SPDX output"
+
+SPDX_INVOKED_BY[doc] = "The base variable name to describe the Agent that \
+ invoked the build, which builds will link to if specified. Requires \
+ SPDX_INCLUDE_BITBAKE_PARENT_BUILD. NOTE: Setting this will likely result in \
+ non-reproducible SPDX output"
+
+SPDX_ON_BEHALF_OF[doc] = "The base variable name to describe the Agent on who's \
+ behalf the invoking Agent (SPDX_INVOKED_BY) is running the build. Requires \
+ SPDX_INCLUDE_BITBAKE_PARENT_BUILD. NOTE: Setting this will likely result in \
+ non-reproducible SPDX output"
+
+SPDX_PACKAGE_SUPPLIER[doc] = "The base variable name to describe the Agent who \
+ is supplying artifacts produced by the build"
+
+
+IMAGE_CLASSES:append = " create-spdx-image-3.0"
+SDK_CLASSES += "create-spdx-sdk-3.0"
+
+oe.spdx30_tasks.set_timestamp_now[vardepsexclude] = "SPDX_INCLUDE_TIMESTAMPS"
+oe.spdx30_tasks.get_package_sources_from_debug[vardepsexclude] += "STAGING_KERNEL_DIR"
+oe.spdx30_tasks.collect_dep_objsets[vardepsexclude] = "SPDX_MULTILIB_SSTATE_ARCHS"
+
+
+
+python do_create_spdx() {
+ import oe.spdx30_tasks
+ oe.spdx30_tasks.create_spdx(d)
+}
+do_create_spdx[vardepsexclude] += "BB_NUMBER_THREADS"
+addtask do_create_spdx after \
+ do_collect_spdx_deps \
+ do_deploy_source_date_epoch \
+ do_populate_sysroot do_package do_packagedata \
+ ${create_spdx_source_deps(d)} \
+ before do_populate_sdk do_populate_sdk_ext do_build do_rm_work
+
+def create_spdx_source_deps(d):
+ deps = []
+ if d.getVar("SPDX_INCLUDE_SOURCES") == "1":
+ deps.extend([
+ # do_unpack is a hack for now; we only need it to get the
+ # dependencies do_unpack already has so we can extract the source
+ # ourselves
+ "do_unpack",
+ # For kernel source code
+ "do_shared_workdir",
+ ])
+ return " ".join(deps)
+
+SSTATETASKS += "do_create_spdx"
+do_create_spdx[sstate-inputdirs] = "${SPDXDEPLOY}"
+do_create_spdx[sstate-outputdirs] = "${DEPLOY_DIR_SPDX}"
+
+python do_create_spdx_setscene () {
+ sstate_setscene(d)
+}
+addtask do_create_spdx_setscene
+
+do_create_spdx[dirs] = "${SPDXWORK}"
+do_create_spdx[cleandirs] = "${SPDXDEPLOY} ${SPDXWORK}"
+do_create_spdx[depends] += "${PATCHDEPENDENCY}"
+
+python do_create_package_spdx() {
+ import oe.spdx30_tasks
+ oe.spdx30_tasks.create_package_spdx(d)
+}
+do_create_package_spdx[vardepsexclude] += "OVERRIDES SPDX_MULTILIB_SSTATE_ARCHS"
+
+addtask do_create_package_spdx after do_create_spdx before do_build do_rm_work
+SSTATETASKS += "do_create_package_spdx"
+do_create_package_spdx[sstate-inputdirs] = "${SPDXRUNTIMEDEPLOY}"
+do_create_package_spdx[sstate-outputdirs] = "${DEPLOY_DIR_SPDX}"
+
+python do_create_package_spdx_setscene () {
+ sstate_setscene(d)
+}
+addtask do_create_package_spdx_setscene
+
+do_create_package_spdx[dirs] = "${SPDXRUNTIMEDEPLOY}"
+do_create_package_spdx[cleandirs] = "${SPDXRUNTIMEDEPLOY}"
+do_create_package_spdx[rdeptask] = "do_create_spdx"
+
+python spdx30_build_started_handler () {
+ import oe.spdx30_tasks
+ d = e.data.createCopy()
+ oe.spdx30_tasks.write_bitbake_spdx(d)
+}
+
+addhandler spdx30_build_started_handler
+spdx30_build_started_handler[eventmask] = "bb.event.BuildStarted"
+
diff --git a/poky/meta/classes/create-spdx.bbclass b/poky/meta/classes/create-spdx.bbclass
index 19c6c0ff0b..b604973ae0 100644
--- a/poky/meta/classes/create-spdx.bbclass
+++ b/poky/meta/classes/create-spdx.bbclass
@@ -5,4 +5,4 @@
#
# Include this class when you don't care what version of SPDX you get; it will
# be updated to the latest stable version that is supported
-inherit create-spdx-2.2
+inherit create-spdx-3.0
diff --git a/poky/meta/classes/cve-check.bbclass b/poky/meta/classes/cve-check.bbclass
index 56ba8bceef..0c92b87f52 100644
--- a/poky/meta/classes/cve-check.bbclass
+++ b/poky/meta/classes/cve-check.bbclass
@@ -31,25 +31,22 @@
CVE_PRODUCT ??= "${BPN}"
CVE_VERSION ??= "${PV}"
-CVE_CHECK_DB_DIR ?= "${DL_DIR}/CVE_CHECK"
-CVE_CHECK_DB_FILE ?= "${CVE_CHECK_DB_DIR}/nvdcve_2-1.db"
+CVE_CHECK_DB_FILENAME ?= "nvdcve_2-2.db"
+CVE_CHECK_DB_DIR ?= "${STAGING_DIR}/CVE_CHECK"
+CVE_CHECK_DB_FILE ?= "${CVE_CHECK_DB_DIR}/${CVE_CHECK_DB_FILENAME}"
CVE_CHECK_DB_FILE_LOCK ?= "${CVE_CHECK_DB_FILE}.lock"
-CVE_CHECK_LOG ?= "${T}/cve.log"
-CVE_CHECK_TMP_FILE ?= "${TMPDIR}/cve_check"
CVE_CHECK_SUMMARY_DIR ?= "${LOG_DIR}/cve"
CVE_CHECK_SUMMARY_FILE_NAME ?= "cve-summary"
-CVE_CHECK_SUMMARY_FILE ?= "${CVE_CHECK_SUMMARY_DIR}/${CVE_CHECK_SUMMARY_FILE_NAME}"
CVE_CHECK_SUMMARY_FILE_NAME_JSON = "cve-summary.json"
CVE_CHECK_SUMMARY_INDEX_PATH = "${CVE_CHECK_SUMMARY_DIR}/cve-summary-index.txt"
CVE_CHECK_LOG_JSON ?= "${T}/cve.json"
CVE_CHECK_DIR ??= "${DEPLOY_DIR}/cve"
-CVE_CHECK_RECIPE_FILE ?= "${CVE_CHECK_DIR}/${PN}"
CVE_CHECK_RECIPE_FILE_JSON ?= "${CVE_CHECK_DIR}/${PN}_cve.json"
-CVE_CHECK_MANIFEST ?= "${IMGDEPLOYDIR}/${IMAGE_NAME}.cve"
-CVE_CHECK_MANIFEST_JSON ?= "${IMGDEPLOYDIR}/${IMAGE_NAME}.json"
+CVE_CHECK_MANIFEST_JSON_SUFFIX ?= "json"
+CVE_CHECK_MANIFEST_JSON ?= "${IMGDEPLOYDIR}/${IMAGE_NAME}.${CVE_CHECK_MANIFEST_JSON_SUFFIX}"
CVE_CHECK_COPY_FILES ??= "1"
CVE_CHECK_CREATE_MANIFEST ??= "1"
@@ -58,9 +55,6 @@ CVE_CHECK_REPORT_PATCHED ??= "1"
CVE_CHECK_SHOW_WARNINGS ??= "1"
-# Provide text output
-CVE_CHECK_FORMAT_TEXT ??= "1"
-
# Provide JSON output
CVE_CHECK_FORMAT_JSON ??= "1"
@@ -150,20 +144,11 @@ python cve_save_summary_handler () {
import datetime
from oe.cve_check import update_symlinks
- cve_tmp_file = d.getVar("CVE_CHECK_TMP_FILE")
-
cve_summary_name = d.getVar("CVE_CHECK_SUMMARY_FILE_NAME")
cvelogpath = d.getVar("CVE_CHECK_SUMMARY_DIR")
bb.utils.mkdirhier(cvelogpath)
timestamp = datetime.datetime.now().strftime('%Y%m%d%H%M%S')
- cve_summary_file = os.path.join(cvelogpath, "%s-%s.txt" % (cve_summary_name, timestamp))
-
- if os.path.exists(cve_tmp_file):
- shutil.copyfile(cve_tmp_file, cve_summary_file)
- cvefile_link = os.path.join(cvelogpath, cve_summary_name)
- update_symlinks(cve_summary_file, cvefile_link)
- bb.plain("Complete CVE report summary created at: %s" % cvefile_link)
if d.getVar("CVE_CHECK_FORMAT_JSON") == "1":
json_summary_link_name = os.path.join(cvelogpath, d.getVar("CVE_CHECK_SUMMARY_FILE_NAME_JSON"))
@@ -187,24 +172,23 @@ python do_cve_check () {
patched_cves = get_patched_cves(d)
except FileNotFoundError:
bb.fatal("Failure in searching patches")
- ignored, patched, unpatched, status = check_cves(d, patched_cves)
- if patched or unpatched or (d.getVar("CVE_CHECK_COVERAGE") == "1" and status):
- cve_data = get_cve_info(d, patched + unpatched + ignored)
- cve_write_data(d, patched, unpatched, ignored, cve_data, status)
+ cve_data, status = check_cves(d, patched_cves)
+ if len(cve_data) or (d.getVar("CVE_CHECK_COVERAGE") == "1" and status):
+ get_cve_info(d, cve_data)
+ cve_write_data(d, cve_data, status)
else:
bb.note("No CVE database found, skipping CVE check")
}
addtask cve_check before do_build
-do_cve_check[depends] = "cve-update-nvd2-native:do_fetch"
+do_cve_check[depends] = "cve-update-nvd2-native:do_unpack"
do_cve_check[nostamp] = "1"
python cve_check_cleanup () {
"""
Delete the file used to gather all the CVE information.
"""
- bb.utils.remove(e.data.getVar("CVE_CHECK_TMP_FILE"))
bb.utils.remove(e.data.getVar("CVE_CHECK_SUMMARY_INDEX_PATH"))
}
@@ -222,9 +206,6 @@ python cve_check_write_rootfs_manifest () {
from oe.cve_check import cve_check_merge_jsons, update_symlinks
if d.getVar("CVE_CHECK_COPY_FILES") == "1":
- deploy_file = d.getVar("CVE_CHECK_RECIPE_FILE")
- if os.path.exists(deploy_file):
- bb.utils.remove(deploy_file)
deploy_file_json = d.getVar("CVE_CHECK_RECIPE_FILE_JSON")
if os.path.exists(deploy_file_json):
bb.utils.remove(deploy_file_json)
@@ -244,19 +225,13 @@ python cve_check_write_rootfs_manifest () {
json_data = {"version":"1", "package": []}
text_data = ""
enable_json = d.getVar("CVE_CHECK_FORMAT_JSON") == "1"
- enable_text = d.getVar("CVE_CHECK_FORMAT_TEXT") == "1"
save_pn = d.getVar("PN")
for pkg in recipies:
- # To be able to use the CVE_CHECK_RECIPE_FILE variable we have to evaluate
+ # To be able to use the CVE_CHECK_RECIPE_FILE_JSON variable we have to evaluate
# it with the different PN names set each time.
d.setVar("PN", pkg)
- if enable_text:
- pkgfilepath = d.getVar("CVE_CHECK_RECIPE_FILE")
- if os.path.exists(pkgfilepath):
- with open(pkgfilepath) as pfile:
- text_data += pfile.read()
if enable_json:
pkgfilepath = d.getVar("CVE_CHECK_RECIPE_FILE_JSON")
@@ -267,18 +242,9 @@ python cve_check_write_rootfs_manifest () {
d.setVar("PN", save_pn)
- if enable_text:
- link_path = os.path.join(deploy_dir, "%s.cve" % link_name)
- manifest_name = d.getVar("CVE_CHECK_MANIFEST")
-
- with open(manifest_name, "w") as f:
- f.write(text_data)
-
- update_symlinks(manifest_name, link_path)
- bb.plain("Image CVE report stored in: %s" % manifest_name)
-
if enable_json:
- link_path = os.path.join(deploy_dir, "%s.json" % link_name)
+ manifest_name_suffix = d.getVar("CVE_CHECK_MANIFEST_JSON_SUFFIX")
+ link_path = os.path.join(deploy_dir, "%s.%s" % (link_name, manifest_name_suffix))
manifest_name = d.getVar("CVE_CHECK_MANIFEST_JSON")
with open(manifest_name, "w") as f:
@@ -292,7 +258,51 @@ ROOTFS_POSTPROCESS_COMMAND:prepend = "${@'cve_check_write_rootfs_manifest ' if d
do_rootfs[recrdeptask] += "${@'do_cve_check' if d.getVar('CVE_CHECK_CREATE_MANIFEST') == '1' else ''}"
do_populate_sdk[recrdeptask] += "${@'do_cve_check' if d.getVar('CVE_CHECK_CREATE_MANIFEST') == '1' else ''}"
-def check_cves(d, patched_cves):
+def cve_is_ignored(d, cve_data, cve):
+ if cve not in cve_data:
+ return False
+ if cve_data[cve]['abbrev-status'] == "Ignored":
+ return True
+ return False
+
+def cve_is_patched(d, cve_data, cve):
+ if cve not in cve_data:
+ return False
+ if cve_data[cve]['abbrev-status'] == "Patched":
+ return True
+ return False
+
+def cve_update(d, cve_data, cve, entry):
+ # If no entry, just add it
+ if cve not in cve_data:
+ cve_data[cve] = entry
+ return
+ # If we are updating, there might be change in the status
+ bb.debug("Trying CVE entry update for %s from %s to %s" % (cve, cve_data[cve]['abbrev-status'], entry['abbrev-status']))
+ if cve_data[cve]['abbrev-status'] == "Unknown":
+ cve_data[cve] = entry
+ return
+ if cve_data[cve]['abbrev-status'] == entry['abbrev-status']:
+ return
+ # Update like in {'abbrev-status': 'Patched', 'status': 'version-not-in-range'} to {'abbrev-status': 'Unpatched', 'status': 'version-in-range'}
+ if entry['abbrev-status'] == "Unpatched" and cve_data[cve]['abbrev-status'] == "Patched":
+ if entry['status'] == "version-in-range" and cve_data[cve]['status'] == "version-not-in-range":
+ # New result from the scan, vulnerable
+ cve_data[cve] = entry
+ bb.debug("CVE entry %s update from Patched to Unpatched from the scan result" % cve)
+ return
+ if entry['abbrev-status'] == "Patched" and cve_data[cve]['abbrev-status'] == "Unpatched":
+ if entry['status'] == "version-not-in-range" and cve_data[cve]['status'] == "version-in-range":
+ # Range does not match the scan, but we already have a vulnerable match, ignore
+ bb.debug("CVE entry %s update from Patched to Unpatched from the scan result - not applying" % cve)
+ return
+ # If we have an "Ignored", it has a priority
+ if cve_data[cve]['abbrev-status'] == "Ignored":
+ bb.debug("CVE %s not updating because Ignored" % cve)
+ return
+ bb.warn("Unhandled CVE entry update for %s from %s to %s" % (cve, cve_data[cve], entry))
+
+def check_cves(d, cve_data):
"""
Connect to the NVD database and find unpatched cves.
"""
@@ -302,28 +312,19 @@ def check_cves(d, patched_cves):
real_pv = d.getVar("PV")
suffix = d.getVar("CVE_VERSION_SUFFIX")
- cves_unpatched = []
- cves_ignored = []
cves_status = []
cves_in_recipe = False
# CVE_PRODUCT can contain more than one product (eg. curl/libcurl)
products = d.getVar("CVE_PRODUCT").split()
# If this has been unset then we're not scanning for CVEs here (for example, image recipes)
if not products:
- return ([], [], [], [])
+ return ([], [])
pv = d.getVar("CVE_VERSION").split("+git")[0]
# If the recipe has been skipped/ignored we return empty lists
if pn in d.getVar("CVE_CHECK_SKIP_RECIPE").split():
bb.note("Recipe has been skipped by cve-check")
- return ([], [], [], [])
-
- # Convert CVE_STATUS into ignored CVEs and check validity
- cve_ignore = []
- for cve in (d.getVarFlags("CVE_STATUS") or {}):
- decoded_status, _, _ = decode_cve_status(d, cve)
- if decoded_status == "Ignored":
- cve_ignore.append(cve)
+ return ([], [])
import sqlite3
db_file = d.expand("file:${CVE_CHECK_DB_FILE}?mode=ro")
@@ -342,11 +343,10 @@ def check_cves(d, patched_cves):
for cverow in cve_cursor:
cve = cverow[0]
- if cve in cve_ignore:
+ if cve_is_ignored(d, cve_data, cve):
bb.note("%s-%s ignores %s" % (product, pv, cve))
- cves_ignored.append(cve)
continue
- elif cve in patched_cves:
+ elif cve_is_patched(d, cve_data, cve):
bb.note("%s has been patched" % (cve))
continue
# Write status once only for each product
@@ -362,7 +362,7 @@ def check_cves(d, patched_cves):
for row in product_cursor:
(_, _, _, version_start, operator_start, version_end, operator_end) = row
#bb.debug(2, "Evaluating row " + str(row))
- if cve in cve_ignore:
+ if cve_is_ignored(d, cve_data, cve):
ignored = True
version_start = convert_cve_version(version_start)
@@ -401,16 +401,16 @@ def check_cves(d, patched_cves):
if vulnerable:
if ignored:
bb.note("%s is ignored in %s-%s" % (cve, pn, real_pv))
- cves_ignored.append(cve)
+ cve_update(d, cve_data, cve, {"abbrev-status": "Ignored"})
else:
bb.note("%s-%s is vulnerable to %s" % (pn, real_pv, cve))
- cves_unpatched.append(cve)
+ cve_update(d, cve_data, cve, {"abbrev-status": "Unpatched", "status": "version-in-range"})
break
product_cursor.close()
if not vulnerable:
bb.note("%s-%s is not vulnerable to %s" % (pn, real_pv, cve))
- patched_cves.add(cve)
+ cve_update(d, cve_data, cve, {"abbrev-status": "Patched", "status": "version-not-in-range"})
cve_cursor.close()
if not cves_in_product:
@@ -418,123 +418,39 @@ def check_cves(d, patched_cves):
cves_status.append([product, False])
conn.close()
- diff_ignore = list(set(cve_ignore) - set(cves_ignored))
- if diff_ignore:
- oe.qa.handle_error("cve_status_not_in_db", "Found CVE (%s) with CVE_STATUS set that are not found in database for this component" % " ".join(diff_ignore), d)
if not cves_in_recipe:
bb.note("No CVE records for products in recipe %s" % (pn))
- return (list(cves_ignored), list(patched_cves), cves_unpatched, cves_status)
+ return (cve_data, cves_status)
-def get_cve_info(d, cves):
+def get_cve_info(d, cve_data):
"""
Get CVE information from the database.
"""
import sqlite3
- cve_data = {}
db_file = d.expand("file:${CVE_CHECK_DB_FILE}?mode=ro")
conn = sqlite3.connect(db_file, uri=True)
- for cve in cves:
+ for cve in cve_data:
cursor = conn.execute("SELECT * FROM NVD WHERE ID IS ?", (cve,))
for row in cursor:
- cve_data[row[0]] = {}
- cve_data[row[0]]["summary"] = row[1]
- cve_data[row[0]]["scorev2"] = row[2]
- cve_data[row[0]]["scorev3"] = row[3]
- cve_data[row[0]]["modified"] = row[4]
- cve_data[row[0]]["vector"] = row[5]
- cve_data[row[0]]["vectorString"] = row[6]
+ # The CVE itdelf has been added already
+ if row[0] not in cve_data:
+ bb.note("CVE record %s not present" % row[0])
+ continue
+ #cve_data[row[0]] = {}
+ cve_data[row[0]]["NVD-summary"] = row[1]
+ cve_data[row[0]]["NVD-scorev2"] = row[2]
+ cve_data[row[0]]["NVD-scorev3"] = row[3]
+ cve_data[row[0]]["NVD-scorev4"] = row[4]
+ cve_data[row[0]]["NVD-modified"] = row[5]
+ cve_data[row[0]]["NVD-vector"] = row[6]
+ cve_data[row[0]]["NVD-vectorString"] = row[7]
cursor.close()
conn.close()
- return cve_data
-
-def cve_write_data_text(d, patched, unpatched, ignored, cve_data):
- """
- Write CVE information in WORKDIR; and to CVE_CHECK_DIR, and
- CVE manifest if enabled.
- """
-
- from oe.cve_check import decode_cve_status
-
- cve_file = d.getVar("CVE_CHECK_LOG")
- fdir_name = d.getVar("FILE_DIRNAME")
- layer = fdir_name.split("/")[-3]
-
- include_layers = d.getVar("CVE_CHECK_LAYER_INCLUDELIST").split()
- exclude_layers = d.getVar("CVE_CHECK_LAYER_EXCLUDELIST").split()
-
- report_all = d.getVar("CVE_CHECK_REPORT_PATCHED") == "1"
-
- if exclude_layers and layer in exclude_layers:
- return
-
- if include_layers and layer not in include_layers:
- return
-
- # Early exit, the text format does not report packages without CVEs
- if not patched+unpatched+ignored:
- return
-
- nvd_link = "https://nvd.nist.gov/vuln/detail/"
- write_string = ""
- unpatched_cves = []
- bb.utils.mkdirhier(os.path.dirname(cve_file))
-
- for cve in sorted(cve_data):
- is_patched = cve in patched
- is_ignored = cve in ignored
-
- status = "Unpatched"
- if (is_patched or is_ignored) and not report_all:
- continue
- if is_ignored:
- status = "Ignored"
- elif is_patched:
- status = "Patched"
- else:
- # default value of status is Unpatched
- unpatched_cves.append(cve)
-
- write_string += "LAYER: %s\n" % layer
- write_string += "PACKAGE NAME: %s\n" % d.getVar("PN")
- write_string += "PACKAGE VERSION: %s%s\n" % (d.getVar("EXTENDPE"), d.getVar("PV"))
- write_string += "CVE: %s\n" % cve
- write_string += "CVE STATUS: %s\n" % status
- _, detail, description = decode_cve_status(d, cve)
- if detail:
- write_string += "CVE DETAIL: %s\n" % detail
- if description:
- write_string += "CVE DESCRIPTION: %s\n" % description
- write_string += "CVE SUMMARY: %s\n" % cve_data[cve]["summary"]
- write_string += "CVSS v2 BASE SCORE: %s\n" % cve_data[cve]["scorev2"]
- write_string += "CVSS v3 BASE SCORE: %s\n" % cve_data[cve]["scorev3"]
- write_string += "VECTOR: %s\n" % cve_data[cve]["vector"]
- write_string += "VECTORSTRING: %s\n" % cve_data[cve]["vectorString"]
- write_string += "MORE INFORMATION: %s%s\n\n" % (nvd_link, cve)
-
- if unpatched_cves and d.getVar("CVE_CHECK_SHOW_WARNINGS") == "1":
- bb.warn("Found unpatched CVE (%s), for more information check %s" % (" ".join(unpatched_cves),cve_file))
-
- with open(cve_file, "w") as f:
- bb.note("Writing file %s with CVE information" % cve_file)
- f.write(write_string)
-
- if d.getVar("CVE_CHECK_COPY_FILES") == "1":
- deploy_file = d.getVar("CVE_CHECK_RECIPE_FILE")
- bb.utils.mkdirhier(os.path.dirname(deploy_file))
- with open(deploy_file, "w") as f:
- f.write(write_string)
-
- if d.getVar("CVE_CHECK_CREATE_MANIFEST") == "1":
- cvelogpath = d.getVar("CVE_CHECK_SUMMARY_DIR")
- bb.utils.mkdirhier(cvelogpath)
-
- with open(d.getVar("CVE_CHECK_TMP_FILE"), "a") as f:
- f.write("%s" % write_string)
def cve_check_write_json_output(d, output, direct_file, deploy_file, manifest_file):
"""
@@ -566,13 +482,11 @@ def cve_check_write_json_output(d, output, direct_file, deploy_file, manifest_fi
with open(index_path, "a+") as f:
f.write("%s\n" % fragment_path)
-def cve_write_data_json(d, patched, unpatched, ignored, cve_data, cve_status):
+def cve_write_data_json(d, cve_data, cve_status):
"""
Prepare CVE data for the JSON format, then write it.
"""
- from oe.cve_check import decode_cve_status
-
output = {"version":"1", "package": []}
nvd_link = "https://nvd.nist.gov/vuln/detail/"
@@ -590,8 +504,6 @@ def cve_write_data_json(d, patched, unpatched, ignored, cve_data, cve_status):
if include_layers and layer not in include_layers:
return
- unpatched_cves = []
-
product_data = []
for s in cve_status:
p = {"product": s[0], "cvesInRecord": "Yes"}
@@ -606,39 +518,33 @@ def cve_write_data_json(d, patched, unpatched, ignored, cve_data, cve_status):
"version" : package_version,
"products": product_data
}
+
cve_list = []
for cve in sorted(cve_data):
- is_patched = cve in patched
- is_ignored = cve in ignored
- status = "Unpatched"
- if (is_patched or is_ignored) and not report_all:
+ if not report_all and (cve_data[cve]["abbrev-status"] == "Patched" or cve_data[cve]["abbrev-status"] == "Ignored"):
continue
- if is_ignored:
- status = "Ignored"
- elif is_patched:
- status = "Patched"
- else:
- # default value of status is Unpatched
- unpatched_cves.append(cve)
-
issue_link = "%s%s" % (nvd_link, cve)
cve_item = {
"id" : cve,
- "summary" : cve_data[cve]["summary"],
- "scorev2" : cve_data[cve]["scorev2"],
- "scorev3" : cve_data[cve]["scorev3"],
- "vector" : cve_data[cve]["vector"],
- "vectorString" : cve_data[cve]["vectorString"],
- "status" : status,
- "link": issue_link
+ "status" : cve_data[cve]["abbrev-status"],
+ "link": issue_link,
}
- _, detail, description = decode_cve_status(d, cve)
- if detail:
- cve_item["detail"] = detail
- if description:
- cve_item["description"] = description
+ if 'NVD-summary' in cve_data[cve]:
+ cve_item["summary"] = cve_data[cve]["NVD-summary"]
+ cve_item["scorev2"] = cve_data[cve]["NVD-scorev2"]
+ cve_item["scorev3"] = cve_data[cve]["NVD-scorev3"]
+ cve_item["scorev4"] = cve_data[cve]["NVD-scorev4"]
+ cve_item["modified"] = cve_data[cve]["NVD-modified"]
+ cve_item["vector"] = cve_data[cve]["NVD-vector"]
+ cve_item["vectorString"] = cve_data[cve]["NVD-vectorString"]
+ if 'status' in cve_data[cve]:
+ cve_item["detail"] = cve_data[cve]["status"]
+ if 'justification' in cve_data[cve]:
+ cve_item["description"] = cve_data[cve]["justification"]
+ if 'resource' in cve_data[cve]:
+ cve_item["patch-file"] = cve_data[cve]["resource"]
cve_list.append(cve_item)
package_data["issue"] = cve_list
@@ -650,12 +556,10 @@ def cve_write_data_json(d, patched, unpatched, ignored, cve_data, cve_status):
cve_check_write_json_output(d, output, direct_file, deploy_file, manifest_file)
-def cve_write_data(d, patched, unpatched, ignored, cve_data, status):
+def cve_write_data(d, cve_data, status):
"""
Write CVE data in each enabled format.
"""
- if d.getVar("CVE_CHECK_FORMAT_TEXT") == "1":
- cve_write_data_text(d, patched, unpatched, ignored, cve_data)
if d.getVar("CVE_CHECK_FORMAT_JSON") == "1":
- cve_write_data_json(d, patched, unpatched, ignored, cve_data, status)
+ cve_write_data_json(d, cve_data, status)
diff --git a/poky/meta/classes/spdx-common.bbclass b/poky/meta/classes/spdx-common.bbclass
index 03f1d0cc27..cd9cc0db98 100644
--- a/poky/meta/classes/spdx-common.bbclass
+++ b/poky/meta/classes/spdx-common.bbclass
@@ -17,6 +17,7 @@ SPDXDEPLOY = "${SPDXDIR}/deploy"
SPDXWORK = "${SPDXDIR}/work"
SPDXIMAGEWORK = "${SPDXDIR}/image-work"
SPDXSDKWORK = "${SPDXDIR}/sdk-work"
+SPDXSDKEXTWORK = "${SPDXDIR}/sdk-ext-work"
SPDXDEPS = "${SPDXDIR}/deps.json"
SPDX_TOOL_NAME ??= "oe-spdx-creator"
@@ -36,100 +37,7 @@ SPDX_LICENSES ??= "${COREBASE}/meta/files/spdx-licenses.json"
SPDX_CUSTOM_ANNOTATION_VARS ??= ""
-SPDX_ORG ??= "OpenEmbedded ()"
-SPDX_SUPPLIER ??= "Organization: ${SPDX_ORG}"
-SPDX_SUPPLIER[doc] = "The SPDX PackageSupplier field for SPDX packages created from \
- this recipe. For SPDX documents create using this class during the build, this \
- is the contact information for the person or organization who is doing the \
- build."
-
-def extract_licenses(filename):
- import re
-
- lic_regex = re.compile(rb'^\W*SPDX-License-Identifier:\s*([ \w\d.()+-]+?)(?:\s+\W*)?$', re.MULTILINE)
-
- try:
- with open(filename, 'rb') as f:
- size = min(15000, os.stat(filename).st_size)
- txt = f.read(size)
- licenses = re.findall(lic_regex, txt)
- if licenses:
- ascii_licenses = [lic.decode('ascii') for lic in licenses]
- return ascii_licenses
- except Exception as e:
- bb.warn(f"Exception reading {filename}: {e}")
- return []
-
-def is_work_shared_spdx(d):
- return bb.data.inherits_class('kernel', d) or ('work-shared' in d.getVar('WORKDIR'))
-
-def get_json_indent(d):
- if d.getVar("SPDX_PRETTY") == "1":
- return 2
- return None
-
-python() {
- import json
- if d.getVar("SPDX_LICENSE_DATA"):
- return
-
- with open(d.getVar("SPDX_LICENSES"), "r") as f:
- data = json.load(f)
- # Transform the license array to a dictionary
- data["licenses"] = {l["licenseId"]: l for l in data["licenses"]}
- d.setVar("SPDX_LICENSE_DATA", data)
-}
-
-def process_sources(d):
- pn = d.getVar('PN')
- assume_provided = (d.getVar("ASSUME_PROVIDED") or "").split()
- if pn in assume_provided:
- for p in d.getVar("PROVIDES").split():
- if p != pn:
- pn = p
- break
-
- # glibc-locale: do_fetch, do_unpack and do_patch tasks have been deleted,
- # so avoid archiving source here.
- if pn.startswith('glibc-locale'):
- return False
- if d.getVar('PN') == "libtool-cross":
- return False
- if d.getVar('PN') == "libgcc-initial":
- return False
- if d.getVar('PN') == "shadow-sysroot":
- return False
-
- # We just archive gcc-source for all the gcc related recipes
- if d.getVar('BPN') in ['gcc', 'libgcc']:
- bb.debug(1, 'spdx: There is bug in scan of %s is, do nothing' % pn)
- return False
-
- return True
-
-def collect_direct_deps(d, dep_task):
- current_task = "do_" + d.getVar("BB_CURRENTTASK")
- pn = d.getVar("PN")
-
- taskdepdata = d.getVar("BB_TASKDEPDATA", False)
-
- for this_dep in taskdepdata.values():
- if this_dep[0] == pn and this_dep[1] == current_task:
- break
- else:
- bb.fatal(f"Unable to find this {pn}:{current_task} in taskdepdata")
-
- deps = set()
-
- for dep_name in this_dep.deps:
- dep_data = taskdepdata[dep_name]
- if dep_data.taskname == dep_task and dep_data.pn != pn:
- deps.add((dep_data.pn, dep_data.hashfn, dep_name in this_dep.taskhash_deps))
-
- return sorted(deps)
-
-collect_direct_deps[vardepsexclude] += "BB_TASKDEPDATA"
-collect_direct_deps[vardeps] += "DEPENDS"
+SPDX_MULTILIB_SSTATE_ARCHS ??= "${SSTATE_ARCHS}"
python do_collect_spdx_deps() {
# This task calculates the build time dependencies of the recipe, and is
@@ -139,11 +47,12 @@ python do_collect_spdx_deps() {
# do_create_spdx reads in the found dependencies when writing the actual
# SPDX document
import json
+ import oe.spdx_common
from pathlib import Path
spdx_deps_file = Path(d.getVar("SPDXDEPS"))
- deps = collect_direct_deps(d, "do_create_spdx")
+ deps = oe.spdx_common.collect_direct_deps(d, "do_create_spdx")
with spdx_deps_file.open("w") as f:
json.dump(deps, f)
@@ -154,104 +63,7 @@ do_collect_spdx_deps[depends] += "${PATCHDEPENDENCY}"
do_collect_spdx_deps[deptask] = "do_create_spdx"
do_collect_spdx_deps[dirs] = "${SPDXDIR}"
-def get_spdx_deps(d):
- import json
- from pathlib import Path
-
- spdx_deps_file = Path(d.getVar("SPDXDEPS"))
-
- with spdx_deps_file.open("r") as f:
- return json.load(f)
-
-def collect_package_providers(d):
- from pathlib import Path
- import oe.sbom
- import oe.spdx
- import json
-
- deploy_dir_spdx = Path(d.getVar("DEPLOY_DIR_SPDX"))
-
- providers = {}
-
- deps = collect_direct_deps(d, "do_create_spdx")
- deps.append((d.getVar("PN"), d.getVar("BB_HASHFILENAME"), True))
-
- for dep_pn, dep_hashfn, _ in deps:
- localdata = d
- recipe_data = oe.packagedata.read_pkgdata(dep_pn, localdata)
- if not recipe_data:
- localdata = bb.data.createCopy(d)
- localdata.setVar("PKGDATA_DIR", "${PKGDATA_DIR_SDK}")
- recipe_data = oe.packagedata.read_pkgdata(dep_pn, localdata)
-
- for pkg in recipe_data.get("PACKAGES", "").split():
-
- pkg_data = oe.packagedata.read_subpkgdata_dict(pkg, localdata)
- rprovides = set(n for n, _ in bb.utils.explode_dep_versions2(pkg_data.get("RPROVIDES", "")).items())
- rprovides.add(pkg)
-
- if "PKG" in pkg_data:
- pkg = pkg_data["PKG"]
- rprovides.add(pkg)
-
- for r in rprovides:
- providers[r] = (pkg, dep_hashfn)
-
- return providers
-
-collect_package_providers[vardepsexclude] += "BB_TASKDEPDATA"
-
-def spdx_get_src(d):
- """
- save patched source of the recipe in SPDX_WORKDIR.
- """
- import shutil
- spdx_workdir = d.getVar('SPDXWORK')
- spdx_sysroot_native = d.getVar('STAGING_DIR_NATIVE')
- pn = d.getVar('PN')
-
- workdir = d.getVar("WORKDIR")
-
- try:
- # The kernel class functions require it to be on work-shared, so we dont change WORKDIR
- if not is_work_shared_spdx(d):
- # Change the WORKDIR to make do_unpack do_patch run in another dir.
- d.setVar('WORKDIR', spdx_workdir)
- # Restore the original path to recipe's native sysroot (it's relative to WORKDIR).
- d.setVar('STAGING_DIR_NATIVE', spdx_sysroot_native)
-
- # The changed 'WORKDIR' also caused 'B' changed, create dir 'B' for the
- # possibly requiring of the following tasks (such as some recipes's
- # do_patch required 'B' existed).
- bb.utils.mkdirhier(d.getVar('B'))
-
- bb.build.exec_func('do_unpack', d)
- # Copy source of kernel to spdx_workdir
- if is_work_shared_spdx(d):
- share_src = d.getVar('WORKDIR')
- d.setVar('WORKDIR', spdx_workdir)
- d.setVar('STAGING_DIR_NATIVE', spdx_sysroot_native)
- src_dir = spdx_workdir + "/" + d.getVar('PN')+ "-" + d.getVar('PV') + "-" + d.getVar('PR')
- bb.utils.mkdirhier(src_dir)
- if bb.data.inherits_class('kernel',d):
- share_src = d.getVar('STAGING_KERNEL_DIR')
- cmd_copy_share = "cp -rf " + share_src + "/* " + src_dir + "/"
- cmd_copy_shared_res = os.popen(cmd_copy_share).read()
- bb.note("cmd_copy_shared_result = " + cmd_copy_shared_res)
-
- git_path = src_dir + "/.git"
- if os.path.exists(git_path):
- shutils.rmtree(git_path)
-
- # Make sure gcc and kernel sources are patched only once
- if not (d.getVar('SRC_URI') == "" or is_work_shared_spdx(d)):
- bb.build.exec_func('do_patch', d)
-
- # Some userland has no source.
- if not os.path.exists( spdx_workdir ):
- bb.utils.mkdirhier(spdx_workdir)
- finally:
- d.setVar("WORKDIR", workdir)
-
-spdx_get_src[vardepsexclude] += "STAGING_KERNEL_DIR"
-
+oe.spdx_common.collect_direct_deps[vardepsexclude] += "BB_TASKDEPDATA"
+oe.spdx_common.collect_direct_deps[vardeps] += "DEPENDS"
+oe.spdx_common.collect_package_providers[vardepsexclude] += "BB_TASKDEPDATA"
+oe.spdx_common.get_patched_src[vardepsexclude] += "STAGING_KERNEL_DIR"
diff --git a/poky/meta/classes/vex.bbclass b/poky/meta/classes/vex.bbclass
new file mode 100644
index 0000000000..01d4e52051
--- /dev/null
+++ b/poky/meta/classes/vex.bbclass
@@ -0,0 +1,311 @@
+#
+# Copyright OpenEmbedded Contributors
+#
+# SPDX-License-Identifier: MIT
+#
+
+# This class is used to generate metadata needed by external
+# tools to check for vulnerabilities, for example CVEs.
+#
+# In order to use this class just inherit the class in the
+# local.conf file and it will add the generate_vex task for
+# every recipe. If an image is build it will generate a report
+# in DEPLOY_DIR_IMAGE for all the packages used, it will also
+# generate a file for all recipes used in the build.
+#
+# Variables use CVE_CHECK prefix to keep compatibility with
+# the cve-check class
+#
+# Example:
+# bitbake -c generate_vex openssl
+# bitbake core-image-sato
+# bitbake -k -c generate_vex universe
+#
+# The product name that the CVE database uses defaults to BPN, but may need to
+# be overriden per recipe (for example tiff.bb sets CVE_PRODUCT=libtiff).
+CVE_PRODUCT ??= "${BPN}"
+CVE_VERSION ??= "${PV}"
+
+CVE_CHECK_SUMMARY_DIR ?= "${LOG_DIR}/cve"
+
+CVE_CHECK_SUMMARY_FILE_NAME_JSON = "cve-summary.json"
+CVE_CHECK_SUMMARY_INDEX_PATH = "${CVE_CHECK_SUMMARY_DIR}/cve-summary-index.txt"
+
+CVE_CHECK_DIR ??= "${DEPLOY_DIR}/cve"
+CVE_CHECK_RECIPE_FILE_JSON ?= "${CVE_CHECK_DIR}/${PN}_cve.json"
+CVE_CHECK_MANIFEST_JSON ?= "${IMGDEPLOYDIR}/${IMAGE_NAME}.json"
+
+# Skip CVE Check for packages (PN)
+CVE_CHECK_SKIP_RECIPE ?= ""
+
+# Replace NVD DB check status for a given CVE. Each of CVE has to be mentioned
+# separately with optional detail and description for this status.
+#
+# CVE_STATUS[CVE-1234-0001] = "not-applicable-platform: Issue only applies on Windows"
+# CVE_STATUS[CVE-1234-0002] = "fixed-version: Fixed externally"
+#
+# Settings the same status and reason for multiple CVEs is possible
+# via CVE_STATUS_GROUPS variable.
+#
+# CVE_STATUS_GROUPS = "CVE_STATUS_WIN CVE_STATUS_PATCHED"
+#
+# CVE_STATUS_WIN = "CVE-1234-0001 CVE-1234-0003"
+# CVE_STATUS_WIN[status] = "not-applicable-platform: Issue only applies on Windows"
+# CVE_STATUS_PATCHED = "CVE-1234-0002 CVE-1234-0004"
+# CVE_STATUS_PATCHED[status] = "fixed-version: Fixed externally"
+#
+# All possible CVE statuses could be found in cve-check-map.conf
+# CVE_CHECK_STATUSMAP[not-applicable-platform] = "Ignored"
+# CVE_CHECK_STATUSMAP[fixed-version] = "Patched"
+#
+# CVE_CHECK_IGNORE is deprecated and CVE_STATUS has to be used instead.
+# Keep CVE_CHECK_IGNORE until other layers migrate to new variables
+CVE_CHECK_IGNORE ?= ""
+
+# Layers to be excluded
+CVE_CHECK_LAYER_EXCLUDELIST ??= ""
+
+# Layers to be included
+CVE_CHECK_LAYER_INCLUDELIST ??= ""
+
+
+# set to "alphabetical" for version using single alphabetical character as increment release
+CVE_VERSION_SUFFIX ??= ""
+
+python () {
+ if bb.data.inherits_class("cve-check", d):
+ raise bb.parse.SkipRecipe("Skipping recipe: found incompatible combination of cve-check and vex enabled at the same time.")
+
+ # Fallback all CVEs from CVE_CHECK_IGNORE to CVE_STATUS
+ cve_check_ignore = d.getVar("CVE_CHECK_IGNORE")
+ if cve_check_ignore:
+ bb.warn("CVE_CHECK_IGNORE is deprecated in favor of CVE_STATUS")
+ for cve in (d.getVar("CVE_CHECK_IGNORE") or "").split():
+ d.setVarFlag("CVE_STATUS", cve, "ignored")
+
+ # Process CVE_STATUS_GROUPS to set multiple statuses and optional detail or description at once
+ for cve_status_group in (d.getVar("CVE_STATUS_GROUPS") or "").split():
+ cve_group = d.getVar(cve_status_group)
+ if cve_group is not None:
+ for cve in cve_group.split():
+ d.setVarFlag("CVE_STATUS", cve, d.getVarFlag(cve_status_group, "status"))
+ else:
+ bb.warn("CVE_STATUS_GROUPS contains undefined variable %s" % cve_status_group)
+}
+
+def generate_json_report(d, out_path, link_path):
+ if os.path.exists(d.getVar("CVE_CHECK_SUMMARY_INDEX_PATH")):
+ import json
+ from oe.cve_check import cve_check_merge_jsons, update_symlinks
+
+ bb.note("Generating JSON CVE summary")
+ index_file = d.getVar("CVE_CHECK_SUMMARY_INDEX_PATH")
+ summary = {"version":"1", "package": []}
+ with open(index_file) as f:
+ filename = f.readline()
+ while filename:
+ with open(filename.rstrip()) as j:
+ data = json.load(j)
+ cve_check_merge_jsons(summary, data)
+ filename = f.readline()
+
+ summary["package"].sort(key=lambda d: d['name'])
+
+ with open(out_path, "w") as f:
+ json.dump(summary, f, indent=2)
+
+ update_symlinks(out_path, link_path)
+
+python vex_save_summary_handler () {
+ import shutil
+ import datetime
+ from oe.cve_check import update_symlinks
+
+ cvelogpath = d.getVar("CVE_CHECK_SUMMARY_DIR")
+
+ bb.utils.mkdirhier(cvelogpath)
+ timestamp = datetime.datetime.now().strftime('%Y%m%d%H%M%S')
+
+ json_summary_link_name = os.path.join(cvelogpath, d.getVar("CVE_CHECK_SUMMARY_FILE_NAME_JSON"))
+ json_summary_name = os.path.join(cvelogpath, "cve-summary-%s.json" % (timestamp))
+ generate_json_report(d, json_summary_name, json_summary_link_name)
+ bb.plain("Complete CVE JSON report summary created at: %s" % json_summary_link_name)
+}
+
+addhandler vex_save_summary_handler
+vex_save_summary_handler[eventmask] = "bb.event.BuildCompleted"
+
+python do_generate_vex () {
+ """
+ Generate metadata needed for vulnerability checking for
+ the current recipe
+ """
+ from oe.cve_check import get_patched_cves
+
+ try:
+ patched_cves = get_patched_cves(d)
+ cves_status = []
+ products = d.getVar("CVE_PRODUCT").split()
+ for product in products:
+ if ":" in product:
+ _, product = product.split(":", 1)
+ cves_status.append([product, False])
+
+ except FileNotFoundError:
+ bb.fatal("Failure in searching patches")
+
+ cve_write_data_json(d, patched_cves, cves_status)
+}
+
+addtask generate_vex before do_build
+
+python vex_cleanup () {
+ """
+ Delete the file used to gather all the CVE information.
+ """
+ bb.utils.remove(e.data.getVar("CVE_CHECK_SUMMARY_INDEX_PATH"))
+}
+
+addhandler vex_cleanup
+vex_cleanup[eventmask] = "bb.event.BuildCompleted"
+
+python vex_write_rootfs_manifest () {
+ """
+ Create VEX/CVE manifest when building an image
+ """
+
+ import json
+ from oe.rootfs import image_list_installed_packages
+ from oe.cve_check import cve_check_merge_jsons, update_symlinks
+
+ deploy_file_json = d.getVar("CVE_CHECK_RECIPE_FILE_JSON")
+ if os.path.exists(deploy_file_json):
+ bb.utils.remove(deploy_file_json)
+
+ # Create a list of relevant recipies
+ recipies = set()
+ for pkg in list(image_list_installed_packages(d)):
+ pkg_info = os.path.join(d.getVar('PKGDATA_DIR'),
+ 'runtime-reverse', pkg)
+ pkg_data = oe.packagedata.read_pkgdatafile(pkg_info)
+ recipies.add(pkg_data["PN"])
+
+ bb.note("Writing rootfs VEX manifest")
+ deploy_dir = d.getVar("IMGDEPLOYDIR")
+ link_name = d.getVar("IMAGE_LINK_NAME")
+
+ json_data = {"version":"1", "package": []}
+ text_data = ""
+
+ save_pn = d.getVar("PN")
+
+ for pkg in recipies:
+ # To be able to use the CVE_CHECK_RECIPE_FILE_JSON variable we have to evaluate
+ # it with the different PN names set each time.
+ d.setVar("PN", pkg)
+
+ pkgfilepath = d.getVar("CVE_CHECK_RECIPE_FILE_JSON")
+ if os.path.exists(pkgfilepath):
+ with open(pkgfilepath) as j:
+ data = json.load(j)
+ cve_check_merge_jsons(json_data, data)
+
+ d.setVar("PN", save_pn)
+
+ link_path = os.path.join(deploy_dir, "%s.json" % link_name)
+ manifest_name = d.getVar("CVE_CHECK_MANIFEST_JSON")
+
+ with open(manifest_name, "w") as f:
+ json.dump(json_data, f, indent=2)
+
+ update_symlinks(manifest_name, link_path)
+ bb.plain("Image VEX JSON report stored in: %s" % manifest_name)
+}
+
+ROOTFS_POSTPROCESS_COMMAND:prepend = "vex_write_rootfs_manifest; "
+do_rootfs[recrdeptask] += "do_generate_vex "
+do_populate_sdk[recrdeptask] += "do_generate_vex "
+
+def cve_write_data_json(d, cve_data, cve_status):
+ """
+ Prepare CVE data for the JSON format, then write it.
+ Done for each recipe.
+ """
+
+ from oe.cve_check import get_cpe_ids
+ import json
+
+ output = {"version":"1", "package": []}
+ nvd_link = "https://nvd.nist.gov/vuln/detail/"
+
+ fdir_name = d.getVar("FILE_DIRNAME")
+ layer = fdir_name.split("/")[-3]
+
+ include_layers = d.getVar("CVE_CHECK_LAYER_INCLUDELIST").split()
+ exclude_layers = d.getVar("CVE_CHECK_LAYER_EXCLUDELIST").split()
+
+ if exclude_layers and layer in exclude_layers:
+ return
+
+ if include_layers and layer not in include_layers:
+ return
+
+ product_data = []
+ for s in cve_status:
+ p = {"product": s[0], "cvesInRecord": "Yes"}
+ if s[1] == False:
+ p["cvesInRecord"] = "No"
+ product_data.append(p)
+ product_data = list({p['product']:p for p in product_data}.values())
+
+ package_version = "%s%s" % (d.getVar("EXTENDPE"), d.getVar("PV"))
+ cpes = get_cpe_ids(d.getVar("CVE_PRODUCT"), d.getVar("CVE_VERSION"))
+ package_data = {
+ "name" : d.getVar("PN"),
+ "layer" : layer,
+ "version" : package_version,
+ "products": product_data,
+ "cpes": cpes
+ }
+
+ cve_list = []
+
+ for cve in sorted(cve_data):
+ issue_link = "%s%s" % (nvd_link, cve)
+
+ cve_item = {
+ "id" : cve,
+ "status" : cve_data[cve]["abbrev-status"],
+ "link": issue_link,
+ }
+ if 'NVD-summary' in cve_data[cve]:
+ cve_item["summary"] = cve_data[cve]["NVD-summary"]
+ cve_item["scorev2"] = cve_data[cve]["NVD-scorev2"]
+ cve_item["scorev3"] = cve_data[cve]["NVD-scorev3"]
+ cve_item["scorev4"] = cve_data[cve]["NVD-scorev4"]
+ cve_item["vector"] = cve_data[cve]["NVD-vector"]
+ cve_item["vectorString"] = cve_data[cve]["NVD-vectorString"]
+ if 'status' in cve_data[cve]:
+ cve_item["detail"] = cve_data[cve]["status"]
+ if 'justification' in cve_data[cve]:
+ cve_item["description"] = cve_data[cve]["justification"]
+ if 'resource' in cve_data[cve]:
+ cve_item["patch-file"] = cve_data[cve]["resource"]
+ cve_list.append(cve_item)
+
+ package_data["issue"] = cve_list
+ output["package"].append(package_data)
+
+ deploy_file = d.getVar("CVE_CHECK_RECIPE_FILE_JSON")
+
+ write_string = json.dumps(output, indent=2)
+
+ cvelogpath = d.getVar("CVE_CHECK_SUMMARY_DIR")
+ index_path = d.getVar("CVE_CHECK_SUMMARY_INDEX_PATH")
+ bb.utils.mkdirhier(cvelogpath)
+ fragment_file = os.path.basename(deploy_file)
+ fragment_path = os.path.join(cvelogpath, fragment_file)
+ with open(fragment_path, "w") as f:
+ f.write(write_string)
+ with open(index_path, "a+") as f:
+ f.write("%s\n" % fragment_path)