diff options
Diffstat (limited to 'poky/meta/classes')
27 files changed, 1151 insertions, 120 deletions
diff --git a/poky/meta/classes/autotools.bbclass b/poky/meta/classes/autotools.bbclass index 2c7968e65..bc0c2ea83 100644 --- a/poky/meta/classes/autotools.bbclass +++ b/poky/meta/classes/autotools.bbclass @@ -145,7 +145,13 @@ ACLOCALEXTRAPATH:class-target = " -I ${STAGING_DATADIR_NATIVE}/aclocal/" ACLOCALEXTRAPATH:class-nativesdk = " -I ${STAGING_DATADIR_NATIVE}/aclocal/" python autotools_aclocals () { - d.setVar("CONFIG_SITE", siteinfo_get_files(d, sysrootcache=True)) + sitefiles, searched = siteinfo_get_files(d, sysrootcache=True) + d.setVar("CONFIG_SITE", " ".join(sitefiles)) +} + +python () { + sitefiles, searched = siteinfo_get_files(d, sysrootcache=False) + d.appendVarFlag("do_configure", "file-checksums", " " + " ".join(searched)) } CONFIGURE_FILES = "${S}/configure.in ${S}/configure.ac ${S}/config.h.in ${S}/acinclude.m4 Makefile.am" diff --git a/poky/meta/classes/cpan-base.bbclass b/poky/meta/classes/cpan-base.bbclass index 9758065bf..93d11e1be 100644 --- a/poky/meta/classes/cpan-base.bbclass +++ b/poky/meta/classes/cpan-base.bbclass @@ -20,7 +20,7 @@ PERLLIBDIRS:class-native = "${libdir}/perl5" def cpan_upstream_check_pattern(d): for x in (d.getVar('SRC_URI') or '').split(' '): if x.startswith("https://cpan.metacpan.org"): - _pattern = x.split('/')[-1].replace(d.getVar('PV'), '(?P<pver>\d+.\d+)') + _pattern = x.split('/')[-1].replace(d.getVar('PV'), r'(?P<pver>\d+.\d+)') return _pattern return '' diff --git a/poky/meta/classes/create-spdx.bbclass b/poky/meta/classes/create-spdx.bbclass new file mode 100644 index 000000000..3c73c21c0 --- /dev/null +++ b/poky/meta/classes/create-spdx.bbclass @@ -0,0 +1,931 @@ +# +# SPDX-License-Identifier: GPL-2.0-only +# + +DEPLOY_DIR_SPDX ??= "${DEPLOY_DIR}/spdx/${MACHINE}" + +# The product name that the CVE database uses. Defaults to BPN, but may need to +# be overriden per recipe (for example tiff.bb sets CVE_PRODUCT=libtiff). +CVE_PRODUCT ??= "${BPN}" +CVE_VERSION ??= "${PV}" + +SPDXDIR ??= "${WORKDIR}/spdx" +SPDXDEPLOY = "${SPDXDIR}/deploy" +SPDXWORK = "${SPDXDIR}/work" + +SPDXRUNTIMEDEPLOY = "${SPDXDIR}/runtime-deploy" + +SPDX_INCLUDE_SOURCES ??= "0" +SPDX_INCLUDE_PACKAGED ??= "0" +SPDX_ARCHIVE_SOURCES ??= "0" +SPDX_ARCHIVE_PACKAGED ??= "0" + +SPDX_UUID_NAMESPACE ??= "sbom.openembedded.org" +SPDX_NAMESPACE_PREFIX ??= "http://spdx.org/spdxdoc" + +SPDX_LICENSES ??= "${COREBASE}/meta/files/spdx-licenses.json" + +do_image_complete[depends] = "virtual/kernel:do_create_spdx" + +def get_doc_namespace(d, doc): + import uuid + namespace_uuid = uuid.uuid5(uuid.NAMESPACE_DNS, d.getVar("SPDX_UUID_NAMESPACE")) + return "%s/%s-%s" % (d.getVar("SPDX_NAMESPACE_PREFIX"), doc.name, str(uuid.uuid5(namespace_uuid, doc.name))) + + +def is_work_shared(d): + pn = d.getVar('PN') + return bb.data.inherits_class('kernel', d) or pn.startswith('gcc-source') + + +python() { + import json + if d.getVar("SPDX_LICENSE_DATA"): + return + + with open(d.getVar("SPDX_LICENSES"), "r") as f: + data = json.load(f) + # Transform the license array to a dictionary + data["licenses"] = {l["licenseId"]: l for l in data["licenses"]} + d.setVar("SPDX_LICENSE_DATA", data) +} + +def convert_license_to_spdx(lic, document, d, existing={}): + from pathlib import Path + import oe.spdx + + available_licenses = d.getVar("AVAILABLE_LICENSES").split() + license_data = d.getVar("SPDX_LICENSE_DATA") + extracted = {} + + def add_extracted_license(ident, name): + nonlocal document + + if name in extracted: + return + + extracted_info = oe.spdx.SPDXExtractedLicensingInfo() + extracted_info.name = name + extracted_info.licenseId = ident + extracted_info.extractedText = None + + if name == "PD": + # Special-case this. + extracted_info.extractedText = "Software released to the public domain" + elif name in available_licenses: + # This license can be found in COMMON_LICENSE_DIR or LICENSE_PATH + for directory in [d.getVar('COMMON_LICENSE_DIR')] + d.getVar('LICENSE_PATH').split(): + try: + with (Path(directory) / name).open(errors="replace") as f: + extracted_info.extractedText = f.read() + break + except FileNotFoundError: + pass + if extracted_info.extractedText is None: + # Error out, as the license was in available_licenses so should + # be on disk somewhere. + bb.error("Cannot find text for license %s" % name) + else: + # If it's not SPDX, or PD, or in available licenses, then NO_GENERIC_LICENSE must be set + filename = d.getVarFlag('NO_GENERIC_LICENSE', name) + if filename: + filename = d.expand("${S}/" + filename) + with open(filename, errors="replace") as f: + extracted_info.extractedText = f.read() + else: + bb.error("Cannot find any text for license %s" % name) + + extracted[name] = extracted_info + document.hasExtractedLicensingInfos.append(extracted_info) + + def convert(l): + if l == "(" or l == ")": + return l + + if l == "&": + return "AND" + + if l == "|": + return "OR" + + if l == "CLOSED": + return "NONE" + + spdx_license = d.getVarFlag("SPDXLICENSEMAP", l) or l + if spdx_license in license_data["licenses"]: + return spdx_license + + try: + spdx_license = existing[l] + except KeyError: + spdx_license = "LicenseRef-" + l + add_extracted_license(spdx_license, l) + + return spdx_license + + lic_split = lic.replace("(", " ( ").replace(")", " ) ").split() + + return ' '.join(convert(l) for l in lic_split) + + +def process_sources(d): + pn = d.getVar('PN') + assume_provided = (d.getVar("ASSUME_PROVIDED") or "").split() + if pn in assume_provided: + for p in d.getVar("PROVIDES").split(): + if p != pn: + pn = p + break + + # glibc-locale: do_fetch, do_unpack and do_patch tasks have been deleted, + # so avoid archiving source here. + if pn.startswith('glibc-locale'): + return False + if d.getVar('PN') == "libtool-cross": + return False + if d.getVar('PN') == "libgcc-initial": + return False + if d.getVar('PN') == "shadow-sysroot": + return False + + # We just archive gcc-source for all the gcc related recipes + if d.getVar('BPN') in ['gcc', 'libgcc']: + bb.debug(1, 'spdx: There is bug in scan of %s is, do nothing' % pn) + return False + + return True + + +def add_package_files(d, doc, spdx_pkg, topdir, get_spdxid, get_types, *, archive=None, ignore_dirs=[], ignore_top_level_dirs=[]): + from pathlib import Path + import oe.spdx + import hashlib + + source_date_epoch = d.getVar("SOURCE_DATE_EPOCH") + if source_date_epoch: + source_date_epoch = int(source_date_epoch) + + sha1s = [] + spdx_files = [] + + file_counter = 1 + for subdir, dirs, files in os.walk(topdir): + dirs[:] = [d for d in dirs if d not in ignore_dirs] + if subdir == str(topdir): + dirs[:] = [d for d in dirs if d not in ignore_top_level_dirs] + + for file in files: + filepath = Path(subdir) / file + filename = str(filepath.relative_to(topdir)) + + if filepath.is_file() and not filepath.is_symlink(): + spdx_file = oe.spdx.SPDXFile() + spdx_file.SPDXID = get_spdxid(file_counter) + for t in get_types(filepath): + spdx_file.fileTypes.append(t) + spdx_file.fileName = filename + + if archive is not None: + with filepath.open("rb") as f: + info = archive.gettarinfo(fileobj=f) + info.name = filename + info.uid = 0 + info.gid = 0 + info.uname = "root" + info.gname = "root" + + if source_date_epoch is not None and info.mtime > source_date_epoch: + info.mtime = source_date_epoch + + archive.addfile(info, f) + + sha1 = bb.utils.sha1_file(filepath) + sha1s.append(sha1) + spdx_file.checksums.append(oe.spdx.SPDXChecksum( + algorithm="SHA1", + checksumValue=sha1, + )) + spdx_file.checksums.append(oe.spdx.SPDXChecksum( + algorithm="SHA256", + checksumValue=bb.utils.sha256_file(filepath), + )) + + doc.files.append(spdx_file) + doc.add_relationship(spdx_pkg, "CONTAINS", spdx_file) + spdx_pkg.hasFiles.append(spdx_file.SPDXID) + + spdx_files.append(spdx_file) + + file_counter += 1 + + sha1s.sort() + verifier = hashlib.sha1() + for v in sha1s: + verifier.update(v.encode("utf-8")) + spdx_pkg.packageVerificationCode.packageVerificationCodeValue = verifier.hexdigest() + + return spdx_files + + +def add_package_sources_from_debug(d, package_doc, spdx_package, package, package_files, sources): + from pathlib import Path + import hashlib + import oe.packagedata + import oe.spdx + + debug_search_paths = [ + Path(d.getVar('PKGD')), + Path(d.getVar('STAGING_DIR_TARGET')), + Path(d.getVar('STAGING_DIR_NATIVE')), + ] + + pkg_data = oe.packagedata.read_subpkgdata_extended(package, d) + + if pkg_data is None: + return + + for file_path, file_data in pkg_data["files_info"].items(): + if not "debugsrc" in file_data: + continue + + for pkg_file in package_files: + if file_path.lstrip("/") == pkg_file.fileName.lstrip("/"): + break + else: + bb.fatal("No package file found for %s" % str(file_path)) + continue + + for debugsrc in file_data["debugsrc"]: + ref_id = "NOASSERTION" + for search in debug_search_paths: + debugsrc_path = search / debugsrc.lstrip("/") + if not debugsrc_path.exists(): + continue + + file_sha256 = bb.utils.sha256_file(debugsrc_path) + + if file_sha256 in sources: + source_file = sources[file_sha256] + + doc_ref = package_doc.find_external_document_ref(source_file.doc.documentNamespace) + if doc_ref is None: + doc_ref = oe.spdx.SPDXExternalDocumentRef() + doc_ref.externalDocumentId = "DocumentRef-dependency-" + source_file.doc.name + doc_ref.spdxDocument = source_file.doc.documentNamespace + doc_ref.checksum.algorithm = "SHA1" + doc_ref.checksum.checksumValue = source_file.doc_sha1 + package_doc.externalDocumentRefs.append(doc_ref) + + ref_id = "%s:%s" % (doc_ref.externalDocumentId, source_file.file.SPDXID) + else: + bb.debug(1, "Debug source %s with SHA256 %s not found in any dependency" % (str(debugsrc_path), file_sha256)) + break + else: + bb.debug(1, "Debug source %s not found" % debugsrc) + + package_doc.add_relationship(pkg_file, "GENERATED_FROM", ref_id, comment=debugsrc) + +def collect_dep_recipes(d, doc, spdx_recipe): + from pathlib import Path + import oe.sbom + import oe.spdx + + deploy_dir_spdx = Path(d.getVar("DEPLOY_DIR_SPDX")) + + dep_recipes = [] + taskdepdata = d.getVar("BB_TASKDEPDATA", False) + deps = sorted(set( + dep[0] for dep in taskdepdata.values() if + dep[1] == "do_create_spdx" and dep[0] != d.getVar("PN") + )) + for dep_pn in deps: + dep_recipe_path = deploy_dir_spdx / "recipes" / ("recipe-%s.spdx.json" % dep_pn) + + spdx_dep_doc, spdx_dep_sha1 = oe.sbom.read_doc(dep_recipe_path) + + for pkg in spdx_dep_doc.packages: + if pkg.name == dep_pn: + spdx_dep_recipe = pkg + break + else: + continue + + dep_recipes.append(oe.sbom.DepRecipe(spdx_dep_doc, spdx_dep_sha1, spdx_dep_recipe)) + + dep_recipe_ref = oe.spdx.SPDXExternalDocumentRef() + dep_recipe_ref.externalDocumentId = "DocumentRef-dependency-" + spdx_dep_doc.name + dep_recipe_ref.spdxDocument = spdx_dep_doc.documentNamespace + dep_recipe_ref.checksum.algorithm = "SHA1" + dep_recipe_ref.checksum.checksumValue = spdx_dep_sha1 + + doc.externalDocumentRefs.append(dep_recipe_ref) + + doc.add_relationship( + "%s:%s" % (dep_recipe_ref.externalDocumentId, spdx_dep_recipe.SPDXID), + "BUILD_DEPENDENCY_OF", + spdx_recipe + ) + + return dep_recipes + +collect_dep_recipes[vardepsexclude] += "BB_TASKDEPDATA" + + +def collect_dep_sources(d, dep_recipes): + import oe.sbom + + sources = {} + for dep in dep_recipes: + recipe_files = set(dep.recipe.hasFiles) + + for spdx_file in dep.doc.files: + if spdx_file.SPDXID not in recipe_files: + continue + + if "SOURCE" in spdx_file.fileTypes: + for checksum in spdx_file.checksums: + if checksum.algorithm == "SHA256": + sources[checksum.checksumValue] = oe.sbom.DepSource(dep.doc, dep.doc_sha1, dep.recipe, spdx_file) + break + + return sources + + +python do_create_spdx() { + from datetime import datetime, timezone + import oe.sbom + import oe.spdx + import uuid + from pathlib import Path + from contextlib import contextmanager + import oe.cve_check + + @contextmanager + def optional_tarfile(name, guard, mode="w"): + import tarfile + import bb.compress.zstd + + num_threads = int(d.getVar("BB_NUMBER_THREADS")) + + if guard: + name.parent.mkdir(parents=True, exist_ok=True) + with bb.compress.zstd.open(name, mode=mode + "b", num_threads=num_threads) as f: + with tarfile.open(fileobj=f, mode=mode + "|") as tf: + yield tf + else: + yield None + + + deploy_dir_spdx = Path(d.getVar("DEPLOY_DIR_SPDX")) + spdx_workdir = Path(d.getVar("SPDXWORK")) + include_packaged = d.getVar("SPDX_INCLUDE_PACKAGED") == "1" + include_sources = d.getVar("SPDX_INCLUDE_SOURCES") == "1" + archive_sources = d.getVar("SPDX_ARCHIVE_SOURCES") == "1" + archive_packaged = d.getVar("SPDX_ARCHIVE_PACKAGED") == "1" + is_native = bb.data.inherits_class("native", d) + + creation_time = datetime.now(tz=timezone.utc).strftime("%Y-%m-%dT%H:%M:%SZ") + + doc = oe.spdx.SPDXDocument() + + doc.name = "recipe-" + d.getVar("PN") + doc.documentNamespace = get_doc_namespace(d, doc) + doc.creationInfo.created = creation_time + doc.creationInfo.comment = "This document was created by analyzing recipe files during the build." + doc.creationInfo.licenseListVersion = d.getVar("SPDX_LICENSE_DATA")["licenseListVersion"] + doc.creationInfo.creators.append("Tool: OpenEmbedded Core create-spdx.bbclass") + doc.creationInfo.creators.append("Organization: OpenEmbedded ()") + doc.creationInfo.creators.append("Person: N/A ()") + + recipe = oe.spdx.SPDXPackage() + recipe.name = d.getVar("PN") + recipe.versionInfo = d.getVar("PV") + recipe.SPDXID = oe.sbom.get_recipe_spdxid(d) + + for s in d.getVar('SRC_URI').split(): + if not s.startswith("file://"): + recipe.downloadLocation = s + break + else: + recipe.downloadLocation = "NOASSERTION" + + homepage = d.getVar("HOMEPAGE") + if homepage: + recipe.homepage = homepage + + license = d.getVar("LICENSE") + if license: + recipe.licenseDeclared = convert_license_to_spdx(license, doc, d) + + summary = d.getVar("SUMMARY") + if summary: + recipe.summary = summary + + description = d.getVar("DESCRIPTION") + if description: + recipe.description = description + + # Some CVEs may be patched during the build process without incrementing the version number, + # so querying for CVEs based on the CPE id can lead to false positives. To account for this, + # save the CVEs fixed by patches to source information field in the SPDX. + patched_cves = oe.cve_check.get_patched_cves(d) + patched_cves = list(patched_cves) + patched_cves = ' '.join(patched_cves) + if patched_cves: + recipe.sourceInfo = "CVEs fixed: " + patched_cves + + cpe_ids = oe.cve_check.get_cpe_ids(d.getVar("CVE_PRODUCT"), d.getVar("CVE_VERSION")) + if cpe_ids: + for cpe_id in cpe_ids: + cpe = oe.spdx.SPDXExternalReference() + cpe.referenceCategory = "SECURITY" + cpe.referenceType = "http://spdx.org/rdf/references/cpe23Type" + cpe.referenceLocator = cpe_id + recipe.externalRefs.append(cpe) + + doc.packages.append(recipe) + doc.add_relationship(doc, "DESCRIBES", recipe) + + if process_sources(d) and include_sources: + recipe_archive = deploy_dir_spdx / "recipes" / (doc.name + ".tar.zst") + with optional_tarfile(recipe_archive, archive_sources) as archive: + spdx_get_src(d) + + add_package_files( + d, + doc, + recipe, + spdx_workdir, + lambda file_counter: "SPDXRef-SourceFile-%s-%d" % (d.getVar("PN"), file_counter), + lambda filepath: ["SOURCE"], + ignore_dirs=[".git"], + ignore_top_level_dirs=["temp"], + archive=archive, + ) + + if archive is not None: + recipe.packageFileName = str(recipe_archive.name) + + dep_recipes = collect_dep_recipes(d, doc, recipe) + + doc_sha1 = oe.sbom.write_doc(d, doc, "recipes") + dep_recipes.append(oe.sbom.DepRecipe(doc, doc_sha1, recipe)) + + recipe_ref = oe.spdx.SPDXExternalDocumentRef() + recipe_ref.externalDocumentId = "DocumentRef-recipe-" + recipe.name + recipe_ref.spdxDocument = doc.documentNamespace + recipe_ref.checksum.algorithm = "SHA1" + recipe_ref.checksum.checksumValue = doc_sha1 + + sources = collect_dep_sources(d, dep_recipes) + found_licenses = {license.name:recipe_ref.externalDocumentId + ":" + license.licenseId for license in doc.hasExtractedLicensingInfos} + + if not is_native: + bb.build.exec_func("read_subpackage_metadata", d) + + pkgdest = Path(d.getVar("PKGDEST")) + for package in d.getVar("PACKAGES").split(): + if not oe.packagedata.packaged(package, d): + continue + + package_doc = oe.spdx.SPDXDocument() + pkg_name = d.getVar("PKG:%s" % package) or package + package_doc.name = pkg_name + package_doc.documentNamespace = get_doc_namespace(d, package_doc) + package_doc.creationInfo.created = creation_time + package_doc.creationInfo.comment = "This document was created by analyzing packages created during the build." + package_doc.creationInfo.licenseListVersion = d.getVar("SPDX_LICENSE_DATA")["licenseListVersion"] + package_doc.creationInfo.creators.append("Tool: OpenEmbedded Core create-spdx.bbclass") + package_doc.creationInfo.creators.append("Organization: OpenEmbedded ()") + package_doc.creationInfo.creators.append("Person: N/A ()") + package_doc.externalDocumentRefs.append(recipe_ref) + + package_license = d.getVar("LICENSE:%s" % package) or d.getVar("LICENSE") + + spdx_package = oe.spdx.SPDXPackage() + + spdx_package.SPDXID = oe.sbom.get_package_spdxid(pkg_name) + spdx_package.name = pkg_name + spdx_package.versionInfo = d.getVar("PV") + spdx_package.licenseDeclared = convert_license_to_spdx(package_license, package_doc, d, found_licenses) + + package_doc.packages.append(spdx_package) + + package_doc.add_relationship(spdx_package, "GENERATED_FROM", "%s:%s" % (recipe_ref.externalDocumentId, recipe.SPDXID)) + package_doc.add_relationship(package_doc, "DESCRIBES", spdx_package) + + package_archive = deploy_dir_spdx / "packages" / (package_doc.name + ".tar.zst") + with optional_tarfile(package_archive, archive_packaged) as archive: + package_files = add_package_files( + d, + package_doc, + spdx_package, + pkgdest / package, + lambda file_counter: oe.sbom.get_packaged_file_spdxid(pkg_name, file_counter), + lambda filepath: ["BINARY"], + archive=archive, + ) + + if archive is not None: + spdx_package.packageFileName = str(package_archive.name) + + add_package_sources_from_debug(d, package_doc, spdx_package, package, package_files, sources) + + oe.sbom.write_doc(d, package_doc, "packages") +} +# NOTE: depending on do_unpack is a hack that is necessary to get it's dependencies for archive the source +addtask do_create_spdx after do_package do_packagedata do_unpack before do_build do_rm_work + +SSTATETASKS += "do_create_spdx" +do_create_spdx[sstate-inputdirs] = "${SPDXDEPLOY}" +do_create_spdx[sstate-outputdirs] = "${DEPLOY_DIR_SPDX}" + +python do_create_spdx_setscene () { + sstate_setscene(d) +} +addtask do_create_spdx_setscene + +do_create_spdx[dirs] = "${SPDXDEPLOY} ${SPDXWORK}" +do_create_spdx[cleandirs] = "${SPDXDEPLOY} ${SPDXWORK}" +do_create_spdx[depends] += "${PATCHDEPENDENCY}" +do_create_spdx[deptask] = "do_create_spdx" + +def collect_package_providers(d): + from pathlib import Path + import oe.sbom + import oe.spdx + import json + + deploy_dir_spdx = Path(d.getVar("DEPLOY_DIR_SPDX")) + + providers = {} + + taskdepdata = d.getVar("BB_TASKDEPDATA", False) + deps = sorted(set( + dep[0] for dep in taskdepdata.values() if dep[0] != d.getVar("PN") + )) + deps.append(d.getVar("PN")) + + for dep_pn in deps: + recipe_data = oe.packagedata.read_pkgdata(dep_pn, d) + + for pkg in recipe_data.get("PACKAGES", "").split(): + + pkg_data = oe.packagedata.read_subpkgdata_dict(pkg, d) + rprovides = set(n for n, _ in bb.utils.explode_dep_versions2(pkg_data.get("RPROVIDES", "")).items()) + rprovides.add(pkg) + + for r in rprovides: + providers[r] = pkg + + return providers + +collect_package_providers[vardepsexclude] += "BB_TASKDEPDATA" + +python do_create_runtime_spdx() { + from datetime import datetime, timezone + import oe.sbom + import oe.spdx + import oe.packagedata + from pathlib import Path + + deploy_dir_spdx = Path(d.getVar("DEPLOY_DIR_SPDX")) + spdx_deploy = Path(d.getVar("SPDXRUNTIMEDEPLOY")) + is_native = bb.data.inherits_class("native", d) + + creation_time = datetime.now(tz=timezone.utc).strftime("%Y-%m-%dT%H:%M:%SZ") + + providers = collect_package_providers(d) + + if not is_native: + bb.build.exec_func("read_subpackage_metadata", d) + + dep_package_cache = {} + + pkgdest = Path(d.getVar("PKGDEST")) + for package in d.getVar("PACKAGES").split(): + localdata = bb.data.createCopy(d) + pkg_name = d.getVar("PKG:%s" % package) or package + localdata.setVar("PKG", pkg_name) + localdata.setVar('OVERRIDES', d.getVar("OVERRIDES", False) + ":" + package) + + if not oe.packagedata.packaged(package, localdata): + continue + + pkg_spdx_path = deploy_dir_spdx / "packages" / (pkg_name + ".spdx.json") + + package_doc, package_doc_sha1 = oe.sbom.read_doc(pkg_spdx_path) + + for p in package_doc.packages: + if p.name == pkg_name: + spdx_package = p + break + else: + bb.fatal("Package '%s' not found in %s" % (pkg_name, pkg_spdx_path)) + + runtime_doc = oe.spdx.SPDXDocument() + runtime_doc.name = "runtime-" + pkg_name + runtime_doc.documentNamespace = get_doc_namespace(localdata, runtime_doc) + runtime_doc.creationInfo.created = creation_time + runtime_doc.creationInfo.comment = "This document was created by analyzing package runtime dependencies." + runtime_doc.creationInfo.licenseListVersion = d.getVar("SPDX_LICENSE_DATA")["licenseListVersion"] + runtime_doc.creationInfo.creators.append("Tool: OpenEmbedded Core create-spdx.bbclass") + runtime_doc.creationInfo.creators.append("Organization: OpenEmbedded ()") + runtime_doc.creationInfo.creators.append("Person: N/A ()") + + package_ref = oe.spdx.SPDXExternalDocumentRef() + package_ref.externalDocumentId = "DocumentRef-package-" + package + package_ref.spdxDocument = package_doc.documentNamespace + package_ref.checksum.algorithm = "SHA1" + package_ref.checksum.checksumValue = package_doc_sha1 + + runtime_doc.externalDocumentRefs.append(package_ref) + + runtime_doc.add_relationship( + runtime_doc.SPDXID, + "AMENDS", + "%s:%s" % (package_ref.externalDocumentId, package_doc.SPDXID) + ) + + deps = bb.utils.explode_dep_versions2(localdata.getVar("RDEPENDS") or "") + seen_deps = set() + for dep, _ in deps.items(): + if dep in seen_deps: + continue + + dep = providers[dep] + + if not oe.packagedata.packaged(dep, localdata): + continue + + dep_pkg_data = oe.packagedata.read_subpkgdata_dict(dep, d) + dep_pkg = dep_pkg_data["PKG"] + + if dep in dep_package_cache: + (dep_spdx_package, dep_package_ref) = dep_package_cache[dep] + else: + dep_path = deploy_dir_spdx / "packages" / ("%s.spdx.json" % dep_pkg) + + spdx_dep_doc, spdx_dep_sha1 = oe.sbom.read_doc(dep_path) + + for pkg in spdx_dep_doc.packages: + if pkg.name == dep_pkg: + dep_spdx_package = pkg + break + else: + bb.fatal("Package '%s' not found in %s" % (dep_pkg, dep_path)) + + dep_package_ref = oe.spdx.SPDXExternalDocumentRef() + dep_package_ref.externalDocumentId = "DocumentRef-runtime-dependency-" + spdx_dep_doc.name + dep_package_ref.spdxDocument = spdx_dep_doc.documentNamespace + dep_package_ref.checksum.algorithm = "SHA1" + dep_package_ref.checksum.checksumValue = spdx_dep_sha1 + + dep_package_cache[dep] = (dep_spdx_package, dep_package_ref) + + runtime_doc.externalDocumentRefs.append(dep_package_ref) + + runtime_doc.add_relationship( + "%s:%s" % (dep_package_ref.externalDocumentId, dep_spdx_package.SPDXID), + "RUNTIME_DEPENDENCY_OF", + "%s:%s" % (package_ref.externalDocumentId, spdx_package.SPDXID) + ) + seen_deps.add(dep) + + oe.sbom.write_doc(d, runtime_doc, "runtime", spdx_deploy) +} + +addtask do_create_runtime_spdx after do_create_spdx before do_build do_rm_work +SSTATETASKS += "do_create_runtime_spdx" +do_create_runtime_spdx[sstate-inputdirs] = "${SPDXRUNTIMEDEPLOY}" +do_create_runtime_spdx[sstate-outputdirs] = "${DEPLOY_DIR_SPDX}" + +python do_create_runtime_spdx_setscene () { + sstate_setscene(d) +} +addtask do_create_runtime_spdx_setscene + +do_create_runtime_spdx[dirs] = "${SPDXRUNTIMEDEPLOY}" +do_create_runtime_spdx[cleandirs] = "${SPDXRUNTIMEDEPLOY}" +do_create_runtime_spdx[rdeptask] = "do_create_spdx" + +def spdx_get_src(d): + """ + save patched source of the recipe in SPDX_WORKDIR. + """ + import shutil + spdx_workdir = d.getVar('SPDXWORK') + spdx_sysroot_native = d.getVar('STAGING_DIR_NATIVE') + pn = d.getVar('PN') + + workdir = d.getVar("WORKDIR") + + try: + # The kernel class functions require it to be on work-shared, so we dont change WORKDIR + if not is_work_shared(d): + # Change the WORKDIR to make do_unpack do_patch run in another dir. + d.setVar('WORKDIR', spdx_workdir) + # Restore the original path to recipe's native sysroot (it's relative to WORKDIR). + d.setVar('STAGING_DIR_NATIVE', spdx_sysroot_native) + + # The changed 'WORKDIR' also caused 'B' changed, create dir 'B' for the + # possibly requiring of the following tasks (such as some recipes's + # do_patch required 'B' existed). + bb.utils.mkdirhier(d.getVar('B')) + + bb.build.exec_func('do_unpack', d) + # Copy source of kernel to spdx_workdir + if is_work_shared(d): + d.setVar('WORKDIR', spdx_workdir) + d.setVar('STAGING_DIR_NATIVE', spdx_sysroot_native) + src_dir = spdx_workdir + "/" + d.getVar('PN')+ "-" + d.getVar('PV') + "-" + d.getVar('PR') + bb.utils.mkdirhier(src_dir) + if bb.data.inherits_class('kernel',d): + share_src = d.getVar('STAGING_KERNEL_DIR') + cmd_copy_share = "cp -rf " + share_src + "/* " + src_dir + "/" + cmd_copy_kernel_result = os.popen(cmd_copy_share).read() + bb.note("cmd_copy_kernel_result = " + cmd_copy_kernel_result) + + git_path = src_dir + "/.git" + if os.path.exists(git_path): + shutils.rmtree(git_path) + + # Make sure gcc and kernel sources are patched only once + if not (d.getVar('SRC_URI') == "" or is_work_shared(d)): + bb.build.exec_func('do_patch', d) + + # Some userland has no source. + if not os.path.exists( spdx_workdir ): + bb.utils.mkdirhier(spdx_workdir) + finally: + d.setVar("WORKDIR", workdir) + +do_rootfs[recrdeptask] += "do_create_spdx do_create_runtime_spdx" + +ROOTFS_POSTUNINSTALL_COMMAND =+ "image_combine_spdx ; " +python image_combine_spdx() { + import os + import oe.spdx + import oe.sbom + import io + import json + from oe.rootfs import image_list_installed_packages + from datetime import timezone, datetime + from pathlib import Path + import tarfile + import bb.compress.zstd + + creation_time = datetime.now(tz=timezone.utc).strftime("%Y-%m-%dT%H:%M:%SZ") + image_name = d.getVar("IMAGE_NAME") + image_link_name = d.getVar("IMAGE_LINK_NAME") + + deploy_dir_spdx = Path(d.getVar("DEPLOY_DIR_SPDX")) + imgdeploydir = Path(d.getVar("IMGDEPLOYDIR")) + source_date_epoch = d.getVar("SOURCE_DATE_EPOCH") + + doc = oe.spdx.SPDXDocument() + doc.name = image_name + doc.documentNamespace = get_doc_namespace(d, doc) + doc.creationInfo.created = creation_time + doc.creationInfo.comment = "This document was created by analyzing the source of the Yocto recipe during the build." + doc.creationInfo.licenseListVersion = d.getVar("SPDX_LICENSE_DATA")["licenseListVersion"] + doc.creationInfo.creators.append("Tool: OpenEmbedded Core create-spdx.bbclass") + doc.creationInfo.creators.append("Organization: OpenEmbedded ()") + doc.creationInfo.creators.append("Person: N/A ()") + + image = oe.spdx.SPDXPackage() + image.name = d.getVar("PN") + image.versionInfo = d.getVar("PV") + image.SPDXID = oe.sbom.get_image_spdxid(image_name) + + doc.packages.append(image) + + spdx_package = oe.spdx.SPDXPackage() + + packages = image_list_installed_packages(d) + + for name in sorted(packages.keys()): + pkg_spdx_path = deploy_dir_spdx / "packages" / (name + ".spdx.json") + pkg_doc, pkg_doc_sha1 = oe.sbom.read_doc(pkg_spdx_path) + + for p in pkg_doc.packages: + if p.name == name: + pkg_ref = oe.spdx.SPDXExternalDocumentRef() + pkg_ref.externalDocumentId = "DocumentRef-%s" % pkg_doc.name + pkg_ref.spdxDocument = pkg_doc.documentNamespace + pkg_ref.checksum.algorithm = "SHA1" + pkg_ref.checksum.checksumValue = pkg_doc_sha1 + + doc.externalDocumentRefs.append(pkg_ref) + doc.add_relationship(image, "CONTAINS", "%s:%s" % (pkg_ref.externalDocumentId, p.SPDXID)) + break + else: + bb.fatal("Unable to find package with name '%s' in SPDX file %s" % (name, pkg_spdx_path)) + + runtime_spdx_path = deploy_dir_spdx / "runtime" / ("runtime-" + name + ".spdx.json") + runtime_doc, runtime_doc_sha1 = oe.sbom.read_doc(runtime_spdx_path) + + runtime_ref = oe.spdx.SPDXExternalDocumentRef() + runtime_ref.externalDocumentId = "DocumentRef-%s" % runtime_doc.name + runtime_ref.spdxDocument = runtime_doc.documentNamespace + runtime_ref.checksum.algorithm = "SHA1" + runtime_ref.checksum.checksumValue = runtime_doc_sha1 + + # "OTHER" isn't ideal here, but I can't find a relationship that makes sense + doc.externalDocumentRefs.append(runtime_ref) + doc.add_relationship( + image, + "OTHER", + "%s:%s" % (runtime_ref.externalDocumentId, runtime_doc.SPDXID), + comment="Runtime dependencies for %s" % name + ) + + image_spdx_path = imgdeploydir / (image_name + ".spdx.json") + + with image_spdx_path.open("wb") as f: + doc.to_json(f, sort_keys=True) + + image_spdx_link = imgdeploydir / (image_link_name + ".spdx.json") + image_spdx_link.symlink_to(os.path.relpath(image_spdx_path, image_spdx_link.parent)) + + num_threads = int(d.getVar("BB_NUMBER_THREADS")) + + visited_docs = set() + + index = {"documents": []} + + spdx_tar_path = imgdeploydir / (image_name + ".spdx.tar.zst") + with bb.compress.zstd.open(spdx_tar_path, "w", num_threads=num_threads) as f: + with tarfile.open(fileobj=f, mode="w|") as tar: + def collect_spdx_document(path): + nonlocal tar + nonlocal deploy_dir_spdx + nonlocal source_date_epoch + nonlocal index + + if path in visited_docs: + return + + visited_docs.add(path) + + with path.open("rb") as f: + doc, sha1 = oe.sbom.read_doc(f) + f.seek(0) + + if doc.documentNamespace in visited_docs: + return + + bb.note("Adding SPDX document %s" % path) + visited_docs.add(doc.documentNamespace) + info = tar.gettarinfo(fileobj=f) + + info.name = doc.name + ".spdx.json" + info.uid = 0 + info.gid = 0 + info.uname = "root" + info.gname = "root" + + if source_date_epoch is not None and info.mtime > int(source_date_epoch): + info.mtime = int(source_date_epoch) + + tar.addfile(info, f) + + index["documents"].append({ + "filename": info.name, + "documentNamespace": doc.documentNamespace, + "sha1": sha1, + }) + + for ref in doc.externalDocumentRefs: + ref_path = deploy_dir_spdx / "by-namespace" / ref.spdxDocument.replace("/", "_") + collect_spdx_document(ref_path) + + collect_spdx_document(image_spdx_path) + + index["documents"].sort(key=lambda x: x["filename"]) + + index_str = io.BytesIO(json.dumps(index, sort_keys=True).encode("utf-8")) + + info = tarfile.TarInfo() + info.name = "index.json" + info.size = len(index_str.getvalue()) + info.uid = 0 + info.gid = 0 + info.uname = "root" + info.gname = "root" + + tar.addfile(info, fileobj=index_str) + + def make_image_link(target_path, suffix): + link = imgdeploydir / (image_link_name + suffix) + link.symlink_to(os.path.relpath(target_path, link.parent)) + + make_image_link(spdx_tar_path, ".spdx.tar.zst") + + spdx_index_path = imgdeploydir / (image_name + ".spdx.index.json") + with spdx_index_path.open("w") as f: + json.dump(index, f, sort_keys=True) + + make_image_link(spdx_index_path, ".spdx.index.json") +} + diff --git a/poky/meta/classes/cross-canadian.bbclass b/poky/meta/classes/cross-canadian.bbclass index 447a2bb15..ffbc2167e 100644 --- a/poky/meta/classes/cross-canadian.bbclass +++ b/poky/meta/classes/cross-canadian.bbclass @@ -40,6 +40,8 @@ python () { extralibcs = [""] if "musl" in d.getVar("BASECANADIANEXTRAOS"): extralibcs.append("musl") + if "android" in tos: + extralibcs.append("android") for variant in ["", "spe", "x32", "eabi", "n32", "_ilp32"]: for libc in extralibcs: entry = "linux" diff --git a/poky/meta/classes/cross.bbclass b/poky/meta/classes/cross.bbclass index 00e0de84f..3e6a2f60b 100644 --- a/poky/meta/classes/cross.bbclass +++ b/poky/meta/classes/cross.bbclass @@ -72,10 +72,6 @@ libexecdir = "${exec_prefix}/libexec/${CROSS_TARGET_SYS_DIR}" do_populate_sysroot[sstate-inputdirs] = "${SYSROOT_DESTDIR}/${STAGING_DIR_NATIVE}/" do_packagedata[stamp-extra-info] = "" -do_install () { - oe_runmake 'DESTDIR=${D}' install -} - USE_NLS = "no" export CC = "${BUILD_CC}" diff --git a/poky/meta/classes/externalsrc.bbclass b/poky/meta/classes/externalsrc.bbclass index 54b08adf6..7f1a760ee 100644 --- a/poky/meta/classes/externalsrc.bbclass +++ b/poky/meta/classes/externalsrc.bbclass @@ -110,6 +110,16 @@ python () { continue bb.build.deltask(task, d) + if bb.data.inherits_class('reproducible_build', d) and 'do_unpack' in d.getVar("SRCTREECOVEREDTASKS").split(): + # The reproducible_build's create_source_date_epoch_stamp function must + # be run after the source is available and before the + # do_deploy_source_date_epoch task. In the normal case, it's attached + # to do_unpack as a postfuncs, but since we removed do_unpack (above) + # we need to move the function elsewhere. The easiest thing to do is + # move it into the prefuncs of the do_deploy_source_date_epoch task. + # This is safe, as externalsrc runs with the source already unpacked. + d.prependVarFlag('do_deploy_source_date_epoch', 'prefuncs', 'create_source_date_epoch_stamp ') + d.prependVarFlag('do_compile', 'prefuncs', "externalsrc_compile_prefunc ") d.prependVarFlag('do_configure', 'prefuncs', "externalsrc_configure_prefunc ") diff --git a/poky/meta/classes/image.bbclass b/poky/meta/classes/image.bbclass index d76895178..1d88ccd81 100644 --- a/poky/meta/classes/image.bbclass +++ b/poky/meta/classes/image.bbclass @@ -138,7 +138,10 @@ python () { def extraimage_getdepends(task): deps = "" for dep in (d.getVar('EXTRA_IMAGEDEPENDS') or "").split(): - deps += " %s:%s" % (dep, task) + if ":" in dep: + deps += " %s " % (dep) + else: + deps += " %s:%s" % (dep, task) return deps d.appendVarFlag('do_image_complete', 'depends', extraimage_getdepends('do_populate_sysroot')) diff --git a/poky/meta/classes/image_types_wic.bbclass b/poky/meta/classes/image_types_wic.bbclass index 2f1a0b709..d561fb263 100644 --- a/poky/meta/classes/image_types_wic.bbclass +++ b/poky/meta/classes/image_types_wic.bbclass @@ -1,11 +1,35 @@ # The WICVARS variable is used to define list of bitbake variables used in wic code # variables from this list is written to <image>.env file WICVARS ?= "\ - BBLAYERS IMGDEPLOYDIR DEPLOY_DIR_IMAGE FAKEROOTCMD IMAGE_BASENAME IMAGE_EFI_BOOT_FILES IMAGE_BOOT_FILES \ - IMAGE_LINK_NAME IMAGE_ROOTFS INITRAMFS_FSTYPES INITRD INITRD_LIVE ISODIR RECIPE_SYSROOT_NATIVE \ - ROOTFS_SIZE STAGING_DATADIR STAGING_DIR STAGING_LIBDIR TARGET_SYS HOSTTOOLS_DIR \ - KERNEL_IMAGETYPE MACHINE INITRAMFS_IMAGE INITRAMFS_IMAGE_BUNDLE INITRAMFS_LINK_NAME APPEND \ - ASSUME_PROVIDED PSEUDO_IGNORE_PATHS" + APPEND \ + ASSUME_PROVIDED \ + BBLAYERS \ + DEPLOY_DIR_IMAGE \ + FAKEROOTCMD \ + HOSTTOOLS_DIR \ + IMAGE_BASENAME \ + IMAGE_BOOT_FILES \ + IMAGE_EFI_BOOT_FILES \ + IMAGE_LINK_NAME \ + IMAGE_ROOTFS \ + IMGDEPLOYDIR \ + INITRAMFS_FSTYPES \ + INITRAMFS_IMAGE \ + INITRAMFS_IMAGE_BUNDLE \ + INITRAMFS_LINK_NAME \ + INITRD \ + INITRD_LIVE \ + ISODIR \ + KERNEL_IMAGETYPE \ + MACHINE \ + PSEUDO_IGNORE_PATHS \ + RECIPE_SYSROOT_NATIVE \ + ROOTFS_SIZE \ + STAGING_DATADIR \ + STAGING_DIR \ + STAGING_LIBDIR \ + TARGET_SYS \ +" inherit ${@bb.utils.contains('INITRAMFS_IMAGE_BUNDLE', '1', 'kernel-artifact-names', '', d)} diff --git a/poky/meta/classes/insane.bbclass b/poky/meta/classes/insane.bbclass index 20d4e4d74..f2d2ca368 100644 --- a/poky/meta/classes/insane.bbclass +++ b/poky/meta/classes/insane.bbclass @@ -780,7 +780,7 @@ def package_qa_check_rdepends(pkg, pkgdest, skip, taskdeps, packages, d): filerdepends = {} rdep_data = oe.packagedata.read_subpkgdata(pkg, d) for key in rdep_data: - if key.startswith("FILERDEPENDS_"): + if key.startswith("FILERDEPENDS:"): for subkey in bb.utils.explode_deps(rdep_data[key]): if subkey not in ignored_file_rdeps and \ not subkey.startswith('perl('): diff --git a/poky/meta/classes/meta.bbclass b/poky/meta/classes/meta.bbclass deleted file mode 100644 index 5e6890238..000000000 --- a/poky/meta/classes/meta.bbclass +++ /dev/null @@ -1,4 +0,0 @@ - -PACKAGES = "" - -do_build[recrdeptask] = "do_build" diff --git a/poky/meta/classes/mirrors.bbclass b/poky/meta/classes/mirrors.bbclass index 87bba4147..ba325a658 100644 --- a/poky/meta/classes/mirrors.bbclass +++ b/poky/meta/classes/mirrors.bbclass @@ -70,7 +70,6 @@ ${CPAN_MIRROR} http://search.cpan.org/CPAN/ \n \ MIRRORS += "\ git://salsa.debian.org/.* git://salsa.debian.org/PATH;protocol=https \n \ git://git.gnome.org/.* git://gitlab.gnome.org/GNOME/PATH;protocol=https \n \ -git://git.savannah.gnu.org/.* git://git.savannah.gnu.org/git/PATH;protocol=https \n \ -git://git.yoctoproject.org/.* git://git.yoctoproject.org/git/PATH;protocol=https \n \ git://.*/.* git://HOST/PATH;protocol=https \n \ +git://.*/.* git://HOST/git/PATH;protocol=https \n \ " diff --git a/poky/meta/classes/multilib.bbclass b/poky/meta/classes/multilib.bbclass index 3cbda5d80..b210c49c0 100644 --- a/poky/meta/classes/multilib.bbclass +++ b/poky/meta/classes/multilib.bbclass @@ -196,7 +196,7 @@ PACKAGEFUNCS:append = " do_package_qa_multilib" python do_package_qa_multilib() { def check_mlprefix(pkg, var, mlprefix): - values = bb.utils.explode_deps(d.getVar('%s_%s' % (var, pkg)) or d.getVar(var) or "") + values = bb.utils.explode_deps(d.getVar('%s:%s' % (var, pkg)) or d.getVar(var) or "") candidates = [] for i in values: if i.startswith('virtual/'): diff --git a/poky/meta/classes/package.bbclass b/poky/meta/classes/package.bbclass index c4c5515d5..460997ad5 100644 --- a/poky/meta/classes/package.bbclass +++ b/poky/meta/classes/package.bbclass @@ -1224,6 +1224,14 @@ python split_and_strip_files () { # Modified the file so clear the cache cpath.updatecache(file) + def strip_pkgd_prefix(f): + nonlocal dvar + + if f.startswith(dvar): + return f[len(dvar):] + + return f + # # First lets process debug splitting # @@ -1237,6 +1245,8 @@ python split_and_strip_files () { for file in staticlibs: results.append( (file,source_info(file, d)) ) + d.setVar("PKGDEBUGSOURCES", {strip_pkgd_prefix(f): sorted(s) for f, s in results}) + sources = set() for r in results: sources.update(r[1]) @@ -1549,6 +1559,7 @@ PKGDATA_VARS = "PN PE PV PR PKGE PKGV PKGR LICENSE DESCRIPTION SUMMARY RDEPENDS python emit_pkgdata() { from glob import glob import json + import bb.compress.zstd def process_postinst_on_target(pkg, mlprefix): pkgval = d.getVar('PKG:%s' % pkg) @@ -1621,6 +1632,8 @@ fi with open(data_file, 'w') as fd: fd.write("PACKAGES: %s\n" % packages) + pkgdebugsource = d.getVar("PKGDEBUGSOURCES") or [] + pn = d.getVar('PN') global_variants = (d.getVar('MULTILIB_GLOBAL_VARIANTS') or "").split() variants = (d.getVar('MULTILIB_VARIANTS') or "").split() @@ -1640,17 +1653,32 @@ fi pkgval = pkg d.setVar('PKG:%s' % pkg, pkg) + extended_data = { + "files_info": {} + } + pkgdestpkg = os.path.join(pkgdest, pkg) files = {} + files_extra = {} total_size = 0 seen = set() for f in pkgfiles[pkg]: - relpth = os.path.relpath(f, pkgdestpkg) + fpath = os.sep + os.path.relpath(f, pkgdestpkg) + fstat = os.lstat(f) - files[os.sep + relpth] = fstat.st_size + files[fpath] = fstat.st_size + + extended_data["files_info"].setdefault(fpath, {}) + extended_data["files_info"][fpath]['size'] = fstat.st_size + if fstat.st_ino not in seen: seen.add(fstat.st_ino) total_size += fstat.st_size + + if fpath in pkgdebugsource: + extended_data["files_info"][fpath]['debugsrc'] = pkgdebugsource[fpath] + del pkgdebugsource[fpath] + d.setVar('FILES_INFO:' + pkg , json.dumps(files, sort_keys=True)) process_postinst_on_target(pkg, d.getVar("MLPREFIX")) @@ -1662,15 +1690,20 @@ fi val = write_if_exists(sf, pkg, var) write_if_exists(sf, pkg, 'FILERPROVIDESFLIST') - for dfile in (d.getVar('FILERPROVIDESFLIST:' + pkg) or "").split(): + for dfile in sorted((d.getVar('FILERPROVIDESFLIST:' + pkg) or "").split()): write_if_exists(sf, pkg, 'FILERPROVIDES:' + dfile) write_if_exists(sf, pkg, 'FILERDEPENDSFLIST') - for dfile in (d.getVar('FILERDEPENDSFLIST:' + pkg) or "").split(): + for dfile in sorted((d.getVar('FILERDEPENDSFLIST:' + pkg) or "").split()): write_if_exists(sf, pkg, 'FILERDEPENDS:' + dfile) sf.write('%s:%s: %d\n' % ('PKGSIZE', pkg, total_size)) + subdata_extended_file = pkgdatadir + "/extended/%s.json.zstd" % pkg + num_threads = int(d.getVar("BB_NUMBER_THREADS")) + with bb.compress.zstd.open(subdata_extended_file, "wt", encoding="utf-8", num_threads=num_threads) as f: + json.dump(extended_data, f, sort_keys=True, separators=(",", ":")) + # Symlinks needed for rprovides lookup rprov = d.getVar('RPROVIDES:%s' % pkg) or d.getVar('RPROVIDES') if rprov: @@ -1701,7 +1734,8 @@ fi write_extra_runtime_pkgs(global_variants, packages, pkgdatadir) } -emit_pkgdata[dirs] = "${PKGDESTWORK}/runtime ${PKGDESTWORK}/runtime-reverse ${PKGDESTWORK}/runtime-rprovides" +emit_pkgdata[dirs] = "${PKGDESTWORK}/runtime ${PKGDESTWORK}/runtime-reverse ${PKGDESTWORK}/runtime-rprovides ${PKGDESTWORK}/extended" +emit_pkgdata[vardepsexclude] = "BB_NUMBER_THREADS" ldconfig_postinst_fragment() { if [ x"$D" = "x" ]; then @@ -1763,9 +1797,9 @@ python package_do_filedeps() { d.appendVar(key, " " + " ".join(requires[file])) for pkg in requires_files: - d.setVar("FILERDEPENDSFLIST:" + pkg, " ".join(requires_files[pkg])) + d.setVar("FILERDEPENDSFLIST:" + pkg, " ".join(sorted(requires_files[pkg]))) for pkg in provides_files: - d.setVar("FILERPROVIDESFLIST:" + pkg, " ".join(provides_files[pkg])) + d.setVar("FILERPROVIDESFLIST:" + pkg, " ".join(sorted(provides_files[pkg]))) } SHLIBSDIRS = "${WORKDIR_PKGDATA}/${MLPREFIX}shlibs2" diff --git a/poky/meta/classes/package_deb.bbclass b/poky/meta/classes/package_deb.bbclass index eca43e178..65dbe6c39 100644 --- a/poky/meta/classes/package_deb.bbclass +++ b/poky/meta/classes/package_deb.bbclass @@ -315,10 +315,8 @@ python do_package_write_deb () { do_package_write_deb[dirs] = "${PKGWRITEDIRDEB}" do_package_write_deb[cleandirs] = "${PKGWRITEDIRDEB}" do_package_write_deb[depends] += "${@oe.utils.build_depends_string(d.getVar('PACKAGE_WRITE_DEPS'), 'do_populate_sysroot')}" -addtask package_write_deb after do_packagedata do_package - +EPOCHTASK ??= "" +addtask package_write_deb after do_packagedata do_package ${EPOCHTASK} before do_build PACKAGEINDEXDEPS += "dpkg-native:do_populate_sysroot" PACKAGEINDEXDEPS += "apt-native:do_populate_sysroot" - -do_build[recrdeptask] += "do_package_write_deb" diff --git a/poky/meta/classes/package_ipk.bbclass b/poky/meta/classes/package_ipk.bbclass index c3b53854e..4fcb25ca7 100644 --- a/poky/meta/classes/package_ipk.bbclass +++ b/poky/meta/classes/package_ipk.bbclass @@ -274,9 +274,8 @@ python do_package_write_ipk () { do_package_write_ipk[dirs] = "${PKGWRITEDIRIPK}" do_package_write_ipk[cleandirs] = "${PKGWRITEDIRIPK}" do_package_write_ipk[depends] += "${@oe.utils.build_depends_string(d.getVar('PACKAGE_WRITE_DEPS'), 'do_populate_sysroot')}" -addtask package_write_ipk after do_packagedata do_package +EPOCHTASK ??= "" +addtask package_write_ipk after do_packagedata do_package ${EPOCHTASK} before do_build PACKAGEINDEXDEPS += "opkg-utils-native:do_populate_sysroot" PACKAGEINDEXDEPS += "opkg-native:do_populate_sysroot" - -do_build[recrdeptask] += "do_package_write_ipk" diff --git a/poky/meta/classes/package_rpm.bbclass b/poky/meta/classes/package_rpm.bbclass index 88d861c0e..1c62d8283 100644 --- a/poky/meta/classes/package_rpm.bbclass +++ b/poky/meta/classes/package_rpm.bbclass @@ -748,9 +748,8 @@ python do_package_write_rpm () { do_package_write_rpm[dirs] = "${PKGWRITEDIRRPM}" do_package_write_rpm[cleandirs] = "${PKGWRITEDIRRPM}" do_package_write_rpm[depends] += "${@oe.utils.build_depends_string(d.getVar('PACKAGE_WRITE_DEPS'), 'do_populate_sysroot')}" -addtask package_write_rpm after do_packagedata do_package +EPOCHTASK ??= "" +addtask package_write_rpm after do_packagedata do_package ${EPOCHTASK} before do_build PACKAGEINDEXDEPS += "rpm-native:do_populate_sysroot" PACKAGEINDEXDEPS += "createrepo-c-native:do_populate_sysroot" - -do_build[recrdeptask] += "do_package_write_rpm" diff --git a/poky/meta/classes/populate_sdk_base.bbclass b/poky/meta/classes/populate_sdk_base.bbclass index ccfe22328..49e166e69 100644 --- a/poky/meta/classes/populate_sdk_base.bbclass +++ b/poky/meta/classes/populate_sdk_base.bbclass @@ -1,4 +1,6 @@ -inherit meta image-postinst-intercepts image-artifact-names +PACKAGES = "" + +inherit image-postinst-intercepts image-artifact-names # Wildcards specifying complementary packages to install for every package that has been explicitly # installed into the rootfs diff --git a/poky/meta/classes/reproducible_build.bbclass b/poky/meta/classes/reproducible_build.bbclass index 378121903..89f645b85 100644 --- a/poky/meta/classes/reproducible_build.bbclass +++ b/poky/meta/classes/reproducible_build.bbclass @@ -1,17 +1,38 @@ # reproducible_build.bbclass # -# Sets SOURCE_DATE_EPOCH in each component's build environment. +# Sets the default SOURCE_DATE_EPOCH in each component's build environment. +# The format is number of seconds since the system epoch. +# # Upstream components (generally) respect this environment variable, # using it in place of the "current" date and time. # See https://reproducible-builds.org/specs/source-date-epoch/ # -# After sources are unpacked but before they are patched, we set a reproducible value for SOURCE_DATE_EPOCH. -# This value should be reproducible for anyone who builds the same revision from the same sources. +# The default value of SOURCE_DATE_EPOCH comes from the function +# get_source_date_epoch_value which reads from the SDE_FILE, or if the file +# is not available (or set to 0) will use the fallback of +# SOURCE_DATE_EPOCH_FALLBACK. +# +# The SDE_FILE is normally constructed from the function +# create_source_date_epoch_stamp which is typically added as a postfuncs to +# the do_unpack task. If a recipe does NOT have do_unpack, it should be added +# to a task that runs after the source is available and before the +# do_deploy_source_date_epoch task is executed. +# +# If a recipe wishes to override the default behavior it should set it's own +# SOURCE_DATE_EPOCH or override the do_deploy_source_date_epoch_stamp task +# with recipe-specific functionality to write the appropriate +# SOURCE_DATE_EPOCH into the SDE_FILE. # -# There are 4 ways we determine SOURCE_DATE_EPOCH: +# SOURCE_DATE_EPOCH is intended to be a reproducible value. This value should +# be reproducible for anyone who builds the same revision from the same +# sources. +# +# There are 4 ways the create_source_date_epoch_stamp function determines what +# becomes SOURCE_DATE_EPOCH: # # 1. Use the value from __source_date_epoch.txt file if this file exists. -# This file was most likely created in the previous build by one of the following methods 2,3,4. +# This file was most likely created in the previous build by one of the +# following methods 2,3,4. # Alternatively, it can be provided by a recipe via SRC_URI. # # If the file does not exist: @@ -22,20 +43,16 @@ # 3. Use the mtime of "known" files such as NEWS, CHANGLELOG, ... # This works for well-kept repositories distributed via tarball. # -# 4. Use the modification time of the youngest file in the source tree, if there is one. +# 4. Use the modification time of the youngest file in the source tree, if +# there is one. # This will be the newest file from the distribution tarball, if any. # -# 5. Fall back to a fixed timestamp. -# -# Once the value of SOURCE_DATE_EPOCH is determined, it is stored in the recipe's SDE_FILE. -# If none of these mechanisms are suitable, replace the do_deploy_source_date_epoch task -# with recipe-specific functionality to write the appropriate SOURCE_DATE_EPOCH into the SDE_FILE. +# 5. Fall back to a fixed timestamp (SOURCE_DATE_EPOCH_FALLBACK). # -# If this file is found by other tasks, the value is exported in the SOURCE_DATE_EPOCH variable. -# SOURCE_DATE_EPOCH is set for all tasks that might use it (do_configure, do_compile, do_package, ...) +# Once the value is determined, it is stored in the recipe's SDE_FILE. BUILD_REPRODUCIBLE_BINARIES ??= '1' -inherit ${@oe.utils.ifelse(d.getVar('BUILD_REPRODUCIBLE_BINARIES') == '1', 'reproducible_build_simple', '')} +inherit reproducible_build_simple SDE_DIR = "${WORKDIR}/source-date-epoch" SDE_FILE = "${SDE_DIR}/__source_date_epoch.txt" @@ -92,6 +109,11 @@ python create_source_date_epoch_stamp() { os.rename(tmp_file, epochfile) } +EPOCHTASK = "do_deploy_source_date_epoch" + +# Generate the stamp after do_unpack runs +do_unpack[postfuncs] += "create_source_date_epoch_stamp" + def get_source_date_epoch_value(d): cached = d.getVar('__CACHED_SOURCE_DATE_EPOCH') if cached: @@ -120,8 +142,3 @@ def get_source_date_epoch_value(d): export SOURCE_DATE_EPOCH ?= "${@get_source_date_epoch_value(d)}" BB_HASHBASE_WHITELIST += "SOURCE_DATE_EPOCH" - -python () { - if d.getVar('BUILD_REPRODUCIBLE_BINARIES') == '1': - d.appendVarFlag("do_unpack", "postfuncs", " create_source_date_epoch_stamp") -} diff --git a/poky/meta/classes/rootfs-postcommands.bbclass b/poky/meta/classes/rootfs-postcommands.bbclass index c5746eba1..7fe9e3d8c 100644 --- a/poky/meta/classes/rootfs-postcommands.bbclass +++ b/poky/meta/classes/rootfs-postcommands.bbclass @@ -218,8 +218,8 @@ postinst_enable_logging () { # Modify systemd default target # set_systemd_default_target () { - if [ -d ${IMAGE_ROOTFS}${sysconfdir}/systemd/system -a -e ${IMAGE_ROOTFS}${systemd_unitdir}/system/${SYSTEMD_DEFAULT_TARGET} ]; then - ln -sf ${systemd_unitdir}/system/${SYSTEMD_DEFAULT_TARGET} ${IMAGE_ROOTFS}${sysconfdir}/systemd/system/default.target + if [ -d ${IMAGE_ROOTFS}${sysconfdir}/systemd/system -a -e ${IMAGE_ROOTFS}${systemd_system_unitdir}/${SYSTEMD_DEFAULT_TARGET} ]; then + ln -sf ${systemd_system_unitdir}/${SYSTEMD_DEFAULT_TARGET} ${IMAGE_ROOTFS}${sysconfdir}/systemd/system/default.target fi } diff --git a/poky/meta/classes/rust-common.bbclass b/poky/meta/classes/rust-common.bbclass index a8803d61b..f7f9cbbb2 100644 --- a/poky/meta/classes/rust-common.bbclass +++ b/poky/meta/classes/rust-common.bbclass @@ -176,5 +176,5 @@ do_rust_create_wrappers () { ${BUILD_CC} ${COREBASE}/meta/files/rust-ccld-wrapper.c -o ${RUST_TARGET_CCLD} } -addtask rust_create_wrappers before do_configure after do_patch +addtask rust_create_wrappers before do_configure after do_patch do_prepare_recipe_sysroot do_rust_create_wrappers[dirs] += "${WRAPPER_DIR}" diff --git a/poky/meta/classes/siteinfo.bbclass b/poky/meta/classes/siteinfo.bbclass index 0bd1f3680..c5f4dfda4 100644 --- a/poky/meta/classes/siteinfo.bbclass +++ b/poky/meta/classes/siteinfo.bbclass @@ -176,17 +176,39 @@ python () { bb.fatal("Please add your architecture to siteinfo.bbclass") } -def siteinfo_get_files(d, sysrootcache = False): +# Layers with siteconfig need to add a replacement path to this variable so the +# sstate isn't path specific +SITEINFO_PATHVARS = "COREBASE" + +def siteinfo_get_files(d, sysrootcache=False): sitedata = siteinfo_data(d) - sitefiles = "" + sitefiles = [] + searched = [] for path in d.getVar("BBPATH").split(":"): for element in sitedata: filename = os.path.join(path, "site", element) if os.path.exists(filename): - sitefiles += filename + " " + searched.append(filename + ":True") + sitefiles.append(filename) + else: + searched.append(filename + ":False") + + # Have to parameterise out hardcoded paths such as COREBASE for the main site files + for var in d.getVar("SITEINFO_PATHVARS").split(): + searched2 = [] + replace = os.path.normpath(d.getVar(var)) + for s in searched: + searched2.append(s.replace(replace, "${" + var + "}")) + searched = searched2 + + if bb.data.inherits_class('native', d) or bb.data.inherits_class('cross', d) or bb.data.inherits_class('crosssdk', d): + # We need sstate sigs for native/cross not to vary upon arch so we can't depend on the site files. + # In future we may want to depend upon all site files? + # This would show up as breaking sstatetests.SStateTests.test_sstate_32_64_same_hash for example + searched = [] if not sysrootcache: - return sitefiles + return sitefiles, searched # Now check for siteconfig cache files in sysroots path_siteconfig = d.getVar('SITECONFIG_SYSROOTCACHE') @@ -195,8 +217,8 @@ def siteinfo_get_files(d, sysrootcache = False): if not i.endswith("_config"): continue filename = os.path.join(path_siteconfig, i) - sitefiles += filename + " " - return sitefiles + sitefiles.append(filename) + return sitefiles, searched # # Make some information available via variables diff --git a/poky/meta/classes/sstate.bbclass b/poky/meta/classes/sstate.bbclass index d7f1b3f26..92a73114b 100644 --- a/poky/meta/classes/sstate.bbclass +++ b/poky/meta/classes/sstate.bbclass @@ -640,10 +640,21 @@ python sstate_hardcode_path () { def sstate_package(ss, d): import oe.path + import time tmpdir = d.getVar('TMPDIR') + fixtime = False + if ss['task'] == "package": + fixtime = True + + def fixtimestamp(root, path): + f = os.path.join(root, path) + if os.lstat(f).st_mtime > sde: + os.utime(f, (sde, sde), follow_symlinks=False) + sstatebuild = d.expand("${WORKDIR}/sstate-build-%s/" % ss['task']) + sde = int(d.getVar("SOURCE_DATE_EPOCH") or time.time()) d.setVar("SSTATE_CURRTASK", ss['task']) bb.utils.remove(sstatebuild, recurse=True) bb.utils.mkdirhier(sstatebuild) @@ -656,6 +667,8 @@ def sstate_package(ss, d): # to sstate tasks but there aren't many of these so better just avoid them entirely. for walkroot, dirs, files in os.walk(state[1]): for file in files + dirs: + if fixtime: + fixtimestamp(walkroot, file) srcpath = os.path.join(walkroot, file) if not os.path.islink(srcpath): continue @@ -677,6 +690,11 @@ def sstate_package(ss, d): bb.utils.mkdirhier(plain) bb.utils.mkdirhier(pdir) bb.utils.rename(plain, pdir) + if fixtime: + fixtimestamp(pdir, "") + for walkroot, dirs, files in os.walk(pdir): + for file in files + dirs: + fixtimestamp(walkroot, file) d.setVar('SSTATE_BUILDDIR', sstatebuild) d.setVar('SSTATE_INSTDIR', sstatebuild) @@ -731,6 +749,7 @@ def pstaging_fetch(sstatefetch, d): localdata.setVar('FILESPATH', dldir) localdata.setVar('DL_DIR', dldir) localdata.setVar('PREMIRRORS', mirrors) + localdata.setVar('SRCPV', d.getVar('SRCPV')) # if BB_NO_NETWORK is set but we also have SSTATE_MIRROR_ALLOW_NETWORK, # we'll want to allow network access for the current set of fetches. @@ -755,6 +774,9 @@ def pstaging_fetch(sstatefetch, d): except bb.fetch2.BBFetchException: pass +pstaging_fetch[vardepsexclude] += "SRCPV" + + def sstate_setscene(d): shared_state = sstate_state_fromvars(d) accelerate = sstate_installpkg(shared_state, d) @@ -1037,15 +1059,13 @@ def setscene_depvalid(task, taskdependees, notneeded, d, log=None): logit("Considering setscene task: %s" % (str(taskdependees[task])), log) + directtasks = ["do_populate_lic", "do_deploy_source_date_epoch", "do_shared_workdir", "do_stash_locale", "do_gcc_stash_builddir"] + def isNativeCross(x): return x.endswith("-native") or "-cross-" in x or "-crosssdk" in x or x.endswith("-cross") - # We only need to trigger populate_lic through direct dependencies - if taskdependees[task][1] == "do_populate_lic": - return True - - # stash_locale and gcc_stash_builddir are never needed as a dependency for built objects - if taskdependees[task][1] == "do_stash_locale" or taskdependees[task][1] == "do_gcc_stash_builddir": + # We only need to trigger deploy_source_date_epoch through direct dependencies + if taskdependees[task][1] in directtasks: return True # We only need to trigger packagedata through direct dependencies @@ -1068,8 +1088,8 @@ def setscene_depvalid(task, taskdependees, notneeded, d, log=None): # do_package_write_* need do_populate_sysroot as they're mainly postinstall dependencies if taskdependees[task][1] == "do_populate_sysroot" and taskdependees[dep][1] in ['do_package_write_deb', 'do_package_write_ipk', 'do_package_write_rpm']: return False - # do_package/packagedata/package_qa don't need do_populate_sysroot - if taskdependees[task][1] == "do_populate_sysroot" and taskdependees[dep][1] in ['do_package', 'do_packagedata', 'do_package_qa']: + # do_package/packagedata/package_qa/deploy don't need do_populate_sysroot + if taskdependees[task][1] == "do_populate_sysroot" and taskdependees[dep][1] in ['do_package', 'do_packagedata', 'do_package_qa', 'do_deploy']: continue # Native/Cross packages don't exist and are noexec anyway if isNativeCross(taskdependees[dep][0]) and taskdependees[dep][1] in ['do_package_write_deb', 'do_package_write_ipk', 'do_package_write_rpm', 'do_packagedata', 'do_package', 'do_package_qa']: @@ -1117,13 +1137,9 @@ def setscene_depvalid(task, taskdependees, notneeded, d, log=None): # Target populate_sysroot need their dependencies return False - if taskdependees[task][1] == 'do_shared_workdir': + if taskdependees[dep][1] in directtasks: continue - if taskdependees[dep][1] == "do_populate_lic": - continue - - # Safe fallthrough default logit(" Default setscene dependency fall through due to dependency: %s" % (str(taskdependees[dep])), log) return False diff --git a/poky/meta/classes/staging.bbclass b/poky/meta/classes/staging.bbclass index af3397bab..65a6cd512 100644 --- a/poky/meta/classes/staging.bbclass +++ b/poky/meta/classes/staging.bbclass @@ -306,6 +306,7 @@ python extend_recipe_sysroot() { sstatetasks = d.getVar("SSTATETASKS").split() # Add recipe specific tasks referenced by setscene_depvalid() sstatetasks.append("do_stash_locale") + sstatetasks.append("do_deploy") def print_dep_tree(deptree): data = "" diff --git a/poky/meta/classes/testimage.bbclass b/poky/meta/classes/testimage.bbclass index 3c689aec9..a76e77385 100644 --- a/poky/meta/classes/testimage.bbclass +++ b/poky/meta/classes/testimage.bbclass @@ -64,8 +64,6 @@ BASICTESTSUITE = "\ DEFAULT_TEST_SUITES = "${BASICTESTSUITE}" -# aarch64 has no graphics -DEFAULT_TEST_SUITES:remove:aarch64 = "xorg" # musl doesn't support systemtap DEFAULT_TEST_SUITES:remove:libc-musl = "stap" @@ -201,6 +199,7 @@ def testimage_main(d): import json import signal import logging + import shutil from bb.utils import export_proxies from oeqa.core.utils.misc import updateTestData @@ -408,10 +407,17 @@ def testimage_main(d): get_testimage_result_id(configuration), dump_streams=d.getVar('TESTREPORT_FULLLOGS')) results.logSummary(pn) + + # Copy additional logs to tmp/log/oeqa so it's easier to find them + targetdir = os.path.join(get_testimage_json_result_dir(d), d.getVar("PN")) + os.makedirs(targetdir, exist_ok=True) + os.symlink(bootlog, os.path.join(targetdir, os.path.basename(bootlog))) + os.symlink(d.getVar("BB_LOGFILE"), os.path.join(targetdir, os.path.basename(d.getVar("BB_LOGFILE") + "." + d.getVar('DATETIME')))) + if not results or not complete: - bb.fatal('%s - FAILED - tests were interrupted during execution' % pn, forcelog=True) + bb.fatal('%s - FAILED - tests were interrupted during execution, check the logs in %s' % (pn, d.getVar("LOG_DIR")), forcelog=True) if not results.wasSuccessful(): - bb.fatal('%s - FAILED - check the task log and the ssh log' % pn, forcelog=True) + bb.fatal('%s - FAILED - also check the logs in %s' % (pn, d.getVar("LOG_DIR")), forcelog=True) def get_runtime_paths(d): """ diff --git a/poky/meta/classes/toolchain-scripts.bbclass b/poky/meta/classes/toolchain-scripts.bbclass index 479f3b706..fb6261c91 100644 --- a/poky/meta/classes/toolchain-scripts.bbclass +++ b/poky/meta/classes/toolchain-scripts.bbclass @@ -65,6 +65,7 @@ toolchain_create_sdk_env_script () { # This function creates an environment-setup-script in the TMPDIR which enables # a OE-core IDE to integrate with the build tree +# Caller must ensure CONFIG_SITE is setup toolchain_create_tree_env_script () { script=${TMPDIR}/environment-setup-${REAL_MULTIMACH_TARGET_SYS} rm -f $script @@ -73,7 +74,7 @@ toolchain_create_tree_env_script () { echo 'export PATH=${STAGING_DIR_NATIVE}/usr/bin:${STAGING_BINDIR_TOOLCHAIN}:$PATH' >> $script echo 'export PKG_CONFIG_SYSROOT_DIR=${PKG_CONFIG_SYSROOT_DIR}' >> $script echo 'export PKG_CONFIG_PATH=${PKG_CONFIG_PATH}' >> $script - echo 'export CONFIG_SITE="${@siteinfo_get_files(d)}"' >> $script + echo 'export CONFIG_SITE="${CONFIG_SITE}"' >> $script echo 'export SDKTARGETSYSROOT=${STAGING_DIR_TARGET}' >> $script echo 'export OECORE_NATIVE_SYSROOT="${STAGING_DIR_NATIVE}"' >> $script echo 'export OECORE_TARGET_SYSROOT="${STAGING_DIR_TARGET}"' >> $script @@ -161,7 +162,7 @@ EOF } #we get the cached site config in the runtime -TOOLCHAIN_CONFIGSITE_NOCACHE = "${@siteinfo_get_files(d)}" +TOOLCHAIN_CONFIGSITE_NOCACHE = "${@' '.join(siteinfo_get_files(d)[0])}" TOOLCHAIN_CONFIGSITE_SYSROOTCACHE = "${STAGING_DIR}/${MLPREFIX}${MACHINE}/${target_datadir}/${TARGET_SYS}_config_site.d" TOOLCHAIN_NEED_CONFIGSITE_CACHE ??= "virtual/${MLPREFIX}libc ncurses" DEPENDS += "${TOOLCHAIN_NEED_CONFIGSITE_CACHE}" diff --git a/poky/meta/classes/useradd.bbclass b/poky/meta/classes/useradd.bbclass index 287ef8433..20771a0ce 100644 --- a/poky/meta/classes/useradd.bbclass +++ b/poky/meta/classes/useradd.bbclass @@ -230,6 +230,10 @@ fakeroot python populate_packages:prepend () { preinst += 'perform_useradd () {\n%s}\n' % d.getVar('perform_useradd') preinst += 'perform_groupmems () {\n%s}\n' % d.getVar('perform_groupmems') preinst += d.getVar('useradd_preinst') + # Expand out the *_PARAM variables to the package specific versions + for rep in ["GROUPADD_PARAM", "USERADD_PARAM", "GROUPMEMS_PARAM"]: + val = d.getVar(rep + ":" + pkg) or "" + preinst = preinst.replace("${" + rep + "}", val) d.setVar('pkg_preinst:%s' % pkg, preinst) # RDEPENDS setup diff --git a/poky/meta/classes/utils.bbclass b/poky/meta/classes/utils.bbclass index 072ea1f63..b4eb3d38a 100644 --- a/poky/meta/classes/utils.bbclass +++ b/poky/meta/classes/utils.bbclass @@ -30,7 +30,6 @@ oe_libinstall() { silent="" require_static="" require_shared="" - staging_install="" while [ "$#" -gt 0 ]; do case "$1" in -C) @@ -62,10 +61,6 @@ oe_libinstall() { if [ -z "$destpath" ]; then bbfatal "oe_libinstall: no destination path specified" fi - if echo "$destpath/" | egrep '^${STAGING_LIBDIR}/' >/dev/null - then - staging_install=1 - fi __runcmd () { if [ -z "$silent" ]; then @@ -159,36 +154,6 @@ oe_libinstall() { __runcmd cd "$olddir" } -oe_machinstall() { - # Purpose: Install machine dependent files, if available - # If not available, check if there is a default - # If no default, just touch the destination - # Example: - # $1 $2 $3 $4 - # oe_machinstall -m 0644 fstab ${D}/etc/fstab - # - # TODO: Check argument number? - # - filename=`basename $3` - dirname=`dirname $3` - - for o in `echo ${OVERRIDES} | tr ':' ' '`; do - if [ -e $dirname/$o/$filename ]; then - bbnote $dirname/$o/$filename present, installing to $4 - install $1 $2 $dirname/$o/$filename $4 - return - fi - done -# bbnote overrides specific file NOT present, trying default=$3... - if [ -e $3 ]; then - bbnote $3 present, installing to $4 - install $1 $2 $3 $4 - else - bbnote $3 NOT present, touching empty $4 - touch $4 - fi -} - create_cmdline_wrapper () { # Create a wrapper script where commandline options are needed # |