summaryrefslogtreecommitdiff
path: root/poky/meta/lib/oe
diff options
context:
space:
mode:
authorPatrick Williams <patrick@stwcx.xyz>2024-12-14 02:56:42 +0300
committerPatrick Williams <patrick@stwcx.xyz>2024-12-14 04:38:25 +0300
commite73366c8bab752f44899222f9df7ce7ed080f2e9 (patch)
tree57ae1423728ade061bb318ab6413a18e1afb9c20 /poky/meta/lib/oe
parent1d19bb6db66dd40f999dbfcd25be489aa4ecd0b3 (diff)
downloadopenbmc-styhead.tar.xz
subtree updatesstyhead
poky: subtree update:5d88faa0f3..ecd195a3ef Aditya Tayade (1): e2fsprogs: removed 'sed -u' option Adrian Freihofer (12): oe-selftest: fitimage refactor u-boot-tools-native oe-selftest: fitimage drop test-mkimage-wrapper oe-selftest: fitimage cleanup asserts oe-selftest: fitimage fix test_initramfs_bundle kernel-fitimage: fix handling of empty default dtb pybootchartgui.py: python 3.12+ regexes kernel-fitimage: fix intentation kernel-fitimage: fix external dtb check uboot-config: fix devtool modify with kernel-fitimage devtool: modify kernel adds append twice devtool: remove obsolete SRCTREECOVEREDTASKS handling cml1: add do_savedefconfig Alban Bedel (2): bind: Fix build with the `httpstats` package config enabled util-linux: Add `findmnt` to the bash completion RDEPENDS Alejandro Hernandez Samaniego (1): tclibc-picolibc: Adds a new TCLIBC variant to build with picolibc as C library Aleksandar Nikolic (2): cve-check: Introduce CVE_CHECK_MANIFEST_JSON_SUFFIX scripts/install-buildtools: Update to 5.1 Alessandro Pecugi (1): runqemu: add sd card device Alexander Kanavin (100): perf: drop newt from tui build requirements libnewt: move to meta-oe python3: submit deterministic_imports.patch upstream as a ticket glib-networking: submit eagain.patch upstream psmisc: merge .inc into .bb psmisc: drop duplicate entries psmisc: remove 0001-Use-UINTPTR_MAX-instead-of-__WORDSIZE.patch openssh: drop add-test-support-for-busybox.patch libfm-extra: drop unneeded 0001-nls.m4-Take-it-from-gettext-0.15.patch glslang: mark 0001-generate-glslang-pkg-config.patch as Inappropriate tcp-wrappers: mark all patches as inactive-upstream automake: mark new_rt_path_for_test-driver.patch as Inappropriate settings-daemon: submit addsoundkeys.patch upstream and update to a revision that has it dpkg: mark patches adding custom non-debian architectures as inappropriate for upstream libacpi: mark patches as inactive-upstream python3: drop deterministic_imports.patch lib/oe/recipeutils: return a dict in get_recipe_upgrade_status() instead of a tuple lib/recipeutils: add a function to determine recipes with shared include files recipeutils/get_recipe_upgrade_status: group recipes when they need to be upgraded together devtool/upgrade: use PN instead of BPN for naming newly created upgraded recipes devtool/upgrade: rename RECIPE_UPDATE_EXTRA_TASKS -> RECIPE_UPGRADE_EXTRA_TASKS python3-jinja2: fix upstream version check ca-certificates: get sources from debian tarballs pulseaudio, desktop-file-utils: correct freedesktop.org -> www.freedesktop.org SRC_URI xf86-video-intel: correct SRC_URI as freedesktop anongit is down python3-cython: correct upstream version check python3-babel: drop custom PYPI settings python3-cython: fix upstream check again sysvinit: take release tarballs from github bash: upgrade 5.2.21 -> 5.2.32 boost: upgrade 1.85.0 -> 1.86.0 ccache: upgrade 4.10.1 -> 4.10.2 cmake: upgrade 3.30.1 -> 3.30.2 dpkg: upgrade 1.22.10 -> 1.22.11 e2fsprogs: upgrade 1.47.0 -> 1.47.1 epiphany: upgrade 46.0 -> 46.3 gstreamer1.0: upgrade 1.24.5 -> 1.24.6 kmod: upgrade 32 -> 33 kmscube: upgrade to latest revision libadwaita: upgrade 1.5.2 -> 1.5.3 libedit: upgrade 20240517-3.1 -> 20240808-3.1 libnl: upgrade 3.9.0 -> 3.10.0 librepo: upgrade 1.17.0 -> 1.18.1 libva: upgrade 2.20.0 -> 2.22.0 linux-firmware: upgrade 20240513 -> 20240811 lua: upgrade 5.4.6 -> 5.4.7 mpg123: upgrade 1.32.6 -> 1.32.7 mtools: upgrade 4.0.43 -> 4.0.44 nghttp2: upgrade 1.62.0 -> 1.62.1 puzzles: upgrade to latest revision python3-dtschema: upgrade 2024.4 -> 2024.5 python3-uritools: upgrade 4.0.2 -> 4.0.3 python3-webcolors: upgrade 1.13 -> 24.8.0 sqlite3: upgrade 3.45.3 -> 3.46.1 stress-ng: upgrade 0.17.08 -> 0.18.02 webkitgtk: upgrade 2.44.1 -> 2.44.3 weston: upgrade 13.0.1 -> 13.0.3 xkeyboard-config: upgrade 2.41 -> 2.42 xz: upgrade 5.4.6 -> 5.6.2 mesa: set PV from the .inc file and not via filenames meta/lib/oe/sstatesig.py: do not error out if sstate files fail on os.stat() piglit: add a patch to address host contamination for wayland-scanner selftest: always tweak ERROR_QA/WARN_QA per package selftest: use INIT_MANAGER to enable systemd instead of custom settings xmlto: check upstream version tags, not new commits glib-2.0: update 2.80.2 -> 2.80.4 lttng-modules: update 2.13.13 -> 2.13.14 automake: update 1.16.5 -> 1.17 fmt: update 10.2.1 -> 11.0.2 git: 2.45.2 -> 2.46.0 perlcross: update 1.5.2 -> 1.6 perl: update 5.38.2 -> 5.40.0 gnu-config: update to latest revision python3-license-expression: update 30.3.0 -> 30.3.1 python3-pip: 24.0 -> 24.2 python3-pyopenssl: update 24.1.0 -> 24.2.1 python3-pyyaml: update 6.0.1 -> 6.0.2 python3-scons: update 4.7.0 -> 4.8.0 cargo-c-native: update 0.9.30 -> 0.10.3 go-helloworld: update to latest revision vulkan-samples: update to latest revision ffmpeg: update 6.1.1 -> 7.0.2 libksba: update 1.6.6 -> 1.6.7 p11-kit: update 0.25.3 -> 0.25.5 iproute2: upgrade 6.9.0 -> 6.10.0 ifupdown: upgrade 0.8.41 -> 0.8.43 libdnf: upgrade 0.73.2 -> 0.73.3 mmc-utils: upgrade to latest revision adwaita-icon-theme: upgrade 46.0 -> 46.2 hicolor-icon-theme: upgrade 0.17 -> 0.18 waffle: upgrade 1.8.0 -> 1.8.1 libtraceevent: upgrade 1.8.2 -> 1.8.3 alsa-utils: upgrade 1.2.11 -> 1.2.12 lz4: upgrade 1.9.4 -> 1.10.0 vte: upgrade 0.74.2 -> 0.76.3 cracklib: update 2.9.11 -> 2.10.2 selftest/sstatetests: run CDN mirror check only once package_rpm: use zstd's default compression level package_rpm: restrict rpm to 4 threads ref-manual: merge two separate descriptions of RECIPE_UPGRADE_EXTRA_TASKS Alexander Sverdlin (1): linux-firmware: Move Silabs wfx firmware to a separate package Alexandre Belloni (1): oeqa/selftest/oescripts: pinentry update to 1.3.1 Alexis Lothoré (4): oeqa/ssh: allow to retrieve raw, unformatted ouput oeqa/utils/postactions: transfer whole archive over ssh instead of doing individual copies oeqa/postactions: fix exception handling oeqa/postactions: do not uncompress retrieved archive on host Andrew Oppelt (1): testexport: support for executing tests over serial Andrey Zhizhikin (1): devicetree.bbclass: switch away from S = WORKDIR Antonin Godard (38): ref-manual: add missing CVE_CHECK manifest variables ref-manual: add new vex class ref-manual: add new retain class and variables ref-manual: add missing nospdx class ref-manual: add new RECIPE_UPGRADE_EXTRA_TASKS variable ref-manual: drop siteconfig class ref-manual: add missing TESTIMAGE_FAILED_QA_ARTIFACTS ref-manual: add missing image manifest variables ref-manual: add missing EXTERNAL_KERNEL_DEVICETREE variable ref-manual: drop TCLIBCAPPEND variable ref-manual: add missing OPKGBUILDCMD variable ref-manual: add missing variable PRSERV_UPSTREAM ref-manual: merge patch-status-* to patch-status ref-manual: add mission pep517-backend sanity check release-notes-5.1: update release note for styhead release-notes-5.1: fix spdx bullet point ref-manual: fix ordering of insane checks list release-notes-5.1: add beaglebone-yocto parselogs test oeqa failure ref-manual: structure.rst: document missing tmp/ dirs overview-manual: concepts: add details on package splitting ref-manual: faq: add q&a on class appends ref-manual: release-process: update releases.svg ref-manual: release-process: refresh the current LTS releases ref-manual: release-process: update releases.svg with month after "Current" ref-manual: release-process: add a reference to the doc's release ref-manual: devtool-reference: refresh example outputs ref-manual: devtool-reference: document missing commands conf.py: rename :cve: role to :cve_nist: doc: Makefile: remove inkscape, replace by rsvg-convert doc: Makefile: add support for xelatex doc: add a download page for epub and pdf sphinx-static/switchers.js.in: do not refer to URL_ROOT anymore migration-guides: 5.1: fix titles conf.py: add a bitbake_git extlink dev-manual: document how to provide confs from layer.conf dev-manual: bblock: use warning block instead of attention standards.md: add a section on admonitions ref-manual: classes: fix bin_package description Benjamin Szőke (1): mc: fix source URL Bruce Ashfield (40): linux-yocto/6.6: update to v6.6.34 linux-yocto/6.6: update to v6.6.35 linux-yocto/6.6: fix AMD boot trace linux-yocto/6.6: update to v6.6.36 linux-yocto/6.6: update to v6.6.38 linux-yocto/6.6: update to v6.6.40 linux-yocto/6.6: update to v6.6.43 linux-libc-headers: update to v6.10 kernel-devsrc: remove 64 bit vdso cmd files linux-yocto: introduce 6.10 reference kernel linux-yocto/6.10: update to v6.10 linux-yocto/6.10: update to v6.10.2 linux-yocto/6.10: update to v6.10.3 oeqa/runtime/parselogs: update pci BAR ignore for kernel 6.10 oeqa/runtime/parselogs: mips: skip sysctl warning yocto-bsp: set temporary preferred version for genericarm64 lttng-modules: backport patches for kernel v6.11 linux-yocto-dev: bump to v6.11 linux-yocto-rt/6.10: update to -rt14 linux-yocto/6.10: cfg: disable nfsd legacy client tracking linux-yocto/6.6: update to v6.6.44 poky/poky-tiny: bump preferred version to 6.10 linux-yocto/6.6: update to v6.6.45 linux-yocto/6.6: fix genericarm64 config warning linux-yocto/6.6: update to v6.6.47 linux-yocto/6.10: fix CONFIG_PWM_SYSFS config warning linux-yocto/6.10: update to v6.10.7 linux-yocto/6.10: update to v6.10.8 linux-yocto/6.6: update to v6.6.49 linux-yocto/6.6: update to v6.6.50 linux-yocto/6.10: cfg: arm64 configuration updates linux-yocto/6.6: update to v6.6.52 linux-yocto/6.6: update to v6.6.54 linux-yocto/6.10: update to v6.10.11 linux-yocto/6.10: update to v6.10.12 linux-yocto/6.10: update to v6.10.13 linux-yocto/6.10: update to v6.10.14 linux-yocto/6.10: genericarm64.cfg: enable CONFIG_DMA_CMA linux-yocto/6.10: cfg: gpio: allow to re-enable the deprecated GPIO sysfs interface linux-yocto/6.10: bsp/genericarm64: disable ARM64_SME Carlos Alberto Lopez Perez (1): icu: Backport patch to fix build issues with long paths (>512 chars) Changhyeok Bae (1): ethtool: upgrade 6.7 -> 6.9 Changqing Li (11): pixman: fix do_compile error vulkan-samples: fix do_compile error when -Og enabled multilib.conf: remove appending to PKG_CONFIG_PATH pixman: update patch for fixing inline failure with -Og rt-tests: rt_bmark.py: fix TypeError libcap-ng: update SRC_URI apt-native: don't let dpkg overwrite files by default webkitgtk: fix do_configure error on beaglebone-yocto bitbake.conf: drop VOLATILE_LOG_DIR, use FILESYSTEM_PERMS_TABLES instead bitbake.conf: drop VOLATILE_TMP_DIR, use FILESYSTEM_PERMS_TABLES instead rxvt-unicode.inc: disable the terminfo installation by setting TIC to : Chen Qi (13): pciutils: remove textrel INSANE_SKIP systemd: upgrade from 255.6 to 256 systemd-boot: upgrade from 255.6 to 256 util-linux/util-linux-libuuid: upgrade from 2.39.3 to 2.40.1 libssh2: remove util-linux-col from ptest dependencies kexec-tools: avoid kernel warning json-c: use upstream texts for SUMMARY and DESCRIPTION util-linux/util-linux-libuuid: upgrade from 2.40.1 to 2.40.2 shadow: upgrade from 4.15.1 to 4.16.0 json-c: avoid ptest failure caused by valgrind toolchain-shar-extract.sh: exit when post-relocate-setup.sh fails libgfortran: fix buildpath QA issue shadow: use update-alternatives to handle groups.1 Chris Laplante (4): bitbake: ui/knotty: print log paths for failed tasks in summary bitbake: ui/knotty: respect NO_COLOR & check for tty; rename print_hyperlink => format_hyperlink bitbake: persist_data: close connection in SQLTable __exit__ bitbake: fetch2: use persist_data context managers Chris Spencer (1): cargo_common.bbclass: Support git repos with submodules Christian Lindeberg (3): bitbake: fetch2: Add gomod fetcher bitbake: fetch2: Add gomodgit fetcher bitbake: tests/fetch: Update GoModTest and GoModGitTest Christian Taedcke (1): iptables: fix memory corruption when parsing nft rules Clara Kowalsky (1): resulttool: Add support to create test report in JUnit XML format Claus Stovgaard (1): lib/oe/package-manager: skip processing installed-pkgs with empty globs Clayton Casciato (1): uboot-sign: fix concat_dtb arguments Clément Péron (1): openssl: Remove patch already upstreamed Colin McAllister (2): udev-extraconf: Add collect flag to mount busybox: Fix cut with "-s" flag Corentin Lévy (1): python3-libarchive-c: add ptest Dan McGregor (1): bitbake: prserv: increment 9 to 10 correctly Daniel McGregor (1): libpam: use libdir in conditional Daniel Semkowicz (1): os-release: Fix VERSION_CODENAME in case it is empty Daniil Batalov (1): spdx30_tasks.py: fix typo in call of is_file method Deepesh Varatharajan (1): rust: Rust Oe-Selftest Reduce the testcases in exclude list Deepthi Hemraj (5): llvm: Fix CVE-2024-0151 glibc: stable 2.39 branch updates. binutils: stable 2.42 branch updates glibc: stable 2.40 branch updates glibc: stable 2.40 branch updates. Denys Dmytriyenko (3): llvm: extend llvm-config reproducibility fix to nativesdk class nativesdk-libtool: sanitize the script, remove buildpaths gcc: unify cleanup of include-fixed, apply to cross-canadian Divya Chellam (1): python3: Upgrade 3.12.5 -> 3.12.6 Dmitry Baryshkov (12): mesa: fix QA warnings caused by freedreno tools xserver-xorg: fix CVE-2023-5574 status lib/spdx30_tasks: improve error message linux-firmware: make qcom-sc8280xp-lenovo-x13s-audio install Linaro licence linux-firmware: add packages with SM8550 and SM8650 audio topology files linux-firmware: move -qcom-qcm2290-wifi before -ath10k linux-firmware: use wildcards to grab all qcom-qcm2290/qrb4210 wifi files linux-firmware: package qcom-vpu firmware linux-firmware: restore qcom/vpu-1.0/venus.mdt compatibility symlink piglit: add missing dependency on wayland linux-firmware: add packages for Qualcomm XElite GPU firmware linux-firmware: split ath10k package Enguerrand de Ribaucourt (6): bitbake: fetch2/npmsw: fix fetching git revisions not on master bitbake: fetch2/npmsw: allow packages not declaring a registry version npm: accept unspecified versions in package.json recipetool: create_npm: resolve licenses defined in package.json recipetool: create: split guess_license function recipetool: create_npm: reuse license utils Enrico Jörns (2): bitbake: bitbake-diffsigs: fix handling when finding only a single sigfile archiver.bbclass: fix BB_GENERATE_MIRROR_TARBALLS checking Esben Haabendal (1): mesa: Fix build with etnaviv gallium driver Etienne Cordonnier (3): oeqa/runtime: fix race-condition in minidebuginfo test bitbake: gcp.py: remove slow calls to gsutil stat systemd: make home directory readable by systemd-coredump Fabio Estevam (1): u-boot: upgrade 2024.04 -> 2024.07 Florian Amstutz (1): u-boot: Fix potential index error issues in do_deploy with multiple u-boot configurations Gassner, Tobias.ext (1): rootfs: Ensure run-postinsts is not uninstalled for read-only-rootfs-delayed-postinsts Gauthier HADERER (1): populate_sdk_ext.bclass: make sure OECORE_NATIVE_SYSROOT is exported. Guðni Már Gilbert (7): python3-setuptools: drop python3-2to3 from RDEPENDS bluez5: drop modifications to Python shebangs bluez5: cleanup redundant backslashes python3-attrs: drop python3-ctypes from RDEPENDS gobject-introspection: split tools and giscanner into a separate package bluez5: upgrade 5.77 -> 5.78 bluez5: remove redundant patch for MAX_INPUT Harish Sadineni (4): gcc-runtime: enabling "network" task specific flag oeqa/selftest/gcc: Fix host key verfication failure oeqa/selftest/gcc: Fix kex exchange identification error binutils: Add missing perl modules to RDEPENDS for nativsdk variant Het Patel (1): zlib: Add CVE_PRODUCT to exclude false positives Hiago De Franco (1): weston: backport patch to allow neatvnc < v0.9.0 Hongxu Jia (1): gcc-source: Fix racing on building gcc-source-14.2.0 and lib32-gcc-source-14.2.0 Intaek Hwang (6): alsa-plugins: set CVE_PRODUCT mpfr: set CVE_PRODUCT libatomic-ops: set CVE_PRODUCT gstreamer1.0-plugins-bad: set CVE_PRODUCT python3-lxml: set CVE_PRODUCT python3-psutil: set CVE_PRODUCT Jaeyoon Jung (2): makedevs: Fix issue when rootdir of / is given makedevs: Fix matching uid/gid Jagadeesh Krishnanjanappa (1): tune-cortexa32: set tune feature as armv8a Jan Vermaete (2): python3-websockets: added python3-zipp as RDEPENDS ref-manual: added wic.zst to the IMAGE_TYPES Jinfeng Wang (2): glib-2.0: fix glib-2.0 ptest failure when upgrading tzdata2024b tzdata/tzcode-native: upgrade 2024a -> 2024b Johannes Schneider (3): systemd: add PACKAGECONFIG for bpf-framework systemd: bpf-framework: 'propagate' the '--sysroot=' for crosscompilation systemd: bpf-framework: pass 'recipe-sysroot' to BPF compiler John Ripple (1): packagegroup-core-tools-profile.bb: Enable aarch64 valgrind Jon Mason (6): oeqa/runtime/ssh: add retry logic and sleeps to allow for slower systems oeqa/runtime/ssh: check for all errors at the end docs: modify reference from python2.py to python.py kernel.bbclass: remove unused CMDLINE_CONSOLE oeqa/runtime/ssh: increase the number of attempts wpa-supplicant: add patch to check for kernel header version when compiling macsec Jonas Gorski (1): rootfs-postcommands.bbclass: make opkg status reproducible Jonas Munsin (1): bzip2: set CVE_PRODUCT Jonathan GUILLOT (1): cronie: add inotify PACKAGECONFIG option Jose Quaresma (14): go: upgrade 1.22.3 -> 1.22.4 go: drop the old 1.4 bootstrap C version openssh: fix CVE-2024-6387 go: upgrade 1.22.4 -> 1.22.5 openssh: drop rejected patch fixed in 8.6p1 release openssh: systemd sd-notify patch was rejected upstream oeqa/runtime/scp: requires openssh-sftp-server libssh2: fix ptest regression with openssh 9.8p1 openssh: systemd notification was implemented upstream openssh: upgrade 9.7p1 -> 9.8p1 libssh2: disable-DSA-by-default go: upgrade 1.22.5 -> 1.22.6 bitbake: bitbake: doc/user-manual: Update the BB_HASHSERVE_UPSTREAM oeqa/selftest: Update the BB_HASHSERVE_UPSTREAM Joshua Watt (22): binutils-cross-testsuite: Rename to binutils-testsuite classes/spdx-common: Move SPDX_SUPPLIER scripts/pull-spdx-licenses.py: Add script licenses: Update to SPDX license version 3.24.0 classes/create-spdx-2.2: Handle SemVer License List Versions classes-recipe/image: Add image file manifest classes-global/staging: Exclude do_create_spdx from automatic sysroot extension classes-recipe/image_types: Add SPDX_IMAGE_PURPOSE to images classes-recipe: nospdx: Add class classes-recipe/baremetal-image: Add image file manifest selftest: sstatetests: Exclude all SPDX tasks classes/create-spdx-2.2: Handle empty packages classes/create-spdx-3.0: Add classes selftest: spdx: Add SPDX 3.0 test cases classes/spdx-common: Move to library classes/create-spdx-3.0: Move tasks to library Switch default spdx version to 3.0 classes-recipe/multilib_script: Expand before splitting classes/create-spdx-image-3.0: Fix SSTATE_SKIP_CREATION lib/spdx30_tasks: Report all missing providers lib/oe/sbom30.py: Fix build parameters bitbake: Remove custom exception backtrace formatting Julien Stephan (5): README: add instruction to run Vale on a subset documentation: Makefile: add SPHINXLINTDOCS to specify subset to sphinx-lint styles: vocabularies: Yocto: add sstate ref-manual: variables: add SIGGEN_LOCKEDSIGS* variables dev-manual: add bblock documentation Jörg Sommer (7): classes/kernel: No symlink in postinst without KERNEL_IMAGETYPE_SYMLINK ref-manual: add DEFAULT_TIMEZONE variable ptest-runner: Update 2.4.4 -> 2.4.5 runqemu: Fix detection of -serial parameter buildcfg.py: add dirty status to get_metadata_git_describe doc/features: remove duplicate word in distribution feature ext2 doc/features: describe distribution feature pni-name Kai Kang (3): glibc: fix fortran header file conflict for arm systemd: fix VERSION_TAG related build error kexec-tools: update COMPATIBLE_HOST because of makedumpfile Katawann (1): cve-check: add field "modified" to JSON report Khem Raj (38): llvm: Update to 18.1.8 utils.bbclass: Use objdump instead of readelf to compute SONAME mesa: Including missing LLVM header mesa: Add packageconfig knob to control tegra gallium driver gdb: Upgrade to 15.1 release busybox: Fix tc applet build when using kernel 6.8+ busybox: CVE-2023-42364 and CVE-2023-42365 fixes busybox: Add fix for CVE-2023-42366 gcc-14: Mark CVE-2023-4039 as fixed in GCC14+ systemd: Replace deprecate udevadm command glibc: Upgrade to 2.40 glibc: Remove redundant configure option --disable-werror libyaml: Update status of CVE-2024-35328 libyaml: Change CVE status to wontfix binutils: Upgrade to 2.43 release binutils: Fix comment about major version gcc: Upgrade to GCC 14.2 gnupg: Document CVE-2022-3219 and mark wontfix systemd: Refresh patch to remove patch-fuzz quota: Apply a backport to fix basename API with musl bluez5: Fix build with musl musl: Update to 1.2.5 release musl: Upgrade to latest tip of trunk gdb: Fix build with latest clang fmt: Get rid of std::copy aspell: Backport a fix to build with gcc-15/clang-19 openssh: Mark CVE-2023-51767 as wont-fix python: Backport fixes for CVE-2024-7592 ffmpeg: Fix build on musl linux systems kea: Replace Name::NameString with vector of uint8_t webkitgtk: Fix build issues with clang-19 glibc: Fix the arm/arm64 worsize.h uniificationb patch gcc: Fix spurious '/' in GLIBC_DYNAMIC_LINKER on microblaze libpcre2: Update base uri PhilipHazel -> PCRE2Project linux-yocto: Enable l2tp drivers when ptest featuee is on bluez: Fix mesh builds on musl qemu: Fix build on musl/riscv64 ffmpeg: Disable asm optimizations on x86 Konrad Weihmann (6): testimage: fallback for empty IMAGE_LINK_NAME python3-docutils: fix interpreter lines testexport: fallback for empty IMAGE_LINK_NAME python_flit_core: remove python3 dependency runqemu: keep generating tap devices runqemu: remove unused uid variable Lee Chee Yang (10): migration-guides: add release notes for 4.0.19 migration-guides: add release notes for 5.0.2 migration-guide: add release notes for 4.0.20 migration-guides: add release notes for 5.0.3 migration-guide: add release notes for 4.0.21 release-notes-5.1: update for several section migration-guide: add release notes for 4.0.22 migration-guides: add release notes for 5.0.4 migration-guides: add release notes for 5.0.5 migration-guides: add release notes for 4.0.23 Leon Anavi (1): u-boot.inc: WORKDIR -> UNPACKDIR transition Leonard Göhrs (1): bitbake: fetch2/npm: allow the '@' character in package names Louis Rannou (1): image_qa: fix error handling Marc Ferland (2): appstream: refresh patch appstream: add qt6 PACKAGECONFIG option Marcus Folkesson (1): bootimg-partition: break out code to a common library. Mark Hatle (7): create-sdpx-2.2.bbclass: Switch from exists to isfile checking debugsrc package.py: Fix static debuginfo split package.py: Fix static library processing selftest-hardlink: Add additional test cases spdx30_tasks.py: switch from exists to isfile checking debugsrc create-spdx-*: Support multilibs via SPDX_MULTILIB_SSTATE_ARCHS oeqa sdk cases: Skip SDK test cases when TCLIBC is newlib Markus Volk (4): libinput: update 1.25.0 -> 1.26.1 systemd: dont set polkit permissions manually gtk4: update 4.14.4 -> 4.14.5 gcc: add a backport patch to fix an issue with tzdata 2024b Marta Rybczynska (9): classes/kernel.bbclass: update CVE_PRODUCT cve-check: encode affected product/vendor in CVE_STATUS cve-extra-inclusions: encode CPEs of affected packages cve-check: annotate CVEs during analysis vex.bbclass: add a new class cve-check-map: add new statuses selftest: add test_product_match cve-json-to-text: add script cve-check: remove the TEXT format support Martin Hundeb?ll (1): ofono: upgrade 2.7 -> 2.8 Martin Jansa (10): libgfortran.inc: fix nativesdk-libgfortran dependencies hdparm: drop NO_GENERIC_LICENSE[hdparm] gstreamer1.0-plugins-bad: add PACKAGECONFIG for gtk3 kernel.bbclass: add original package name to RPROVIDES for -image and -base meta-world-pkgdata: Inherit nopackages populate_sdk_base: inherit nopackages mc: set ac_cv_path_ZIP to avoid buildpaths QA issues bitbake.conf: DEBUG_PREFIX_MAP: add -fmacro-prefix-map for STAGING_DIR_NATIVE bitbake: Revert "fetch2/gitsm: use configparser to parse .gitmodules" ffmpeg: fix packaging examples Mathieu Dubois-Briand (1): oeqa/postactions: Fix archive retrieval from target Matthew Bullock (1): openssh: allow configuration of hostkey type Matthias Pritschet (1): ref-manual: fix typo and move SYSROOT_DIRS example Michael Halstead (1): yocto-uninative: Update to 4.6 for glibc 2.40 Michael Opdenacker (1): doc: Makefile: publish pdf and epub versions too Michal Sieron (1): insane: remove obsolete QA errors Mikko Rapeli (2): systemd: update from 256 to 256.4 ovmf-native: remove .pyc files from install Mingli Yu (1): llvm: Enable libllvm for native build Niko Mauno (17): dnf/mesa: Fix missing leading whitespace with ':append' systemd: Mitigate /var/log type mismatch issue systemd: Mitigate /var/tmp type mismatch issue libyaml: Amend CVE status as 'upstream-wontfix' image_types.bbclass: Use --force also with lz4,lzop util-linux: Add PACKAGECONFIG option to mitigate rootfs remount error iw: Fix LICENSE dejagnu: Fix LICENSE unzip: Fix LICENSE zip: Fix LICENSE tiff: Fix LICENSE gcr: Fix LICENSE python3-maturin: Fix cross compilation issue for armv7l, mips64, ppc bitbake.conf: Mark VOLATILE_LOG_DIR as obsolete bitbake.conf: Mark VOLATILE_TMP_DIR as obsolete docs: Replace VOLATILE_LOG_DIR with FILESYSTEM_PERMS_TABLES docs: Replace VOLATILE_TMP_DIR with FILESYSTEM_PERMS_TABLES Ola x Nilsson (4): scons.bbclass: Add scons class prefix to do_configure insane: Remove redundant returns ffmpeg: Package example files in ffmpeg-examples glibc: Fix missing randomness in __gen_tempname Oleksandr Hnatiuk (2): icu: remove host references in nativesdk to fix reproducibility gcc: remove paths to sysroot from configargs.h and checksum-options for gcc-cross-canadian Otavio Salvador (1): u-boot: Ensure we use BFD as linker even if using GCC for it Patrick Wicki (1): gpgme: move gpgme-tool to own sub-package Paul Barker (1): meta-ide-support: Mark recipe as MACHINE-specific Paul Eggleton (1): classes: add new retain class for retaining build results Paul Gerber (1): uboot-sign: fix counters in do_uboot_assemble_fitimage Pavel Zhukov (1): package_rpm: Check if file exists before open() Pedro Ferreira (2): buildhistory: Fix intermittent package file list creation buildhistory: Restoring files from preserve list Peter Kjellerstedt (9): systemd: Correct the indentation in do_install() systemd: Move the MIME file to a separate package license_image.bbclass: Rename license-incompatible to license-exception test-manual: Add a missing dot systemd.bbclass: Clean up empty parent directories oeqa/selftest/bbclasses: Add tests for systemd and update-rc.d interaction systemd: Remove a leftover reference to ${datadir}/mime bitbake: fetch2/gomod: Support URIs with only a hostname image.bbclass: Drop support for ImageQAFailed exceptions in image_qa Peter Marko (17): cargo: remove True option to getVar calls poky-sanity: remove True option to getVar calls flac: fix buildpaths warnings bitbake: fetch/clearcase: remove True option to getVar calls in clearcase module busybox: Patch CVE-2021-42380 busybox: Patch CVE-2023-42363 libstd-rs,rust-cross-canadian: set CVE_PRODUCT to rust glibc: cleanup old cve status libmnl: explicitly disable doxygen libyaml: ignore CVE-2024-35326 libyaml: Ignore CVE-2024-35325 wpa-supplicant: Ignore CVE-2024-5290 cve-check: add support for cvss v4.0 go: upgrade 1.22.6 -> 1.22.7 go: upgrade 1.22.7 -> 1.22.8 cve-check: do not skip cve status description after : cve-check: fix malformed cve status description with : characters Philip Lorenz (1): curl: Reenable auth support for native and nativesdk Primoz Fiser (2): pulseaudio: Add PACKAGECONFIG for optional OSS support pulseaudio: Remove from time64.inc exception list Purushottam Choudhary (2): kmscube: Upgrade to latest revision virglrenderer: Add patch to fix -int-conversion build issue Quentin Schulz (4): bitbake: doc: releases: mark mickledore as outdated bitbake: doc: releases: add nanbield to the outdated manuals bitbake: doc: releases: add scarthgap weston-init: fix weston not starting when xwayland is enabled Rasmus Villemoes (3): iptables: remove /etc/ethertypes openssh: factor out sshd hostkey setup to separate function systemd: include sysvinit in default PACKAGECONFIG only if in DISTRO_FEATURES Regis Dargent (1): udev-extraconf: fix network.sh script did not configure hotplugged interfaces Ricardo Simoes (2): volatile-binds: Do not create workdir if OverlayFS is disabled volatile-binds: Remove workdir if OverlayFS fails Richard Purdie (116): maintainers: Drop go-native as recipe removed oeqa/runtime/parselogs: Add some kernel log trigger keywords bitbake: codeparser/data: Ensure module function contents changing is accounted for bitbake: codeparser: Skip non-local functions for module dependencies native/nativesdk: Stop overriding unprefixed *FLAGS variables qemu: Upgrade 9.0.0 -> 9.0.1 oeqa/runtime/ssh: In case of failure, show exit code and handle -15 (SIGTERM) oeqa/selftest/reproducibile: Explicitly list virtual targets abi_version/package: Bump hashequiv version and package class version testimage/postactions: Allow artifact collection to be skipped python3: Drop generating a static libpython bitbake.conf: Drop obsolete debug compiler options bitbake.conf: Further cleanup compiler optimization flags oeqa/selftest/incompatible_lic: Ensure tests work with ERROR_QA changes oeqa/selftest/locale: Ensure tests work with ERROR_QA changes meson: Fix native meson config busybox: reconfigure wget https support by default for security poky-tiny: Update FULL_OPTIMIZATION to match core changes icu/perf: Drop SPDX_S variable insane: Promote long standing warnings to errors selftest/fortran-helloworld: Fix buildpaths error build-appliance-image: Update to master head revision distro/include: Add yocto-space-optimize, disabling debugging for large components testimage: Fix TESTIMAGE_FAILED_QA_ARTIFACTS setting oeqa/postactions: Separate artefact collection from test result collection qemu: Drop mips workaround poky: Enable yocto-space-optimize.inc time64.inc: Add warnings exclusion for known toolchain problems for now pseudo: Fix to work with glibc 2.40 pseudo: Update to include open symlink handling bugfix create-spdx-3.0/populate_sdk_base: Add SDK_CLASSES inherit mechanism to fix tarball SPDX manifests libtool: Upgrade 2.5.0 -> 2.5.1 qemu: Upgrade 9.0.1 -> 9.0.2 populate_sdk_base: Ensure nativesdk targets have do_package_qa run cve_check: Use a local copy of the database during builds pixman: Backport fix for recent binutils musl: Show error when used with multilibs sdpx: Avoid loading of SPDX_LICENSE_DATA into global config perf: Drop perl buildpaths skip m4: Drop ptest INSANE_SKIPs gettext: Drop ptest INSANE_SKIPs glibc-y2038-tests: Fix debug split and drop INSANE_SKIPs glibc-y2038-tests: Don't force distro policy glib-initial: Inherit nopackages vim: Drop vim-tools INSANE_SKIP as not needed coreutils: Fix intermittent ptest issue coreutils: Update merged patch to backport status bitbake.conf: Add truncate to HOSTTOOLS bitbake.conf: Include cve-check-map earlier, before distro bitbake: BBHandler: Handle comments in addtask/deltask bitbake: cache: Drop unused function bitbake: cookerdata: Separate out data_hash and hook to tinfoil bitbake: BBHandler/ast: Improve addtask handling bitbake: build: Ensure addtask before/after tasknames have prefix applied bitbake: codeparser: Allow code visitor expressions to be declared in metadata lib/oe: Use new visitorcode functionality for qa.handle_error() insane: Optimise ERROR_QA/WARN_QA references in do_populate_sysroot insane: Drop oe.qa.add_message usage insane: Add missing vardepsexclude insane: Further simplify code insane: Allow ERROR_QA to use 'contains' hash optimisations for do_package_qa selftest/sstatetests: Extend to cover ERROR_QA/WARN_QA common issues lz4: Fix static library reproducibility issue lz4: Disable static libraries again abi-version/ssate: Bump to avoid systemd hash corruption issue buildhistory: Simplify intercept call sites and drop SSTATEPOSTINSTFUNC usage sstate: Drop SSTATEPOSTINSTFUNC support lttng-tools: 2.13.13 -> 2.13.14 libtool: 2.5.1 -> 2.5.2 gettext: Drop obsolete ptest conditional in do_install elfutils: Drop obsolete ptest conditional in do_install expat: 2.6.2 -> 2.6.3 license: Fix directory layout issues sstate: Make do_recipe_qa and do_populate_lic non-arch specific bitbake: siggen: Fix rare file-checksum hash issue insane: Remove dependency on TCLIBC from QA test conf/defaultsetup.conf: Drop TCLIBCAPPEND poky.conf: Drop TCLIBCAPPEND layer.conf: Drop scarthgap namespace from LAYERSERIES layer.conf: Update to styhead Revert "python3-setuptools: upgrade 72.1.0 -> 72.2.0" ruby: Make docs generation deterministic libedit: Make docs generation deterministic poky-tiny: Drop TCLIBCAPPEND libsdl2: Fix non-deterministic configure option for libsamplerate bitbake: toaster: Update fixtures for styhead scripts/install-buildtools: Update to 5.0.3 build-appliance-image: Update to master head revision poky.conf: Bump version for 5.1 styhead release build-appliance-image: Update to master head revision bitbake: fetch2/git: Use quote from shlex, not pipes efi-bootdisk.wks: Increase overhead-factor to avoid test failures binutils: Fix binutils mingw packaging bitbake: tests/fetch: Use our own mirror of sysprof to decouple from gnome gitlab bitbake: tests/fetch: Use our own mirror of mobile-broadband-provider to decouple from gnome gitlab pseudo: Fix envp bug and add posix_spawn wrapper oeqa/runtime/ssh: Rework ssh timeout oeqa/runtime/ssh: Fix incorrect timeout fix qemurunner: Clean up serial_lock handling bitbake: fetch/wget: Increase timeout to 100s from 30s openssl: Fix SDK environment script to avoid unbound variable bitbake: runqueue: Fix performance of multiconfigs with large overlap bitbake: runqueue: Optimise setscene loop processing bitbake: runqueue: Fix scenetask processing performance issue do_package/sstate/sstatesig: Change timestamp clamping to hash output only selftest/reproducible: Drop rawlogs selftest/reproducible: Clean up pathnames resulttool: Allow store to filter to specific revisions resulttool: Use single space indentation in json output oeqa/utils/gitarchive: Return tag name and improve exclude handling resulttool: Fix passthrough of --all files in store mode resulttool: Add --logfile-archive option to store mode resulttool: Handle ltp rawlogs as well as ptest resulttool: Clean up repoducible build logs resulttool: Trim the precision of duration information resulttool: Improve repo layout for oeselftest results Robert Joslyn (1): curl: Update to 8.9.1 Robert Yang (8): bitbake: cache: Remove invalid symlink for bb_cache.dat bitbake: fetch2/git: Use git shallow fetch to implement clone_shallow_local() bitbake: bitbake: tests/fetch: Update GitShallowTest for clone_shallow_local() bitbake: data_smart: Improve performance for VariableHistory release-notes-5.0.rst: NO_OUTPUT -> NO_COLOR bitbake: gitsm: Add call_process_submodules() to remove duplicated code bitbake: gitsm: Remove downloads/tmpdir when failed cml1.bbclass: do_diffconfig: Don't override .config with .config.orig Rohini Sangam (1): vim: Upgrade 9.1.0698 -> 9.1.0764 Ross Burton (92): expect: fix configure with GCC 14 expect: update code for Tcl channel implementation libxcrypt: correct the check for a working libucontext.h bash: fix configure checks that fail with GCC 14.1 gstreamer1.0: disable flaky baseparser tests librsvg: don't try to run target code at build time librsvg: upgrade to 2.57.3 linux-libc-headers: remove redundant install_headers patch glibc: add task to list exported symbols oeqa/sdk: add out-of-tree kernel module building test openssl: disable tests unless ptest is enabled openssl: strip the test suite openssl: rewrite ptest installation ell: upgrade 0.66 -> 0.67 ofono: upgrade 2.8 -> 2.9 ruby: upgrade 3.3.0 -> 3.3.4 gtk+3: upgrade 3.24.42 -> 3.24.43 pango: upgrade 1.52.2 -> 1.54.0 Revert "python3: drop deterministic_imports.patch" python3: add dependency on -compression to -core python3-jsonschema: rename nongpl PACKAGECONFIG python3-setuptools: RDEPEND on python3-unixadmin python3-poetry-core: remove python3-pathlib2 dependency pytest-runner: remove python3-py dependency python3-chardet: remove pytest-runner DEPENDS python3-websockets: remove unused imports python3-beartype: add missing RDEPENDS python3-jsonschema: remove obsolete RDEPENDS python3-pluggy: clean up RDEPENDS python3-scons: remove obsolete RDEPENDS gi-docgen: remove obsolete python3-toml dependency python3-jinja2: remove obsolete python3-toml dependency python3-setuptools-rust: remove obsolete python3-toml dependency python3-setuptools-scm: remove obsolete python3-tomli dependency python3-zipp: remove obsolete dependencies python3-importlib-metadata: remove obsolete dependencies python3-pathspec: use python_flit_core python3-pyasn1: merge bb/inc python3-pyasn1: use python_setuptools_build_meta build class python3-beartype: use python_setuptools_build_meta build class python3-cffi: use python_setuptools_build_meta build class python3-psutil: use python_setuptools_build_meta build class python3-pycryptodome(x): use python_setuptools_build_meta build class python3-pyelftools: use python_setuptools_build_meta build class python3-ruemel-yaml: use python_setuptools_build_meta build class python3-scons: use python_setuptools_build_meta build class python3-websockets: use python_setuptools_build_meta build class python3-setuptools-scm: remove python3-tomli dependency python3-spdx-tools: use python_setuptools_build_meta build class python3-subunit: use python_setuptools_build_meta build class python3-uritools: use python_setuptools_build_meta build class python3-yamllint: use python_setuptools_build_meta build class python3-mako: add dependency on python3-misc for timeit python3-uritools: enable ptest gi-docgen: upgrade to 2024.1 python3-pytest: clean up RDEPENDS libcap-ng: clean up recipe glib-networking: upgrade 2.78.1 -> 2.80.0 python3-unittest-automake-output: add dependency on unittest python3-idna: generalise RDEPENDS python3-jsonpointer: upgrade 2.4 -> 3.0.0 ptest-packagelists: sort entries python3-cffi: generalise RDEPENDS python3-cffi: enable ptest python3-packaging: enable ptest python3-idna: enable ptest setuptools3: check for a PEP517 build system selection insane: add pep517-backend to WARN_QA python3-numpy: ignore pep517-backend warnings bmaptool: temporarily silence the pep517-backend warning meson: upgrade 1.4.0 -> 1.5.1 python3-pathlib2: remove recipe (moved to meta-python) python3-rfc3986-validator: remove recipe (moved to meta-python) python3-py: remove recipe (moved to meta-python) pytest-runner: remove recipe (moved to meta-python) python3-importlib-metadata: remove recipe (moved to meta-python) python3-toml: remove recipe (moved to meta-python) python3-tomli: remove recipe (moved to meta-python) bblayers/machines: add bitbake-layers command to list machines ffmpeg: fix build with binutils 2.43 on arm with commerical codecs vulkan-samples: limit to aarch64/x86-64 bitbake: fetch2/gitsm: use configparser to parse .gitmodules systemd: add missing dependency on libkmod to udev sanity: check for working user namespaces bitbake.conf: mark TCLIBCAPPEND as deprecated bitbake: fetch2: don't try to preserve all attributes when unpacking files icu: update patch Upstream-Status ffmpeg: nasm is x86 only, so only DEPEND if x86 ffmpeg: no need for textrel INSANE_SKIP strace: download release tarballs from GitHub tcl: skip io-13.6 test case groff: fix rare build race in hdtbl Ryan Eatmon (3): u-boot.inc: Refactor do_* steps into functions that can be overridden oe-setup-build: Fix typo oe-setup-build: Change how we get the SHELL value Sabeeh Khan (1): linux-firmware: add new package for cc33xx firmware Sakib Sajal (1): blktrace: ask for python3 specifically Samantha Jalabert (1): cve_check: Update selftest with new status detail Sergei Zhmylev (1): lsb-release: fix Distro Codename shell escaping Shunsuke Tokumoto (1): python3-setuptools: Add "python:setuptools" to CVE_PRODUCT Siddharth Doshi (5): libxml2: Upgrade 2.12.7 -> 2.12.8 Tiff: Security fix for CVE-2024-7006 vim: Upgrade 9.1.0114 -> 9.1.0682 wpa-supplicant: Upgrade 2.10 -> 2.11 vim: Upgrade 9.1.0682 -> 9.1.0698 Simone Weiß (2): gnutls: upgrade 3.8.5 -> 3.8.6 curl: Ignore CVE-2024-32928 Sreejith Ravi (1): package.py: Add Requires.private field in process_pkgconfig Stefan Mueller-Klieser (1): icu: fix make-icudata package config Steve Sakoman (3): release-notes-4.0: update BB_HASHSERVE_UPSTREAM for new infrastructure poky.conf: bump version for 5.1.1 build-appliance-image: Update to styhead head revision Sundeep KOKKONDA (3): binutils: stable 2.42 branch updates oeqa/selftest/reproducibile: rename of reproducible directories rust: rustdoc reproducibility issue fix Talel BELHAJSALEM (1): contributor-guide: Remove duplicated words Teresa Remmet (1): recipes-bsp: usbutils: Fix usb-devices command using busybox Theodore A. Roth (2): ca-certificates: update 20211016 -> 20240203 ca-certificates: Add comment for provenance of SRCREV Thomas Perrot (2): opensbi: bump to 1.5 opensbi: bump to 1.5.1 Tim Orling (8): python3-rpds-py: upgrade 0.18.1 -> 0.20.0 python3-alabaster: upgrade 0.7.16 -> 1.0.0 python3-cffi: upgrade 1.16.0 -> 1.17.0 python3-more-itertools: upgrade 10.3.0 -> 10.4.0 python3-wheel: upgrade 0.43.0 -> 0.44.0 python3-zipp: upgrade 3.19.2 -> 3.20.0 python3-attrs: upgrade 23.2.0 -> 24.2.0 python3-setuptools-rust: upgrade 1.9.0 -> 1.10.1 Tom Hochstein (2): time64.inc: Simplify GLIBC_64BIT_TIME_FLAGS usage weston: Add missing runtime dependency on freerdp Trevor Gamblin (37): dhcpcd: upgrade 10.0.6 -> 10.0.8 python3-hypothesis: upgrade 6.103.0 -> 6.103.2 python3-psutil: upgrade 5.9.8 -> 6.0.0 python3-testtools: upgrade 2.7.1 -> 2.7.2 python3-urllib3: upgrade 2.2.1 -> 2.2.2 maintainers.inc: add self for unassigned python recipes MAINTAINERS.md: fix patchtest entry python3-pytest-subtests: upgrade 0.12.1 -> 0.13.0 python3-hypothesis: upgrade 6.103.2 -> 6.105.1 python3-setuptools: upgrade 69.5.1 -> 70.3.0 bind: upgrade 9.18.27 -> 9.20.0 cmake: upgrade 3.29.3 -> 3.30.1 dpkg: upgrade 1.22.6 -> 1.22.10 nettle: upgrade 3.9.1 -> 3.10 patchtest/patch.py: remove cruft scripts/patchtest.README: cleanup, add selftest notes kea: upgrade 2.4.1 -> 2.6.1 python3-sphinx: upgrade 7.4.7 -> 8.0.2 python3-hypothesis: upgrade 6.108.4 -> 6.108.10 python3-pytest: upgrade 8.3.1 -> 8.3.2 python3-sphinxcontrib-applehelp: upgrade 1.0.8 -> 2.0.0 python3-sphinxcontrib-devhelp: upgrade 1.0.6 -> 2.0.0 python3-sphinxcontrib-htmlhelp: upgrade 2.0.6 -> 2.1.0 python3-sphinxcontrib-qthelp: upgrade 1.0.8 -> 2.0.0 python3-sphinxcontrib-serializinghtml: upgrade 1.1.10 -> 2.0.0 libassuan: upgrade 2.5.7 -> 3.0.1 python3-setuptools: upgrade 71.1.0 -> 72.1.0 python3-hypothesis: upgrade 6.108.10 -> 6.110.1 python3-cython: upgrade 3.0.10 -> 3.0.11 python3: upgrade 3.12.4 -> 3.12.5 python3: skip readline limited history tests piglit: upgrade 22eaf6a91c -> c11c9374c1 python3-hypothesis: upgrade 6.111.1 -> 6.111.2 python3-pyparsing: upgrade 3.1.2 -> 3.1.4 patchtest: test_mbox: remove duplicate regex definition patchtest: test_shortlog_length: omit shortlog prefixes patchtest: test_non_auh_upgrade: improve parse logic Troels Dalsgaard Hoffmeyer (1): bitbake: build/exec_task: Log str() instead of repr() for exceptions in build Tronje Krabbe (1): rust-target-config: Update data layouts for 32-bit arm targets Ulrich Ölmann (2): initramfs-framework: fix typos buildhistory: fix typos Vijay Anusuri (4): wget: Fix for CVE-2024-38428 apr: upgrade 1.7.4 -> 1.7.5 xserver-xorg: upgrade 21.1.13 -> 21.1.14 xwayland: upgrade 24.1.3 -> 24.1.4 Vivek Puar (1): linux-firmware: upgrade 20240811 -> 20240909 Wadim Egorov (1): watchdog: Set watchdog_module in default config Wang Mingyu (125): alsa-lib: upgrade 1.2.11 -> 1.2.12 alsa-plugins: upgrade 1.2.7.1 -> 1.2.12 alsa-ucm-conf: upgrade 1.2.11 -> 1.2.12 git: upgrade 2.45.1 -> 2.45.2 createrepo-c: upgrade 1.1.1 -> 1.1.2 diffoscope: upgrade 267 -> 271 enchant2: upgrade 2.7.3 -> 2.8.1 fribidi: upgrade 1.0.14 -> 1.0.15 gstreamer: upgrade 1.24.3 -> 1.24.4 libevdev: upgrade 1.13.1 -> 1.13.2 libjitterentropy: upgrade 3.4.1 -> 3.5.0 libpcre2: upgrade 10.43 -> 10.44 pciutils: upgrade 3.12.0 -> 3.13.0 rng-tools: upgrade 6.16 -> 6.17 ttyrun: upgrade 2.32.0 -> 2.33.1 btrfs-tools: handle rename of inode_includes() from e2fsprogs 1.47.1 rt-tests: upgrade 2.6 -> 2.7 base-passwd: upgrade 3.6.3 -> 3.6.4 btrfs-tools: upgrade 6.8.1 -> 6.9.2 ccache: upgrade 4.10 -> 4.10.1 createrepo-c: upgrade 1.1.2 -> 1.1.3 cups: upgrade 2.4.9 -> 2.4.10 debianutils: upgrade 5.19 -> 5.20 diffoscope: upgrade 271 -> 272 dnf: upgrade 4.20.0 -> 4.21.0 gdbm: upgrade 1.23 -> 1.24 gstreamer: upgrade 1.24.4 -> 1.24.5 harfbuzz: upgrade 8.5.0 -> 9.0.0 libadwaita: upgrade 1.5.1 -> 1.5.2 libdnf: upgrade 0.73.1 -> 0.73.2 libdrm: upgrade 2.4.120 -> 2.4.122 libproxy: upgrade 0.5.6 -> 0.5.7 librsvg: upgrade 2.57.3 -> 2.58.1 libsdl2: upgrade 2.30.4 -> 2.30.5 opkg: upgrade 0.6.3 -> 0.7.0 opkg-utils: upgrade 0.6.3 -> 0.7.0 pinentry: upgrade 1.3.0 -> 1.3.1 python3-certifi: upgrade 2024.6.2 -> 2024.7.4 python3-hatchling: upgrade 1.24.2 -> 1.25.0 python3-importlib-metadata: upgrade 7.1.0 -> 8.0.0 python3-maturin: upgrade 1.6.0 -> 1.7.0 python3-pycairo: upgrade 1.26.0 -> 1.26.1 python3-trove-classifiers: upgrade 2024.5.22 -> 2024.7.2 repo: upgrade 2.45 -> 2.46 sysstat: upgrade 12.7.5 -> 12.7.6 wireless-regdb: upgrade 2024.05.08 -> 2024.07.04 cryptodev: upgrade 1.13 -> 1.14 asciidoc: upgrade 10.2.0 -> 10.2.1 glslang: upgrade 1.3.283.0 -> 1.3.290.0 gsettings-desktop-schemas: upgrade 46.0 -> 46.1 kexec-tools: upgrade 2.0.28 -> 2.0.29 libproxy: upgrade 0.5.7 -> 0.5.8 librsvg: upgrade 2.58.1 -> 2.58.2 libsolv: upgrade 0.7.29 -> 0.7.30 libtirpc: upgrade 1.3.4 -> 1.3.5 orc: upgrade 0.4.38 -> 0.4.39 python3-bcrypt: upgrade 4.1.3 -> 4.2.0 python3-dbusmock: upgrade 0.31.1 -> 0.32.1 python3-hypothesis: upgrade 6.105.1 -> 6.108.4 python3-importlib-metadata: upgrade 8.0.0 -> 8.2.0 python3-jsonschema: upgrade 4.22.0 -> 4.23.0 python3-pytest-subtests: upgrade 0.13.0 -> 0.13.1 python3-pytest: upgrade 8.2.2 -> 8.3.1 python3-setuptools: upgrade 70.3.0 -> 71.1.0 python3-sphinx: upgrade 7.3.7 -> 7.4.7 python3-sphinxcontrib-htmlhelp: upgrade 2.0.5 -> 2.0.6 python3-sphinxcontrib-qthelp: upgrade 1.0.7 -> 1.0.8 spirv-headers: upgrade 1.3.283.0 -> 1.3.290.0 spirv-tools: upgrade 1.3.283.0 -> 1.3.290.0 strace: upgrade 6.9 -> 6.10 sysklogd: upgrade 2.5.2 -> 2.6.0 vulkan-headers: upgrade 1.3.283.0 -> 1.3.290.0 vulkan-loader: upgrade 1.3.283.0 -> 1.3.290.0 vulkan-tools: upgrade 1.3.283.0 -> 1.3.290.0 vulkan-utility-libraries: upgrade 1.3.283.0 -> 1.3.290.0 vulkan-validation-layers: upgrade 1.3.283.0 -> 1.3.290.0 vulkan-volk: upgrade 1.3.283.0 -> 1.3.290.0 xwayland: upgrade 24.1.0 -> 24.1.1 binutils: upgrade 2.43 -> 2.43.1 btrfs-tools: upgrade 6.9.2 -> 6.10.1 createrepo-c: upgrade 1.1.3 -> 1.1.4 diffoscope: upgrade 272 -> 276 dnf: upgrade 4.21.0 -> 4.21.1 enchant2: upgrade 2.8.1 -> 2.8.2 erofs-utils: upgrade 1.7.1 -> 1.8.1 ethtool: upgrade 6.9 -> 6.10 freetype: upgrade 2.13.2 -> 2.13.3 libx11: upgrade 1.8.9 -> 1.8.10 libxfont2: upgrade 2.0.6 -> 2.0.7 libxtst: upgrade 1.2.4 -> 1.2.5 pkgconf: upgrade 2.2.0 -> 2.3.0 python3-babel: upgrade 2.15.0 -> 2.16.0 python3-hypothesis: upgrade 6.110.1 -> 6.111.1 python3-lxml: upgrade 5.2.2 -> 5.3.0 python3-setuptools: upgrade 72.1.0 -> 72.2.0 rpcbind: upgrade 1.2.6 -> 1.2.7 sysklogd: upgrade 2.6.0 -> 2.6.1 ttyrun: upgrade 2.33.1 -> 2.34.0 xwayland: upgrade 24.1.1 -> 24.1.2 systemd: upgrade 256.4 -> 256.5 acpica: upgrade 20240322 -> 20240827 cairo: upgrade 1.18.0 -> 1.18.2 dhcpcd: upgrade 10.0.8 -> 10.0.10 diffoscope: upgrade 276 -> 277 ell: upgrade 0.67 -> 0.68 libdrm: upgrade 2.4.122 -> 2.4.123 libsoup: upgrade 3.4.4 -> 3.6.0 liburcu: upgrade 0.14.0 -> 0.14.1 mc: upgrade 4.8.31 -> 4.8.32 nghttp2: upgrade 1.62.1 -> 1.63.0 ofono: upgrade 2.9 -> 2.10 python3-certifi: upgrade 2024.7.4 -> 2024.8.30 python3-idna: upgrade 3.7 -> 3.8 python3-maturin: upgrade 1.7.0 -> 1.7.1 python3-pbr: upgrade 6.0.0 -> 6.1.0 python3-websockets: upgrade 12.0 -> 13.0.1 python3-zipp: upgrade 3.20.0 -> 3.20.1 taglib: upgrade 2.0.1 -> 2.0.2 wayland-protocols: upgrade 1.36 -> 1.37 wayland: upgrade 1.23.0 -> 1.23.1 git: upgrade 2.46.0 -> 2.46.1 libevdev: upgrade 1.13.2 -> 1.13.3 orc: upgrade 0.4.39 -> 0.4.40 wireless-regdb: upgrade 2024.07.04 -> 2024.10.07 xwayland: upgrade 24.1.2 -> 24.1.3 Weisser, Pascal.ext (1): qemuboot: Trigger write_qemuboot_conf task on changes of kernel image realpath Yash Shinde (12): rust: Oe-selftest fixes for rust v1.76 rust: Upgrade 1.75.0->1.76.0 rust: reproducibility issue fix with v1.76 rust: Oe-selftest changes for rust v1.77 rust: Upgrade 1.76.0->1.77.0 rust: Upgrade 1.77.0->1.77.1 rust: Upgrade 1.77.1->1.77.2 rust: Oe-selftest changes for rust v1.78 rust: Upgrade 1.77.2->1.78.0 zlib: Enable PIE for native builds rust: Oe-selftest changes for rust v1.79 rust: Upgrade 1.78.0->1.79.0 Yi Zhao (9): libsdl2: upgrade 2.30.3 -> 2.30.4 less: upgrade 643 -> 661 util-linux: install lastlog2 volatile file rpm: fix expansion of %_libdir in macros libsdl2: upgrade 2.30.5 -> 2.30.6 bind: upgrade 9.20.0 -> 9.20.1 libpcap: upgrade 1.10.4 -> 1.10.5 libsdl2: upgrade 2.30.6 -> 2.30.7 systemd: fix broken links for sysvinit-compatible commands Yoann Congal (10): Revert "insane: skip unimplemented-ptest on S=WORKDIR recipes" insane: skip unimplemented-ptest checks if disabled spirv-tools: Fix git-describe related reproducibility spirv-tools: Update merged patch to backport status oeqa/selftest: Only rewrite envvars paths that absolutely point to builddir migration/release-notes-5.1: document oeqa/selftest envvars change release-notes-5.1: document added python3-libarchive-c ptest release-notes-5.1: document fixed _test_devtool_add_git_url test release-notes-5.1: document spirv-tools reproducibility python3-maturin: sort external libs in wheel files Yuri D'Elia (1): bitbake: fetch2/git: Enforce default remote name to "origin" Zoltan Boszormenyi (1): rpcbind: Fix boot time start failure aszh07 (2): xz: Update LICENSE variable for xz packages ffmpeg: Add "libswresample libavcodec" to CVE_PRODUCT gudnimg (1): bluez5: upgrade 5.72 -> 5.77 hongxu (7): libgpg-error: 1.49 -> 1.50 man-pages: 6.8 -> 6.9.1 libxml2: 2.12.8 -> 2.13.3 readline: 8.2 -> 8.2.13 libxslt: 1.1.39 -> 1.1.42 xmlto: 0.0.28 -> 0.0.29 gnupg: 2.4.5 -> 2.5.0 simit.ghane (2): libgcrypt: Fix building error with '-O2' in sysroot path libgcrypt: upgrade 1.10.3 -> 1.11.0 y75zhang (1): bitbake: fetch/wget: checkstatus: drop shared connecton when catch Timeout error meta-openembedded: 487a2d5695..5d54a52fbe: Adrian Freihofer (1): networkmanager: remove modemmanager rdepends Akash Hadke (1): python3-flatbuffers: provide nativesdk support Alba Herrerías (1): yelp: fix unterminated string Alexander Kanavin (1): libnewt: add from oe-core Alexander Stein (1): luajit: Fix host development package Alexandre Truong (99): ace: include UPSTREAM_CHECK_* to fix UNKNOWN_BROKEN status acpitool: include UPSTREAM_CHECK_URI to fix UNKNOWN_BROKEN status anthy: include UPSTREAM_CHECK_* to fix UNKNOWN_BROKEN status atop: include UPSTREAM_CHECK_* to fix UNKNOWN_BROKEN status bitwise: Include UPSTREAM_CHECK_REGEX to fix UNKNOWN_BROKEN status cfengine-masterfiles: Include UPSTREAM_CHECK_* to fix UNKNOWN_BROKEN status ckermit: Include UPSTREAM_CHECK_* to fix UNKNOWN_BROKEN status cloc: include UPSTREAM_CHECK_REGEX to fix UNKNOWN_BROKEN status cups-filters: include UPSTREAM_CHECK_URI to fix UNKNOWN_BROKEN status cxxtest: include UPSTREAM_CHECK_URI to fix UNKNOWN_BROKEN status czmq: include UPSTREAM_CHECK_REGEX to fix UNKNOWN_BROKEN status daemontools: include UPSTREAM_CHECK_URI to fix UNKNOWN_BROKEN status doxygen: include UPSTREAM_CHECK_* to fix UNKNOWN_BROKEN status duktape: include UPSTREAM_CHECK_URI to fix UNKNOWN_BROKEN status fftw: include UPSTREAM_CHECK_URI to fix UNKNOWN_BROKEN status fltk: include UPSTREAM_CHECK_URI to fix UNKNOWN_BROKEN status fltk-native: include UPSTREAM_CHECK_URI to fix UNKNOWN_BROKEN status fwupd: include UPSTREAM_CHECK_REGEX to fix UNKNOWN_BROKEN status gmime: include UPSTREAM_CHECK_* to fix UNKNOWN_BROKEN status gnome-themes-extra: include UPSTREAM_CHECK_* to fix UNKNOWN_BROKEN status gradm: include UPSTREAM_CHECK_URI to fix UNKNOWN_BROKEN status graphviz: include UPSTREAM_CHECK_* to fix UNKNOWN_BROKEN status gtkperf: include UPSTREAM_CHECK_* to fix UNKNOWN_BROKEN status hplip: include UPSTREAM_CHECK_URI to fix UNKNOWN_BROKEN status icewm: include UPSTREAM_CHECK_REGEX to fix UNKNOWN_BROKEN status irssi: include UPSTREAM_CHECK_REGEX to fix UNKNOWN_BROKEN status jansson: modify existing UPSTREAM_CHECK_REGEX lcov: include UPSTREAM_CHECK_* to fix UNKNOWN_BROKEN status leptonica: include UPSTREAM_CHECK_REGEX to fix UNKNOWN_BROKEN status libcdio-paranoia: include UPSTREAM_CHECK_* to fix UNKNOWN_BROKEN status libdbus-c++: include UPSTREAM_CHECK_* to fix UNKNOWN_BROKEN status libftdi: include UPSTREAM_CHECK_URI to fix UNKNOWN_BROKEN status libgnt: include UPSTREAM_CHECK_* to fix UNKNOWN_BROKEN status libiodbc: include UPSTREAM_CHECK_* to fix UNKNOWN_BROKEN status libjs-jquery: include UPSTREAM_CHECK_* to fix UNKNOWN_BROKEN status liblinebreak: include UPSTREAM_CHECK_* to fix UNKNOWN_BROKEN status libmng: include UPSTREAM_CHECK_* to fix UNKNOWN_BROKEN status libmtp: include UPSTREAM_CHECK_* to fix UNKNOWN_BROKEN status libnice: include UPSTREAM_CHECK_* to fix UNKNOWN_BROKEN status libopusenc: include UPSTREAM_CHECK_* to fix UNKNOWN_BROKEN status libpaper: include UPSTREAM_CHECK_* to fix UNKNOWN_BROKEN status libpcsc-perl: include UPSTREAM_CHECK_* to fix UNKNOWN_BROKEN status libsdl-gfx: include UPSTREAM_CHECK_URI to fix UNKNOWN_BROKEN status libsigc++-2.0: include UPSTREAM_CHECK_* to fix UNKNOWN_BROKEN status libsigc++-3: include UPSTREAM_CHECK_* to fix UNKNOWN_BROKEN status libsmi: include UPSTREAM_CHECK_* to fix UNKNOWN_BROKEN status libspiro: include UPSTREAM_CHECK_* to fix UNKNOWN_BROKEN status libstatgrab: include UPSTREAM_CHECK_REGEX to fix UNKNOWN_BROKEN status libwmf: include UPSTREAM_CHECK_* to fix UNKNOWN_BROKEN status libx86-1: include UPSTREAM_CHECK_URI to fix UNKNOWN_BROKEN status libxml++-5.0: include UPSTREAM_CHECK_REGEX to fix UNKNOWN_BROKEN status logwarn: include UPSTREAM_CHECK_* to fix UNKNOWN_BROKEN status lprng: include UPSTREAM_CHECK_* to fix UNKNOWN_BROKEN status mcpp: include UPSTREAM_CHECK_* to fix UNKNOWN_BROKEN status mozjs-115: include UPSTREAM_CHECK_* to fix UNKNOWN_BROKEN status mscgen: include UPSTREAM_CHECK_URI to fix UNKNOWN_BROKEN status msgpack-cpp: include UPSTREAM_CHECK_* to fix UNKNOWN_BROKEN status msktutil: include UPSTREAM_CHECK_* to fix UNKNOWN_BROKEN status nmon: include UPSTREAM_CHECK_* to fix UNKNOWN_BROKEN status nss: modify UPSTREAM_CHECK_* to fix UNKNOWN_BROKEN status obexftp: include UPSTREAM_CHECK_* to fix UNKNOWN_BROKEN status onig: include UPSTREAM_CHECK_* to fix UNKNOWN_BROKEN status openbox: include UPSTREAM_CHECK_* to fix UNKNOWN_BROKEN status openct: include UPSTREAM_CHECK_URI to fix UNKNOWN_BROKEN status openobex: include UPSTREAM_CHECK_* to fix UNKNOWN_BROKEN status p7zip: include UPSTREAM_CHECK_* to fix UNKNOWN_BROKEN status pngcheck: include UPSTREAM_CHECK_* to fix UNKNOWN_BROKEN status rsyslog: modify existing UPSTREAM_CHECK_URI to fix UNKNOWN_BROKEN status sblim-cmpi-devel: include UPSTREAM_CHECK_* to fix UNKNOWN_BROKEN status sblim-sfc-common: include UPSTREAM_CHECK_URI to fix UNKNOWN_BROKEN status ttf-ubuntu-font-family: include UPSTREAM_CHECK_* to fix UNKNOWN_BROKEN status ttf-wqy-zenhei: include UPSTREAM_CHECK_* to fix UNKNOWN_BROKEN status uml-utilities: include UPSTREAM_CHECK_URI to fix UNKNOWN_BROKEN status xrdp: include UPSTREAM_CHECK_* to fix UNKNOWN_BROKEN status xscreensaver: include UPSTREAM_CHECK_URI to fix UNKNOWN_BROKEN status can-isotp: include UPSTREAM_CHECK_COMMITS to fix UNKNOWN_BROKEN status con2fbmap: include UPSTREAM_CHECK_COMMITS to fix UNKNOWN_BROKEN status cpufrequtils: include UPSTREAM_CHECK_COMMITS to fix UNKNOWN_BROKEN status dbus-daemon-proxy: include UPSTREAM_CHECK_COMMITS to fix UNKNOWN_BROKEN status devmem2: include UPSTREAM_CHECK_COMMITS to fix UNKNOWN_BROKEN status edid-decode: include UPSTREAM_CHECK_COMMITS to fix UNKNOWN_BROKEN status fb-test: include UPSTREAM_CHECK_COMMITS to fix UNKNOWN_BROKEN status firmwared: include UPSTREAM_CHECK_COMMITS to fix UNKNOWN_BROKEN status funyahoo-plusplus: include UPSTREAM_CHECK_COMMITS to fix UNKNOWN_BROKEN status hunspell-dictionaries: switch branch from master to main hunspell-dictionaries: include UPSTREAM_CHECK_COMMITS to fix UNKNOWN_BROKEN status icyque: include UPSTREAM_CHECK_COMMITS to fix UNKNOWN_BROKEN status iksemel: include UPSTREAM_CHECK_COMMITS to fix UNKNOWN_BROKEN status kconfig-frontends: include UPSTREAM_CHECK_COMMITS to fix UNKNOWN_BROKEN status libbacktrace: include UPSTREAM_CHECK_COMMITS to fix UNKNOWN_BROKEN status libc-bench: include UPSTREAM_CHECK_COMMITS to fix UNKNOWN_BROKEN status libubox: include UPSTREAM_CHECK_COMMITS to fix UNKNOWN_BROKEN status linux-serial-test: include UPSTREAM_CHECK_COMMITS to fix UNKNOWN_BROKEN status musl-rpmatch: include UPSTREAM_CHECK_COMMITS to fix UNKNOWN_BROKEN status pam-plugin-ccreds: include UPSTREAM_CHECK_COMMITS to fix UNKNOWN_BROKEN status pcimem: include UPSTREAM_CHECK_COMMITS to fix UNKNOWN_BROKEN status pim435: include UPSTREAM_CHECK_COMMITS to fix UNKNOWN_BROKEN status properties-cpp: include UPSTREAM_CHECK_COMMITS to fix UNKNOWN_BROKEN status pegtl: add ptest support Alexandre Videgrain (1): openbox: fix crash on alt+tab with fullscreen app Anuj Mittal (1): tbb: pass TBB_STRICT=OFF to disable -Werror Archana Polampalli (1): apache2: Upgrade 2.4.60 -> 2.4.61 Armin Kuster (2): meta-openemnedded: Add myself as styhead maintainer audit: fix build when systemd is enabled. BINDU (1): flatbuffers: adapt for cross-compilation environments Barry Grussling (1): postgresql: Break perl RDEPENDS Bartosz Golaszewski (4): python3-gpiod: update to v2.2.0 python3-virtualenv: add missing run-time dependencies libgpiod: update v2.1.2 -> v2.1.3 python3-gpiod: update v2.2.0 -> v2.2.1 Benjamin Szőke (1): tree: fix broken links Carlos Alberto Lopez Perez (1): sysprof: upgrade 3.44.0 -> 3.48.0 Changqing Li (4): python3-h5py: remove unneeded CFLAGS pavucontrol: update SRC_URI libatasmart: Update SRC_URI libdbi-perl: upgrade 1.643 -> 1.644 Chen Qi (2): python3-protobuf: remove useless and problematic .pth file jansson: add JSON_INTEGER_IS_LONG_LONG for cmake Christian Eggers (2): lvgl: fix version in shared library file name lvgl: update upstream-status of all patches Christophe Vu-Brugier (2): nvme-cli: upgrade 2.9.1 -> 2.10.2 exfatprogs: upgrade 1.2.4 -> 1.2.5 Dimitri Bouras (1): python3-geomet: Switch to setuptools_build_meta build backend Dmitry Baryshkov (6): android-tools: make PN-adbd as a systemd package deqp-runner: improved version of parallel-deqp-runner packagegroup-meta-oe: include deqp-runner into packagegroup-meta-oe-graphics README.md: discourage use of GitHub pull request system android-tools: create flag flag file for adbd at a proper location gpsd: apply patch to fix gpsd building on Musl Einar Gunnarsson (2): yavta: Update to kernel 6.8 v4l-utils: Install media ctrl pkgconfig files Enrico Jörns (6): libconfuse: move to meta-oe libconfuse: provide native and nativesdk support libconfuse: replace DESCRIPTION by SUMMARY libconfuse: switch to release tar archive libconfuse: add backported patch to fix search path logic genimage: add new recipe Esben Haabendal (1): netplan: add missing runtime dependencies Etienne Cordonnier (3): uutils-coreutils: upgrade 0.0.26 -> 0.0.27 uutils-coreutils: disable buildpaths error perfetto: upgrade 31.0 -> 47.0 Fabio Estevam (1): imx-cst: Add recipe Faiz HAMMOUCHE (6): uim: update UPSTREAM_CHECK_* variables to fix devtool upgrades unixodbc: update UPSTREAM_CHECK_* variables to fix devtool upgrades xdotool: update UPSTREAM_CHECK_* variables to fix devtool upgrades xf86-input-tslib: update UPSTREAM_CHECK_* variables to fix devtool upgrades wvstrams: Unmaintained upstream, add UPSTREAM_VERSION_UNKNOWN wvdial: Unmaintained upstream, add UPSTREAM_VERSION_UNKNOWN Fathi Boudra (2): python3-django: upgrade 4.2.11 -> 4.2.16 python3-django: upgrade 5.0.6 -> 5.0.9 Frank de Brabander (1): python3-pydantic-core: fix incompatible version Fredrik Hugosson (1): lvm2: Remove the lvm2-udevrules package Ghislain Mangé (1): wireshark: fix typo in PACKAGECONFIG[zstd] Gianfranco Costamagna (1): vbxguestdrivers: upgrade 7.0.18 -> 7.0.20 Guocai He (1): mariadb: File conflicts for multilib Guðni Már Gilbert (5): python3-incremental: improve packaging python3-twisted: upgrade 24.3.0 -> 24.7.0 python3-incremental: drop python3-twisted-core from RDEPENDS python3-twisted: add python3-attrs to RDEPENDS python3-automat: upgrade 22.10.0 -> 24.8.1 Harish Sadineni (1): bpftool: Add support for riscv64 Hauke Lampe (1): postgresql: Use packageconfig flag for readline dependency Hitendra Prajapati (1): tcpdump: fix CVE-2024-2397 Hongxu Jia (1): nodejs: support cross compile without qemu user conditionally Hubert Wiśniewski (1): libcamera: Use multiple of sizeof as malloc size J. S. (8): znc: Fix buildpaths QA errors webmin: upgrade 2.111 -> 2.202 nodejs: upgrade 20.16.0 -> 20.17.0 syslog-ng: upgrade 4.6.0 -> 4.7.0 xfce4-panel: upgrade 4.18.3 -> 4.18.4 nodejs: upgrade 20.17.0 -> 20.18.0 xfce4-panel: upgrade 4.18.4 -> 4.18.5 nodejs: cleanup Jamin Lin (1): drgn: add new recipe Jan Luebbe (2): python3-grpcio-reflection: new recipe python3-grpcio-channelz: new recipe Jan Vermaete (3): python3-protobuf: added python3-ctypes as RDEPENDS protobuf: version bump 4.25.3 -> 4.25.4 netdata: version bump 1.47.0 -> 1.47.1 Jason Schonberg (1): nodejs: upgrade 20.13.0 -> 20.16.0 Jeremy A. Puhlman (1): net-snmp: Set ps flag value since it checks the host Jeroen Knoops (1): nng: Rename default branch of github.com:nanomsg/nng.git Jiaying Song (3): nftables: change ptest output format wireguard-tools: fix do_fetch error vlock: fix do_fetch error Jose Quaresma (6): composefs: the srcrev hash was the release tag ostree: Upgrade 2024.6 -> 2024.7 composefs: upgrade 1.0.4 -> 1.0.5 gpsd: make the meta-python dependency conditionally Revert "gpsd: make the meta-python dependency conditionally" gpsd: condition the runtime dependence of pyserial on the pygps Justin Bronder (1): python3-xmodem: replace hardcoded /usr with ${prefix} Jörg Sommer (5): dnsmasq: Install conf example from upstream instead of our version dnsmasq: set config dhcp6, broken-rtc by FEATURES gpsd: upgrade 3.24 -> 3.25; new gpsd-snmp bluealsa: upgrade 4.0.0+git -> 4.3.0 zsh: update 5.8 -> 5.9 Kai Kang (1): libosinfo: add runtime dependency osinfo-db Katariina Lounento (1): libtar: patch CVEs Keith McRae (1): ntp: Fix status call reporting incorrect value Khem Raj (142): python3-tornado: Switch to python_setuptools_build_meta rdma-core: Fix recvfrom override errors with glibc 2.40 and clang tipcutils: Replace WORKDIR with UNPACKDIR rdma-core: Do not use overloadable attribute with musl python3-pint: Upgrade to 24.1 flite: Fix buld with clang fortify enabled distros python3-inflate64: Fix build with clang fortified glibc headers renderdoc: Upgrade to 1.33 renderdoc: Fix build with clang fortify and glibc 2.40 overlayfs-tools: Fix build with musl webmin: Upgrade to 2.111 release opencv: Check GTK3DISTROFEATURES for enabling gtk support opencv: Add missing trailing slash sysprof: Fix build with llvm libunwind log4cpp: Fix buildpaths QA error ldns: Upgrade to 1.8.4 libwmf: Fix buildpaths QA Errors in libwmf-config Revert "libftdi: Fix missing ftdi_eeprom" vsomeip: Fix build with GCC-14 turbostat: Add band-aid to build from 6.10+ kernel python3-daemon: Fix build with PEP-575 build backend zfs: Upgrade to 2.2.5 release e2tools: Fix buildpaths QA warning in config.status in ptest glibmm: Upgrade to 2.66.7 release transmission: Upgrade to 4.0.6 release wolfssl: Add packageconfig for reproducible build lprng: Specify target paths for needed utilities sharutils: Let POSIX_SHELL be overridable from environment freediameter: Fix buildpaths QA error libforms: Remove buildpaths from fd2ps and fdesign scripts blueman: Fix buildpathe issue with cython generated code fvwm: Fix buildpaths QA Errors proftpd: Upgrade to 1.3.8b botan: Make it reproducible ndisc: Remove buildpaths from binaries python3-kivy: Remove buildpaths from comments in generated C sources keepalived: Make build reproducible fwknop: Upgrade to 2.6.11 fwknop: Specify target locations of gpg and wget ippool: Fix buildpaths QA error ot-br-posix: Define config files explicitly libyui: Upgrade to 4.6.2 fluentbit: Make it deprecated python3-pyproj: Fix buildpaths QA Error python3-pyproj: Remove absolute paths from cython generated .c files libyui-ncurses: Fix buildpaths QA Error ftgl: Upgrade to 2.4.0 ftgl: Switch to maintained fork frr: Upgrade to 10.1 release python3-pandas: Downgrade version check for numpy to 1.x python3-pycocotools: Use build pep517-backend python3-pycocotools: Downgrade numpy version needed to 1.x python3-pycocotools: Remove absolute paths from comments raptor2: Do not use curl-config to detect curl libgsf: Fix build with libxml2 2.13+ libspatialite: Upgrade to 5.1 libblockdev: Fix build with latest e2fsprogs bluealsa: Fix build on musl bluealsa: Update cython patch to latest upstream patch mariadb: Upgrade to 10.11.9 release gerbera: Upgrade to 2.2.0 e2tools: Fix build with automake 1.17 minidlna: Upgrade to 1.3.3 release vlc: Upgrade to 3.0.21 libplacebo: Add recipe mpv: Upgrade to 0.38.0 release libmpdclient,mpc: Upgrade to 2.22 and 0.35 respectively vlc: Disable recipe mpd: Upgrade to 0.23.15+git xdg-desktop-portal-wlr: Update to latest on master branch ltrace: Switch to gitlab SRC_URI webkitgtk3: Fix build with latest clang python3-grpcio: Upgrade to 1.66.1 release grpc: Upgrade to 1.66.1 release mozjs-115: fix build with clang and libc++ 19 nmap: Upgrade to 7.95 etcd-cpp-apiv3: Fix build with gprc 2.66+ paho-mqtt-cpp: Upgrade to 1.4.1 release poppler: Upgrade to 24.09.0 release nodejs: Fix build with libc++ 19 poco: Drop RISCV patch paho-mqtt-cpp: Move to tip of 1.4.x branch netdata: Upgrade to 1.47.0 freeipmi: Add recipe opentelemetry-cpp: Fix build with clang-19 opengl-es-cts,vulkan-cts: Upgrade recipes to 3.2.11.0 and 1.3.9.2 libcereal: Fix build with clang-19 libjxl: Upgrade to 0.10.3 release python3-serpent: Add missing rdeps for ptests to run python3-parse-type: Add missing rdep on six for ptests paho-mqtt-cpp: Use system paho-mqtt-c python3-serpent: Fix typo attr -> attrs python3-tzdata: Add missing attrs modules rdep for ptests python3-trustme: Add missing ptest rdeps on attrs and six modules python3-service-identity: Fix ptest rdeps python3-fsspec: Add recipe ptest-packagelists-meta-python: Add python3-fsspec to fast test list python3-pyyaml-include: Add missing dependencies for ptests python3-py-cpuinfo: Fix ptest runtime deps python3-flask: Add missing ptest deps yavta: Upgrade SRCREV to include 64bit time_t print format errors libjxl: Do not use -mrelax-all on RISCV with clang python3-wrapt: Add missing rdep on misc modules for ptests python3-pillow: Add missing rdep on py3-compile for ptests python-ujson: Use python_setuptools_build_meta python3-pylint: Add missing ptest rdep on python3-misc python3-fastjsonschema: Add missing rdeps for ptests python3-pytest-mock: Upgrade to 3.14.0 protobuf-c: Link with libatomic on riscv32 highway: Disable RVV on RISCV-32 dav1d: Disable asm code on rv32 mosh: Use libatomic on rv32 for atomics dlm: Disable fcf-protection on riscv32 usbguard: Link with libatomic on rv32 transmission: Link with libatomic on riscv32 ot-br-posix: Link with libatomic on rv32 opentelemetry-cpp: Link with libatomic on rv32 mozjs-115: Fix build on riscv32 netdata: Add checks for 64-bit atomic builtins liburing: Upgrade to 2.7 and fix build on riscv32 highway: Fix cmake to detect riscv32 libjxl: Disable sizeless-vectors on riscv32 kernel-selftest: Fix build on 32bit arches with 64bit time_t reptyr: Do not build for riscv32 python3-typer: Disable test_rich_markup_mode tests python3-pydbus: Add missing rdep on xml module for ptests python3-pdm: Upgrade to 2.19.1 python3-pdm-backend: Upgrade to 2.4.1 release python3-ujson: Add python misc modules to ptest rdeps python3-gunicorn: Add missing rdeps for ptests python3-eth-hash: Add packageconfigs and switch to pep517-backend python3-validators: Add missing rdeps for ptests python3-pint: Upgrade to 0.24.3 python3-pytest-mock: Fix ptests python3-sqlparse: Add missing rdep on mypy module for ptests libhugetlbfs: Use linker wrapper during build webkitgtk3: Always use -g1 for debug flags webkitgtk3: Fix build break with latest gir ndisc6: Fix reproducible build rsyslog: Enable 64bit atomics check xmlsec1: Switch SRC_URI to use github release python3-pdm-build-locked: Add recipe Kieran Bingham (1): libcamera: Add support for pycamera Leon Anavi (39): python3-eth-utils: Upgrade 3.0.0 -> 4.1.1 python3-requests-file: Upgrade 1.5.1 -> 2.1.0 python3-filelock: Upgrade 3.14.0 -> 3.15.3 python3-hexbytes: Upgrade 1.2.0 -> 1.2.1 python3-moteus: Upgrade 0.3.70 -> 0.3.71 python3-tornado: Upgrade 6.4 -> 6.4.1 python3-paho-mqtt: Upgrade 2.0.0 -> 2.1.0 python3-pyperclip: Upgrade 1.8.2 -> 1.9.0 python3-whitenoise: Upgrade 6.6.0 -> 6.7.0 python3-pycocotools: Upgrade 2.0.7 -> 2.0.8 python3-cbor2: Upgrade 5.6.3 -> 5.6.4 python3-gunicorn: Upgrade 21.2.0 -> 22.0.0 python3-aiohttp: Upgrade 3.9.5 -> 3.10.0 python3-aiosignal: switch to PEP-517 build backend python3-pycares: switch to PEP-517 build backend python3-multidict: switch to PEP-517 build backend python3-cachetools: Upgrade 5.3.3 -> 5.4.0 python3-coverage: switch to PEP-517 build backend coverage: Upgrade 7.6.0 -> 7.6.1 python3-aiohttp: Upgrade 3.10.0 -> 3.10.1 python3-hatch-requirements-txt: Add recipe python3-pymongo: Upgrade 4.7.3 -> 4.8.0 python3-itsdangerous: Upgrade 2.1.2 -> 2.2.0 python3-sniffio: witch to PEP-517 build backend python3-sniffio: Upgrade 1.3.0 -> 1.3.1 python3-qface: Upgrade 2.0.10 -> 2.0.11 python3-argcomplete: switch to PEP-517 build backend python3-argcomplete: Upgrade 3.4.0 -> 3.5.0 python3-prettytable: Upgrade 3.10.2 -> 3.11.0 python3-transitions: Upgrade 0.9.1 -> 0.9.2 python3-apispec: Upgrade 6.4.0 -> 6.6.1 python3-imageio: Upgrade 2.34.2 -> 2.35.0 python3-aiohttp: Upgrade 3.10.1 -> 3.10.3 python3-watchdog: Upgrade 4.0.1 -> 4.0.2 python3-soupsieve: Upgrade 2.5 -> 2.6 python3-fastjsonschema: Upgrade 2.18.0 -> 2.20.0 python3-dirty-equals: Upgrade 0.7.1 -> 0.8.0 python3-path: Upgrade 16.14.0 -> 17.0.0 python3-astroid: Upgrade 3.2.4 -> 3.3.2 Libo Chen (1): thin-provisioning-tools: install missed thin_shrink and era_repair Liyin Zhang (1): sound-theme-freedesktop: Update SRC_URI Luca Boccassi (4): dbus-broker: upgrade 32 -> 36 polkit: stop overriding DAC on /usr/share/polkit-1/rules.d polkit: update 124 -> 125 polkit: install group rules in /usr/share/ instead of /etc/ Marc Ferland (3): polkit: update SRC_URI polikt: add elogind packageconfig option polkit: add libs-only PACKAGECONFIG option Markus Volk (28): exiv2: update 0.28.0 -> 0.28.2 wireplumber: update 0.5.3 -> 0.5.5 pipewire: update 1.0.7 -> 1.2.0 flatpak: add PACKAGECONFIG for dconf lvm2: install all systemd service files nss: update 3.101 > 3.102 geary: update 44.1 -> 46.0 dav1d: update 1.4.2 -> 1.4.3 pipewire: update 1.2.0 -> 1.2.1 flatpak: update 1.15.8 -> 1.15.9 blueman: update 2.3.5 -> 2.4.3 pipewire: update 1.2.1 -> 1.2.2 webkitgtk3: update 2.44.2 -> 2.44.3 iwd: update 2.18 -> 2.19 bubblewrap: update 0.9.0 -> 0.10.0 flatpak: update 1.15.9 -> 1.15.10 pipewire: update 1.2.2 -> 1.2.3 cleanup after polkit fix libspelling: add recipe wireplumber: update 0.5.5. -> 0.5.6 gnome-disk-utility: update 46.0 -> 46.1 rygel: update 0.42.5 -> 0.44.0 colord: add configuration to fix runtime iwd: update 2.19 -> 2.20 iwd: use internal ell gnome-shell: add gnome-control-center dependency gnome-desktop: update 44.0 -> 44.1 cryptsetup: fix udev PACKAGECONFIG Martin Jansa (15): lvgl: install lv_conf.h in ${includedir}/${BPN} giflib: fix build with gold and avoid imagemagick-native dependency recipes: ignore various issues fatal with gcc-14 (for 32bit MACHINEs) recipes: ignore various issues fatal with gcc-14 bolt: package systemd_system_unitdir correctly pkcs11-provider: backport a fix for build with gcc-14 blueman: fix installation paths polkit-group-rule: package polkit rules vdpauinfo: require x11 in DISTRO_FEATURES gpm: fix buildpaths QA issue xerces-c: fix buildpaths QA issue gcab: keep buildpaths QA issue as a warning gcab: fix buildpaths QA issue nmap: depend on libpcre2 not libpcre xmlrpc-c: update SRCREV Maxin John (1): nginx: add PACKAGECONFIG knobs for fastcgi, scgi and uwsgi Michael Trimarchi (1): cpuset: Add recipe for cpuset tool 1.6.2 Mikko Rapeli (3): fwupd: skip buildpaths errors gcab: ignore buildpaths error from sources libjcat: skip buildpaths check Neel Gandhi (1): v4l-utils: Install media ctrl header and library files Nikhil R (1): rocksdb: Add an option to set static library Niko Mauno (27): pkcs11-provider: Upgrade 0.3 -> 0.5 opensc: Amend FILES:${PN} declaration opensc: Add 'readline' PACKAGECONFIG option opensc: Drop virtual/libiconv from DEPENDS opensc: Fix LICENSE declaration opensc: Cosmetic fixes python3-xlsxwriter: Fix LICENSE python3-ansi2html: Fix HOMEPAGE and LICENSE python3-cbor2: Fix LICENSE and LIC_FILES_CHKSUM python3-cbor2: Sanitize recipe content python3-crc32c: Amend LICENSE declaration python3-email-validator: Fix LICENSE python3-lru-dict: Fix LICENSE and change SUMMARY to DESCRIPTION python3-mock: Fix LICENSE python3-parse-type: Fix LICENSE python3-parse-type: Cosmetic fixes python3-pillow: Fix LICENSE and change SUMMARY to DESCRIPTION python3-platformdirs: Fix LICENSE python3-colorama: Fix LICENSE python3-fann2: Fix LICENSE python3-nmap: Fix LICENSE and LIC_FILES_CHKSUM python3-pycurl: Fix LICENSE python3-googleapis-common-protos: Fix LIC_FILES_CHKSUM python3-haversine: Fix LIC_FILES_CHKSUM python3-libevdev: Fix LIC_FILES_CHKSUM python3-smbus2: Fix LIC_FILES_CHKSUM python3-xmodem: Fix LIC_FILES_CHKSUM Ninette Adhikari (15): imagemagick: Update status for CVE mercurial: Update CVE status for CVE-2022-43410 influxdb: Update CVE status for CVE-2019-10329 links: CVE status update for CVE-2008-3319 usrsctp: CVE status update for CVE-2019-20503 libraw: CVE status update for CVE-2020-22628 and CVE-2023-1729 xsp: CVE status update for CVE-2006-2658 apache2:apache2-native: CVE status update gimp: CVE status update php-native: CVE status update for CVE-2022-4900 xterm: CVE status update CVE-1999-0965 redis: Update status for CVE-2022-3734 monkey: Update status for CVE-2013-2183 apache2: Update CVE status imagemagick: Update status for CVE Peter Kjellerstedt (2): libdevmapper: Inherit nopackages poppler: Correct the configuration options Peter Marko (4): cjson: fix buildpath warnings squid: Upgrade to 6.10 nginx: Upgrade stable 1.26.0 -> 1.26.2 nginx: Upgrade mainline 1.25.3 -> 1.27.1 Poonam Jadhav (1): tcpreplay: Fix CVE-2023-4256 Przemyslaw Zegan (1): libftdi: Fix missing ftdi_eeprom Quentin Schulz (1): nftables: fix pep517-backend warning Randolph Sapp (2): vulkan-cts: add workaround for createMeshShaderMiscTestsEXT opencl-clhpp: add native and nativesdk Randy MacLeod (2): libee: remove recipe since libee is obsolete liblinebreak: remove obsolete library Ricardo Simoes (8): magic-enum: add recipe magic-enum: Disable unused-value warning in tests memtool: Add recipe directfb: Order PACKAGECONFIG alphabetically directfb: Add freetype PACKAGECONFIG directfb: Add zlib PACKAGECONFIG directfb: Fix C++17 build warning magic-enum: Upgrade v0.9.5 -> v0.9.6 Richard Tollerton (1): tmux: Upgrade to 3.4 Robert Middleton (1): Upgrade dbus-cxx to 2.5.2 Ross Burton (9): libabigail: add recipe for the ABI Generic Analysis and Instrumentation Library libabigail: refresh musl/fts patch python3-importlib-metadata: add from openembedded-core python3-pathlib2: add from openembedded-core python3-py: add from openembedded-core python3-pytest-runner: add from openembedded-core python3-rfc3986-validator: add from openembedded-core python3-toml: add from openembedded-core python3-tomli: add from openembedded-core Rouven Czerwinski (1): softhsm: add destroyed global access prevention patch Ryan Eatmon (2): mpv: Fix typo in x11 option kernel-selftest: Update to allow for turning on all tests Shinji Matsunaga (1): audit: Fix CVE_PRODUCT Siddharth Doshi (1): apache2: Upgrade 2.4.59 -> 2.4.60 Soumya Sambu (4): php: Upgrade to 8.2.20 python3-werkzeug: upgrade 3.0.1 -> 3.0.3 gtk+: Fix CVE-2024-6655 python3-flask-cors: Fix CVE-2024-6221 Thomas Perrot (1): vdpauinfo: add recipe Tim Orling (7): python3-configobj: switch to PEP-517 build backend python3-tzdata: add recipe for v2024.1 python3-tzdata: enable ptest python3-pydantic-core: upgrade 2.18.4 -> 2.21.0 python3-pydantic: upgrade 2.7.3 -> 2.8.2 python3-pydantic-core: backport patch python3-psycopg: add v3.2.1 Tom Geelen (4): python3-sqlparse 0.4.4 -> 0.5.0 python3-bleak 0.21.1 -> 0.22.2 python3-aiohue: 4.7.1 -> 4.7.2 python3-pyjwt 2.8.0 -> 2.9.0 Trevor Gamblin (1): python3-pandas: upgrade 2.0.3 -> 2.2.2 Trevor Woerner (2): apache2: use update-alternatives for httpd python3-matplotlib-inline: update 0.1.6 → 0.1.7 plus fixes Tymoteusz Burak (1): dediprog-flasher: Add recipe Valeria Petrov (1): apache2: do not depend on zlib header and libs from host Vijay Anusuri (3): tipcutils: Add systemd support krb5: upgrade 1.21.2 -> 1.21.3 wireshark: upgrade 4.2.6 -> 4.2.7 Vyacheslav Yurkov (1): overlayfs: Use explicit version Wang Mingyu (306): cryptsetup: upgrade 2.7.2 -> 2.7.3 ctags: upgrade 6.1.20240602.0 -> 6.1.20240623.0 dialog: upgrade 1.3-20240307 -> 1.3-20240619 editorconfig-core-c: upgrade 0.12.7 -> 0.12.9 exiftool: upgrade 12.85 -> 12.87 frr: upgrade 10.0 -> 10.0.1 gensio: upgrade 2.8.4 -> 2.8.5 gtkwave: upgrade 3.3.119 -> 3.3.120 iniparser: upgrade 4.2.2 -> 4.2.4 libbpf: upgrade 1.4.2 -> 1.4.3 libcgi-perl: upgrade 4.64 -> 4.66 libcrypt-openssl-random-perl: upgrade 0.16 -> 0.17 libdaq: upgrade 3.0.14 -> 3.0.15 libextutils-helpers-perl: upgrade 0.026 -> 0.027 libfido2: upgrade 1.14.0 -> 1.15.0 libimobiledevice-glue: upgrade 1.2.0 -> 1.3.0 mcelog: upgrade 199 -> 200 msgraph: upgrade 0.2.2 -> 0.2.3 networkmanager-openvpn: upgrade 1.11.0 -> 1.12.0 opentelemetry-cpp: upgrade 1.15.0 -> 1.16.0 openvpn: upgrade 2.6.10 -> 2.6.11 python3-ansi2html: upgrade 1.9.1 -> 1.9.2 python3-argcomplete: upgrade 3.3.0 -> 3.4.0 python3-bandit: upgrade 1.7.8 -> 1.7.9 python3-coverage: upgrade 7.5.3 -> 7.5.4 python3-djangorestframework: upgrade 3.15.1 -> 3.15.2 python3-email-validator: upgrade 2.1.1 -> 2.2.0 python3-filelock: upgrade 3.15.3 -> 3.15.4 python3-flexparser: upgrade 0.3 -> 0.3.1 python3-google-api-python-client: upgrade 2.131.0 -> 2.134.0 python3-google-auth: upgrade 2.29.0 -> 2.30.0 python3-googleapis-common-protos: upgrade 1.63.0 -> 1.63.1 python3-huey: upgrade 2.5.0 -> 2.5.1 python3-langtable: upgrade 0.0.66 -> 0.0.67 python3-marshmallow: upgrade 3.21.2 -> 3.21.3 python3-meh: upgrade 0.51 -> 0.52 python3-openpyxl: upgrade 3.1.3 -> 3.1.4 python3-parse: upgrade 1.20.1 -> 1.20.2 python3-pdm-backend: upgrade 2.3.0 -> 2.3.1 python3-pint: upgrade 0.23 -> 0.24 python3-portalocker: upgrade 2.8.2 -> 2.10.0 python3-prompt-toolkit: upgrade 3.0.45 -> 3.0.47 python3-pycodestyle: upgrade 2.11.1 -> 2.12.0 python3-pymisp: upgrade 2.4.190 -> 2.4.194 python3-pymongo: upgrade 4.7.2 -> 4.7.3 python3-pyproject-api: upgrade 1.6.1 -> 1.7.1 python3-redis: upgrade 5.0.4 -> 5.0.6 python3-responses: upgrade 0.25.0 -> 0.25.3 python3-robotframework: upgrade 7.0 -> 7.0.1 python3-scikit-build: upgrade 0.17.6 -> 0.18.0 python3-sqlalchemy: upgrade 2.0.30 -> 2.0.31 python3-tox: upgrade 4.15.0 -> 4.15.1 python3-types-psutil: upgrade 5.9.5.20240516 -> 6.0.0.20240621 python3-virtualenv: upgrade 20.26.2 -> 20.26.3 qpdf: upgrade 11.9.0 -> 11.9.1 tesseract: upgrade 5.3.4 -> 5.4.1 thingsboard-gateway: upgrade 3.5 -> 3.5.1 openldap: upgrade 2.6.7 -> 2.6.8 openldap: fix lib32-openldap build failure with gcc-14 sblim-sfcc: fix build failure with gcc-14 openct: fix build failure with gcc-14 libcurses-perl: upgrade 1.41 -> 1.45 ctags: upgrade 6.1.20240623.0 -> 6.1.20240630.0 feh: upgrade 3.10.2 -> 3.10.3 gexiv2: upgrade 0.14.2 -> 0.14.3 isomd5sum: upgrade 1.2.4 -> 1.2.5 libndp: upgrade 1.8 -> 1.9 networkmanager: upgrade 1.48.0 -> 1.48.2 python3-a2wsgi: upgrade 1.10.4 -> 1.10.6 python3-aiofiles: upgrade 23.2.1 -> 24.1.0 python3-alembic: upgrade 1.13.1 -> 1.13.2 python3-awesomeversion: upgrade 24.2.0 -> 24.6.0 python3-dbus-fast: upgrade 2.21.3 -> 2.22.1 python3-gast: upgrade 0.5.4 -> 0.6.0 python3-google-api-core: upgrade 2.19.0 -> 2.19.1 python3-google-api-python-client: upgrade 2.134.0 -> 2.135.0 python3-googleapis-common-protos: upgrade 1.63.1 -> 1.63.2 python3-imageio: upgrade 2.34.1 -> 2.34.2 python3-ipython: upgrade 8.25.0 -> 8.26.0 python3-openpyxl: upgrade 3.1.4 -> 3.1.5 python3-pdm: upgrade 2.15.4 -> 2.16.1 python3-pymodbus: upgrade 3.6.8 -> 3.6.9 python3-rapidjson: upgrade 1.17 -> 1.18 python3-redis: upgrade 5.0.6 -> 5.0.7 python3-twine: upgrade 5.1.0 -> 5.1.1 python3-types-setuptools: upgrade 70.0.0.20240524 -> 70.1.0.20240627 python3-web3: upgrade 6.19.0 -> 6.20.0 fetchmail: disable rpath to fix buildpaths warning. procmail: fix build failure with gcc-14 botan: upgrade 3.4.0 -> 3.5.0 ctags: upgrade 6.1.20240630.0 -> 6.1.20240714.0 exiftool: upgrade 12.87 -> 12.89 gnome-keyring: upgrade 46.1 -> 46.2 hwdata: upgrade 0.383 -> 0.384 imlib2: upgrade 1.12.2 -> 1.12.3 ipset: upgrade 7.21 -> 7.22 libass: upgrade 0.17.2 -> 0.17.3 libbpf: upgrade 1.4.3 -> 1.4.5 lvm2: upgrade 2.03.24 -> 2.03.25 libio-socket-ssl-perl: upgrade 2.085 -> 2.088 mpich: upgrade 4.2.1 -> 4.2.2 nano: upgrade 8.0 -> 8.1 networkmanager: upgrade 1.48.2 -> 1.48.4 poke: upgrade 4.1 -> 4.2 python3-argh: upgrade 0.31.2 -> 0.31.3 python3-astroid: upgrade 3.2.2 -> 3.2.3 python3-coverage: upgrade 7.5.4 -> 7.6.0 python3-humanize: upgrade 4.9.0 -> 4.10.0 python3-moteus: upgrade 0.3.71 -> 0.3.72 python3-oletools: upgrade 0.60.1 -> 0.60.2 python3-pdm-backend: upgrade 2.3.1 -> 2.3.2 python3-pillow: upgrade 10.3.0 -> 10.4.0 python3-portalocker: upgrade 2.10.0 -> 2.10.1 python3-prettytable: upgrade 3.10.0 -> 3.10.2 python3-py7zr: upgrade 0.21.0 -> 0.21.1 python3-sympy: upgrade 1.12.1 -> 1.13.0 python3-tomlkit: upgrade 0.12.5 -> 0.13.0 python3-types-setuptools: upgrade 70.1.0.20240627 -> 70.3.0.20240710 python3-validators: upgrade 0.28.3 -> 0.32.0 qcbor: upgrade 1.3 -> 1.4 sngrep: upgrade 1.8.1 -> 1.8.2 thin-provisioning-tools: upgrade 1.0.12 -> 1.0.13 tree: upgrade 2.1.1 -> 2.1.3 wireshark: upgrade 4.2.5 -> 4.2.6 wolfssl: upgrade 5.7.0 -> 5.7.2 xterm: upgrade 392 -> 393 zenity: upgrade 4.0.1 -> 4.0.2 apache2: upgrade 2.4.61 -> 2.4.62 cfengine-masterfiles: upgrade 3.21.0 -> 3.21.5 cmark: upgrade 0.31.0 -> 0.31.1 cryptsetup: upgrade 2.7.3 -> 2.7.4 ctags: upgrade 6.1.20240714.0 -> 6.1.20240804.0 eog: upgrade 45.3 -> 45.4 fwupd: upgrade 1.9.18 -> 1.9.22 gmime: upgrade 3.2.13 -> 3.2.15 gnome-bluetooth: upgrade 46.0 -> 46.1 googletest: upgrade 1.14.0 -> 1.15.2 icewm: upgrade 3.4.5 -> 3.6.0 leptonica: upgrade 1.82.0 -> 1.84.1 libiodbc: upgrade 3.52.15 -> 3.52.16 liblinebreak: upgrade 1.2 -> 2.1 libnvme: upgrade 1.9 -> 1.10 libpaper: upgrade 2.1.2 -> 2.2.5 libpcsc-perl: upgrade 1.4.14 -> 1.4.15 libsdl-gfx: upgrade 2.0.25 -> 2.0.27 libtdb: upgrade 1.4.10 -> 1.4.11 libtracefs: upgrade 1.8.0 -> 1.8.1 logwarn: upgrade 1.0.14 -> 1.0.17 logwatch: upgrade 7.10 -> 7.11 msgpack-cpp: upgrade 6.1.0 -> 6.1.1 neatvnc: upgrade 0.8.0 -> 0.8.1 networkmanager: upgrade 1.48.4 -> 1.48.6 nss: upgrade 3.102 -> 3.103 openipmi: upgrade 2.0.35 -> 2.0.36 opentelemetry-cpp: upgrade 1.16.0 -> 1.16.1 openvpn: upgrade 2.6.11 -> 2.6.12 python3-a2wsgi: upgrade 1.10.6 -> 1.10.7 python3-aiohappyeyeballs: upgrade 2.3.2 -> 2.3.4 python3-astroid: upgrade 3.2.3 -> 3.2.4 python3-autobahn: upgrade 23.6.2 -> 24.4.2 python3-croniter: upgrade 2.0.5 -> 3.0.3 python3-langtable: upgrade 0.0.67 -> 0.0.68 python3-pdm-backend: upgrade 2.3.2 -> 2.3.3 python3-pure-eval: upgrade 0.2.2 -> 0.2.3 python3-pyfanotify: upgrade 0.2.2 -> 0.3.0 python3-pymisp: upgrade 2.4.194 -> 2.4.195 python3-pymodbus: upgrade 3.6.9 -> 3.7.0 python3-pytest-lazy-fixtures: upgrade 1.0.7 -> 1.1.1 python3-qface: upgrade 2.0.8 -> 2.0.10 python3-rapidjson: upgrade 1.18 -> 1.19 python3-redis: upgrade 5.0.7 -> 5.0.8 python3-regex: upgrade 2024.5.15 -> 2024.7.24 python3-sqlparse: upgrade 0.5.0 -> 0.5.1 python3-sympy: upgrade 1.13.0 -> 1.13.1 python3-tqdm: upgrade 4.66.4 -> 4.66.5 python3-types-setuptools: upgrade 70.3.0.20240710 -> 71.1.0.20240726 python3-validators: upgrade 0.32.0 -> 0.33.0 python3-web3: upgrade 6.20.0 -> 6.20.1 python3-xmlschema: upgrade 3.3.1 -> 3.3.2 qcbor: upgrade 1.4 -> 1.4.1 rsyslog: upgrade 8.2404.0 -> 8.2406.0 ttf-abyssinica: upgrade 2.100 -> 2.201 wavemon: upgrade 0.9.5 -> 0.9.6 xmlsec1: upgrade 1.3.4 -> 1.3.5 picocom: upgrade 2023-04 -> 2024 hostapd: upgrade 2.10 -> 2.11 python3-incremental: upgrade 22.10.0 -> 24.7.2 colord-gtk: upgrade 0.3.0 -> 0.3.1 ctags: upgrade 6.1.20240804.0 -> 6.1.20240825.0 fwupd: upgrade 1.9.22 -> 1.9.24 hwdata: upgrade 0.384 -> 0.385 lastlog2: upgrade 1.2.0 -> 1.3.1 libbytesize: upgrade 2.10 -> 2.11 libei: upgrade 1.2.1 -> 1.3.0 libnet-dns-perl: upgrade 1.45 -> 1.46 libtdb: upgrade 1.4.11 -> 1.4.12 libtest-harness-perl: upgrade 3.48 -> 3.50 xdg-dbus-proxy: upgrade 0.1.5 -> 0.1.6 mdns: upgrade 2200.120.24 -> 2200.140.11 mutter: upgrade 46.2 -> 46.4 networkmanager: upgrade 1.48.6 -> 1.48.10 pamela: upgrade 1.1.0 -> 1.2.0 pcsc-tools: upgrade 1.7.1 -> 1.7.2 postgresql: upgrade 16.3 -> 16.4 python3-aiohappyeyeballs: upgrade 2.3.4 -> 2.4.0 python3-aiohttp: upgrade 3.10.3 -> 3.10.5 python3-aiohue: upgrade 4.7.2 -> 4.7.3 python3-cachetools: upgrade 5.4.0 -> 5.5.0 python3-dbus-fast: upgrade 2.22.1 -> 2.24.0 python3-eth-utils: upgrade 4.1.1 -> 5.0.0 python3-gunicorn: upgrade 22.0.0 -> 23.0.0 python3-imageio: upgrade 2.35.0 -> 2.35.1 python3-importlib-metadata: upgrade 8.2.0 -> 8.4.0 python3-marshmallow: upgrade 3.21.3 -> 3.22.0 python3-nocasedict: upgrade 2.0.3 -> 2.0.4 python3-nocaselist: upgrade 2.0.2 -> 2.0.3 python3-paramiko: upgrade 3.4.0 -> 3.4.1 python3-py7zr: upgrade 0.21.1 -> 0.22.0 python3-pycodestyle: upgrade 2.12.0 -> 2.12.1 python3-pymisp: upgrade 2.4.195 -> 2.4.196 python3-pyzstd: upgrade 0.16.0 -> 0.16.1 python3-simplejson: upgrade 3.19.2 -> 3.19.3 python3-sqlalchemy: upgrade 2.0.31 -> 2.0.32 python3-sympy: upgrade 1.13.1 -> 1.13.2 python3-tomlkit: upgrade 0.13.0 -> 0.13.2 python3-typer: upgrade 0.12.3 -> 0.12.5 python3-types-python-dateutil: upgrade 2.9.0.20240316 -> 2.9.0.20240821 python3-types-setuptools: upgrade 71.1.0.20240726 -> 73.0.0.20240822 python3-xxhash: upgrade 3.4.1 -> 3.5.0 rsyslog: upgrade 8.2406.0 -> 8.2408.0 samba: upgrade 4.19.7 -> 4.19.8 sanlock: upgrade 3.9.3 -> 3.9.4 unbound: upgrade 1.20.0 -> 1.21.0 lastlog2: remove recipe since it has been merged into util-linux ctags: upgrade 6.1.20240825.0 -> 6.1.20240908.0 eog: upgrade 45.4 -> 47.0 flatpak-xdg-utils: upgrade 1.0.5 -> 1.0.6 gensio: upgrade 2.8.5 -> 2.8.7 gnome-autoar: upgrade 0.4.4 -> 0.4.5 hwdata: upgrade 0.385 -> 0.387 libbpf: upgrade 1.4.5 -> 1.4.6 libcompress-raw-bzip2-perl: upgrade 2.212 -> 2.213 libcompress-raw-lzma-perl: upgrade 2.212 -> 2.213 libcompress-raw-zlib-perl: upgrade 2.212 -> 2.213 libextutils-helpers-perl: upgrade 0.027 -> 0.028 libio-compress-lzma-perl: upgrade 2.212 -> 2.213 libio-compress-perl: upgrade 2.212 -> 2.213 libio-socket-ssl-perl: upgrade 2.088 -> 2.089 libspiro: upgrade 20221101 -> 20240903 nano: upgrade 8.1 -> 8.2 python3-dbus-fast: upgrade 2.24.0 -> 2.24.2 python3-executing: upgrade 2.0.1 -> 2.1.0 python3-filelock: upgrade 3.15.4 -> 3.16.0 python3-httpx: upgrade 0.27.0 -> 0.27.2 python3-ipython: upgrade 8.26.0 -> 8.27.0 python3-kiwisolver: upgrade 1.4.5 -> 1.4.7 python3-parse-type: upgrade 0.6.2 -> 0.6.3 python3-pefile: upgrade 2023.2.7 -> 2024.8.26 python3-platformdirs: upgrade 4.2.2 -> 4.3.1 python3-pulsectl: upgrade 24.4.0 -> 24.8.0 python3-pymetno: upgrade 0.12.0 -> 0.13.0 python3-pymisp: upgrade 2.4.196 -> 2.4.197 python3-pymodbus: upgrade 3.7.0 -> 3.7.2 python3-rich: upgrade 13.7.1 -> 13.8.0 python3-scikit-build: upgrade 0.18.0 -> 0.18.1 python3-types-psutil: upgrade 6.0.0.20240621 -> 6.0.0.20240901 python3-types-python-dateutil: upgrade 2.9.0.20240821 -> 2.9.0.20240906 python3-validators: upgrade 0.33.0 -> 0.34.0 python3-virtualenv: upgrade 20.26.3 -> 20.26.4 python3-watchdog: upgrade 4.0.2 -> 5.0.2 python3-yarl: upgrade 1.9.4 -> 1.10.0 python3-zeroconf: upgrade 0.132.2 -> 0.134.0 uhubctl: upgrade 2.5.0 -> 2.6.0 valijson: upgrade 1.0.2 -> 1.0.3 xfsdump: upgrade 3.1.12 -> 3.2.0 xterm: upgrade 393 -> 394 bdwgc: upgrade 8.2.6 -> 8.2.8 ctags: upgrade 6.1.20240908.0 -> 6.1.20240915.0 gnome-backgrounds: upgrade 46.0 -> 47.0 gnome-chess: upgrade 46.0 -> 47.0 gnome-font-viewer: upgrade 46.0 -> 47.0 libmanette: upgrade 0.2.7 -> 0.2.9 pegtl: upgrade 3.2.7 -> 3.2.8 python3-elementpath: upgrade 4.4.0 -> 4.5.0 python3-eventlet: upgrade 0.36.1 -> 0.37.0 python3-filelock: upgrade 3.16.0 -> 3.16.1 python3-greenlet: upgrade 3.0.3 -> 3.1.0 python3-nmap: upgrade 1.6.0 -> 1.9.1 python3-paramiko: upgrade 3.4.1 -> 3.5.0 python3-platformdirs: upgrade 4.3.1 -> 4.3.6 python3-psycopg: upgrade 3.2.1 -> 3.2.2 python3-pyasn1-modules: upgrade 0.4.0 -> 0.4.1 python3-pymisp: upgrade 2.4.197 -> 2.4.198 python3-pyproject-api: upgrade 1.7.1 -> 1.7.2 python3-pyunormalize: upgrade 15.1.0 -> 16.0.0 python3-regex: upgrade 2024.7.24 -> 2024.9.11 python3-rich: upgrade 13.8.0 -> 13.8.1 python3-robotframework: upgrade 7.0.1 -> 7.1 python3-virtualenv: upgrade 20.26.4 -> 20.26.5 python3-xmlschema: upgrade 3.3.2 -> 3.4.1 python3-yarl: upgrade 1.10.0 -> 1.11.1 stunnel: upgrade 5.72 -> 5.73 tecla: upgrade 46.0 -> 47.0 traceroute: upgrade 2.1.5 -> 2.1.6 nmap: Fix off-by-one overflow in the IP protocol table. python3-alembic: upgrade 1.13.2 -> 1.13.3 Yi Zhao (48): libldb: upgrade 2.8.0 -> 2.8.1 samba: upgrade 4.19.6 -> 4.19.7 devecot: set dovecot.conf file mode with chmod packagegroup-xfce-extended: fix typo of gobject-introspection-data feature lastlog2: specify correct pamlibdir wtmpdb: specify correct pamlibdir libnftnl: upgrade 1.2.6 -> 1.2.7 nftables: upgrade 1.0.9 -> 1.1.0 netplan: upgrade 1.0 -> 1.0.1 snort3: upgrade 3.1.84.0 -> 3.3.1.0 snort3: upgrade 3.3.1.0 -> 3.3.2.0 tcpreplay: upgrade 4.4.4 -> 4.5.1 libdaq: upgrade 3.0.15 -> 3.0.16 audit: upgrade 4.0.1 -> 4.0.2 snort3: upgrade 3.3.2.0 -> 3.3.3.0 snort3: upgrade 3.3.3.0 -> 3.3.4.0 tcpdump: upgrade 4.99.4 -> 4.99.5 cryptsetup: upgrade 2.7.4 -> 2.7.5 dracut: upgrade 102 -> 103 freeradius: upgrade 3.2.3 -> 3.2.5 autofs: upgrade 5.1.8 -> 5.1.9 mbedtls: upgrade 3.6.0 -> 3.6.1 mbedtls: upgrade 2.28.8 -> 2.28.9 drbd-utils: upgrade 9.27.0 -> 9.28.0 mm-common: upgrade 1.0.4 -> 1.0.6 lvm2: upgrade 2.03.25 -> 2.03.26 geoclue: upgrade 2.7.1 -> 2.7.2 s-nail: upgrade 14.9.24 -> 14.9.25 crash: upgrade 8.0.4 -> 8.0.5 mce-inject: upgrade to latest git rev mce-test: update to latest git rev fltk: upgrade 1.3.8 -> 1.3.9 openjpeg: upgrade 2.5.0 -> 2.5.2 netplan: upgrade 1.0.1 -> 1.1 libssh: upgrade 0.10.6 -> 0.11.1 jsoncpp: upgrade 1.9.5 -> 1.9.6 debootstrap: upgrade 1.0.132 -> 1.0.137 frr: upgrade 10.1 -> 10.1.1 open-vm-tools: upgrade 12.3.5 -> 12.4.5 v4l-utils: upgrade 1.26.1 -> 1.28.1 catch2: upgrade 3.6.0 -> 3.7.0 tbb: upgrade 2021.11.0 -> 2021.13.0 abseil-cpp: upgrade 20240116.2 -> 20240722.0 protobuf: add abseil-cpp to RDEPENDS protobuf: upgrade 4.25.4 -> 4.25.5 lksctp-tools: upgrade 1.0.19 -> 1.0.20 tcpslice: upgrade 1.7 -> 1.8 libhugetlbfs: upgrade 2.23 -> 2.24 Yoann Congal (39): python3-redis: add an archive prefix to avoid clashing with redis pidgin: Upgrade to 2.14.13 daq: fix SRC_URI to point to the real 2.0.7 release pidgin: Update Upstream-Status for gcc-14 compatibility patch pidgin: Remove gcc-14 compatibility workaround dbus-broker: update UPSTREAM_CHECK_* variables to fix devtool upgrades mariadb: update UPSTREAM_CHECK_* variables to fix devtool upgrades mbuffer: update UPSTREAM_CHECK_* variables to fix devtool upgrades microcom: update UPSTREAM_CHECK_* variables to fix devtool upgrades openbox-xdgmenu: update UPSTREAM_CHECK_* variables to fix devtool upgrades proxy-libintl: update UPSTREAM_CHECK_* variables to fix devtool upgrades pugixml: update UPSTREAM_CHECK_* variables to fix devtool upgrades pv: update UPSTREAM_CHECK_* variables to fix devtool upgrades sblim-sfcc: update UPSTREAM_CHECK_* variables to fix devtool upgrades source-code-pro-fonts: update UPSTREAM_CHECK_* variables to fix devtool upgrades stalonetray: update UPSTREAM_CHECK_* variables to fix devtool upgrades testfloat: update UPSTREAM_CHECK_* variables to fix devtool upgrades tk: update UPSTREAM_CHECK_* variables to fix devtool upgrades tmux: update UPSTREAM_CHECK_* variables to fix devtool upgrades ttf-abyssinica: update UPSTREAM_CHECK_* variables to fix devtool upgrades zeromq: update UPSTREAM_CHECK_* variables to fix devtool upgrades qad: Add UPSTREAM_CHECK_COMMITS reboot-mode: Add UPSTREAM_CHECK_COMMITS s-suite: Add UPSTREAM_CHECK_COMMITS syzkaller: Add UPSTREAM_CHECK_COMMITS yavta: Add UPSTREAM_CHECK_COMMITS zsync-curl: Add UPSTREAM_CHECK_COMMITS klibc: fix debug pkgs reproducibility polkit: Switch PAM files to common-* polkit: fix build on sysvinit grilo: fix buildpaths QA error non-repro-meta-python: exclude packages that failed previously README.md: Hint at "git request-pull" non-repro-meta-networking: exclude packages that failed previously non-repro-meta-filesystems: update known reproducible packages non-repro-meta-networking: update known non-reproducible list polkit: Update Upstream-Status of a merged patch wtmpdb: fix installed-vs-shipped build error minidlna: fix reproducibility Yogesh Tyagi (1): python3-pybind11 : upgrade 2.11.1 -> 2.12.0 Yogita Urade (3): hdf5: upgrade to 1.14.4 poppler: CVE-2024-6239 krb5: fix CVE-2024-26458 and CVE-2024-26461 Zhang Peng (1): hiredis: remove ANSI color from ptest result alba@thehoodiefirm.com (1): apache2:apache2-native: sort CVE status alperak (61): recipes: set S to fix the QA warning pcp: Fix contains reference to TMPDIR [buildpaths] warnings boinc-client: Fix contains reference to TMPDIR [buildpaths] warning rdist: Fix contains reference to TMPDIR [buildpaths] warning gphoto2: Fix contains reference to TMPDIR [buildpaths] warning hplip: Fix contains reference to TMPDIR [buildpaths] warning jsonrpc: Fix contains reference to TMPDIR [buildpaths] warning exiv2: Upgrade 0.28.2 to 0.28.3 for CVE fix tayga: Fix contains reference to TMPDIR [buildpaths] warning etcd-cpp-apiv3: Fix contains reference to TMPDIR [buildpaths] warning python3-lazy: switch to PEP-517 build backend python3-classes: switch to PEP-517 build backend python3-eventlet: switch to PEP-517 build backend python3-bitstruct: switch to PEP-517 build backend python3-dbus-fast: switch to PEP-517 build backend python3-brotli: switch to PEP-517 build backend python3-pymongo: switch to PEP-517 build backend python3-can: switch to PEP-517 build backend python3-pyaudio: switch to PEP-517 build backend python3-term: switch to PEP-517 build backend python3-screeninfo: switch to PEP-517 build backend python3-pykickstart: switch to PEP-517 build backend python3-click-repl: switch to PEP-517 build backend python3-evdev: switch to PEP-517 build backend python3-qrcode: switch to PEP-517 build backend python3-pyproj: switch to PEP-517 build backend python3-file-magic: switch to PEP-517 build backend python3-joblib: switch to PEP-517 build backend python3-dill: switch to PEP-517 build backend python3-luma-oled: switch to PEP-517 build backend python3-pyudev: switch to PEP-517 build backend python3-xmlschema: switch to PEP-517 build backend python3-lru-dict: switch to PEP-517 build backend python3-ipython: switch to PEP-517 build backend python3-portion: switch to PEP-517 build backend python3-lazy-object-proxy: switch to PEP-517 build backend python3-aioserial: switch to PEP-517 build backend perfetto: Fix contains reference to TMPDIR [buildpaths] warning python3-reedsolo: upgrade 2.0.13 -> 2.1.0b1 blueman: Fix do_package QA issue python3-service-identity: switch to PEP-517 build backend python3-parse-type: switch to PEP-517 build backend python3-regex: switch to PEP-517 build backend python3-pytest-timeout: switch to PEP-517 build backend python3-pytest-metadata: switch to PEP-517 build backend python3-pyroute: switch to PEP-517 build backend python3-pyjwt: switch to PEP-517 build backend python3-pyasn1-modules: switch to PEP-517 build backend python3-py-cpuinfo: switch to PEP-517 build backend python3-django: switch to PEP-517 build backend python3-greenlet: switch to PEP-517 build backend python3-gevent: switch to PEP-517 build backend python3-msgpack: upgrade 1.0.8 -> 1.1.0 python3-sqlalchemy: Upgrade 2.0.32 -> 2.0.35 and switch to PEP-517 build backend python3-alembic: switch to PEP-517 build backend python3-inflate64: switch to PEP-517 build backend python3-spidev: switch to PEP-517 build backend python3-pastedeploy: switch to PEP-517 build backend python3-reedsolo: switch to PEP-517 build backend curlpp: Fix build issue libhugetlbfs: Fix contains reference to TMPDIR [buildpaths] error ptak (1): opencv: upgrade 4.9.0 -> 4.10.0 quic-raghuvar (2): android-tools-adbd.service: Change /var to /etc in ConditionPathExists android-toold-adbd: Fix inconsistency between selinux configurations rajmohan r (1): unbound: Add ptest for unbound s-tokumoto (2): capnproto: Add "capnp" to CVE_PRODUCT fuse: Add "fuse:fuse" to CVE_PRODUCT meta-security: b4a8bc606f..e2c44c8b5d: Anusmita Dutta Mazumder (1): Add styhead LAYERSERIES_COMPAT Armin Kuster (18): recipes-*: convert WORKDIR->UNPACKDIR apparmor: fix QA Warnings python3-fail2ban: convert WORKDIR->UNPACKDIR krill: Fix QA warnings suricata: fix QA warnings isic: Fix config error arpwatch: Fix compile error chipsec: Fix QA Warnings tpm-tools: fix QA and compile errors. ima-policy: Fix S=UNPACKDIR harden/initscripts: UNPACKDIR fix harden-image-minima: Fix usermod aide: update to latest stable. python3-privacyidea: switch to PEP-517 build backend switch to PEP-517 build backend python3-tpm2-pyts: switch to PEP-517 build backend gitlab-ci: minor tweaks to try layer.conf: Update to styhead release name series Chen Qi (1): libgssglue: switch to use git source Hitendra Prajapati (2): sssd: Fix CVE-2023-3758 libhtp: fix CVE-2024-45797 Martin Jansa (4): {tcp,udp}-smack-test: fix few more implicit-function-declaration issues fatal with gcc-14 README.md: fix sendemail.to value suricata: run whole autotools_do_configure not just oe_runconf layer.conf: Update to styhead release name series Mikko Rapeli (9): python3-tpm2-pytss: update from 2.1.0 to 2.3.0 parsec-service: UNPACKDIR fixes bastille: UNPACKDIR fixes initramfs-framework-ima: UNPACKDIR fix ima-policy-appraise-all: UNPACKDIR fix ima-policy-simple: UNPACKDIR fix ima-policy-hashed: set S ima-policy-appraise-all: set S ima-policy-simple: set S Rasmus Villemoes (1): fail2ban: update to 1.1.0+ Ricardo Salveti (1): tpm2-tss: drop libgcrypt Siddharth Doshi (1): Suricata: Security Fix for CVE-2024-37151, CVE-2024-38534, CVE-2024-38535, CVE-2024-38536 Stefan Berger (3): meta-integrity: Remove stale variables and documentation meta-integrity: Add IMA_EVM_PRIVKEY_KEY_OPT to pass options to evmctl meta-integrity: Enable passing private key password Vijay Anusuri (1): tpm2-tools: Upgrade 5.5 -> 5.7 Wang Mingyu (3): ima-policy-hashed: Start WORKDIR -> UNPACKDIR transition suricata: Start WORKDIR -> UNPACKDIR transition trousers: Start WORKDIR -> UNPACKDIR transition Yi Zhao (3): openscap: fix PACKAGECONFIG[remediate_service] openscap: upgrade 1.3.10 -> 1.4.0 scap-security-guide: upgrade 0.1.73 -> 0.1.74 meta-raspberrypi: eb8ffc4e63..97d7a6b5ec: Andrew Lalaev (1): rpi-base.inc: add the disable-wifi-pi5 overlay Bastian Wanner (1): udev-rules-rpi.bb: Fix psplash systemd connection Garrett Brown (1): linux: Enable CONFIG_I2C_BRCMSTB for proper HDMI I2C support Jaeyoon Jung (1): linux-raspberrypi: Drop deprecated configs from android-driver.cfg Jan Vermaete (5): kas: updated the refspec syntax of the kas file README.md: pi3-disable-bt is renamed to disable-bt in kas example rpi-base.inc: added the disable-bt-pi5 device tree overlay raspi-utils: added new recipe extra-build-config.md: added a white line Khem Raj (6): linux-raspberrypi: Upgrade kernel to 6.6.36 weston-init.bbappend: Delete layer.conf: Update to walnascar (5.2) layer/release series linux-raspberrypi-6.6: Upgrade to 6.6.63 rpi-base: Remove bcm2712-rpi-5-b.dtb from RPI_KERNEL_DEVICETREE target SECURITY.md: Add instructions for reporting security issues Leon Anavi (2): rpi-u-boot-scr: WORKDIR -> UNPACKDIR transition conf/layer.conf: Remove meta-lts-mixins Luca Carlon (1): picamera-libs: removed unused libraries from python3-picamera Martin Jansa (1): mesa: rename bbappend to match new recipe name from oe-core Matthias Klein (1): linux-firmware-rpidistro: Upgrade to bookworm/20230625-2+rpt3 Pierrick Curt (1): rpi-base: build uart dts overlays by default Robert Yang (1): conf/layer.conf: Remove duplicated BBFILES Victor Löfgren (1): README.md: Update link to compatible layers Vincent Davis Jr (2): rpi-default-providers: remove vlc,ffmpeg PREFFERED_PROVIDER docs: include PREFERRED_PROVIDER_ffmpeg,vlc change meta-arm: 981425c54e..18bc3f9389: Ali Can Ozaslan (2): arm-bsp/trusted-firmware-m: corstone1000: Increase PS size arm-bsp/optee: corstone1000: Update upstream status Amr Mohamed (5): arm-systemready/README.md: add ARM_FVP_EULA_ACCEPT arm-systemready/linux-distros: new inc file for unattended installation arm-systemready/linux-distros: Add kickstart file for Fedora unattended arm-systemready/oeqa: Add new test for Fedora unattended installation kas: Add new yml file for Distros unattended installation Ben (3): arm-systemready/linux-distros: Implement unattended openSUSE arm-systemready/oeqa: Add unattended installation testcase kas: Include unattended openSUSE test Bence Balogh (18): arm-bsp/optee:corstone1000: Update optee to v4.2 arm-bsp/optee: Remove OP-TEE OS v4.1 recipe arm-bsp/trusted-firmware-a: Upgrade Corstone1000 to TF-A v2.11 arm-bsp/u-boot: corstone1000: use mdata v2 arm-bsp/trusted-firmware-a: corstone1000: update upstream statuses arm-bsp/trusted-firmware-m: corstone1000: upgrade to TF-M v2.1.x arm-bsp/trusted-services: corstone1000: align PSA crypto structs with TF-M arm-bsp/trusted-firmware-m: Remove TF-M v2.0 recipe arm-bsp/trusted-firmware-m: corstone1000: fix bank offset arm-bsp/trusted-firmware-m: corstone1000: add Secure Debug arm-bsp/documentation: corstone1000: add Secure Debug test CI: Add secure debug build for Corstone-1000 arm-bsp/linux-yocto: corstone1000: bump to v6.10 arm-bsp/documentation: corstone1000: remove TEE driver load arm-bsp/trusted-firmware-m: corstone1000: Fix MPU configuration arm-bsp/trusted-firmware-m: corstone1000: Update metadata handling arm-bsp/trusted-firmware-m: corstone1000: Update patches arm-bsp/trusted-firmware-m: corstone1000: Fix Secure Debug connection due to token version mismatch Delane Brandy (1): arm-bsp/corstone1000: Update Corstone-1000 user guide Emekcan Aras (1): arm-bsp/trusted-firmware-m: corstone1000: Switch to metadata v2 Harsimran Singh Tungal (7): arm-bsp/u-boot: corstone1000: fix U-Boot patch arm-bsp/trusted-services: corstone1000: fix compilation issues arm-bsp/trusted-services: fix compilation issues for ts-newlib arm-bsp/trusted-firmware-a: corstone1000: fix compilation issue for FVP multicore arm-bsp,kas: corstone1000: enable External System based on new yml file arm-bsp,documentation: corstone1000: update user documentation arm-bsp/trusted-services: corstone1000: Update Trusted-Services patches Hugues KAMBA MPIANA (4): arm-bsp/documentation: corstone1000: Mention PMOD module as prerequisite arm-bsp/documentation: corstone1000: Amend documentation for CORSTONE1000-2024.11 release kas: corstone-1000: Update the SHA of the Yocto layer dependencies for the CORSTONE1000-2024.11 release. kas: corstone-1000: Pin Yocto layer dependencies for CORSTONE1000-2024.11 release Hugues Kamba-Mpiana (2): arm-bsp/documentation: corstone1000: Deprecation of Sphinx context injection arm-bsp/documentation: corstone1000: Install Sphinx theme as recommended Javier Tia (3): arm/optee: Add optee udev rules arm: Enable Secure Boot in all required recipes arm/qemuarm64-secureboot: Enable UEFI Secure Boot Jon Mason (31): arm-bsp/fvp-base: update version to 11.26.11 arm/qemuarm64-secureboot: fix qemu parameter arm-toolchain: fix for WORKDIR changes arm-systemready: WORKDIR to UNPACKDIR changes CI: remove ts-smm-gateway for qemuarm64-secureboot-ts arm-toolchain: update to 13.3 CI: remove unnecessary clang settings CI: add poky-altcfg arm/opencsd: update to 1.5.3 arm/boot-wrapper-aarch64: update with latest patch arm/gn: update to the latest commit CI: remove xorg test removal from edk2 arm-bsp/fvp-base: add edk2 testimage support arm-bsp/fvp-base: u-boot patch clean-up arm: use devtool to clean-up patches arm-bsp: remove unreferenced patches and configs arm/trusted-firmware-a: remove workaround patch for qemuarm64-secureboot arm/qemu-efi-disk: add rootwait to bootargs arm/arm-tstee: pin kernel to 6.6 to workaround issue arm/trusted-firmware-a: update LICENSE entry arm/musl: work around trusted services error arm/libts: Patch to fix 6.10 kernel builds breaks arm-bsp/documentation: corstone1000: Improve user guide arm-toolchain: remove libmount-mountfd-support when using binary toolchain arm-bsp/fvp-base: support poky-altcfg arm-bsp/fvp-base: Get 6.10 kernel working arm-bsp/fvp: Re-enable parselogs arm/optee-os: Backport the clang fixes arm-bsp/fvp-base: use trusted-firmware-a v2.11 CI: Rework qemuarm64-secureboot matrix CI: remove branch name Luca Fancellu (2): arm/oeqa: Introduce retry mechanism for fvp_devices run_cmd arm/lib: Handle timeout for spawn object on stop() Mariam Elshakfy (1): arm/trusted-services: Move ts-newlib compilation fix to meta-arm Martin Jansa (1): layer.conf: Update to styhead release name series Mikko Rapeli (8): optee-os: asm debug prefix fixes optee-os: remove absolute paths optee-os-tadevkit: remove buildpaths INSANE_SKIP optee-os: remove buildpaths INSANE_SKIP optee-os: fix buildpaths QA failure on corstone1000 ts-newlib: setup git with check_git_config arm/optee-client: fix systemd service dependencies trusted-firmware-a: fix panic on kv260/zynqmp Peter Hoyes (1): arm/fvpboot: Revert "Disable timing annotation by default" Quentin Schulz (2): add basic b4 config file arm/trusted-firmware-a: add recipe for more-recent-but-not-yet-released source code Ross Burton (9): CI: update to Kas 4.4 image arm-systemready: explicitly disable SPDX in the fake image classes arm/edk2-firmware: set CVE_PRODUCT to the correct CPE arm-bsp/linux-yocto: update for linux 6.10 CI: switch to building against styhead branches where possible CI: add KAS_BUILD_DIR variable CI: remove duplicate arm-systemready-ir-acs CI: transform testimage reports into JUnit XML reports arm-base/linux-yocto: revert interim 6.10 patch for fvp-base Ziad Elhanafy (2): arm/oeqa: Enable pexpect profiling for testcase debugging arm-systemready/linux-distros: Follow WORKDIR -> UNPACKDIR transition Change-Id: I8c03dc8ed1822e0356c1d3dcf86b5c408aff3f78 Signed-off-by: Patrick Williams <patrick@stwcx.xyz>
Diffstat (limited to 'poky/meta/lib/oe')
-rw-r--r--poky/meta/lib/oe/__init__.py6
-rw-r--r--poky/meta/lib/oe/bootfiles.py57
-rw-r--r--poky/meta/lib/oe/buildcfg.py2
-rw-r--r--poky/meta/lib/oe/cve_check.py72
-rw-r--r--poky/meta/lib/oe/package.py59
-rw-r--r--poky/meta/lib/oe/package_manager/__init__.py76
-rw-r--r--poky/meta/lib/oe/qa.py22
-rw-r--r--poky/meta/lib/oe/recipeutils.py62
-rw-r--r--poky/meta/lib/oe/rootfs.py4
-rw-r--r--poky/meta/lib/oe/sbom30.py1121
-rw-r--r--poky/meta/lib/oe/spdx30.py6020
-rw-r--r--poky/meta/lib/oe/spdx30_tasks.py1243
-rw-r--r--poky/meta/lib/oe/spdx_common.py227
-rw-r--r--poky/meta/lib/oe/sstatesig.py27
-rw-r--r--poky/meta/lib/oe/utils.py13
15 files changed, 8919 insertions, 92 deletions
diff --git a/poky/meta/lib/oe/__init__.py b/poky/meta/lib/oe/__init__.py
index 6eb536ad28..d760481283 100644
--- a/poky/meta/lib/oe/__init__.py
+++ b/poky/meta/lib/oe/__init__.py
@@ -7,6 +7,8 @@
from pkgutil import extend_path
__path__ = extend_path(__path__, __name__)
-BBIMPORTS = ["data", "path", "utils", "types", "package", "packagedata", \
+# Modules with vistorcode need to go first else anything depending on them won't be
+# processed correctly (e.g. qa)
+BBIMPORTS = ["qa", "data", "path", "utils", "types", "package", "packagedata", \
"packagegroup", "sstatesig", "lsb", "cachedpath", "license", \
- "qa", "reproducible", "rust", "buildcfg", "go"]
+ "reproducible", "rust", "buildcfg", "go"]
diff --git a/poky/meta/lib/oe/bootfiles.py b/poky/meta/lib/oe/bootfiles.py
new file mode 100644
index 0000000000..155fe742db
--- /dev/null
+++ b/poky/meta/lib/oe/bootfiles.py
@@ -0,0 +1,57 @@
+#
+# SPDX-License-Identifier: MIT
+#
+# Copyright (C) 2024 Marcus Folkesson
+# Author: Marcus Folkesson <marcus.folkesson@gmail.com>
+#
+# Utility functions handling boot files
+#
+# Look into deploy_dir and search for boot_files.
+# Returns a list of tuples with (original filepath relative to
+# deploy_dir, desired filepath renaming)
+#
+# Heavily inspired of bootimg-partition.py
+#
+def get_boot_files(deploy_dir, boot_files):
+ import re
+ import os
+ from glob import glob
+
+ if boot_files is None:
+ return None
+
+ # list of tuples (src_name, dst_name)
+ deploy_files = []
+ for src_entry in re.findall(r'[\w;\-\./\*]+', boot_files):
+ if ';' in src_entry:
+ dst_entry = tuple(src_entry.split(';'))
+ if not dst_entry[0] or not dst_entry[1]:
+ raise ValueError('Malformed boot file entry: %s' % src_entry)
+ else:
+ dst_entry = (src_entry, src_entry)
+
+ deploy_files.append(dst_entry)
+
+ install_files = []
+ for deploy_entry in deploy_files:
+ src, dst = deploy_entry
+ if '*' in src:
+ # by default install files under their basename
+ entry_name_fn = os.path.basename
+ if dst != src:
+ # unless a target name was given, then treat name
+ # as a directory and append a basename
+ entry_name_fn = lambda name: \
+ os.path.join(dst,
+ os.path.basename(name))
+
+ srcs = glob(os.path.join(deploy_dir, src))
+
+ for entry in srcs:
+ src = os.path.relpath(entry, deploy_dir)
+ entry_dst_name = entry_name_fn(entry)
+ install_files.append((src, entry_dst_name))
+ else:
+ install_files.append((src, dst))
+
+ return install_files
diff --git a/poky/meta/lib/oe/buildcfg.py b/poky/meta/lib/oe/buildcfg.py
index 27b059b834..4b22f18f36 100644
--- a/poky/meta/lib/oe/buildcfg.py
+++ b/poky/meta/lib/oe/buildcfg.py
@@ -52,7 +52,7 @@ def get_metadata_git_remote_url(path, remote):
def get_metadata_git_describe(path):
try:
- describe, _ = bb.process.run('git describe --tags', cwd=path)
+ describe, _ = bb.process.run('git describe --tags --dirty', cwd=path)
except bb.process.ExecutionError:
return ""
return describe.strip()
diff --git a/poky/meta/lib/oe/cve_check.py b/poky/meta/lib/oe/cve_check.py
index ed5c714cb8..647a94f5af 100644
--- a/poky/meta/lib/oe/cve_check.py
+++ b/poky/meta/lib/oe/cve_check.py
@@ -88,7 +88,7 @@ def get_patched_cves(d):
# (cve_match regular expression)
cve_file_name_match = re.compile(r".*(CVE-\d{4}-\d+)", re.IGNORECASE)
- patched_cves = set()
+ patched_cves = {}
patches = oe.patch.src_patches(d)
bb.debug(2, "Scanning %d patches for CVEs" % len(patches))
for url in patches:
@@ -98,7 +98,7 @@ def get_patched_cves(d):
fname_match = cve_file_name_match.search(patch_file)
if fname_match:
cve = fname_match.group(1).upper()
- patched_cves.add(cve)
+ patched_cves[cve] = {"abbrev-status": "Patched", "status": "fix-file-included", "resource": patch_file}
bb.debug(2, "Found %s from patch file name %s" % (cve, patch_file))
# Remote patches won't be present and compressed patches won't be
@@ -124,7 +124,7 @@ def get_patched_cves(d):
cves = patch_text[match.start()+5:match.end()]
for cve in cves.split():
bb.debug(2, "Patch %s solves %s" % (patch_file, cve))
- patched_cves.add(cve)
+ patched_cves[cve] = {"abbrev-status": "Patched", "status": "fix-file-included", "resource": patch_file}
text_match = True
if not fname_match and not text_match:
@@ -132,10 +132,16 @@ def get_patched_cves(d):
# Search for additional patched CVEs
for cve in (d.getVarFlags("CVE_STATUS") or {}):
- decoded_status, _, _ = decode_cve_status(d, cve)
- if decoded_status == "Patched":
- bb.debug(2, "CVE %s is additionally patched" % cve)
- patched_cves.add(cve)
+ decoded_status = decode_cve_status(d, cve)
+ products = d.getVar("CVE_PRODUCT")
+ if has_cve_product_match(decoded_status, products) == True:
+ patched_cves[cve] = {
+ "abbrev-status": decoded_status["mapping"],
+ "status": decoded_status["detail"],
+ "justification": decoded_status["description"],
+ "affected-vendor": decoded_status["vendor"],
+ "affected-product": decoded_status["product"]
+ }
return patched_cves
@@ -227,19 +233,57 @@ def convert_cve_version(version):
def decode_cve_status(d, cve):
"""
- Convert CVE_STATUS into status, detail and description.
+ Convert CVE_STATUS into status, vendor, product, detail and description.
"""
status = d.getVarFlag("CVE_STATUS", cve)
if not status:
- return ("", "", "")
+ return {}
+
+ status_split = status.split(':', 4)
+ status_out = {}
+ status_out["detail"] = status_split[0]
+ product = "*"
+ vendor = "*"
+ description = ""
+ if len(status_split) >= 4 and status_split[1].strip() == "cpe":
+ # Both vendor and product are mandatory if cpe: present, the syntax is then:
+ # detail: cpe:vendor:product:description
+ vendor = status_split[2].strip()
+ product = status_split[3].strip()
+ description = status_split[4].strip()
+ elif len(status_split) >= 2 and status_split[1].strip() == "cpe":
+ # Malformed CPE
+ bb.warn('Invalid CPE information for CVE_STATUS[%s] = "%s", not setting CPE' % (detail, cve, status))
+ else:
+ # Other case: no CPE, the syntax is then:
+ # detail: description
+ description = status.split(':', 1)[1].strip() if (len(status_split) > 1) else ""
- status_split = status.split(':', 1)
- detail = status_split[0]
- description = status_split[1].strip() if (len(status_split) > 1) else ""
+ status_out["vendor"] = vendor
+ status_out["product"] = product
+ status_out["description"] = description
- status_mapping = d.getVarFlag("CVE_CHECK_STATUSMAP", detail)
+ status_mapping = d.getVarFlag("CVE_CHECK_STATUSMAP", status_out['detail'])
if status_mapping is None:
bb.warn('Invalid detail "%s" for CVE_STATUS[%s] = "%s", fallback to Unpatched' % (detail, cve, status))
status_mapping = "Unpatched"
+ status_out["mapping"] = status_mapping
+
+ return status_out
+
+def has_cve_product_match(detailed_status, products):
+ """
+ Check product/vendor match between detailed_status from decode_cve_status and a string of
+ products (like from CVE_PRODUCT)
+ """
+ for product in products.split():
+ vendor = "*"
+ if ":" in product:
+ vendor, product = product.split(":", 1)
+
+ if (vendor == detailed_status["vendor"] or detailed_status["vendor"] == "*") and \
+ (product == detailed_status["product"] or detailed_status["product"] == "*"):
+ return True
- return (status_mapping, detail, description)
+ #if no match, return False
+ return False
diff --git a/poky/meta/lib/oe/package.py b/poky/meta/lib/oe/package.py
index d1303f32bf..c213a9a3ca 100644
--- a/poky/meta/lib/oe/package.py
+++ b/poky/meta/lib/oe/package.py
@@ -14,6 +14,7 @@ import glob
import stat
import mmap
import subprocess
+import shutil
import oe.cachedpath
@@ -1078,6 +1079,7 @@ def process_split_and_strip_files(d):
d.getVar('INHIBIT_PACKAGE_DEBUG_SPLIT') != '1'):
checkelf = {}
checkelflinks = {}
+ checkstatic = {}
for root, dirs, files in cpath.walk(dvar):
for f in files:
file = os.path.join(root, f)
@@ -1091,10 +1093,6 @@ def process_split_and_strip_files(d):
if file in skipfiles:
continue
- if oe.package.is_static_lib(file):
- staticlibs.append(file)
- continue
-
try:
ltarget = cpath.realpath(file, dvar, False)
s = cpath.lstat(ltarget)
@@ -1106,6 +1104,13 @@ def process_split_and_strip_files(d):
continue
if not s:
continue
+
+ if oe.package.is_static_lib(file):
+ # Use a reference of device ID and inode number to identify files
+ file_reference = "%d_%d" % (s.st_dev, s.st_ino)
+ checkstatic[file] = (file, file_reference)
+ continue
+
# Check its an executable
if (s[stat.ST_MODE] & stat.S_IXUSR) or (s[stat.ST_MODE] & stat.S_IXGRP) \
or (s[stat.ST_MODE] & stat.S_IXOTH) \
@@ -1170,6 +1175,27 @@ def process_split_and_strip_files(d):
# Modified the file so clear the cache
cpath.updatecache(file)
+ # Do the same hardlink processing as above, but for static libraries
+ results = list(checkstatic.keys())
+
+ # As above, sort the results.
+ results.sort(key=lambda x: x[0])
+
+ for file in results:
+ # Use a reference of device ID and inode number to identify files
+ file_reference = checkstatic[file][1]
+ if file_reference in inodes:
+ os.unlink(file)
+ os.link(inodes[file_reference][0], file)
+ inodes[file_reference].append(file)
+ else:
+ inodes[file_reference] = [file]
+ # break hardlink
+ bb.utils.break_hardlinks(file)
+ staticlibs.append(file)
+ # Modified the file so clear the cache
+ cpath.updatecache(file)
+
def strip_pkgd_prefix(f):
nonlocal dvar
@@ -1208,11 +1234,24 @@ def process_split_and_strip_files(d):
dest = dv["libdir"] + os.path.dirname(src) + dv["dir"] + "/" + os.path.basename(target) + dv["append"]
fpath = dvar + dest
ftarget = dvar + dv["libdir"] + os.path.dirname(target) + dv["dir"] + "/" + os.path.basename(target) + dv["append"]
- bb.utils.mkdirhier(os.path.dirname(fpath))
- # Only one hardlink of separated debug info file in each directory
- if not os.access(fpath, os.R_OK):
- #bb.note("Link %s -> %s" % (fpath, ftarget))
- os.link(ftarget, fpath)
+ if os.access(ftarget, os.R_OK):
+ bb.utils.mkdirhier(os.path.dirname(fpath))
+ # Only one hardlink of separated debug info file in each directory
+ if not os.access(fpath, os.R_OK):
+ #bb.note("Link %s -> %s" % (fpath, ftarget))
+ os.link(ftarget, fpath)
+ elif (d.getVar('PACKAGE_DEBUG_STATIC_SPLIT') == '1'):
+ deststatic = dv["staticlibdir"] + os.path.dirname(src) + dv["staticdir"] + "/" + os.path.basename(file) + dv["staticappend"]
+ fpath = dvar + deststatic
+ ftarget = dvar + dv["staticlibdir"] + os.path.dirname(target) + dv["staticdir"] + "/" + os.path.basename(target) + dv["staticappend"]
+ if os.access(ftarget, os.R_OK):
+ bb.utils.mkdirhier(os.path.dirname(fpath))
+ # Only one hardlink of separated debug info file in each directory
+ if not os.access(fpath, os.R_OK):
+ #bb.note("Link %s -> %s" % (fpath, ftarget))
+ os.link(ftarget, fpath)
+ else:
+ bb.note("Unable to find inode link target %s" % (target))
# Create symlinks for all cases we were able to split symbols
for file in symlinks:
@@ -1853,7 +1892,7 @@ def process_pkgconfig(pkgfiles, d):
if m:
hdr = m.group(1)
exp = pd.expand(m.group(2))
- if hdr == 'Requires':
+ if hdr == 'Requires' or hdr == 'Requires.private':
pkgconfig_needed[pkg] += exp.replace(',', ' ').split()
continue
m = var_re.match(l)
diff --git a/poky/meta/lib/oe/package_manager/__init__.py b/poky/meta/lib/oe/package_manager/__init__.py
index d3b2317894..2100a97c12 100644
--- a/poky/meta/lib/oe/package_manager/__init__.py
+++ b/poky/meta/lib/oe/package_manager/__init__.py
@@ -365,45 +365,43 @@ class PackageManager(object, metaclass=ABCMeta):
for complementary_linguas in (self.d.getVar('IMAGE_LINGUAS_COMPLEMENTARY') or "").split():
globs += (" " + complementary_linguas) % lang
- if globs is None:
- return
-
- # we need to write the list of installed packages to a file because the
- # oe-pkgdata-util reads it from a file
- with tempfile.NamedTemporaryFile(mode="w+", prefix="installed-pkgs") as installed_pkgs:
- pkgs = self.list_installed()
-
- provided_pkgs = set()
- for pkg in pkgs.values():
- provided_pkgs |= set(pkg.get('provs', []))
-
- output = oe.utils.format_pkg_list(pkgs, "arch")
- installed_pkgs.write(output)
- installed_pkgs.flush()
-
- cmd = ["oe-pkgdata-util",
- "-p", self.d.getVar('PKGDATA_DIR'), "glob", installed_pkgs.name,
- globs]
- exclude = self.d.getVar('PACKAGE_EXCLUDE_COMPLEMENTARY')
- if exclude:
- cmd.extend(['--exclude=' + '|'.join(exclude.split())])
- try:
- bb.note('Running %s' % cmd)
- proc = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
- stdout, stderr = proc.communicate()
- if stderr: bb.note(stderr.decode("utf-8"))
- complementary_pkgs = stdout.decode("utf-8")
- complementary_pkgs = set(complementary_pkgs.split())
- skip_pkgs = sorted(complementary_pkgs & provided_pkgs)
- install_pkgs = sorted(complementary_pkgs - provided_pkgs)
- bb.note("Installing complementary packages ... %s (skipped already provided packages %s)" % (
- ' '.join(install_pkgs),
- ' '.join(skip_pkgs)))
- self.install(install_pkgs, hard_depends_only=True)
- except subprocess.CalledProcessError as e:
- bb.fatal("Could not compute complementary packages list. Command "
- "'%s' returned %d:\n%s" %
- (' '.join(cmd), e.returncode, e.output.decode("utf-8")))
+ if globs:
+ # we need to write the list of installed packages to a file because the
+ # oe-pkgdata-util reads it from a file
+ with tempfile.NamedTemporaryFile(mode="w+", prefix="installed-pkgs") as installed_pkgs:
+ pkgs = self.list_installed()
+
+ provided_pkgs = set()
+ for pkg in pkgs.values():
+ provided_pkgs |= set(pkg.get('provs', []))
+
+ output = oe.utils.format_pkg_list(pkgs, "arch")
+ installed_pkgs.write(output)
+ installed_pkgs.flush()
+
+ cmd = ["oe-pkgdata-util",
+ "-p", self.d.getVar('PKGDATA_DIR'), "glob", installed_pkgs.name,
+ globs]
+ exclude = self.d.getVar('PACKAGE_EXCLUDE_COMPLEMENTARY')
+ if exclude:
+ cmd.extend(['--exclude=' + '|'.join(exclude.split())])
+ try:
+ bb.note('Running %s' % cmd)
+ proc = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
+ stdout, stderr = proc.communicate()
+ if stderr: bb.note(stderr.decode("utf-8"))
+ complementary_pkgs = stdout.decode("utf-8")
+ complementary_pkgs = set(complementary_pkgs.split())
+ skip_pkgs = sorted(complementary_pkgs & provided_pkgs)
+ install_pkgs = sorted(complementary_pkgs - provided_pkgs)
+ bb.note("Installing complementary packages ... %s (skipped already provided packages %s)" % (
+ ' '.join(install_pkgs),
+ ' '.join(skip_pkgs)))
+ self.install(install_pkgs, hard_depends_only=True)
+ except subprocess.CalledProcessError as e:
+ bb.fatal("Could not compute complementary packages list. Command "
+ "'%s' returned %d:\n%s" %
+ (' '.join(cmd), e.returncode, e.output.decode("utf-8")))
if self.d.getVar('IMAGE_LOCALES_ARCHIVE') == '1':
target_arch = self.d.getVar('TARGET_ARCH')
diff --git a/poky/meta/lib/oe/qa.py b/poky/meta/lib/oe/qa.py
index f8ae3c743f..cd36cb5070 100644
--- a/poky/meta/lib/oe/qa.py
+++ b/poky/meta/lib/oe/qa.py
@@ -4,6 +4,7 @@
# SPDX-License-Identifier: GPL-2.0-only
#
+import ast
import os, struct, mmap
class NotELFFileError(Exception):
@@ -186,6 +187,20 @@ def write_error(type, error, d):
with open(logfile, "a+") as f:
f.write("%s: %s [%s]\n" % (p, error, type))
+def handle_error_visitorcode(name, args):
+ execs = set()
+ contains = {}
+ warn = None
+ if isinstance(args[0], ast.Constant) and isinstance(args[0].value, str):
+ for i in ["ERROR_QA", "WARN_QA"]:
+ if i not in contains:
+ contains[i] = set()
+ contains[i].add(args[0].value)
+ else:
+ warn = args[0]
+ execs.add(name)
+ return contains, execs, warn
+
def handle_error(error_class, error_msg, d):
if error_class in (d.getVar("ERROR_QA") or "").split():
write_error(error_class, error_msg, d)
@@ -198,12 +213,7 @@ def handle_error(error_class, error_msg, d):
else:
bb.note("QA Issue: %s [%s]" % (error_msg, error_class))
return True
-
-def add_message(messages, section, new_msg):
- if section not in messages:
- messages[section] = new_msg
- else:
- messages[section] = messages[section] + "\n" + new_msg
+handle_error.visitorcode = handle_error_visitorcode
def exit_with_message_if_errors(message, d):
qa_fatal_errors = bb.utils.to_boolean(d.getVar("QA_ERRORS_FOUND"), False)
diff --git a/poky/meta/lib/oe/recipeutils.py b/poky/meta/lib/oe/recipeutils.py
index 2d69a33113..56be75dc9c 100644
--- a/poky/meta/lib/oe/recipeutils.py
+++ b/poky/meta/lib/oe/recipeutils.py
@@ -1112,7 +1112,7 @@ def _get_recipe_upgrade_status(data):
maintainer = data.getVar('RECIPE_MAINTAINER')
no_upgrade_reason = data.getVar('RECIPE_NO_UPDATE_REASON')
- return (pn, status, cur_ver, next_ver, maintainer, revision, no_upgrade_reason)
+ return {'pn':pn, 'status':status, 'cur_ver':cur_ver, 'next_ver':next_ver, 'maintainer':maintainer, 'revision':revision, 'no_upgrade_reason':no_upgrade_reason}
def get_recipe_upgrade_status(recipes=None):
pkgs_list = []
@@ -1154,6 +1154,7 @@ def get_recipe_upgrade_status(recipes=None):
if not recipes:
recipes = tinfoil.all_recipe_files(variants=False)
+ recipeincludes = {}
for fn in recipes:
try:
if fn.startswith("/"):
@@ -1178,8 +1179,65 @@ def get_recipe_upgrade_status(recipes=None):
data_copy_list.append(data_copy)
+ recipeincludes[data.getVar('FILE')] = {'bbincluded':data.getVar('BBINCLUDED').split(),'pn':data.getVar('PN')}
+
from concurrent.futures import ProcessPoolExecutor
with ProcessPoolExecutor(max_workers=utils.cpu_count()) as executor:
pkgs_list = executor.map(_get_recipe_upgrade_status, data_copy_list)
- return pkgs_list
+ return _group_recipes(pkgs_list, _get_common_include_recipes(recipeincludes))
+
+def get_common_include_recipes():
+ with bb.tinfoil.Tinfoil() as tinfoil:
+ tinfoil.prepare(config_only=False)
+
+ recipes = tinfoil.all_recipe_files(variants=False)
+
+ recipeincludes = {}
+ for fn in recipes:
+ data = tinfoil.parse_recipe_file(fn)
+ recipeincludes[fn] = {'bbincluded':data.getVar('BBINCLUDED').split(),'pn':data.getVar('PN')}
+ return _get_common_include_recipes(recipeincludes)
+
+def _get_common_include_recipes(recipeincludes_all):
+ recipeincludes = {}
+ for fn,data in recipeincludes_all.items():
+ bbincluded_filtered = [i for i in data['bbincluded'] if os.path.dirname(i) == os.path.dirname(fn) and i != fn]
+ if bbincluded_filtered:
+ recipeincludes[data['pn']] = bbincluded_filtered
+
+ recipeincludes_inverted = {}
+ for k,v in recipeincludes.items():
+ for i in v:
+ recipeincludes_inverted.setdefault(i,set()).add(k)
+
+ recipeincludes_inverted_filtered = {k:v for k,v in recipeincludes_inverted.items() if len(v) > 1}
+
+ recipes_with_shared_includes = list()
+ for v in recipeincludes_inverted_filtered.values():
+ recipeset = v
+ for v1 in recipeincludes_inverted_filtered.values():
+ if recipeset.intersection(v1):
+ recipeset.update(v1)
+ if recipeset not in recipes_with_shared_includes:
+ recipes_with_shared_includes.append(recipeset)
+
+ return recipes_with_shared_includes
+
+def _group_recipes(recipes, groups):
+ recipedict = {}
+ for r in recipes:
+ recipedict[r['pn']] = r
+
+ recipegroups = []
+ for g in groups:
+ recipeset = []
+ for r in g:
+ if r in recipedict.keys():
+ recipeset.append(recipedict[r])
+ del recipedict[r]
+ recipegroups.append(recipeset)
+
+ for r in recipedict.values():
+ recipegroups.append([r])
+ return recipegroups
diff --git a/poky/meta/lib/oe/rootfs.py b/poky/meta/lib/oe/rootfs.py
index 8cd48f9450..5abce4ad7d 100644
--- a/poky/meta/lib/oe/rootfs.py
+++ b/poky/meta/lib/oe/rootfs.py
@@ -269,7 +269,11 @@ class Rootfs(object, metaclass=ABCMeta):
self.pm.remove(["run-postinsts"])
image_rorfs = bb.utils.contains("IMAGE_FEATURES", "read-only-rootfs",
+ True, False, self.d) and \
+ not bb.utils.contains("IMAGE_FEATURES",
+ "read-only-rootfs-delayed-postinsts",
True, False, self.d)
+
image_rorfs_force = self.d.getVar('FORCE_RO_REMOVE')
if image_rorfs or image_rorfs_force == "1":
diff --git a/poky/meta/lib/oe/sbom30.py b/poky/meta/lib/oe/sbom30.py
new file mode 100644
index 0000000000..76bfb752ef
--- /dev/null
+++ b/poky/meta/lib/oe/sbom30.py
@@ -0,0 +1,1121 @@
+#
+# Copyright OpenEmbedded Contributors
+#
+# SPDX-License-Identifier: GPL-2.0-only
+#
+
+from pathlib import Path
+
+import oe.spdx30
+import bb
+import re
+import hashlib
+import uuid
+import os
+import oe.spdx_common
+from datetime import datetime, timezone
+
+OE_SPDX_BASE = "https://rdf.openembedded.org/spdx/3.0/"
+
+VEX_VERSION = "1.0.0"
+
+SPDX_BUILD_TYPE = "http://openembedded.org/bitbake"
+
+
+@oe.spdx30.register(OE_SPDX_BASE + "link-extension")
+class OELinkExtension(oe.spdx30.extension_Extension):
+ """
+ This custom extension controls if an Element creates a symlink based on
+ its SPDX ID in the deploy directory. Some elements may not be able to be
+ linked because they are duplicated in multiple documents (e.g. the bitbake
+ Build Element). Those elements can add this extension and set link_spdx_id
+ to False
+
+ It is in internal extension that should be removed when writing out a final
+ SBoM
+ """
+
+ CLOSED = True
+ INTERNAL = True
+
+ @classmethod
+ def _register_props(cls):
+ super()._register_props()
+ cls._add_property(
+ "link_spdx_id",
+ oe.spdx30.BooleanProp(),
+ OE_SPDX_BASE + "link-spdx-id",
+ min_count=1,
+ max_count=1,
+ )
+
+ # The symlinks written to the deploy directory are based on the hash of
+ # the SPDX ID. While this makes it easy to look them up, it can be
+ # difficult to trace a Element to the hashed symlink name. As a
+ # debugging aid, this property is set to the basename of the symlink
+ # when the symlink is created to make it easier to trace
+ cls._add_property(
+ "link_name",
+ oe.spdx30.StringProp(),
+ OE_SPDX_BASE + "link-name",
+ max_count=1,
+ )
+
+
+@oe.spdx30.register(OE_SPDX_BASE + "id-alias")
+class OEIdAliasExtension(oe.spdx30.extension_Extension):
+ """
+ This extension allows an Element to provide an internal alias for the SPDX
+ ID. Since SPDX requires unique URIs for each SPDX ID, most of the objects
+ created have a unique UUID namespace and the unihash of the task encoded in
+ their SPDX ID. However, this causes a problem for referencing documents
+ across recipes, since the taskhash of a dependency may not factor into the
+ taskhash of the current task and thus the current task won't rebuild and
+ see the new SPDX ID when the dependency changes (e.g. ABI safe recipes and
+ tasks).
+
+ To help work around this, this extension provides a non-unique alias for an
+ Element by which it can be referenced from other tasks/recipes. When a
+ final SBoM is created, references to these aliases will be replaced with
+ the actual unique SPDX ID.
+
+ Most Elements will automatically get an alias created when they are written
+ out if they do not already have one. To suppress the creation of an alias,
+ add an extension with a blank `alias` property.
+
+
+ It is in internal extension that should be removed when writing out a final
+ SBoM
+ """
+
+ CLOSED = True
+ INTERNAL = True
+
+ @classmethod
+ def _register_props(cls):
+ super()._register_props()
+ cls._add_property(
+ "alias",
+ oe.spdx30.StringProp(),
+ OE_SPDX_BASE + "alias",
+ max_count=1,
+ )
+
+ cls._add_property(
+ "link_name",
+ oe.spdx30.StringProp(),
+ OE_SPDX_BASE + "link-name",
+ max_count=1,
+ )
+
+
+@oe.spdx30.register(OE_SPDX_BASE + "file-name-alias")
+class OEFileNameAliasExtension(oe.spdx30.extension_Extension):
+ CLOSED = True
+ INTERNAL = True
+
+ @classmethod
+ def _register_props(cls):
+ super()._register_props()
+ cls._add_property(
+ "aliases",
+ oe.spdx30.ListProp(oe.spdx30.StringProp()),
+ OE_SPDX_BASE + "filename-alias",
+ )
+
+
+@oe.spdx30.register(OE_SPDX_BASE + "license-scanned")
+class OELicenseScannedExtension(oe.spdx30.extension_Extension):
+ """
+ The presence of this extension means the file has already been scanned for
+ license information
+ """
+
+ CLOSED = True
+ INTERNAL = True
+
+
+@oe.spdx30.register(OE_SPDX_BASE + "document-extension")
+class OEDocumentExtension(oe.spdx30.extension_Extension):
+ """
+ This extension is added to a SpdxDocument to indicate various useful bits
+ of information about its contents
+ """
+
+ CLOSED = True
+
+ @classmethod
+ def _register_props(cls):
+ super()._register_props()
+ cls._add_property(
+ "is_native",
+ oe.spdx30.BooleanProp(),
+ OE_SPDX_BASE + "is-native",
+ max_count=1,
+ )
+
+
+def spdxid_hash(*items):
+ h = hashlib.md5()
+ for i in items:
+ if isinstance(i, oe.spdx30.Element):
+ h.update(i._id.encode("utf-8"))
+ else:
+ h.update(i.encode("utf-8"))
+ return h.hexdigest()
+
+
+def spdx_sde(d):
+ sde = d.getVar("SOURCE_DATE_EPOCH")
+ if not sde:
+ return datetime.now(timezone.utc)
+
+ return datetime.fromtimestamp(int(sde), timezone.utc)
+
+
+def get_element_link_id(e):
+ """
+ Get the string ID which should be used to link to an Element. If the
+ element has an alias, that will be preferred, otherwise its SPDX ID will be
+ used.
+ """
+ ext = get_alias(e)
+ if ext is not None and ext.alias:
+ return ext.alias
+ return e._id
+
+
+def set_alias(obj, alias):
+ for ext in obj.extension:
+ if not isinstance(ext, OEIdAliasExtension):
+ continue
+ ext.alias = alias
+ return ext
+
+ ext = OEIdAliasExtension(alias=alias)
+ obj.extension.append(ext)
+ return ext
+
+
+def get_alias(obj):
+ for ext in obj.extension:
+ if not isinstance(ext, OEIdAliasExtension):
+ continue
+ return ext
+
+ return None
+
+
+def to_list(l):
+ if isinstance(l, set):
+ l = sorted(list(l))
+
+ if not isinstance(l, (list, tuple)):
+ raise TypeError("Must be a list or tuple. Got %s" % type(l))
+
+ return l
+
+
+class ObjectSet(oe.spdx30.SHACLObjectSet):
+ def __init__(self, d):
+ super().__init__()
+ self.d = d
+
+ def create_index(self):
+ self.by_sha256_hash = {}
+ super().create_index()
+
+ def add_index(self, obj):
+ # Check that all elements are given an ID before being inserted
+ if isinstance(obj, oe.spdx30.Element):
+ if not obj._id:
+ raise ValueError("Element missing ID")
+ for ext in obj.extension:
+ if not isinstance(ext, OEIdAliasExtension):
+ continue
+ if ext.alias:
+ self.obj_by_id[ext.alias] = obj
+
+ for v in obj.verifiedUsing:
+ if not isinstance(v, oe.spdx30.Hash):
+ continue
+
+ if v.algorithm == oe.spdx30.HashAlgorithm.sha256:
+ continue
+
+ self.by_sha256_hash.setdefault(v.hashValue, set()).add(obj)
+
+ super().add_index(obj)
+ if isinstance(obj, oe.spdx30.SpdxDocument):
+ self.doc = obj
+
+ def __filter_obj(self, obj, attr_filter):
+ return all(getattr(obj, k) == v for k, v in attr_filter.items())
+
+ def foreach_filter(self, typ, *, match_subclass=True, **attr_filter):
+ for obj in self.foreach_type(typ, match_subclass=match_subclass):
+ if self.__filter_obj(obj, attr_filter):
+ yield obj
+
+ def find_filter(self, typ, *, match_subclass=True, **attr_filter):
+ for obj in self.foreach_filter(
+ typ, match_subclass=match_subclass, **attr_filter
+ ):
+ return obj
+ return None
+
+ def foreach_root(self, typ, **attr_filter):
+ for obj in self.doc.rootElement:
+ if not isinstance(obj, typ):
+ continue
+
+ if self.__filter_obj(obj, attr_filter):
+ yield obj
+
+ def find_root(self, typ, **attr_filter):
+ for obj in self.foreach_root(typ, **attr_filter):
+ return obj
+ return None
+
+ def add_root(self, obj):
+ self.add(obj)
+ self.doc.rootElement.append(obj)
+ return obj
+
+ def is_native(self):
+ for e in self.doc.extension:
+ if not isinstance(e, oe.sbom30.OEDocumentExtension):
+ continue
+
+ if e.is_native is not None:
+ return e.is_native
+
+ return False
+
+ def set_is_native(self, is_native):
+ for e in self.doc.extension:
+ if not isinstance(e, oe.sbom30.OEDocumentExtension):
+ continue
+
+ e.is_native = is_native
+ return
+
+ if is_native:
+ self.doc.extension.append(oe.sbom30.OEDocumentExtension(is_native=True))
+
+ def add_aliases(self):
+ for o in self.foreach_type(oe.spdx30.Element):
+ if not o._id or o._id.startswith("_:"):
+ continue
+
+ alias_ext = get_alias(o)
+ if alias_ext is None:
+ unihash = self.d.getVar("BB_UNIHASH")
+ namespace = self.get_namespace()
+ if unihash not in o._id:
+ bb.warn(f"Unihash {unihash} not found in {o._id}")
+ elif namespace not in o._id:
+ bb.warn(f"Namespace {namespace} not found in {o._id}")
+ else:
+ alias_ext = set_alias(
+ o,
+ o._id.replace(unihash, "UNIHASH").replace(
+ namespace, self.d.getVar("PN")
+ ),
+ )
+
+ def remove_internal_extensions(self):
+ def remove(o):
+ o.extension = [e for e in o.extension if not getattr(e, "INTERNAL", False)]
+
+ for o in self.foreach_type(oe.spdx30.Element):
+ remove(o)
+
+ if self.doc:
+ remove(self.doc)
+
+ def get_namespace(self):
+ namespace_uuid = uuid.uuid5(
+ uuid.NAMESPACE_DNS, self.d.getVar("SPDX_UUID_NAMESPACE")
+ )
+ pn = self.d.getVar("PN")
+ return "%s/%s-%s" % (
+ self.d.getVar("SPDX_NAMESPACE_PREFIX"),
+ pn,
+ str(uuid.uuid5(namespace_uuid, pn)),
+ )
+
+ def new_spdxid(self, *suffix, include_unihash=True):
+ items = [self.get_namespace()]
+ if include_unihash:
+ unihash = self.d.getVar("BB_UNIHASH")
+ items.append(unihash)
+ items.extend(re.sub(r"[^a-zA-Z0-9_-]", "_", s) for s in suffix)
+ return "/".join(items)
+
+ def new_import(self, key):
+ base = f"SPDX_IMPORTS_{key}"
+ spdxid = self.d.getVar(f"{base}_spdxid")
+ if not spdxid:
+ bb.fatal(f"{key} is not a valid SPDX_IMPORTS key")
+
+ for i in self.docs.imports:
+ if i.externalSpdxId == spdxid:
+ # Already imported
+ return spdxid
+
+ m = oe.spdx30.ExternalMap(externalSpdxId=spdxid)
+
+ uri = self.d.getVar(f"{base}_uri")
+ if uri:
+ m.locationHint = uri
+
+ for pyname, algorithm in oe.spdx30.HashAlgorithm.NAMED_INDIVIDUALS.items():
+ value = self.d.getVar(f"{base}_hash_{pyname}")
+ if value:
+ m.verifiedUsing.append(
+ oe.spdx30.Hash(
+ algorithm=algorithm,
+ hashValue=value,
+ )
+ )
+
+ self.doc.imports.append(m)
+ return spdxid
+
+ def new_agent(self, varname, *, creation_info=None, add=True):
+ ref_varname = self.d.getVar(f"{varname}_ref")
+ if ref_varname:
+ if ref_varname == varname:
+ bb.fatal(f"{varname} cannot reference itself")
+ return new_agent(varname, creation_info=creation_info)
+
+ import_key = self.d.getVar(f"{varname}_import")
+ if import_key:
+ return self.new_import(import_key)
+
+ name = self.d.getVar(f"{varname}_name")
+ if not name:
+ return None
+
+ spdxid = self.new_spdxid("agent", name)
+ agent = self.find_by_id(spdxid)
+ if agent is not None:
+ return agent
+
+ agent_type = self.d.getVar("%s_type" % varname)
+ if agent_type == "person":
+ agent = oe.spdx30.Person()
+ elif agent_type == "software":
+ agent = oe.spdx30.SoftwareAgent()
+ elif agent_type == "organization":
+ agent = oe.spdx30.Organization()
+ elif not agent_type or agent_type == "agent":
+ agent = oe.spdx30.Agent()
+ else:
+ bb.fatal("Unknown agent type '%s' in %s_type" % (agent_type, varname))
+
+ agent._id = spdxid
+ agent.creationInfo = creation_info or self.doc.creationInfo
+ agent.name = name
+
+ comment = self.d.getVar("%s_comment" % varname)
+ if comment:
+ agent.comment = comment
+
+ for (
+ pyname,
+ idtype,
+ ) in oe.spdx30.ExternalIdentifierType.NAMED_INDIVIDUALS.items():
+ value = self.d.getVar("%s_id_%s" % (varname, pyname))
+ if value:
+ agent.externalIdentifier.append(
+ oe.spdx30.ExternalIdentifier(
+ externalIdentifierType=idtype,
+ identifier=value,
+ )
+ )
+
+ if add:
+ self.add(agent)
+
+ return agent
+
+ def new_creation_info(self):
+ creation_info = oe.spdx30.CreationInfo()
+
+ name = "%s %s" % (
+ self.d.getVar("SPDX_TOOL_NAME"),
+ self.d.getVar("SPDX_TOOL_VERSION"),
+ )
+ tool = self.add(
+ oe.spdx30.Tool(
+ _id=self.new_spdxid("tool", name),
+ creationInfo=creation_info,
+ name=name,
+ )
+ )
+
+ authors = []
+ for a in self.d.getVar("SPDX_AUTHORS").split():
+ varname = "SPDX_AUTHORS_%s" % a
+ author = self.new_agent(varname, creation_info=creation_info)
+
+ if not author:
+ bb.fatal("Unable to find or create author %s" % a)
+
+ authors.append(author)
+
+ creation_info.created = spdx_sde(self.d)
+ creation_info.specVersion = self.d.getVar("SPDX_VERSION")
+ creation_info.createdBy = authors
+ creation_info.createdUsing = [tool]
+
+ return creation_info
+
+ def copy_creation_info(self, copy):
+ c = oe.spdx30.CreationInfo(
+ created=spdx_sde(self.d),
+ specVersion=self.d.getVar("SPDX_VERSION"),
+ )
+
+ for author in copy.createdBy:
+ if isinstance(author, str):
+ c.createdBy.append(author)
+ else:
+ c.createdBy.append(author._id)
+
+ for tool in copy.createdUsing:
+ if isinstance(tool, str):
+ c.createdUsing.append(tool)
+ else:
+ c.createdUsing.append(tool._id)
+
+ return c
+
+ def new_annotation(self, subject, comment, typ):
+ return self.add(
+ oe.spdx30.Annotation(
+ _id=self.new_spdxid("annotation", spdxid_hash(comment, typ)),
+ creationInfo=self.doc.creationInfo,
+ annotationType=typ,
+ subject=subject,
+ statement=comment,
+ )
+ )
+
+ def _new_relationship(
+ self,
+ cls,
+ from_,
+ typ,
+ to,
+ *,
+ spdxid_name="relationship",
+ **props,
+ ):
+ from_ = to_list(from_)
+ to = to_list(to)
+
+ if not from_:
+ return []
+
+ if not to:
+ # TODO: Switch to the code constant once SPDX 3.0.1 is released
+ to = ["https://spdx.org/rdf/3.0.0/terms/Core/NoneElement"]
+
+ ret = []
+
+ for f in from_:
+ hash_args = [typ, f]
+ for k in sorted(props.keys()):
+ hash_args.append(props[k])
+ hash_args.extend(to)
+
+ relationship = self.add(
+ cls(
+ _id=self.new_spdxid(spdxid_name, spdxid_hash(*hash_args)),
+ creationInfo=self.doc.creationInfo,
+ from_=f,
+ relationshipType=typ,
+ to=to,
+ **props,
+ )
+ )
+ ret.append(relationship)
+
+ return ret
+
+ def new_relationship(self, from_, typ, to):
+ return self._new_relationship(oe.spdx30.Relationship, from_, typ, to)
+
+ def new_scoped_relationship(self, from_, typ, scope, to):
+ return self._new_relationship(
+ oe.spdx30.LifecycleScopedRelationship,
+ from_,
+ typ,
+ to,
+ scope=scope,
+ )
+
+ def new_license_expression(self, license_expression, license_data, license_text_map={}):
+ license_list_version = license_data["licenseListVersion"]
+ # SPDX 3 requires that the license list version be a semver
+ # MAJOR.MINOR.MICRO, but the actual license version might be
+ # MAJOR.MINOR on some older versions. As such, manually append a .0
+ # micro version if its missing to keep SPDX happy
+ if license_list_version.count(".") < 2:
+ license_list_version += ".0"
+
+ spdxid = [
+ "license",
+ license_list_version,
+ re.sub(r"[^a-zA-Z0-9_-]", "_", license_expression),
+ ]
+
+ license_text = (
+ (k, license_text_map[k]) for k in sorted(license_text_map.keys())
+ )
+
+ if not license_text:
+ lic = self.find_filter(
+ oe.spdx30.simplelicensing_LicenseExpression,
+ simplelicensing_licenseExpression=license_expression,
+ simplelicensing_licenseListVersion=license_list_version,
+ )
+ if lic is not None:
+ return lic
+ else:
+ spdxid.append(spdxid_hash(*(v for _, v in license_text)))
+ lic = self.find_by_id(self.new_spdxid(*spdxid))
+ if lic is not None:
+ return lic
+
+ lic = self.add(
+ oe.spdx30.simplelicensing_LicenseExpression(
+ _id=self.new_spdxid(*spdxid),
+ creationInfo=self.doc.creationInfo,
+ simplelicensing_licenseExpression=license_expression,
+ simplelicensing_licenseListVersion=license_list_version,
+ )
+ )
+
+ for key, value in license_text:
+ lic.simplelicensing_customIdToUri.append(
+ oe.spdx30.DictionaryEntry(key=key, value=value)
+ )
+
+ return lic
+
+ def scan_declared_licenses(self, spdx_file, filepath, license_data):
+ for e in spdx_file.extension:
+ if isinstance(e, OELicenseScannedExtension):
+ return
+
+ file_licenses = set()
+ for extracted_lic in oe.spdx_common.extract_licenses(filepath):
+ file_licenses.add(self.new_license_expression(extracted_lic, license_data))
+
+ self.new_relationship(
+ [spdx_file],
+ oe.spdx30.RelationshipType.hasDeclaredLicense,
+ file_licenses,
+ )
+ spdx_file.extension.append(OELicenseScannedExtension())
+
+ def new_file(self, _id, name, path, *, purposes=[]):
+ sha256_hash = bb.utils.sha256_file(path)
+
+ for f in self.by_sha256_hash.get(sha256_hash, []):
+ if not isinstance(oe.spdx30.software_File):
+ continue
+
+ if purposes:
+ new_primary = purposes[0]
+ new_additional = []
+
+ if f.software_primaryPurpose:
+ new_additional.append(f.software_primaryPurpose)
+ new_additional.extend(f.software_additionalPurpose)
+
+ new_additional = sorted(
+ list(set(p for p in new_additional if p != new_primary))
+ )
+
+ f.software_primaryPurpose = new_primary
+ f.software_additionalPurpose = new_additional
+
+ if f.name != name:
+ for e in f.extension:
+ if isinstance(e, OEFileNameAliasExtension):
+ e.aliases.append(name)
+ break
+ else:
+ f.extension.append(OEFileNameAliasExtension(aliases=[name]))
+
+ return f
+
+ spdx_file = oe.spdx30.software_File(
+ _id=_id,
+ creationInfo=self.doc.creationInfo,
+ name=name,
+ )
+ if purposes:
+ spdx_file.software_primaryPurpose = purposes[0]
+ spdx_file.software_additionalPurpose = purposes[1:]
+
+ spdx_file.verifiedUsing.append(
+ oe.spdx30.Hash(
+ algorithm=oe.spdx30.HashAlgorithm.sha256,
+ hashValue=sha256_hash,
+ )
+ )
+
+ return self.add(spdx_file)
+
+ def new_cve_vuln(self, cve):
+ v = oe.spdx30.security_Vulnerability()
+ v._id = self.new_spdxid("vulnerability", cve)
+ v.creationInfo = self.doc.creationInfo
+
+ v.externalIdentifier.append(
+ oe.spdx30.ExternalIdentifier(
+ externalIdentifierType=oe.spdx30.ExternalIdentifierType.cve,
+ identifier=cve,
+ identifierLocator=[
+ f"https://cveawg.mitre.org/api/cve/{cve}",
+ f"https://www.cve.org/CVERecord?id={cve}",
+ ],
+ )
+ )
+ return self.add(v)
+
+ def new_vex_patched_relationship(self, from_, to):
+ return self._new_relationship(
+ oe.spdx30.security_VexFixedVulnAssessmentRelationship,
+ from_,
+ oe.spdx30.RelationshipType.fixedIn,
+ to,
+ spdxid_name="vex-fixed",
+ security_vexVersion=VEX_VERSION,
+ )
+
+ def new_vex_unpatched_relationship(self, from_, to):
+ return self._new_relationship(
+ oe.spdx30.security_VexAffectedVulnAssessmentRelationship,
+ from_,
+ oe.spdx30.RelationshipType.affects,
+ to,
+ spdxid_name="vex-affected",
+ security_vexVersion=VEX_VERSION,
+ )
+
+ def new_vex_ignored_relationship(self, from_, to, *, impact_statement):
+ return self._new_relationship(
+ oe.spdx30.security_VexNotAffectedVulnAssessmentRelationship,
+ from_,
+ oe.spdx30.RelationshipType.doesNotAffect,
+ to,
+ spdxid_name="vex-not-affected",
+ security_vexVersion=VEX_VERSION,
+ security_impactStatement=impact_statement,
+ )
+
+ def import_bitbake_build_objset(self):
+ deploy_dir_spdx = Path(self.d.getVar("DEPLOY_DIR_SPDX"))
+ bb_objset = load_jsonld(
+ self.d, deploy_dir_spdx / "bitbake.spdx.json", required=True
+ )
+ self.doc.imports.extend(bb_objset.doc.imports)
+ self.update(bb_objset.objects)
+
+ return bb_objset
+
+ def import_bitbake_build(self):
+ def find_bitbake_build(objset):
+ return objset.find_filter(
+ oe.spdx30.build_Build,
+ build_buildType=SPDX_BUILD_TYPE,
+ )
+
+ build = find_bitbake_build(self)
+ if build:
+ return build
+
+ bb_objset = self.import_bitbake_build_objset()
+ build = find_bitbake_build(bb_objset)
+ if build is None:
+ bb.fatal(f"No build found in {deploy_dir_spdx}")
+
+ return build
+
+ def new_task_build(self, name, typ):
+ current_task = self.d.getVar("BB_CURRENTTASK")
+ pn = self.d.getVar("PN")
+
+ build = self.add(
+ oe.spdx30.build_Build(
+ _id=self.new_spdxid("build", name),
+ creationInfo=self.doc.creationInfo,
+ name=f"{pn}:do_{current_task}:{name}",
+ build_buildType=f"{SPDX_BUILD_TYPE}/do_{current_task}/{typ}",
+ )
+ )
+
+ if self.d.getVar("SPDX_INCLUDE_BITBAKE_PARENT_BUILD") == "1":
+ bitbake_build = self.import_bitbake_build()
+
+ self.new_relationship(
+ [bitbake_build],
+ oe.spdx30.RelationshipType.ancestorOf,
+ [build],
+ )
+
+ if self.d.getVar("SPDX_INCLUDE_BUILD_VARIABLES") == "1":
+ for varname in sorted(self.d.keys()):
+ if varname.startswith("__"):
+ continue
+
+ value = self.d.getVar(varname, expand=False)
+
+ # TODO: Deal with non-string values
+ if not isinstance(value, str):
+ continue
+
+ build.build_parameters.append(
+ oe.spdx30.DictionaryEntry(key=varname, value=value)
+ )
+
+ return build
+
+ def new_archive(self, archive_name):
+ return self.add(
+ oe.spdx30.software_File(
+ _id=self.new_spdxid("archive", str(archive_name)),
+ creationInfo=self.doc.creationInfo,
+ name=str(archive_name),
+ software_primaryPurpose=oe.spdx30.software_SoftwarePurpose.archive,
+ )
+ )
+
+ @classmethod
+ def new_objset(cls, d, name, copy_from_bitbake_doc=True):
+ objset = cls(d)
+
+ document = oe.spdx30.SpdxDocument(
+ _id=objset.new_spdxid("document", name),
+ name=name,
+ )
+ document.extension.append(OEIdAliasExtension())
+ document.extension.append(OELinkExtension(link_spdx_id=False))
+ objset.doc = document
+
+ if copy_from_bitbake_doc:
+ bb_objset = objset.import_bitbake_build_objset()
+ document.creationInfo = objset.copy_creation_info(
+ bb_objset.doc.creationInfo
+ )
+ else:
+ document.creationInfo = objset.new_creation_info()
+
+ return objset
+
+ def expand_collection(self, *, add_objectsets=[]):
+ """
+ Expands a collection to pull in all missing elements
+
+ Returns the set of ids that could not be found to link into the document
+ """
+ missing_spdxids = set()
+ imports = {e.externalSpdxId: e for e in self.doc.imports}
+
+ def merge_doc(other):
+ nonlocal imports
+
+ for e in other.doc.imports:
+ if not e.externalSpdxId in imports:
+ imports[e.externalSpdxId] = e
+
+ self.objects |= other.objects
+
+ for o in add_objectsets:
+ merge_doc(o)
+
+ needed_spdxids = self.link()
+ provided_spdxids = set(self.obj_by_id.keys())
+
+ while True:
+ import_spdxids = set(imports.keys())
+ searching_spdxids = (
+ needed_spdxids - provided_spdxids - missing_spdxids - import_spdxids
+ )
+ if not searching_spdxids:
+ break
+
+ spdxid = searching_spdxids.pop()
+ bb.debug(
+ 1,
+ f"Searching for {spdxid}. Remaining: {len(searching_spdxids)}, Total: {len(provided_spdxids)}, Missing: {len(missing_spdxids)}, Imports: {len(import_spdxids)}",
+ )
+ dep_objset, dep_path = find_by_spdxid(self.d, spdxid)
+
+ if dep_objset:
+ dep_provided = set(dep_objset.obj_by_id.keys())
+ if spdxid not in dep_provided:
+ bb.fatal(f"{spdxid} not found in {dep_path}")
+ provided_spdxids |= dep_provided
+ needed_spdxids |= dep_objset.missing_ids
+ merge_doc(dep_objset)
+ else:
+ missing_spdxids.add(spdxid)
+
+ bb.debug(1, "Linking...")
+ missing = self.link()
+ if missing != missing_spdxids:
+ bb.fatal(
+ f"Linked document doesn't match missing SPDX ID list. Got: {missing}\nExpected: {missing_spdxids}"
+ )
+
+ self.doc.imports = sorted(imports.values(), key=lambda e: e.externalSpdxId)
+
+ return missing_spdxids
+
+
+def load_jsonld(d, path, required=False):
+ deserializer = oe.spdx30.JSONLDDeserializer()
+ objset = ObjectSet(d)
+ try:
+ with path.open("rb") as f:
+ deserializer.read(f, objset)
+ except FileNotFoundError:
+ if required:
+ bb.fatal("No SPDX document named %s found" % path)
+ return None
+
+ if not objset.doc:
+ bb.fatal("SPDX Document %s has no SPDXDocument element" % path)
+ return None
+
+ objset.objects.remove(objset.doc)
+ return objset
+
+
+def jsonld_arch_path(d, arch, subdir, name, deploydir=None):
+ if deploydir is None:
+ deploydir = Path(d.getVar("DEPLOY_DIR_SPDX"))
+ return deploydir / arch / subdir / (name + ".spdx.json")
+
+
+def jsonld_hash_path(_id):
+ h = hashlib.sha256(_id.encode("utf-8")).hexdigest()
+
+ return Path("by-spdxid-hash") / h[:2], h
+
+
+def load_jsonld_by_arch(d, arch, subdir, name, *, required=False):
+ path = jsonld_arch_path(d, arch, subdir, name)
+ objset = load_jsonld(d, path, required=required)
+ if objset is not None:
+ return (objset, path)
+ return (None, None)
+
+
+def find_jsonld(d, subdir, name, *, required=False):
+ package_archs = d.getVar("SPDX_MULTILIB_SSTATE_ARCHS").split()
+ package_archs.reverse()
+
+ for arch in package_archs:
+ objset, path = load_jsonld_by_arch(d, arch, subdir, name)
+ if objset is not None:
+ return (objset, path)
+
+ if required:
+ bb.fatal("Could not find a %s SPDX document named %s" % (subdir, name))
+
+ return (None, None)
+
+
+def write_jsonld_doc(d, objset, dest):
+ if not isinstance(objset, ObjectSet):
+ bb.fatal("Only an ObjsetSet can be serialized")
+ return
+
+ if not objset.doc:
+ bb.fatal("ObjectSet is missing a SpdxDocument")
+ return
+
+ objset.doc.rootElement = sorted(list(set(objset.doc.rootElement)))
+ objset.doc.profileConformance = sorted(
+ list(
+ getattr(oe.spdx30.ProfileIdentifierType, p)
+ for p in d.getVar("SPDX_PROFILES").split()
+ )
+ )
+
+ dest.parent.mkdir(exist_ok=True, parents=True)
+
+ if d.getVar("SPDX_PRETTY") == "1":
+ serializer = oe.spdx30.JSONLDSerializer(
+ indent=2,
+ )
+ else:
+ serializer = oe.spdx30.JSONLDInlineSerializer()
+
+ objset.objects.add(objset.doc)
+ with dest.open("wb") as f:
+ serializer.write(objset, f, force_at_graph=True)
+ objset.objects.remove(objset.doc)
+
+
+def write_recipe_jsonld_doc(
+ d,
+ objset,
+ subdir,
+ deploydir,
+ *,
+ create_spdx_id_links=True,
+):
+ pkg_arch = d.getVar("SSTATE_PKGARCH")
+
+ dest = jsonld_arch_path(d, pkg_arch, subdir, objset.doc.name, deploydir=deploydir)
+
+ def link_id(_id):
+ hash_path = jsonld_hash_path(_id)
+
+ link_name = jsonld_arch_path(
+ d,
+ pkg_arch,
+ *hash_path,
+ deploydir=deploydir,
+ )
+ try:
+ link_name.parent.mkdir(exist_ok=True, parents=True)
+ link_name.symlink_to(os.path.relpath(dest, link_name.parent))
+ except:
+ target = link_name.readlink()
+ bb.warn(
+ f"Unable to link {_id} in {dest} as {link_name}. Already points to {target}"
+ )
+ raise
+
+ return hash_path[-1]
+
+ objset.add_aliases()
+
+ try:
+ if create_spdx_id_links:
+ for o in objset.foreach_type(oe.spdx30.Element):
+ if not o._id or o._id.startswith("_:"):
+ continue
+
+ ext = None
+ for e in o.extension:
+ if not isinstance(e, OELinkExtension):
+ continue
+
+ ext = e
+ break
+
+ if ext is None:
+ ext = OELinkExtension(link_spdx_id=True)
+ o.extension.append(ext)
+
+ if ext.link_spdx_id:
+ ext.link_name = link_id(o._id)
+
+ alias_ext = get_alias(o)
+ if alias_ext is not None and alias_ext.alias:
+ alias_ext.link_name = link_id(alias_ext.alias)
+
+ finally:
+ # It is really helpful for debugging if the JSON document is written
+ # out, so always do that even if there is an error making the links
+ write_jsonld_doc(d, objset, dest)
+
+
+def find_root_obj_in_jsonld(d, subdir, fn_name, obj_type, **attr_filter):
+ objset, fn = find_jsonld(d, subdir, fn_name, required=True)
+
+ spdx_obj = objset.find_root(obj_type, **attr_filter)
+ if not spdx_obj:
+ bb.fatal("No root %s found in %s" % (obj_type.__name__, fn))
+
+ return spdx_obj, objset
+
+
+def load_obj_in_jsonld(d, arch, subdir, fn_name, obj_type, **attr_filter):
+ objset, fn = load_jsonld_by_arch(d, arch, subdir, fn_name, required=True)
+
+ spdx_obj = objset.find_filter(obj_type, **attr_filter)
+ if not spdx_obj:
+ bb.fatal("No %s found in %s" % (obj_type.__name__, fn))
+
+ return spdx_obj, objset
+
+
+def find_by_spdxid(d, spdxid, *, required=False):
+ return find_jsonld(d, *jsonld_hash_path(spdxid), required=required)
+
+
+def create_sbom(d, name, root_elements, add_objectsets=[]):
+ objset = ObjectSet.new_objset(d, name)
+
+ sbom = objset.add(
+ oe.spdx30.software_Sbom(
+ _id=objset.new_spdxid("sbom", name),
+ name=name,
+ creationInfo=objset.doc.creationInfo,
+ software_sbomType=[oe.spdx30.software_SbomType.build],
+ rootElement=root_elements,
+ )
+ )
+
+ missing_spdxids = objset.expand_collection(add_objectsets=add_objectsets)
+ if missing_spdxids:
+ bb.warn(
+ "The following SPDX IDs were unable to be resolved:\n "
+ + "\n ".join(sorted(list(missing_spdxids)))
+ )
+
+ # Filter out internal extensions from final SBoMs
+ objset.remove_internal_extensions()
+
+ # SBoM should be the only root element of the document
+ objset.doc.rootElement = [sbom]
+
+ # De-duplicate licenses
+ unique = set()
+ dedup = {}
+ for lic in objset.foreach_type(oe.spdx30.simplelicensing_LicenseExpression):
+ for u in unique:
+ if (
+ u.simplelicensing_licenseExpression
+ == lic.simplelicensing_licenseExpression
+ and u.simplelicensing_licenseListVersion
+ == lic.simplelicensing_licenseListVersion
+ ):
+ dedup[lic] = u
+ break
+ else:
+ unique.add(lic)
+
+ if dedup:
+ for rel in objset.foreach_filter(
+ oe.spdx30.Relationship,
+ relationshipType=oe.spdx30.RelationshipType.hasDeclaredLicense,
+ ):
+ rel.to = [dedup.get(to, to) for to in rel.to]
+
+ for rel in objset.foreach_filter(
+ oe.spdx30.Relationship,
+ relationshipType=oe.spdx30.RelationshipType.hasConcludedLicense,
+ ):
+ rel.to = [dedup.get(to, to) for to in rel.to]
+
+ for k, v in dedup.items():
+ bb.debug(1, f"Removing duplicate License {k._id} -> {v._id}")
+ objset.objects.remove(k)
+
+ objset.create_index()
+
+ return objset, sbom
diff --git a/poky/meta/lib/oe/spdx30.py b/poky/meta/lib/oe/spdx30.py
new file mode 100644
index 0000000000..ae74ce36f4
--- /dev/null
+++ b/poky/meta/lib/oe/spdx30.py
@@ -0,0 +1,6020 @@
+#! /usr/bin/env python3
+#
+# Generated Python bindings from a SHACL model
+#
+# This file was automatically generated by shacl2code. DO NOT MANUALLY MODIFY IT
+#
+# SPDX-License-Identifier: MIT
+
+import functools
+import hashlib
+import json
+import re
+import sys
+import threading
+import time
+from contextlib import contextmanager
+from datetime import datetime, timezone, timedelta
+from enum import Enum
+from abc import ABC, abstractmethod
+
+
+def check_type(obj, types):
+ if not isinstance(obj, types):
+ if isinstance(types, (list, tuple)):
+ raise TypeError(
+ f"Value must be one of type: {', '.join(t.__name__ for t in types)}. Got {type(obj)}"
+ )
+ raise TypeError(f"Value must be of type {types.__name__}. Got {type(obj)}")
+
+
+class Property(ABC):
+ """
+ A generic SHACL object property. The different types will derive from this
+ class
+ """
+
+ def __init__(self, *, pattern=None):
+ self.pattern = pattern
+
+ def init(self):
+ return None
+
+ def validate(self, value):
+ check_type(value, self.VALID_TYPES)
+ if self.pattern is not None and not re.search(
+ self.pattern, self.to_string(value)
+ ):
+ raise ValueError(
+ f"Value is not correctly formatted. Got '{self.to_string(value)}'"
+ )
+
+ def set(self, value):
+ return value
+
+ def check_min_count(self, value, min_count):
+ return min_count == 1
+
+ def check_max_count(self, value, max_count):
+ return max_count == 1
+
+ def elide(self, value):
+ return value is None
+
+ def walk(self, value, callback, path):
+ callback(value, path)
+
+ def iter_objects(self, value, recursive, visited):
+ return []
+
+ def link_prop(self, value, objectset, missing, visited):
+ return value
+
+ def to_string(self, value):
+ return str(value)
+
+ @abstractmethod
+ def encode(self, encoder, value, state):
+ pass
+
+ @abstractmethod
+ def decode(self, decoder, *, objectset=None):
+ pass
+
+
+class StringProp(Property):
+ """
+ A scalar string property for an SHACL object
+ """
+
+ VALID_TYPES = str
+
+ def set(self, value):
+ return str(value)
+
+ def encode(self, encoder, value, state):
+ encoder.write_string(value)
+
+ def decode(self, decoder, *, objectset=None):
+ return decoder.read_string()
+
+
+class AnyURIProp(StringProp):
+ def encode(self, encoder, value, state):
+ encoder.write_iri(value)
+
+ def decode(self, decoder, *, objectset=None):
+ return decoder.read_iri()
+
+
+class DateTimeProp(Property):
+ """
+ A Date/Time Object with optional timezone
+ """
+
+ VALID_TYPES = datetime
+ UTC_FORMAT_STR = "%Y-%m-%dT%H:%M:%SZ"
+ REGEX = r"^\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}(Z|[+-]\d{2}:\d{2})?$"
+
+ def set(self, value):
+ return self._normalize(value)
+
+ def encode(self, encoder, value, state):
+ encoder.write_datetime(self.to_string(value))
+
+ def decode(self, decoder, *, objectset=None):
+ s = decoder.read_datetime()
+ if s is None:
+ return None
+ v = self.from_string(s)
+ return self._normalize(v)
+
+ def _normalize(self, value):
+ if value.utcoffset() is None:
+ value = value.astimezone()
+ offset = value.utcoffset()
+ if offset % timedelta(minutes=1):
+ offset = offset - (offset % timedelta(minutes=1))
+ value = value.replace(tzinfo=timezone(offset))
+ value = value.replace(microsecond=0)
+ return value
+
+ def to_string(self, value):
+ value = self._normalize(value)
+ if value.tzinfo == timezone.utc:
+ return value.strftime(self.UTC_FORMAT_STR)
+ return value.isoformat()
+
+ def from_string(self, value):
+ if not re.match(self.REGEX, value):
+ raise ValueError(f"'{value}' is not a correctly formatted datetime")
+ if "Z" in value:
+ d = datetime(
+ *(time.strptime(value, self.UTC_FORMAT_STR)[0:6]),
+ tzinfo=timezone.utc,
+ )
+ else:
+ d = datetime.fromisoformat(value)
+
+ return self._normalize(d)
+
+
+class DateTimeStampProp(DateTimeProp):
+ """
+ A Date/Time Object with required timestamp
+ """
+
+ REGEX = r"^\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}(Z|[+-]\d{2}:\d{2})$"
+
+
+class IntegerProp(Property):
+ VALID_TYPES = int
+
+ def set(self, value):
+ return int(value)
+
+ def encode(self, encoder, value, state):
+ encoder.write_integer(value)
+
+ def decode(self, decoder, *, objectset=None):
+ return decoder.read_integer()
+
+
+class PositiveIntegerProp(IntegerProp):
+ def validate(self, value):
+ super().validate(value)
+ if value < 1:
+ raise ValueError(f"Value must be >=1. Got {value}")
+
+
+class NonNegativeIntegerProp(IntegerProp):
+ def validate(self, value):
+ super().validate(value)
+ if value < 0:
+ raise ValueError(f"Value must be >= 0. Got {value}")
+
+
+class BooleanProp(Property):
+ VALID_TYPES = bool
+
+ def set(self, value):
+ return bool(value)
+
+ def encode(self, encoder, value, state):
+ encoder.write_bool(value)
+
+ def decode(self, decoder, *, objectset=None):
+ return decoder.read_bool()
+
+
+class FloatProp(Property):
+ VALID_TYPES = (float, int)
+
+ def set(self, value):
+ return float(value)
+
+ def encode(self, encoder, value, state):
+ encoder.write_float(value)
+
+ def decode(self, decoder, *, objectset=None):
+ return decoder.read_float()
+
+
+class ObjectProp(Property):
+ """
+ A scalar SHACL object property of a SHACL object
+ """
+
+ def __init__(self, cls, required):
+ super().__init__()
+ self.cls = cls
+ self.required = required
+
+ def init(self):
+ if self.required and not self.cls.IS_ABSTRACT:
+ return self.cls()
+ return None
+
+ def validate(self, value):
+ check_type(value, (self.cls, str))
+
+ def walk(self, value, callback, path):
+ if value is None:
+ return
+
+ if not isinstance(value, str):
+ value.walk(callback, path)
+ else:
+ callback(value, path)
+
+ def iter_objects(self, value, recursive, visited):
+ if value is None or isinstance(value, str):
+ return
+
+ if value not in visited:
+ visited.add(value)
+ yield value
+
+ if recursive:
+ for c in value.iter_objects(recursive=True, visited=visited):
+ yield c
+
+ def encode(self, encoder, value, state):
+ if value is None:
+ raise ValueError("Object cannot be None")
+
+ if isinstance(value, str):
+ value = _NI_ENCODE_CONTEXT.get(value, value)
+ encoder.write_iri(value)
+ return
+
+ return value.encode(encoder, state)
+
+ def decode(self, decoder, *, objectset=None):
+ iri = decoder.read_iri()
+ if iri is None:
+ return self.cls.decode(decoder, objectset=objectset)
+
+ iri = _NI_DECODE_CONTEXT.get(iri, iri)
+
+ if objectset is None:
+ return iri
+
+ obj = objectset.find_by_id(iri)
+ if obj is None:
+ return iri
+
+ self.validate(obj)
+ return obj
+
+ def link_prop(self, value, objectset, missing, visited):
+ if value is None:
+ return value
+
+ if isinstance(value, str):
+ o = objectset.find_by_id(value)
+ if o is not None:
+ self.validate(o)
+ return o
+
+ if missing is not None:
+ missing.add(value)
+
+ return value
+
+ # De-duplicate IDs
+ if value._id:
+ value = objectset.find_by_id(value._id, value)
+ self.validate(value)
+
+ value.link_helper(objectset, missing, visited)
+ return value
+
+
+class ListProxy(object):
+ def __init__(self, prop, data=None):
+ if data is None:
+ self.__data = []
+ else:
+ self.__data = data
+ self.__prop = prop
+
+ def append(self, value):
+ self.__prop.validate(value)
+ self.__data.append(self.__prop.set(value))
+
+ def insert(self, idx, value):
+ self.__prop.validate(value)
+ self.__data.insert(idx, self.__prop.set(value))
+
+ def extend(self, items):
+ for i in items:
+ self.append(i)
+
+ def sort(self, *args, **kwargs):
+ self.__data.sort(*args, **kwargs)
+
+ def __getitem__(self, key):
+ return self.__data[key]
+
+ def __setitem__(self, key, value):
+ if isinstance(key, slice):
+ for v in value:
+ self.__prop.validate(v)
+ self.__data[key] = [self.__prop.set(v) for v in value]
+ else:
+ self.__prop.validate(value)
+ self.__data[key] = self.__prop.set(value)
+
+ def __delitem__(self, key):
+ del self.__data[key]
+
+ def __contains__(self, item):
+ return item in self.__data
+
+ def __iter__(self):
+ return iter(self.__data)
+
+ def __len__(self):
+ return len(self.__data)
+
+ def __str__(self):
+ return str(self.__data)
+
+ def __repr__(self):
+ return repr(self.__data)
+
+ def __eq__(self, other):
+ if isinstance(other, ListProxy):
+ return self.__data == other.__data
+
+ return self.__data == other
+
+
+class ListProp(Property):
+ """
+ A list of SHACL properties
+ """
+
+ VALID_TYPES = (list, ListProxy)
+
+ def __init__(self, prop):
+ super().__init__()
+ self.prop = prop
+
+ def init(self):
+ return ListProxy(self.prop)
+
+ def validate(self, value):
+ super().validate(value)
+
+ for i in value:
+ self.prop.validate(i)
+
+ def set(self, value):
+ if isinstance(value, ListProxy):
+ return value
+
+ return ListProxy(self.prop, [self.prop.set(d) for d in value])
+
+ def check_min_count(self, value, min_count):
+ check_type(value, ListProxy)
+ return len(value) >= min_count
+
+ def check_max_count(self, value, max_count):
+ check_type(value, ListProxy)
+ return len(value) <= max_count
+
+ def elide(self, value):
+ check_type(value, ListProxy)
+ return len(value) == 0
+
+ def walk(self, value, callback, path):
+ callback(value, path)
+ for idx, v in enumerate(value):
+ self.prop.walk(v, callback, path + [f"[{idx}]"])
+
+ def iter_objects(self, value, recursive, visited):
+ for v in value:
+ for c in self.prop.iter_objects(v, recursive, visited):
+ yield c
+
+ def link_prop(self, value, objectset, missing, visited):
+ if isinstance(value, ListProxy):
+ data = [self.prop.link_prop(v, objectset, missing, visited) for v in value]
+ else:
+ data = [self.prop.link_prop(v, objectset, missing, visited) for v in value]
+
+ return ListProxy(self.prop, data=data)
+
+ def encode(self, encoder, value, state):
+ check_type(value, ListProxy)
+
+ with encoder.write_list() as list_s:
+ for v in value:
+ with list_s.write_list_item() as item_s:
+ self.prop.encode(item_s, v, state)
+
+ def decode(self, decoder, *, objectset=None):
+ data = []
+ for val_d in decoder.read_list():
+ v = self.prop.decode(val_d, objectset=objectset)
+ self.prop.validate(v)
+ data.append(v)
+
+ return ListProxy(self.prop, data=data)
+
+
+class EnumProp(Property):
+ VALID_TYPES = str
+
+ def __init__(self, values, *, pattern=None):
+ super().__init__(pattern=pattern)
+ self.values = values
+
+ def validate(self, value):
+ super().validate(value)
+
+ valid_values = (iri for iri, _ in self.values)
+ if value not in valid_values:
+ raise ValueError(
+ f"'{value}' is not a valid value. Choose one of {' '.join(valid_values)}"
+ )
+
+ def encode(self, encoder, value, state):
+ for iri, compact in self.values:
+ if iri == value:
+ encoder.write_enum(value, self, compact)
+ return
+
+ encoder.write_enum(value, self)
+
+ def decode(self, decoder, *, objectset=None):
+ v = decoder.read_enum(self)
+ for iri, compact in self.values:
+ if v == compact:
+ return iri
+ return v
+
+
+class NodeKind(Enum):
+ BlankNode = 1
+ IRI = 2
+ BlankNodeOrIRI = 3
+
+
+def is_IRI(s):
+ if not isinstance(s, str):
+ return False
+ if s.startswith("_:"):
+ return False
+ if ":" not in s:
+ return False
+ return True
+
+
+def is_blank_node(s):
+ if not isinstance(s, str):
+ return False
+ if not s.startswith("_:"):
+ return False
+ return True
+
+
+def register(type_iri, *, compact_type=None, abstract=False):
+ def add_class(key, c):
+ assert (
+ key not in SHACLObject.CLASSES
+ ), f"{key} already registered to {SHACLObject.CLASSES[key].__name__}"
+ SHACLObject.CLASSES[key] = c
+
+ def decorator(c):
+ global NAMED_INDIVIDUALS
+
+ assert issubclass(
+ c, SHACLObject
+ ), f"{c.__name__} is not derived from SHACLObject"
+
+ c._OBJ_TYPE = type_iri
+ c.IS_ABSTRACT = abstract
+ add_class(type_iri, c)
+
+ c._OBJ_COMPACT_TYPE = compact_type
+ if compact_type:
+ add_class(compact_type, c)
+
+ NAMED_INDIVIDUALS |= set(c.NAMED_INDIVIDUALS.values())
+
+ # Registration is deferred until the first instance of class is created
+ # so that it has access to any other defined class
+ c._NEEDS_REG = True
+ return c
+
+ return decorator
+
+
+register_lock = threading.Lock()
+NAMED_INDIVIDUALS = set()
+
+
+@functools.total_ordering
+class SHACLObject(object):
+ CLASSES = {}
+ NODE_KIND = NodeKind.BlankNodeOrIRI
+ ID_ALIAS = None
+ IS_ABSTRACT = True
+
+ def __init__(self, **kwargs):
+ if self._is_abstract():
+ raise NotImplementedError(
+ f"{self.__class__.__name__} is abstract and cannot be implemented"
+ )
+
+ with register_lock:
+ cls = self.__class__
+ if cls._NEEDS_REG:
+ cls._OBJ_PROPERTIES = {}
+ cls._OBJ_IRIS = {}
+ cls._register_props()
+ cls._NEEDS_REG = False
+
+ self.__dict__["_obj_data"] = {}
+ self.__dict__["_obj_metadata"] = {}
+
+ for iri, prop, _, _, _, _ in self.__iter_props():
+ self.__dict__["_obj_data"][iri] = prop.init()
+
+ for k, v in kwargs.items():
+ setattr(self, k, v)
+
+ def _is_abstract(self):
+ return self.__class__.IS_ABSTRACT
+
+ @classmethod
+ def _register_props(cls):
+ cls._add_property("_id", StringProp(), iri="@id")
+
+ @classmethod
+ def _add_property(
+ cls,
+ pyname,
+ prop,
+ iri,
+ min_count=None,
+ max_count=None,
+ compact=None,
+ ):
+ if pyname in cls._OBJ_IRIS:
+ raise KeyError(f"'{pyname}' is already defined for '{cls.__name__}'")
+ if iri in cls._OBJ_PROPERTIES:
+ raise KeyError(f"'{iri}' is already defined for '{cls.__name__}'")
+
+ while hasattr(cls, pyname):
+ pyname = pyname + "_"
+
+ pyname = sys.intern(pyname)
+ iri = sys.intern(iri)
+
+ cls._OBJ_IRIS[pyname] = iri
+ cls._OBJ_PROPERTIES[iri] = (prop, min_count, max_count, pyname, compact)
+
+ def __setattr__(self, name, value):
+ if name == self.ID_ALIAS:
+ self["@id"] = value
+ return
+
+ try:
+ iri = self._OBJ_IRIS[name]
+ self[iri] = value
+ except KeyError:
+ raise AttributeError(
+ f"'{name}' is not a valid property of {self.__class__.__name__}"
+ )
+
+ def __getattr__(self, name):
+ if name in self._OBJ_IRIS:
+ return self.__dict__["_obj_data"][self._OBJ_IRIS[name]]
+
+ if name == self.ID_ALIAS:
+ return self.__dict__["_obj_data"]["@id"]
+
+ if name == "_metadata":
+ return self.__dict__["_obj_metadata"]
+
+ if name == "_IRI":
+ return self._OBJ_IRIS
+
+ if name == "TYPE":
+ return self.__class__._OBJ_TYPE
+
+ if name == "COMPACT_TYPE":
+ return self.__class__._OBJ_COMPACT_TYPE
+
+ raise AttributeError(
+ f"'{name}' is not a valid property of {self.__class__.__name__}"
+ )
+
+ def __delattr__(self, name):
+ if name == self.ID_ALIAS:
+ del self["@id"]
+ return
+
+ try:
+ iri = self._OBJ_IRIS[name]
+ del self[iri]
+ except KeyError:
+ raise AttributeError(
+ f"'{name}' is not a valid property of {self.__class__.__name__}"
+ )
+
+ def __get_prop(self, iri):
+ if iri not in self._OBJ_PROPERTIES:
+ raise KeyError(
+ f"'{iri}' is not a valid property of {self.__class__.__name__}"
+ )
+
+ return self._OBJ_PROPERTIES[iri]
+
+ def __iter_props(self):
+ for iri, v in self._OBJ_PROPERTIES.items():
+ yield iri, *v
+
+ def __getitem__(self, iri):
+ return self.__dict__["_obj_data"][iri]
+
+ def __setitem__(self, iri, value):
+ if iri == "@id":
+ if self.NODE_KIND == NodeKind.BlankNode:
+ if not is_blank_node(value):
+ raise ValueError(
+ f"{self.__class__.__name__} ({id(self)}) can only have local reference. Property '{iri}' cannot be set to '{value}' and must start with '_:'"
+ )
+ elif self.NODE_KIND == NodeKind.IRI:
+ if not is_IRI(value):
+ raise ValueError(
+ f"{self.__class__.__name__} ({id(self)}) can only have an IRI value. Property '{iri}' cannot be set to '{value}'"
+ )
+ else:
+ if not is_blank_node(value) and not is_IRI(value):
+ raise ValueError(
+ f"{self.__class__.__name__} ({id(self)}) Has invalid Property '{iri}' '{value}'. Must be a blank node or IRI"
+ )
+
+ prop, _, _, _, _ = self.__get_prop(iri)
+ prop.validate(value)
+ self.__dict__["_obj_data"][iri] = prop.set(value)
+
+ def __delitem__(self, iri):
+ prop, _, _, _, _ = self.__get_prop(iri)
+ self.__dict__["_obj_data"][iri] = prop.init()
+
+ def __iter__(self):
+ return self._OBJ_PROPERTIES.keys()
+
+ def walk(self, callback, path=None):
+ """
+ Walk object tree, invoking the callback for each item
+
+ Callback has the form:
+
+ def callback(object, path):
+ """
+ if path is None:
+ path = ["."]
+
+ if callback(self, path):
+ for iri, prop, _, _, _, _ in self.__iter_props():
+ prop.walk(self.__dict__["_obj_data"][iri], callback, path + [f".{iri}"])
+
+ def property_keys(self):
+ for iri, _, _, _, pyname, compact in self.__iter_props():
+ if iri == "@id":
+ compact = self.ID_ALIAS
+ yield pyname, iri, compact
+
+ def iter_objects(self, *, recursive=False, visited=None):
+ """
+ Iterate of all objects that are a child of this one
+ """
+ if visited is None:
+ visited = set()
+
+ for iri, prop, _, _, _, _ in self.__iter_props():
+ for c in prop.iter_objects(
+ self.__dict__["_obj_data"][iri], recursive=recursive, visited=visited
+ ):
+ yield c
+
+ def encode(self, encoder, state):
+ idname = self.ID_ALIAS or self._OBJ_IRIS["_id"]
+ if not self._id and self.NODE_KIND == NodeKind.IRI:
+ raise ValueError(
+ f"{self.__class__.__name__} ({id(self)}) must have a IRI for property '{idname}'"
+ )
+
+ if state.is_written(self):
+ encoder.write_iri(state.get_object_id(self))
+ return
+
+ state.add_written(self)
+
+ with encoder.write_object(
+ self,
+ state.get_object_id(self),
+ bool(self._id) or state.is_refed(self),
+ ) as obj_s:
+ self._encode_properties(obj_s, state)
+
+ def _encode_properties(self, encoder, state):
+ for iri, prop, min_count, max_count, pyname, compact in self.__iter_props():
+ value = self.__dict__["_obj_data"][iri]
+ if prop.elide(value):
+ if min_count:
+ raise ValueError(
+ f"Property '{pyname}' in {self.__class__.__name__} ({id(self)}) is required (currently {value!r})"
+ )
+ continue
+
+ if min_count is not None:
+ if not prop.check_min_count(value, min_count):
+ raise ValueError(
+ f"Property '{pyname}' in {self.__class__.__name__} ({id(self)}) requires a minimum of {min_count} elements"
+ )
+
+ if max_count is not None:
+ if not prop.check_max_count(value, max_count):
+ raise ValueError(
+ f"Property '{pyname}' in {self.__class__.__name__} ({id(self)}) requires a maximum of {max_count} elements"
+ )
+
+ if iri == self._OBJ_IRIS["_id"]:
+ continue
+
+ with encoder.write_property(iri, compact) as prop_s:
+ prop.encode(prop_s, value, state)
+
+ @classmethod
+ def _make_object(cls, typ):
+ if typ not in cls.CLASSES:
+ raise TypeError(f"Unknown type {typ}")
+
+ return cls.CLASSES[typ]()
+
+ @classmethod
+ def decode(cls, decoder, *, objectset=None):
+ typ, obj_d = decoder.read_object()
+ if typ is None:
+ raise TypeError("Unable to determine type for object")
+
+ obj = cls._make_object(typ)
+ for key in (obj.ID_ALIAS, obj._OBJ_IRIS["_id"]):
+ with obj_d.read_property(key) as prop_d:
+ if prop_d is None:
+ continue
+
+ _id = prop_d.read_iri()
+ if _id is None:
+ raise TypeError(f"Object key '{key}' is the wrong type")
+
+ obj._id = _id
+ break
+
+ if obj.NODE_KIND == NodeKind.IRI and not obj._id:
+ raise ValueError("Object is missing required IRI")
+
+ if objectset is not None:
+ if obj._id:
+ v = objectset.find_by_id(_id)
+ if v is not None:
+ return v
+
+ obj._decode_properties(obj_d, objectset=objectset)
+
+ if objectset is not None:
+ objectset.add_index(obj)
+ return obj
+
+ def _decode_properties(self, decoder, objectset=None):
+ for key in decoder.object_keys():
+ if not self._decode_prop(decoder, key, objectset=objectset):
+ raise KeyError(f"Unknown property '{key}'")
+
+ def _decode_prop(self, decoder, key, objectset=None):
+ if key in (self._OBJ_IRIS["_id"], self.ID_ALIAS):
+ return True
+
+ for iri, prop, _, _, _, compact in self.__iter_props():
+ if compact == key:
+ read_key = compact
+ elif iri == key:
+ read_key = iri
+ else:
+ continue
+
+ with decoder.read_property(read_key) as prop_d:
+ v = prop.decode(prop_d, objectset=objectset)
+ prop.validate(v)
+ self.__dict__["_obj_data"][iri] = v
+ return True
+
+ return False
+
+ def link_helper(self, objectset, missing, visited):
+ if self in visited:
+ return
+
+ visited.add(self)
+
+ for iri, prop, _, _, _, _ in self.__iter_props():
+ self.__dict__["_obj_data"][iri] = prop.link_prop(
+ self.__dict__["_obj_data"][iri],
+ objectset,
+ missing,
+ visited,
+ )
+
+ def __str__(self):
+ parts = [
+ f"{self.__class__.__name__}(",
+ ]
+ if self._id:
+ parts.append(f"@id='{self._id}'")
+ parts.append(")")
+ return "".join(parts)
+
+ def __hash__(self):
+ return super().__hash__()
+
+ def __eq__(self, other):
+ return super().__eq__(other)
+
+ def __lt__(self, other):
+ def sort_key(obj):
+ if isinstance(obj, str):
+ return (obj, "", "", "")
+ return (
+ obj._id or "",
+ obj.TYPE,
+ getattr(obj, "name", None) or "",
+ id(obj),
+ )
+
+ return sort_key(self) < sort_key(other)
+
+
+class SHACLExtensibleObject(object):
+ CLOSED = False
+
+ def __init__(self, typ=None, **kwargs):
+ if typ:
+ self.__dict__["_obj_TYPE"] = (typ, None)
+ else:
+ self.__dict__["_obj_TYPE"] = (self._OBJ_TYPE, self._OBJ_COMPACT_TYPE)
+ super().__init__(**kwargs)
+
+ def _is_abstract(self):
+ # Unknown classes are assumed to not be abstract so that they can be
+ # deserialized
+ typ = self.__dict__["_obj_TYPE"][0]
+ if typ in self.__class__.CLASSES:
+ return self.__class__.CLASSES[typ].IS_ABSTRACT
+
+ return False
+
+ @classmethod
+ def _make_object(cls, typ):
+ # Check for a known type, and if so, deserialize as that instead
+ if typ in cls.CLASSES:
+ return cls.CLASSES[typ]()
+
+ obj = cls(typ)
+ return obj
+
+ def _decode_properties(self, decoder, objectset=None):
+ if self.CLOSED:
+ super()._decode_properties(decoder, objectset=objectset)
+ return
+
+ for key in decoder.object_keys():
+ if self._decode_prop(decoder, key, objectset=objectset):
+ continue
+
+ if not is_IRI(key):
+ raise KeyError(
+ f"Extensible object properties must be IRIs. Got '{key}'"
+ )
+
+ with decoder.read_property(key) as prop_d:
+ self.__dict__["_obj_data"][key] = prop_d.read_value()
+
+ def _encode_properties(self, encoder, state):
+ def encode_value(encoder, v):
+ if isinstance(v, bool):
+ encoder.write_bool(v)
+ elif isinstance(v, str):
+ encoder.write_string(v)
+ elif isinstance(v, int):
+ encoder.write_integer(v)
+ elif isinstance(v, float):
+ encoder.write_float(v)
+ else:
+ raise TypeError(
+ f"Unsupported serialized type {type(v)} with value '{v}'"
+ )
+
+ super()._encode_properties(encoder, state)
+ if self.CLOSED:
+ return
+
+ for iri, value in self.__dict__["_obj_data"].items():
+ if iri in self._OBJ_PROPERTIES:
+ continue
+
+ with encoder.write_property(iri) as prop_s:
+ encode_value(prop_s, value)
+
+ def __setitem__(self, iri, value):
+ try:
+ super().__setitem__(iri, value)
+ except KeyError:
+ if self.CLOSED:
+ raise
+
+ if not is_IRI(iri):
+ raise KeyError(f"Key '{iri}' must be an IRI")
+ self.__dict__["_obj_data"][iri] = value
+
+ def __delitem__(self, iri):
+ try:
+ super().__delitem__(iri)
+ except KeyError:
+ if self.CLOSED:
+ raise
+
+ if not is_IRI(iri):
+ raise KeyError(f"Key '{iri}' must be an IRI")
+ del self.__dict__["_obj_data"][iri]
+
+ def __getattr__(self, name):
+ if name == "TYPE":
+ return self.__dict__["_obj_TYPE"][0]
+ if name == "COMPACT_TYPE":
+ return self.__dict__["_obj_TYPE"][1]
+ return super().__getattr__(name)
+
+ def property_keys(self):
+ iris = set()
+ for pyname, iri, compact in super().property_keys():
+ iris.add(iri)
+ yield pyname, iri, compact
+
+ if self.CLOSED:
+ return
+
+ for iri in self.__dict__["_obj_data"].keys():
+ if iri not in iris:
+ yield None, iri, None
+
+
+class SHACLObjectSet(object):
+ def __init__(self, objects=[], *, link=False):
+ self.objects = set()
+ self.missing_ids = set()
+ for o in objects:
+ self.objects.add(o)
+ self.create_index()
+ if link:
+ self._link()
+
+ def create_index(self):
+ """
+ (re)Create object index
+
+ Creates or recreates the indices for the object set to enable fast
+ lookup. All objects and their children are walked and indexed
+ """
+ self.obj_by_id = {}
+ self.obj_by_type = {}
+ for o in self.foreach():
+ self.add_index(o)
+
+ def add_index(self, obj):
+ """
+ Add object to index
+
+ Adds the object to all appropriate indices
+ """
+
+ def reg_type(typ, compact, o, exact):
+ self.obj_by_type.setdefault(typ, set()).add((exact, o))
+ if compact:
+ self.obj_by_type.setdefault(compact, set()).add((exact, o))
+
+ if not isinstance(obj, SHACLObject):
+ raise TypeError("Object is not of type SHACLObject")
+
+ for typ in SHACLObject.CLASSES.values():
+ if isinstance(obj, typ):
+ reg_type(
+ typ._OBJ_TYPE, typ._OBJ_COMPACT_TYPE, obj, obj.__class__ is typ
+ )
+
+ # This covers custom extensions
+ reg_type(obj.TYPE, obj.COMPACT_TYPE, obj, True)
+
+ if not obj._id:
+ return
+
+ self.missing_ids.discard(obj._id)
+
+ if obj._id in self.obj_by_id:
+ return
+
+ self.obj_by_id[obj._id] = obj
+
+ def add(self, obj):
+ """
+ Add object to object set
+
+ Adds a SHACLObject to the object set and index it.
+
+ NOTE: Child objects of the attached object are not indexes
+ """
+ if not isinstance(obj, SHACLObject):
+ raise TypeError("Object is not of type SHACLObject")
+
+ if obj not in self.objects:
+ self.objects.add(obj)
+ self.add_index(obj)
+ return obj
+
+ def update(self, *others):
+ """
+ Update object set adding all objects in each other iterable
+ """
+ for o in others:
+ for obj in o:
+ self.add(obj)
+
+ def __contains__(self, item):
+ """
+ Returns True if the item is in the object set
+ """
+ return item in self.objects
+
+ def link(self):
+ """
+ Link object set
+
+ Links the object in the object set by replacing string object
+ references with references to the objects themselves. e.g.
+ a property that references object "https://foo/bar" by a string
+ reference will be replaced with an actual reference to the object in
+ the object set with the same ID if it exists in the object set
+
+ If multiple objects with the same ID are found, the duplicates are
+ eliminated
+ """
+ self.create_index()
+ return self._link()
+
+ def _link(self):
+ global NAMED_INDIVIDUALS
+
+ self.missing_ids = set()
+ visited = set()
+
+ new_objects = set()
+
+ for o in self.objects:
+ if o._id:
+ o = self.find_by_id(o._id, o)
+ o.link_helper(self, self.missing_ids, visited)
+ new_objects.add(o)
+
+ self.objects = new_objects
+
+ # Remove blank nodes
+ obj_by_id = {}
+ for _id, obj in self.obj_by_id.items():
+ if _id.startswith("_:"):
+ del obj._id
+ else:
+ obj_by_id[_id] = obj
+ self.obj_by_id = obj_by_id
+
+ # Named individuals aren't considered missing
+ self.missing_ids -= NAMED_INDIVIDUALS
+
+ return self.missing_ids
+
+ def find_by_id(self, _id, default=None):
+ """
+ Find object by ID
+
+ Returns objects that match the specified ID, or default if there is no
+ object with the specified ID
+ """
+ if _id not in self.obj_by_id:
+ return default
+ return self.obj_by_id[_id]
+
+ def foreach(self):
+ """
+ Iterate over every object in the object set, and all child objects
+ """
+ visited = set()
+ for o in self.objects:
+ if o not in visited:
+ yield o
+ visited.add(o)
+
+ for child in o.iter_objects(recursive=True, visited=visited):
+ yield child
+
+ def foreach_type(self, typ, *, match_subclass=True):
+ """
+ Iterate over each object of a specified type (or subclass there of)
+
+ If match_subclass is True, and class derived from typ will also match
+ (similar to isinstance()). If False, only exact matches will be
+ returned
+ """
+ if not isinstance(typ, str):
+ if not issubclass(typ, SHACLObject):
+ raise TypeError(f"Type must be derived from SHACLObject, got {typ}")
+ typ = typ._OBJ_TYPE
+
+ if typ not in self.obj_by_type:
+ return
+
+ for exact, o in self.obj_by_type[typ]:
+ if match_subclass or exact:
+ yield o
+
+ def merge(self, *objectsets):
+ """
+ Merge object sets
+
+ Returns a new object set that is the combination of this object set and
+ all provided arguments
+ """
+ new_objects = set()
+ new_objects |= self.objects
+ for d in objectsets:
+ new_objects |= d.objects
+
+ return SHACLObjectSet(new_objects, link=True)
+
+ def encode(self, encoder, force_list=False):
+ """
+ Serialize a list of objects to a serialization encoder
+
+ If force_list is true, a list will always be written using the encoder.
+ """
+ ref_counts = {}
+ state = EncodeState()
+
+ def walk_callback(value, path):
+ nonlocal state
+ nonlocal ref_counts
+
+ if not isinstance(value, SHACLObject):
+ return True
+
+ # Remove blank node ID for re-assignment
+ if value._id and value._id.startswith("_:"):
+ del value._id
+
+ if value._id:
+ state.add_refed(value)
+
+ # If the object is referenced more than once, add it to the set of
+ # referenced objects
+ ref_counts.setdefault(value, 0)
+ ref_counts[value] += 1
+ if ref_counts[value] > 1:
+ state.add_refed(value)
+ return False
+
+ return True
+
+ for o in self.objects:
+ if o._id:
+ state.add_refed(o)
+ o.walk(walk_callback)
+
+ use_list = force_list or len(self.objects) > 1
+
+ if use_list:
+ # If we are making a list add all the objects referred to by reference
+ # to the list
+ objects = list(self.objects | state.ref_objects)
+ else:
+ objects = list(self.objects)
+
+ objects.sort()
+
+ if use_list:
+ # Ensure top level objects are only written in the top level graph
+ # node, and referenced by ID everywhere else. This is done by setting
+ # the flag that indicates this object has been written for all the top
+ # level objects, then clearing it right before serializing the object.
+ #
+ # In this way, if an object is referenced before it is supposed to be
+ # serialized into the @graph, it will serialize as a string instead of
+ # the actual object
+ for o in objects:
+ state.written_objects.add(o)
+
+ with encoder.write_list() as list_s:
+ for o in objects:
+ # Allow this specific object to be written now
+ state.written_objects.remove(o)
+ with list_s.write_list_item() as item_s:
+ o.encode(item_s, state)
+
+ else:
+ objects[0].encode(encoder, state)
+
+ def decode(self, decoder):
+ self.create_index()
+
+ for obj_d in decoder.read_list():
+ o = SHACLObject.decode(obj_d, objectset=self)
+ self.objects.add(o)
+
+ self._link()
+
+
+class EncodeState(object):
+ def __init__(self):
+ self.ref_objects = set()
+ self.written_objects = set()
+ self.blank_objects = {}
+
+ def get_object_id(self, o):
+ if o._id:
+ return o._id
+
+ if o not in self.blank_objects:
+ _id = f"_:{o.__class__.__name__}{len(self.blank_objects)}"
+ self.blank_objects[o] = _id
+
+ return self.blank_objects[o]
+
+ def is_refed(self, o):
+ return o in self.ref_objects
+
+ def add_refed(self, o):
+ self.ref_objects.add(o)
+
+ def is_written(self, o):
+ return o in self.written_objects
+
+ def add_written(self, o):
+ self.written_objects.add(o)
+
+
+class Decoder(ABC):
+ @abstractmethod
+ def read_value(self):
+ """
+ Consume next item
+
+ Consumes the next item of any type
+ """
+ pass
+
+ @abstractmethod
+ def read_string(self):
+ """
+ Consume the next item as a string.
+
+ Returns the string value of the next item, or `None` if the next item
+ is not a string
+ """
+ pass
+
+ @abstractmethod
+ def read_datetime(self):
+ """
+ Consumes the next item as a date & time string
+
+ Returns the string value of the next item, if it is a ISO datetime, or
+ `None` if the next item is not a ISO datetime string.
+
+ Note that validation of the string is done by the caller, so a minimal
+ implementation can just check if the next item is a string without
+ worrying about the format
+ """
+ pass
+
+ @abstractmethod
+ def read_integer(self):
+ """
+ Consumes the next item as an integer
+
+ Returns the integer value of the next item, or `None` if the next item
+ is not an integer
+ """
+ pass
+
+ @abstractmethod
+ def read_iri(self):
+ """
+ Consumes the next item as an IRI string
+
+ Returns the string value of the next item an IRI, or `None` if the next
+ item is not an IRI.
+
+ The returned string should be either a fully-qualified IRI, or a blank
+ node ID
+ """
+ pass
+
+ @abstractmethod
+ def read_enum(self, e):
+ """
+ Consumes the next item as an Enum value string
+
+ Returns the fully qualified IRI of the next enum item, or `None` if the
+ next item is not an enum value.
+
+ The callee is responsible for validating that the returned IRI is
+ actually a member of the specified Enum, so the `Decoder` does not need
+ to check that, but can if it wishes
+ """
+ pass
+
+ @abstractmethod
+ def read_bool(self):
+ """
+ Consume the next item as a boolean value
+
+ Returns the boolean value of the next item, or `None` if the next item
+ is not a boolean
+ """
+ pass
+
+ @abstractmethod
+ def read_float(self):
+ """
+ Consume the next item as a float value
+
+ Returns the float value of the next item, or `None` if the next item is
+ not a float
+ """
+ pass
+
+ @abstractmethod
+ def read_list(self):
+ """
+ Consume the next item as a list generator
+
+ This should generate a `Decoder` object for each item in the list. The
+ generated `Decoder` can be used to read the corresponding item from the
+ list
+ """
+ pass
+
+ @abstractmethod
+ def read_object(self):
+ """
+ Consume next item as an object
+
+ A context manager that "enters" the next item as a object and yields a
+ `Decoder` that can read properties from it. If the next item is not an
+ object, yields `None`
+
+ Properties will be read out of the object using `read_property` and
+ `read_object_id`
+ """
+ pass
+
+ @abstractmethod
+ @contextmanager
+ def read_property(self, key):
+ """
+ Read property from object
+
+ A context manager that yields a `Decoder` that can be used to read the
+ value of the property with the given key in current object, or `None`
+ if the property does not exist in the current object.
+ """
+ pass
+
+ @abstractmethod
+ def object_keys(self):
+ """
+ Read property keys from an object
+
+ Iterates over all the serialized keys for the current object
+ """
+ pass
+
+ @abstractmethod
+ def read_object_id(self, alias=None):
+ """
+ Read current object ID property
+
+ Returns the ID of the current object if one is defined, or `None` if
+ the current object has no ID.
+
+ The ID must be a fully qualified IRI or a blank node
+
+ If `alias` is provided, is is a hint as to another name by which the ID
+ might be found, if the `Decoder` supports aliases for an ID
+ """
+ pass
+
+
+class JSONLDDecoder(Decoder):
+ def __init__(self, data, root=False):
+ self.data = data
+ self.root = root
+
+ def read_value(self):
+ if isinstance(self.data, str):
+ try:
+ return float(self.data)
+ except ValueError:
+ pass
+ return self.data
+
+ def read_string(self):
+ if isinstance(self.data, str):
+ return self.data
+ return None
+
+ def read_datetime(self):
+ return self.read_string()
+
+ def read_integer(self):
+ if isinstance(self.data, int):
+ return self.data
+ return None
+
+ def read_bool(self):
+ if isinstance(self.data, bool):
+ return self.data
+ return None
+
+ def read_float(self):
+ if isinstance(self.data, (int, float, str)):
+ return float(self.data)
+ return None
+
+ def read_iri(self):
+ if isinstance(self.data, str):
+ return self.data
+ return None
+
+ def read_enum(self, e):
+ if isinstance(self.data, str):
+ return self.data
+ return None
+
+ def read_list(self):
+ if isinstance(self.data, (list, tuple, set)):
+ for v in self.data:
+ yield self.__class__(v)
+ else:
+ yield self
+
+ def __get_value(self, *keys):
+ for k in keys:
+ if k and k in self.data:
+ return self.data[k]
+ return None
+
+ @contextmanager
+ def read_property(self, key):
+ v = self.__get_value(key)
+ if v is not None:
+ yield self.__class__(v)
+ else:
+ yield None
+
+ def object_keys(self):
+ for key in self.data.keys():
+ if key in ("@type", "type"):
+ continue
+ if self.root and key == "@context":
+ continue
+ yield key
+
+ def read_object(self):
+ typ = self.__get_value("@type", "type")
+ if typ is not None:
+ return typ, self
+
+ return None, self
+
+ def read_object_id(self, alias=None):
+ return self.__get_value(alias, "@id")
+
+
+class JSONLDDeserializer(object):
+ def deserialize_data(self, data, objectset: SHACLObjectSet):
+ if "@graph" in data:
+ h = JSONLDDecoder(data["@graph"], True)
+ else:
+ h = JSONLDDecoder(data, True)
+
+ objectset.decode(h)
+
+ def read(self, f, objectset: SHACLObjectSet):
+ data = json.load(f)
+ self.deserialize_data(data, objectset)
+
+
+class Encoder(ABC):
+ @abstractmethod
+ def write_string(self, v):
+ """
+ Write a string value
+
+ Encodes the value as a string in the output
+ """
+ pass
+
+ @abstractmethod
+ def write_datetime(self, v):
+ """
+ Write a date & time string
+
+ Encodes the value as an ISO datetime string
+
+ Note: The provided string is already correctly encoded as an ISO datetime
+ """
+ pass
+
+ @abstractmethod
+ def write_integer(self, v):
+ """
+ Write an integer value
+
+ Encodes the value as an integer in the output
+ """
+ pass
+
+ @abstractmethod
+ def write_iri(self, v, compact=None):
+ """
+ Write IRI
+
+ Encodes the string as an IRI. Note that the string will be either a
+ fully qualified IRI or a blank node ID. If `compact` is provided and
+ the serialization supports compacted IRIs, it should be preferred to
+ the full IRI
+ """
+ pass
+
+ @abstractmethod
+ def write_enum(self, v, e, compact=None):
+ """
+ Write enum value IRI
+
+ Encodes the string enum value IRI. Note that the string will be a fully
+ qualified IRI. If `compact` is provided and the serialization supports
+ compacted IRIs, it should be preferred to the full IRI.
+ """
+ pass
+
+ @abstractmethod
+ def write_bool(self, v):
+ """
+ Write boolean
+
+ Encodes the value as a boolean in the output
+ """
+ pass
+
+ @abstractmethod
+ def write_float(self, v):
+ """
+ Write float
+
+ Encodes the value as a floating point number in the output
+ """
+ pass
+
+ @abstractmethod
+ @contextmanager
+ def write_object(self, o, _id, needs_id):
+ """
+ Write object
+
+ A context manager that yields an `Encoder` that can be used to encode
+ the given object properties.
+
+ The provided ID will always be a valid ID (even if o._id is `None`), in
+ case the `Encoder` _must_ have an ID. `needs_id` is a hint to indicate
+ to the `Encoder` if an ID must be written or not (if that is even an
+ option). If it is `True`, the `Encoder` must encode an ID for the
+ object. If `False`, the encoder is not required to encode an ID and may
+ omit it.
+
+ The ID will be either a fully qualified IRI, or a blank node IRI.
+
+ Properties will be written the object using `write_property`
+ """
+ pass
+
+ @abstractmethod
+ @contextmanager
+ def write_property(self, iri, compact=None):
+ """
+ Write object property
+
+ A context manager that yields an `Encoder` that can be used to encode
+ the value for the property with the given IRI in the current object
+
+ Note that the IRI will be fully qualified. If `compact` is provided and
+ the serialization supports compacted IRIs, it should be preferred to
+ the full IRI.
+ """
+ pass
+
+ @abstractmethod
+ @contextmanager
+ def write_list(self):
+ """
+ Write list
+
+ A context manager that yields an `Encoder` that can be used to encode a
+ list.
+
+ Each item of the list will be added using `write_list_item`
+ """
+ pass
+
+ @abstractmethod
+ @contextmanager
+ def write_list_item(self):
+ """
+ Write list item
+
+ A context manager that yields an `Encoder` that can be used to encode
+ the value for a list item
+ """
+ pass
+
+
+class JSONLDEncoder(Encoder):
+ def __init__(self, data=None):
+ self.data = data
+
+ def write_string(self, v):
+ self.data = v
+
+ def write_datetime(self, v):
+ self.data = v
+
+ def write_integer(self, v):
+ self.data = v
+
+ def write_iri(self, v, compact=None):
+ self.write_string(compact or v)
+
+ def write_enum(self, v, e, compact=None):
+ self.write_string(compact or v)
+
+ def write_bool(self, v):
+ self.data = v
+
+ def write_float(self, v):
+ self.data = str(v)
+
+ @contextmanager
+ def write_property(self, iri, compact=None):
+ s = self.__class__(None)
+ yield s
+ if s.data is not None:
+ self.data[compact or iri] = s.data
+
+ @contextmanager
+ def write_object(self, o, _id, needs_id):
+ self.data = {
+ "type": o.COMPACT_TYPE or o.TYPE,
+ }
+ if needs_id:
+ self.data[o.ID_ALIAS or "@id"] = _id
+ yield self
+
+ @contextmanager
+ def write_list(self):
+ self.data = []
+ yield self
+ if not self.data:
+ self.data = None
+
+ @contextmanager
+ def write_list_item(self):
+ s = self.__class__(None)
+ yield s
+ if s.data is not None:
+ self.data.append(s.data)
+
+
+class JSONLDSerializer(object):
+ def __init__(self, **args):
+ self.args = args
+
+ def serialize_data(
+ self,
+ objectset: SHACLObjectSet,
+ force_at_graph=False,
+ ):
+ h = JSONLDEncoder()
+ objectset.encode(h, force_at_graph)
+ data = {}
+ if len(CONTEXT_URLS) == 1:
+ data["@context"] = CONTEXT_URLS[0]
+ elif CONTEXT_URLS:
+ data["@context"] = CONTEXT_URLS
+
+ if isinstance(h.data, list):
+ data["@graph"] = h.data
+ else:
+ for k, v in h.data.items():
+ data[k] = v
+
+ return data
+
+ def write(
+ self,
+ objectset: SHACLObjectSet,
+ f,
+ force_at_graph=False,
+ **kwargs,
+ ):
+ """
+ Write a SHACLObjectSet to a JSON LD file
+
+ If force_at_graph is True, a @graph node will always be written
+ """
+ data = self.serialize_data(objectset, force_at_graph)
+
+ args = {**self.args, **kwargs}
+
+ sha1 = hashlib.sha1()
+ for chunk in json.JSONEncoder(**args).iterencode(data):
+ chunk = chunk.encode("utf-8")
+ f.write(chunk)
+ sha1.update(chunk)
+
+ return sha1.hexdigest()
+
+
+class JSONLDInlineEncoder(Encoder):
+ def __init__(self, f, sha1):
+ self.f = f
+ self.comma = False
+ self.sha1 = sha1
+
+ def write(self, s):
+ s = s.encode("utf-8")
+ self.f.write(s)
+ self.sha1.update(s)
+
+ def _write_comma(self):
+ if self.comma:
+ self.write(",")
+ self.comma = False
+
+ def write_string(self, v):
+ self.write(json.dumps(v))
+
+ def write_datetime(self, v):
+ self.write_string(v)
+
+ def write_integer(self, v):
+ self.write(f"{v}")
+
+ def write_iri(self, v, compact=None):
+ self.write_string(compact or v)
+
+ def write_enum(self, v, e, compact=None):
+ self.write_iri(v, compact)
+
+ def write_bool(self, v):
+ if v:
+ self.write("true")
+ else:
+ self.write("false")
+
+ def write_float(self, v):
+ self.write(json.dumps(str(v)))
+
+ @contextmanager
+ def write_property(self, iri, compact=None):
+ self._write_comma()
+ self.write_string(compact or iri)
+ self.write(":")
+ yield self
+ self.comma = True
+
+ @contextmanager
+ def write_object(self, o, _id, needs_id):
+ self._write_comma()
+
+ self.write("{")
+ self.write_string("type")
+ self.write(":")
+ self.write_string(o.COMPACT_TYPE or o.TYPE)
+ self.comma = True
+
+ if needs_id:
+ self._write_comma()
+ self.write_string(o.ID_ALIAS or "@id")
+ self.write(":")
+ self.write_string(_id)
+ self.comma = True
+
+ self.comma = True
+ yield self
+
+ self.write("}")
+ self.comma = True
+
+ @contextmanager
+ def write_list(self):
+ self._write_comma()
+ self.write("[")
+ yield self.__class__(self.f, self.sha1)
+ self.write("]")
+ self.comma = True
+
+ @contextmanager
+ def write_list_item(self):
+ self._write_comma()
+ yield self.__class__(self.f, self.sha1)
+ self.comma = True
+
+
+class JSONLDInlineSerializer(object):
+ def write(
+ self,
+ objectset: SHACLObjectSet,
+ f,
+ force_at_graph=False,
+ ):
+ """
+ Write a SHACLObjectSet to a JSON LD file
+
+ Note: force_at_graph is included for compatibility, but ignored. This
+ serializer always writes out a graph
+ """
+ sha1 = hashlib.sha1()
+ h = JSONLDInlineEncoder(f, sha1)
+ h.write('{"@context":')
+ if len(CONTEXT_URLS) == 1:
+ h.write(f'"{CONTEXT_URLS[0]}"')
+ elif CONTEXT_URLS:
+ h.write('["')
+ h.write('","'.join(CONTEXT_URLS))
+ h.write('"]')
+ h.write(",")
+
+ h.write('"@graph":')
+
+ objectset.encode(h, True)
+ h.write("}")
+ return sha1.hexdigest()
+
+
+def print_tree(objects, all_fields=False):
+ """
+ Print object tree
+ """
+ seen = set()
+
+ def callback(value, path):
+ nonlocal seen
+
+ s = (" " * (len(path) - 1)) + f"{path[-1]}"
+ if isinstance(value, SHACLObject):
+ s += f" {value} ({id(value)})"
+ is_empty = False
+ elif isinstance(value, ListProxy):
+ is_empty = len(value) == 0
+ if is_empty:
+ s += " []"
+ else:
+ s += f" {value!r}"
+ is_empty = value is None
+
+ if all_fields or not is_empty:
+ print(s)
+
+ if isinstance(value, SHACLObject):
+ if value in seen:
+ return False
+ seen.add(value)
+ return True
+
+ return True
+
+ for o in objects:
+ o.walk(callback)
+
+
+# fmt: off
+"""Format Guard"""
+
+
+CONTEXT_URLS = [
+ "https://spdx.org/rdf/3.0.0/spdx-context.jsonld",
+]
+
+_NI_ENCODE_CONTEXT = {
+ "https://spdx.org/rdf/3.0.0/terms/AI/EnergyUnitType/kilowattHour": "ai_EnergyUnitType:kilowattHour",
+ "https://spdx.org/rdf/3.0.0/terms/AI/EnergyUnitType/megajoule": "ai_EnergyUnitType:megajoule",
+ "https://spdx.org/rdf/3.0.0/terms/AI/EnergyUnitType/other": "ai_EnergyUnitType:other",
+ "https://spdx.org/rdf/3.0.0/terms/AI/SafetyRiskAssessmentType/high": "ai_SafetyRiskAssessmentType:high",
+ "https://spdx.org/rdf/3.0.0/terms/AI/SafetyRiskAssessmentType/low": "ai_SafetyRiskAssessmentType:low",
+ "https://spdx.org/rdf/3.0.0/terms/AI/SafetyRiskAssessmentType/medium": "ai_SafetyRiskAssessmentType:medium",
+ "https://spdx.org/rdf/3.0.0/terms/AI/SafetyRiskAssessmentType/serious": "ai_SafetyRiskAssessmentType:serious",
+ "https://spdx.org/rdf/3.0.0/terms/Core/AnnotationType/other": "AnnotationType:other",
+ "https://spdx.org/rdf/3.0.0/terms/Core/AnnotationType/review": "AnnotationType:review",
+ "https://spdx.org/rdf/3.0.0/terms/Core/NoAssertionElement": "spdx:Core/NoAssertionElement",
+ "https://spdx.org/rdf/3.0.0/terms/Core/NoneElement": "spdx:Core/NoneElement",
+ "https://spdx.org/rdf/3.0.0/terms/Core/ExternalIdentifierType/cpe22": "ExternalIdentifierType:cpe22",
+ "https://spdx.org/rdf/3.0.0/terms/Core/ExternalIdentifierType/cpe23": "ExternalIdentifierType:cpe23",
+ "https://spdx.org/rdf/3.0.0/terms/Core/ExternalIdentifierType/cve": "ExternalIdentifierType:cve",
+ "https://spdx.org/rdf/3.0.0/terms/Core/ExternalIdentifierType/email": "ExternalIdentifierType:email",
+ "https://spdx.org/rdf/3.0.0/terms/Core/ExternalIdentifierType/gitoid": "ExternalIdentifierType:gitoid",
+ "https://spdx.org/rdf/3.0.0/terms/Core/ExternalIdentifierType/other": "ExternalIdentifierType:other",
+ "https://spdx.org/rdf/3.0.0/terms/Core/ExternalIdentifierType/packageUrl": "ExternalIdentifierType:packageUrl",
+ "https://spdx.org/rdf/3.0.0/terms/Core/ExternalIdentifierType/securityOther": "ExternalIdentifierType:securityOther",
+ "https://spdx.org/rdf/3.0.0/terms/Core/ExternalIdentifierType/swhid": "ExternalIdentifierType:swhid",
+ "https://spdx.org/rdf/3.0.0/terms/Core/ExternalIdentifierType/swid": "ExternalIdentifierType:swid",
+ "https://spdx.org/rdf/3.0.0/terms/Core/ExternalIdentifierType/urlScheme": "ExternalIdentifierType:urlScheme",
+ "https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/altDownloadLocation": "ExternalRefType:altDownloadLocation",
+ "https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/altWebPage": "ExternalRefType:altWebPage",
+ "https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/binaryArtifact": "ExternalRefType:binaryArtifact",
+ "https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/bower": "ExternalRefType:bower",
+ "https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/buildMeta": "ExternalRefType:buildMeta",
+ "https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/buildSystem": "ExternalRefType:buildSystem",
+ "https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/certificationReport": "ExternalRefType:certificationReport",
+ "https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/chat": "ExternalRefType:chat",
+ "https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/componentAnalysisReport": "ExternalRefType:componentAnalysisReport",
+ "https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/cwe": "ExternalRefType:cwe",
+ "https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/documentation": "ExternalRefType:documentation",
+ "https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/dynamicAnalysisReport": "ExternalRefType:dynamicAnalysisReport",
+ "https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/eolNotice": "ExternalRefType:eolNotice",
+ "https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/exportControlAssessment": "ExternalRefType:exportControlAssessment",
+ "https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/funding": "ExternalRefType:funding",
+ "https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/issueTracker": "ExternalRefType:issueTracker",
+ "https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/license": "ExternalRefType:license",
+ "https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/mailingList": "ExternalRefType:mailingList",
+ "https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/mavenCentral": "ExternalRefType:mavenCentral",
+ "https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/metrics": "ExternalRefType:metrics",
+ "https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/npm": "ExternalRefType:npm",
+ "https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/nuget": "ExternalRefType:nuget",
+ "https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/other": "ExternalRefType:other",
+ "https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/privacyAssessment": "ExternalRefType:privacyAssessment",
+ "https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/productMetadata": "ExternalRefType:productMetadata",
+ "https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/purchaseOrder": "ExternalRefType:purchaseOrder",
+ "https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/qualityAssessmentReport": "ExternalRefType:qualityAssessmentReport",
+ "https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/releaseHistory": "ExternalRefType:releaseHistory",
+ "https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/releaseNotes": "ExternalRefType:releaseNotes",
+ "https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/riskAssessment": "ExternalRefType:riskAssessment",
+ "https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/runtimeAnalysisReport": "ExternalRefType:runtimeAnalysisReport",
+ "https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/secureSoftwareAttestation": "ExternalRefType:secureSoftwareAttestation",
+ "https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/securityAdversaryModel": "ExternalRefType:securityAdversaryModel",
+ "https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/securityAdvisory": "ExternalRefType:securityAdvisory",
+ "https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/securityFix": "ExternalRefType:securityFix",
+ "https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/securityOther": "ExternalRefType:securityOther",
+ "https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/securityPenTestReport": "ExternalRefType:securityPenTestReport",
+ "https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/securityPolicy": "ExternalRefType:securityPolicy",
+ "https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/securityThreatModel": "ExternalRefType:securityThreatModel",
+ "https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/socialMedia": "ExternalRefType:socialMedia",
+ "https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/sourceArtifact": "ExternalRefType:sourceArtifact",
+ "https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/staticAnalysisReport": "ExternalRefType:staticAnalysisReport",
+ "https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/support": "ExternalRefType:support",
+ "https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/vcs": "ExternalRefType:vcs",
+ "https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/vulnerabilityDisclosureReport": "ExternalRefType:vulnerabilityDisclosureReport",
+ "https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/vulnerabilityExploitabilityAssessment": "ExternalRefType:vulnerabilityExploitabilityAssessment",
+ "https://spdx.org/rdf/3.0.0/terms/Core/HashAlgorithm/blake2b256": "HashAlgorithm:blake2b256",
+ "https://spdx.org/rdf/3.0.0/terms/Core/HashAlgorithm/blake2b384": "HashAlgorithm:blake2b384",
+ "https://spdx.org/rdf/3.0.0/terms/Core/HashAlgorithm/blake2b512": "HashAlgorithm:blake2b512",
+ "https://spdx.org/rdf/3.0.0/terms/Core/HashAlgorithm/blake3": "HashAlgorithm:blake3",
+ "https://spdx.org/rdf/3.0.0/terms/Core/HashAlgorithm/crystalsDilithium": "HashAlgorithm:crystalsDilithium",
+ "https://spdx.org/rdf/3.0.0/terms/Core/HashAlgorithm/crystalsKyber": "HashAlgorithm:crystalsKyber",
+ "https://spdx.org/rdf/3.0.0/terms/Core/HashAlgorithm/falcon": "HashAlgorithm:falcon",
+ "https://spdx.org/rdf/3.0.0/terms/Core/HashAlgorithm/md2": "HashAlgorithm:md2",
+ "https://spdx.org/rdf/3.0.0/terms/Core/HashAlgorithm/md4": "HashAlgorithm:md4",
+ "https://spdx.org/rdf/3.0.0/terms/Core/HashAlgorithm/md5": "HashAlgorithm:md5",
+ "https://spdx.org/rdf/3.0.0/terms/Core/HashAlgorithm/md6": "HashAlgorithm:md6",
+ "https://spdx.org/rdf/3.0.0/terms/Core/HashAlgorithm/other": "HashAlgorithm:other",
+ "https://spdx.org/rdf/3.0.0/terms/Core/HashAlgorithm/sha1": "HashAlgorithm:sha1",
+ "https://spdx.org/rdf/3.0.0/terms/Core/HashAlgorithm/sha224": "HashAlgorithm:sha224",
+ "https://spdx.org/rdf/3.0.0/terms/Core/HashAlgorithm/sha256": "HashAlgorithm:sha256",
+ "https://spdx.org/rdf/3.0.0/terms/Core/HashAlgorithm/sha384": "HashAlgorithm:sha384",
+ "https://spdx.org/rdf/3.0.0/terms/Core/HashAlgorithm/sha3_224": "HashAlgorithm:sha3_224",
+ "https://spdx.org/rdf/3.0.0/terms/Core/HashAlgorithm/sha3_256": "HashAlgorithm:sha3_256",
+ "https://spdx.org/rdf/3.0.0/terms/Core/HashAlgorithm/sha3_384": "HashAlgorithm:sha3_384",
+ "https://spdx.org/rdf/3.0.0/terms/Core/HashAlgorithm/sha3_512": "HashAlgorithm:sha3_512",
+ "https://spdx.org/rdf/3.0.0/terms/Core/HashAlgorithm/sha512": "HashAlgorithm:sha512",
+ "https://spdx.org/rdf/3.0.0/terms/Core/LifecycleScopeType/build": "LifecycleScopeType:build",
+ "https://spdx.org/rdf/3.0.0/terms/Core/LifecycleScopeType/design": "LifecycleScopeType:design",
+ "https://spdx.org/rdf/3.0.0/terms/Core/LifecycleScopeType/development": "LifecycleScopeType:development",
+ "https://spdx.org/rdf/3.0.0/terms/Core/LifecycleScopeType/other": "LifecycleScopeType:other",
+ "https://spdx.org/rdf/3.0.0/terms/Core/LifecycleScopeType/runtime": "LifecycleScopeType:runtime",
+ "https://spdx.org/rdf/3.0.0/terms/Core/LifecycleScopeType/test": "LifecycleScopeType:test",
+ "https://spdx.org/rdf/3.0.0/terms/Core/PresenceType/no": "PresenceType:no",
+ "https://spdx.org/rdf/3.0.0/terms/Core/PresenceType/noAssertion": "PresenceType:noAssertion",
+ "https://spdx.org/rdf/3.0.0/terms/Core/PresenceType/yes": "PresenceType:yes",
+ "https://spdx.org/rdf/3.0.0/terms/Core/ProfileIdentifierType/ai": "ProfileIdentifierType:ai",
+ "https://spdx.org/rdf/3.0.0/terms/Core/ProfileIdentifierType/build": "ProfileIdentifierType:build",
+ "https://spdx.org/rdf/3.0.0/terms/Core/ProfileIdentifierType/core": "ProfileIdentifierType:core",
+ "https://spdx.org/rdf/3.0.0/terms/Core/ProfileIdentifierType/dataset": "ProfileIdentifierType:dataset",
+ "https://spdx.org/rdf/3.0.0/terms/Core/ProfileIdentifierType/expandedLicensing": "ProfileIdentifierType:expandedLicensing",
+ "https://spdx.org/rdf/3.0.0/terms/Core/ProfileIdentifierType/extension": "ProfileIdentifierType:extension",
+ "https://spdx.org/rdf/3.0.0/terms/Core/ProfileIdentifierType/lite": "ProfileIdentifierType:lite",
+ "https://spdx.org/rdf/3.0.0/terms/Core/ProfileIdentifierType/security": "ProfileIdentifierType:security",
+ "https://spdx.org/rdf/3.0.0/terms/Core/ProfileIdentifierType/simpleLicensing": "ProfileIdentifierType:simpleLicensing",
+ "https://spdx.org/rdf/3.0.0/terms/Core/ProfileIdentifierType/software": "ProfileIdentifierType:software",
+ "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipCompleteness/complete": "RelationshipCompleteness:complete",
+ "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipCompleteness/incomplete": "RelationshipCompleteness:incomplete",
+ "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipCompleteness/noAssertion": "RelationshipCompleteness:noAssertion",
+ "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/affects": "RelationshipType:affects",
+ "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/amendedBy": "RelationshipType:amendedBy",
+ "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/ancestorOf": "RelationshipType:ancestorOf",
+ "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/availableFrom": "RelationshipType:availableFrom",
+ "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/configures": "RelationshipType:configures",
+ "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/contains": "RelationshipType:contains",
+ "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/coordinatedBy": "RelationshipType:coordinatedBy",
+ "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/copiedTo": "RelationshipType:copiedTo",
+ "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/delegatedTo": "RelationshipType:delegatedTo",
+ "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/dependsOn": "RelationshipType:dependsOn",
+ "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/descendantOf": "RelationshipType:descendantOf",
+ "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/describes": "RelationshipType:describes",
+ "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/doesNotAffect": "RelationshipType:doesNotAffect",
+ "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/expandsTo": "RelationshipType:expandsTo",
+ "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/exploitCreatedBy": "RelationshipType:exploitCreatedBy",
+ "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/fixedBy": "RelationshipType:fixedBy",
+ "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/fixedIn": "RelationshipType:fixedIn",
+ "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/foundBy": "RelationshipType:foundBy",
+ "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/generates": "RelationshipType:generates",
+ "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/hasAddedFile": "RelationshipType:hasAddedFile",
+ "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/hasAssessmentFor": "RelationshipType:hasAssessmentFor",
+ "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/hasAssociatedVulnerability": "RelationshipType:hasAssociatedVulnerability",
+ "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/hasConcludedLicense": "RelationshipType:hasConcludedLicense",
+ "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/hasDataFile": "RelationshipType:hasDataFile",
+ "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/hasDeclaredLicense": "RelationshipType:hasDeclaredLicense",
+ "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/hasDeletedFile": "RelationshipType:hasDeletedFile",
+ "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/hasDependencyManifest": "RelationshipType:hasDependencyManifest",
+ "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/hasDistributionArtifact": "RelationshipType:hasDistributionArtifact",
+ "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/hasDocumentation": "RelationshipType:hasDocumentation",
+ "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/hasDynamicLink": "RelationshipType:hasDynamicLink",
+ "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/hasEvidence": "RelationshipType:hasEvidence",
+ "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/hasExample": "RelationshipType:hasExample",
+ "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/hasHost": "RelationshipType:hasHost",
+ "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/hasInputs": "RelationshipType:hasInputs",
+ "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/hasMetadata": "RelationshipType:hasMetadata",
+ "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/hasOptionalComponent": "RelationshipType:hasOptionalComponent",
+ "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/hasOptionalDependency": "RelationshipType:hasOptionalDependency",
+ "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/hasOutputs": "RelationshipType:hasOutputs",
+ "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/hasPrerequsite": "RelationshipType:hasPrerequsite",
+ "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/hasProvidedDependency": "RelationshipType:hasProvidedDependency",
+ "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/hasRequirement": "RelationshipType:hasRequirement",
+ "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/hasSpecification": "RelationshipType:hasSpecification",
+ "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/hasStaticLink": "RelationshipType:hasStaticLink",
+ "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/hasTest": "RelationshipType:hasTest",
+ "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/hasTestCase": "RelationshipType:hasTestCase",
+ "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/hasVariant": "RelationshipType:hasVariant",
+ "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/invokedBy": "RelationshipType:invokedBy",
+ "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/modifiedBy": "RelationshipType:modifiedBy",
+ "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/other": "RelationshipType:other",
+ "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/packagedBy": "RelationshipType:packagedBy",
+ "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/patchedBy": "RelationshipType:patchedBy",
+ "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/publishedBy": "RelationshipType:publishedBy",
+ "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/reportedBy": "RelationshipType:reportedBy",
+ "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/republishedBy": "RelationshipType:republishedBy",
+ "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/serializedInArtifact": "RelationshipType:serializedInArtifact",
+ "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/testedOn": "RelationshipType:testedOn",
+ "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/trainedOn": "RelationshipType:trainedOn",
+ "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/underInvestigationFor": "RelationshipType:underInvestigationFor",
+ "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/usesTool": "RelationshipType:usesTool",
+ "https://spdx.org/rdf/3.0.0/terms/Core/SupportType/deployed": "SupportType:deployed",
+ "https://spdx.org/rdf/3.0.0/terms/Core/SupportType/development": "SupportType:development",
+ "https://spdx.org/rdf/3.0.0/terms/Core/SupportType/endOfSupport": "SupportType:endOfSupport",
+ "https://spdx.org/rdf/3.0.0/terms/Core/SupportType/limitedSupport": "SupportType:limitedSupport",
+ "https://spdx.org/rdf/3.0.0/terms/Core/SupportType/noAssertion": "SupportType:noAssertion",
+ "https://spdx.org/rdf/3.0.0/terms/Core/SupportType/noSupport": "SupportType:noSupport",
+ "https://spdx.org/rdf/3.0.0/terms/Core/SupportType/support": "SupportType:support",
+ "https://spdx.org/rdf/3.0.0/terms/Dataset/ConfidentialityLevelType/amber": "dataset_ConfidentialityLevelType:amber",
+ "https://spdx.org/rdf/3.0.0/terms/Dataset/ConfidentialityLevelType/clear": "dataset_ConfidentialityLevelType:clear",
+ "https://spdx.org/rdf/3.0.0/terms/Dataset/ConfidentialityLevelType/green": "dataset_ConfidentialityLevelType:green",
+ "https://spdx.org/rdf/3.0.0/terms/Dataset/ConfidentialityLevelType/red": "dataset_ConfidentialityLevelType:red",
+ "https://spdx.org/rdf/3.0.0/terms/Dataset/DatasetAvailabilityType/clickthrough": "dataset_DatasetAvailabilityType:clickthrough",
+ "https://spdx.org/rdf/3.0.0/terms/Dataset/DatasetAvailabilityType/directDownload": "dataset_DatasetAvailabilityType:directDownload",
+ "https://spdx.org/rdf/3.0.0/terms/Dataset/DatasetAvailabilityType/query": "dataset_DatasetAvailabilityType:query",
+ "https://spdx.org/rdf/3.0.0/terms/Dataset/DatasetAvailabilityType/registration": "dataset_DatasetAvailabilityType:registration",
+ "https://spdx.org/rdf/3.0.0/terms/Dataset/DatasetAvailabilityType/scrapingScript": "dataset_DatasetAvailabilityType:scrapingScript",
+ "https://spdx.org/rdf/3.0.0/terms/Dataset/DatasetType/audio": "dataset_DatasetType:audio",
+ "https://spdx.org/rdf/3.0.0/terms/Dataset/DatasetType/categorical": "dataset_DatasetType:categorical",
+ "https://spdx.org/rdf/3.0.0/terms/Dataset/DatasetType/graph": "dataset_DatasetType:graph",
+ "https://spdx.org/rdf/3.0.0/terms/Dataset/DatasetType/image": "dataset_DatasetType:image",
+ "https://spdx.org/rdf/3.0.0/terms/Dataset/DatasetType/noAssertion": "dataset_DatasetType:noAssertion",
+ "https://spdx.org/rdf/3.0.0/terms/Dataset/DatasetType/numeric": "dataset_DatasetType:numeric",
+ "https://spdx.org/rdf/3.0.0/terms/Dataset/DatasetType/other": "dataset_DatasetType:other",
+ "https://spdx.org/rdf/3.0.0/terms/Dataset/DatasetType/sensor": "dataset_DatasetType:sensor",
+ "https://spdx.org/rdf/3.0.0/terms/Dataset/DatasetType/structured": "dataset_DatasetType:structured",
+ "https://spdx.org/rdf/3.0.0/terms/Dataset/DatasetType/syntactic": "dataset_DatasetType:syntactic",
+ "https://spdx.org/rdf/3.0.0/terms/Dataset/DatasetType/text": "dataset_DatasetType:text",
+ "https://spdx.org/rdf/3.0.0/terms/Dataset/DatasetType/timeseries": "dataset_DatasetType:timeseries",
+ "https://spdx.org/rdf/3.0.0/terms/Dataset/DatasetType/timestamp": "dataset_DatasetType:timestamp",
+ "https://spdx.org/rdf/3.0.0/terms/Dataset/DatasetType/video": "dataset_DatasetType:video",
+ "https://spdx.org/rdf/3.0.0/terms/Security/CvssSeverityType/critical": "security_CvssSeverityType:critical",
+ "https://spdx.org/rdf/3.0.0/terms/Security/CvssSeverityType/high": "security_CvssSeverityType:high",
+ "https://spdx.org/rdf/3.0.0/terms/Security/CvssSeverityType/low": "security_CvssSeverityType:low",
+ "https://spdx.org/rdf/3.0.0/terms/Security/CvssSeverityType/medium": "security_CvssSeverityType:medium",
+ "https://spdx.org/rdf/3.0.0/terms/Security/CvssSeverityType/none": "security_CvssSeverityType:none",
+ "https://spdx.org/rdf/3.0.0/terms/Security/ExploitCatalogType/kev": "security_ExploitCatalogType:kev",
+ "https://spdx.org/rdf/3.0.0/terms/Security/ExploitCatalogType/other": "security_ExploitCatalogType:other",
+ "https://spdx.org/rdf/3.0.0/terms/Security/SsvcDecisionType/act": "security_SsvcDecisionType:act",
+ "https://spdx.org/rdf/3.0.0/terms/Security/SsvcDecisionType/attend": "security_SsvcDecisionType:attend",
+ "https://spdx.org/rdf/3.0.0/terms/Security/SsvcDecisionType/track": "security_SsvcDecisionType:track",
+ "https://spdx.org/rdf/3.0.0/terms/Security/SsvcDecisionType/trackStar": "security_SsvcDecisionType:trackStar",
+ "https://spdx.org/rdf/3.0.0/terms/Security/VexJustificationType/componentNotPresent": "security_VexJustificationType:componentNotPresent",
+ "https://spdx.org/rdf/3.0.0/terms/Security/VexJustificationType/inlineMitigationsAlreadyExist": "security_VexJustificationType:inlineMitigationsAlreadyExist",
+ "https://spdx.org/rdf/3.0.0/terms/Security/VexJustificationType/vulnerableCodeCannotBeControlledByAdversary": "security_VexJustificationType:vulnerableCodeCannotBeControlledByAdversary",
+ "https://spdx.org/rdf/3.0.0/terms/Security/VexJustificationType/vulnerableCodeNotInExecutePath": "security_VexJustificationType:vulnerableCodeNotInExecutePath",
+ "https://spdx.org/rdf/3.0.0/terms/Security/VexJustificationType/vulnerableCodeNotPresent": "security_VexJustificationType:vulnerableCodeNotPresent",
+ "https://spdx.org/rdf/3.0.0/terms/Software/ContentIdentifierType/gitoid": "software_ContentIdentifierType:gitoid",
+ "https://spdx.org/rdf/3.0.0/terms/Software/ContentIdentifierType/swhid": "software_ContentIdentifierType:swhid",
+ "https://spdx.org/rdf/3.0.0/terms/Software/FileKindType/directory": "software_FileKindType:directory",
+ "https://spdx.org/rdf/3.0.0/terms/Software/FileKindType/file": "software_FileKindType:file",
+ "https://spdx.org/rdf/3.0.0/terms/Software/SbomType/analyzed": "software_SbomType:analyzed",
+ "https://spdx.org/rdf/3.0.0/terms/Software/SbomType/build": "software_SbomType:build",
+ "https://spdx.org/rdf/3.0.0/terms/Software/SbomType/deployed": "software_SbomType:deployed",
+ "https://spdx.org/rdf/3.0.0/terms/Software/SbomType/design": "software_SbomType:design",
+ "https://spdx.org/rdf/3.0.0/terms/Software/SbomType/runtime": "software_SbomType:runtime",
+ "https://spdx.org/rdf/3.0.0/terms/Software/SbomType/source": "software_SbomType:source",
+ "https://spdx.org/rdf/3.0.0/terms/Software/SoftwarePurpose/application": "software_SoftwarePurpose:application",
+ "https://spdx.org/rdf/3.0.0/terms/Software/SoftwarePurpose/archive": "software_SoftwarePurpose:archive",
+ "https://spdx.org/rdf/3.0.0/terms/Software/SoftwarePurpose/bom": "software_SoftwarePurpose:bom",
+ "https://spdx.org/rdf/3.0.0/terms/Software/SoftwarePurpose/configuration": "software_SoftwarePurpose:configuration",
+ "https://spdx.org/rdf/3.0.0/terms/Software/SoftwarePurpose/container": "software_SoftwarePurpose:container",
+ "https://spdx.org/rdf/3.0.0/terms/Software/SoftwarePurpose/data": "software_SoftwarePurpose:data",
+ "https://spdx.org/rdf/3.0.0/terms/Software/SoftwarePurpose/device": "software_SoftwarePurpose:device",
+ "https://spdx.org/rdf/3.0.0/terms/Software/SoftwarePurpose/deviceDriver": "software_SoftwarePurpose:deviceDriver",
+ "https://spdx.org/rdf/3.0.0/terms/Software/SoftwarePurpose/diskImage": "software_SoftwarePurpose:diskImage",
+ "https://spdx.org/rdf/3.0.0/terms/Software/SoftwarePurpose/documentation": "software_SoftwarePurpose:documentation",
+ "https://spdx.org/rdf/3.0.0/terms/Software/SoftwarePurpose/evidence": "software_SoftwarePurpose:evidence",
+ "https://spdx.org/rdf/3.0.0/terms/Software/SoftwarePurpose/executable": "software_SoftwarePurpose:executable",
+ "https://spdx.org/rdf/3.0.0/terms/Software/SoftwarePurpose/file": "software_SoftwarePurpose:file",
+ "https://spdx.org/rdf/3.0.0/terms/Software/SoftwarePurpose/filesystemImage": "software_SoftwarePurpose:filesystemImage",
+ "https://spdx.org/rdf/3.0.0/terms/Software/SoftwarePurpose/firmware": "software_SoftwarePurpose:firmware",
+ "https://spdx.org/rdf/3.0.0/terms/Software/SoftwarePurpose/framework": "software_SoftwarePurpose:framework",
+ "https://spdx.org/rdf/3.0.0/terms/Software/SoftwarePurpose/install": "software_SoftwarePurpose:install",
+ "https://spdx.org/rdf/3.0.0/terms/Software/SoftwarePurpose/library": "software_SoftwarePurpose:library",
+ "https://spdx.org/rdf/3.0.0/terms/Software/SoftwarePurpose/manifest": "software_SoftwarePurpose:manifest",
+ "https://spdx.org/rdf/3.0.0/terms/Software/SoftwarePurpose/model": "software_SoftwarePurpose:model",
+ "https://spdx.org/rdf/3.0.0/terms/Software/SoftwarePurpose/module": "software_SoftwarePurpose:module",
+ "https://spdx.org/rdf/3.0.0/terms/Software/SoftwarePurpose/operatingSystem": "software_SoftwarePurpose:operatingSystem",
+ "https://spdx.org/rdf/3.0.0/terms/Software/SoftwarePurpose/other": "software_SoftwarePurpose:other",
+ "https://spdx.org/rdf/3.0.0/terms/Software/SoftwarePurpose/patch": "software_SoftwarePurpose:patch",
+ "https://spdx.org/rdf/3.0.0/terms/Software/SoftwarePurpose/platform": "software_SoftwarePurpose:platform",
+ "https://spdx.org/rdf/3.0.0/terms/Software/SoftwarePurpose/requirement": "software_SoftwarePurpose:requirement",
+ "https://spdx.org/rdf/3.0.0/terms/Software/SoftwarePurpose/source": "software_SoftwarePurpose:source",
+ "https://spdx.org/rdf/3.0.0/terms/Software/SoftwarePurpose/specification": "software_SoftwarePurpose:specification",
+ "https://spdx.org/rdf/3.0.0/terms/Software/SoftwarePurpose/test": "software_SoftwarePurpose:test",
+ "https://spdx.org/rdf/3.0.0/terms/ExpandedLicensing/NoAssertionLicense": "spdx:ExpandedLicensing/NoAssertionLicense",
+ "https://spdx.org/rdf/3.0.0/terms/ExpandedLicensing/NoneLicense": "spdx:ExpandedLicensing/NoneLicense",
+}
+
+_NI_DECODE_CONTEXT = {
+ "ai_EnergyUnitType:kilowattHour": "https://spdx.org/rdf/3.0.0/terms/AI/EnergyUnitType/kilowattHour",
+ "ai_EnergyUnitType:megajoule": "https://spdx.org/rdf/3.0.0/terms/AI/EnergyUnitType/megajoule",
+ "ai_EnergyUnitType:other": "https://spdx.org/rdf/3.0.0/terms/AI/EnergyUnitType/other",
+ "ai_SafetyRiskAssessmentType:high": "https://spdx.org/rdf/3.0.0/terms/AI/SafetyRiskAssessmentType/high",
+ "ai_SafetyRiskAssessmentType:low": "https://spdx.org/rdf/3.0.0/terms/AI/SafetyRiskAssessmentType/low",
+ "ai_SafetyRiskAssessmentType:medium": "https://spdx.org/rdf/3.0.0/terms/AI/SafetyRiskAssessmentType/medium",
+ "ai_SafetyRiskAssessmentType:serious": "https://spdx.org/rdf/3.0.0/terms/AI/SafetyRiskAssessmentType/serious",
+ "AnnotationType:other": "https://spdx.org/rdf/3.0.0/terms/Core/AnnotationType/other",
+ "AnnotationType:review": "https://spdx.org/rdf/3.0.0/terms/Core/AnnotationType/review",
+ "spdx:Core/NoAssertionElement": "https://spdx.org/rdf/3.0.0/terms/Core/NoAssertionElement",
+ "spdx:Core/NoneElement": "https://spdx.org/rdf/3.0.0/terms/Core/NoneElement",
+ "ExternalIdentifierType:cpe22": "https://spdx.org/rdf/3.0.0/terms/Core/ExternalIdentifierType/cpe22",
+ "ExternalIdentifierType:cpe23": "https://spdx.org/rdf/3.0.0/terms/Core/ExternalIdentifierType/cpe23",
+ "ExternalIdentifierType:cve": "https://spdx.org/rdf/3.0.0/terms/Core/ExternalIdentifierType/cve",
+ "ExternalIdentifierType:email": "https://spdx.org/rdf/3.0.0/terms/Core/ExternalIdentifierType/email",
+ "ExternalIdentifierType:gitoid": "https://spdx.org/rdf/3.0.0/terms/Core/ExternalIdentifierType/gitoid",
+ "ExternalIdentifierType:other": "https://spdx.org/rdf/3.0.0/terms/Core/ExternalIdentifierType/other",
+ "ExternalIdentifierType:packageUrl": "https://spdx.org/rdf/3.0.0/terms/Core/ExternalIdentifierType/packageUrl",
+ "ExternalIdentifierType:securityOther": "https://spdx.org/rdf/3.0.0/terms/Core/ExternalIdentifierType/securityOther",
+ "ExternalIdentifierType:swhid": "https://spdx.org/rdf/3.0.0/terms/Core/ExternalIdentifierType/swhid",
+ "ExternalIdentifierType:swid": "https://spdx.org/rdf/3.0.0/terms/Core/ExternalIdentifierType/swid",
+ "ExternalIdentifierType:urlScheme": "https://spdx.org/rdf/3.0.0/terms/Core/ExternalIdentifierType/urlScheme",
+ "ExternalRefType:altDownloadLocation": "https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/altDownloadLocation",
+ "ExternalRefType:altWebPage": "https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/altWebPage",
+ "ExternalRefType:binaryArtifact": "https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/binaryArtifact",
+ "ExternalRefType:bower": "https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/bower",
+ "ExternalRefType:buildMeta": "https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/buildMeta",
+ "ExternalRefType:buildSystem": "https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/buildSystem",
+ "ExternalRefType:certificationReport": "https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/certificationReport",
+ "ExternalRefType:chat": "https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/chat",
+ "ExternalRefType:componentAnalysisReport": "https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/componentAnalysisReport",
+ "ExternalRefType:cwe": "https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/cwe",
+ "ExternalRefType:documentation": "https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/documentation",
+ "ExternalRefType:dynamicAnalysisReport": "https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/dynamicAnalysisReport",
+ "ExternalRefType:eolNotice": "https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/eolNotice",
+ "ExternalRefType:exportControlAssessment": "https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/exportControlAssessment",
+ "ExternalRefType:funding": "https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/funding",
+ "ExternalRefType:issueTracker": "https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/issueTracker",
+ "ExternalRefType:license": "https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/license",
+ "ExternalRefType:mailingList": "https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/mailingList",
+ "ExternalRefType:mavenCentral": "https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/mavenCentral",
+ "ExternalRefType:metrics": "https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/metrics",
+ "ExternalRefType:npm": "https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/npm",
+ "ExternalRefType:nuget": "https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/nuget",
+ "ExternalRefType:other": "https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/other",
+ "ExternalRefType:privacyAssessment": "https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/privacyAssessment",
+ "ExternalRefType:productMetadata": "https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/productMetadata",
+ "ExternalRefType:purchaseOrder": "https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/purchaseOrder",
+ "ExternalRefType:qualityAssessmentReport": "https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/qualityAssessmentReport",
+ "ExternalRefType:releaseHistory": "https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/releaseHistory",
+ "ExternalRefType:releaseNotes": "https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/releaseNotes",
+ "ExternalRefType:riskAssessment": "https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/riskAssessment",
+ "ExternalRefType:runtimeAnalysisReport": "https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/runtimeAnalysisReport",
+ "ExternalRefType:secureSoftwareAttestation": "https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/secureSoftwareAttestation",
+ "ExternalRefType:securityAdversaryModel": "https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/securityAdversaryModel",
+ "ExternalRefType:securityAdvisory": "https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/securityAdvisory",
+ "ExternalRefType:securityFix": "https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/securityFix",
+ "ExternalRefType:securityOther": "https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/securityOther",
+ "ExternalRefType:securityPenTestReport": "https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/securityPenTestReport",
+ "ExternalRefType:securityPolicy": "https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/securityPolicy",
+ "ExternalRefType:securityThreatModel": "https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/securityThreatModel",
+ "ExternalRefType:socialMedia": "https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/socialMedia",
+ "ExternalRefType:sourceArtifact": "https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/sourceArtifact",
+ "ExternalRefType:staticAnalysisReport": "https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/staticAnalysisReport",
+ "ExternalRefType:support": "https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/support",
+ "ExternalRefType:vcs": "https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/vcs",
+ "ExternalRefType:vulnerabilityDisclosureReport": "https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/vulnerabilityDisclosureReport",
+ "ExternalRefType:vulnerabilityExploitabilityAssessment": "https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/vulnerabilityExploitabilityAssessment",
+ "HashAlgorithm:blake2b256": "https://spdx.org/rdf/3.0.0/terms/Core/HashAlgorithm/blake2b256",
+ "HashAlgorithm:blake2b384": "https://spdx.org/rdf/3.0.0/terms/Core/HashAlgorithm/blake2b384",
+ "HashAlgorithm:blake2b512": "https://spdx.org/rdf/3.0.0/terms/Core/HashAlgorithm/blake2b512",
+ "HashAlgorithm:blake3": "https://spdx.org/rdf/3.0.0/terms/Core/HashAlgorithm/blake3",
+ "HashAlgorithm:crystalsDilithium": "https://spdx.org/rdf/3.0.0/terms/Core/HashAlgorithm/crystalsDilithium",
+ "HashAlgorithm:crystalsKyber": "https://spdx.org/rdf/3.0.0/terms/Core/HashAlgorithm/crystalsKyber",
+ "HashAlgorithm:falcon": "https://spdx.org/rdf/3.0.0/terms/Core/HashAlgorithm/falcon",
+ "HashAlgorithm:md2": "https://spdx.org/rdf/3.0.0/terms/Core/HashAlgorithm/md2",
+ "HashAlgorithm:md4": "https://spdx.org/rdf/3.0.0/terms/Core/HashAlgorithm/md4",
+ "HashAlgorithm:md5": "https://spdx.org/rdf/3.0.0/terms/Core/HashAlgorithm/md5",
+ "HashAlgorithm:md6": "https://spdx.org/rdf/3.0.0/terms/Core/HashAlgorithm/md6",
+ "HashAlgorithm:other": "https://spdx.org/rdf/3.0.0/terms/Core/HashAlgorithm/other",
+ "HashAlgorithm:sha1": "https://spdx.org/rdf/3.0.0/terms/Core/HashAlgorithm/sha1",
+ "HashAlgorithm:sha224": "https://spdx.org/rdf/3.0.0/terms/Core/HashAlgorithm/sha224",
+ "HashAlgorithm:sha256": "https://spdx.org/rdf/3.0.0/terms/Core/HashAlgorithm/sha256",
+ "HashAlgorithm:sha384": "https://spdx.org/rdf/3.0.0/terms/Core/HashAlgorithm/sha384",
+ "HashAlgorithm:sha3_224": "https://spdx.org/rdf/3.0.0/terms/Core/HashAlgorithm/sha3_224",
+ "HashAlgorithm:sha3_256": "https://spdx.org/rdf/3.0.0/terms/Core/HashAlgorithm/sha3_256",
+ "HashAlgorithm:sha3_384": "https://spdx.org/rdf/3.0.0/terms/Core/HashAlgorithm/sha3_384",
+ "HashAlgorithm:sha3_512": "https://spdx.org/rdf/3.0.0/terms/Core/HashAlgorithm/sha3_512",
+ "HashAlgorithm:sha512": "https://spdx.org/rdf/3.0.0/terms/Core/HashAlgorithm/sha512",
+ "LifecycleScopeType:build": "https://spdx.org/rdf/3.0.0/terms/Core/LifecycleScopeType/build",
+ "LifecycleScopeType:design": "https://spdx.org/rdf/3.0.0/terms/Core/LifecycleScopeType/design",
+ "LifecycleScopeType:development": "https://spdx.org/rdf/3.0.0/terms/Core/LifecycleScopeType/development",
+ "LifecycleScopeType:other": "https://spdx.org/rdf/3.0.0/terms/Core/LifecycleScopeType/other",
+ "LifecycleScopeType:runtime": "https://spdx.org/rdf/3.0.0/terms/Core/LifecycleScopeType/runtime",
+ "LifecycleScopeType:test": "https://spdx.org/rdf/3.0.0/terms/Core/LifecycleScopeType/test",
+ "PresenceType:no": "https://spdx.org/rdf/3.0.0/terms/Core/PresenceType/no",
+ "PresenceType:noAssertion": "https://spdx.org/rdf/3.0.0/terms/Core/PresenceType/noAssertion",
+ "PresenceType:yes": "https://spdx.org/rdf/3.0.0/terms/Core/PresenceType/yes",
+ "ProfileIdentifierType:ai": "https://spdx.org/rdf/3.0.0/terms/Core/ProfileIdentifierType/ai",
+ "ProfileIdentifierType:build": "https://spdx.org/rdf/3.0.0/terms/Core/ProfileIdentifierType/build",
+ "ProfileIdentifierType:core": "https://spdx.org/rdf/3.0.0/terms/Core/ProfileIdentifierType/core",
+ "ProfileIdentifierType:dataset": "https://spdx.org/rdf/3.0.0/terms/Core/ProfileIdentifierType/dataset",
+ "ProfileIdentifierType:expandedLicensing": "https://spdx.org/rdf/3.0.0/terms/Core/ProfileIdentifierType/expandedLicensing",
+ "ProfileIdentifierType:extension": "https://spdx.org/rdf/3.0.0/terms/Core/ProfileIdentifierType/extension",
+ "ProfileIdentifierType:lite": "https://spdx.org/rdf/3.0.0/terms/Core/ProfileIdentifierType/lite",
+ "ProfileIdentifierType:security": "https://spdx.org/rdf/3.0.0/terms/Core/ProfileIdentifierType/security",
+ "ProfileIdentifierType:simpleLicensing": "https://spdx.org/rdf/3.0.0/terms/Core/ProfileIdentifierType/simpleLicensing",
+ "ProfileIdentifierType:software": "https://spdx.org/rdf/3.0.0/terms/Core/ProfileIdentifierType/software",
+ "RelationshipCompleteness:complete": "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipCompleteness/complete",
+ "RelationshipCompleteness:incomplete": "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipCompleteness/incomplete",
+ "RelationshipCompleteness:noAssertion": "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipCompleteness/noAssertion",
+ "RelationshipType:affects": "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/affects",
+ "RelationshipType:amendedBy": "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/amendedBy",
+ "RelationshipType:ancestorOf": "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/ancestorOf",
+ "RelationshipType:availableFrom": "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/availableFrom",
+ "RelationshipType:configures": "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/configures",
+ "RelationshipType:contains": "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/contains",
+ "RelationshipType:coordinatedBy": "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/coordinatedBy",
+ "RelationshipType:copiedTo": "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/copiedTo",
+ "RelationshipType:delegatedTo": "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/delegatedTo",
+ "RelationshipType:dependsOn": "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/dependsOn",
+ "RelationshipType:descendantOf": "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/descendantOf",
+ "RelationshipType:describes": "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/describes",
+ "RelationshipType:doesNotAffect": "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/doesNotAffect",
+ "RelationshipType:expandsTo": "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/expandsTo",
+ "RelationshipType:exploitCreatedBy": "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/exploitCreatedBy",
+ "RelationshipType:fixedBy": "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/fixedBy",
+ "RelationshipType:fixedIn": "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/fixedIn",
+ "RelationshipType:foundBy": "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/foundBy",
+ "RelationshipType:generates": "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/generates",
+ "RelationshipType:hasAddedFile": "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/hasAddedFile",
+ "RelationshipType:hasAssessmentFor": "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/hasAssessmentFor",
+ "RelationshipType:hasAssociatedVulnerability": "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/hasAssociatedVulnerability",
+ "RelationshipType:hasConcludedLicense": "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/hasConcludedLicense",
+ "RelationshipType:hasDataFile": "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/hasDataFile",
+ "RelationshipType:hasDeclaredLicense": "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/hasDeclaredLicense",
+ "RelationshipType:hasDeletedFile": "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/hasDeletedFile",
+ "RelationshipType:hasDependencyManifest": "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/hasDependencyManifest",
+ "RelationshipType:hasDistributionArtifact": "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/hasDistributionArtifact",
+ "RelationshipType:hasDocumentation": "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/hasDocumentation",
+ "RelationshipType:hasDynamicLink": "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/hasDynamicLink",
+ "RelationshipType:hasEvidence": "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/hasEvidence",
+ "RelationshipType:hasExample": "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/hasExample",
+ "RelationshipType:hasHost": "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/hasHost",
+ "RelationshipType:hasInputs": "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/hasInputs",
+ "RelationshipType:hasMetadata": "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/hasMetadata",
+ "RelationshipType:hasOptionalComponent": "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/hasOptionalComponent",
+ "RelationshipType:hasOptionalDependency": "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/hasOptionalDependency",
+ "RelationshipType:hasOutputs": "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/hasOutputs",
+ "RelationshipType:hasPrerequsite": "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/hasPrerequsite",
+ "RelationshipType:hasProvidedDependency": "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/hasProvidedDependency",
+ "RelationshipType:hasRequirement": "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/hasRequirement",
+ "RelationshipType:hasSpecification": "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/hasSpecification",
+ "RelationshipType:hasStaticLink": "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/hasStaticLink",
+ "RelationshipType:hasTest": "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/hasTest",
+ "RelationshipType:hasTestCase": "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/hasTestCase",
+ "RelationshipType:hasVariant": "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/hasVariant",
+ "RelationshipType:invokedBy": "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/invokedBy",
+ "RelationshipType:modifiedBy": "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/modifiedBy",
+ "RelationshipType:other": "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/other",
+ "RelationshipType:packagedBy": "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/packagedBy",
+ "RelationshipType:patchedBy": "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/patchedBy",
+ "RelationshipType:publishedBy": "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/publishedBy",
+ "RelationshipType:reportedBy": "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/reportedBy",
+ "RelationshipType:republishedBy": "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/republishedBy",
+ "RelationshipType:serializedInArtifact": "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/serializedInArtifact",
+ "RelationshipType:testedOn": "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/testedOn",
+ "RelationshipType:trainedOn": "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/trainedOn",
+ "RelationshipType:underInvestigationFor": "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/underInvestigationFor",
+ "RelationshipType:usesTool": "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/usesTool",
+ "SupportType:deployed": "https://spdx.org/rdf/3.0.0/terms/Core/SupportType/deployed",
+ "SupportType:development": "https://spdx.org/rdf/3.0.0/terms/Core/SupportType/development",
+ "SupportType:endOfSupport": "https://spdx.org/rdf/3.0.0/terms/Core/SupportType/endOfSupport",
+ "SupportType:limitedSupport": "https://spdx.org/rdf/3.0.0/terms/Core/SupportType/limitedSupport",
+ "SupportType:noAssertion": "https://spdx.org/rdf/3.0.0/terms/Core/SupportType/noAssertion",
+ "SupportType:noSupport": "https://spdx.org/rdf/3.0.0/terms/Core/SupportType/noSupport",
+ "SupportType:support": "https://spdx.org/rdf/3.0.0/terms/Core/SupportType/support",
+ "dataset_ConfidentialityLevelType:amber": "https://spdx.org/rdf/3.0.0/terms/Dataset/ConfidentialityLevelType/amber",
+ "dataset_ConfidentialityLevelType:clear": "https://spdx.org/rdf/3.0.0/terms/Dataset/ConfidentialityLevelType/clear",
+ "dataset_ConfidentialityLevelType:green": "https://spdx.org/rdf/3.0.0/terms/Dataset/ConfidentialityLevelType/green",
+ "dataset_ConfidentialityLevelType:red": "https://spdx.org/rdf/3.0.0/terms/Dataset/ConfidentialityLevelType/red",
+ "dataset_DatasetAvailabilityType:clickthrough": "https://spdx.org/rdf/3.0.0/terms/Dataset/DatasetAvailabilityType/clickthrough",
+ "dataset_DatasetAvailabilityType:directDownload": "https://spdx.org/rdf/3.0.0/terms/Dataset/DatasetAvailabilityType/directDownload",
+ "dataset_DatasetAvailabilityType:query": "https://spdx.org/rdf/3.0.0/terms/Dataset/DatasetAvailabilityType/query",
+ "dataset_DatasetAvailabilityType:registration": "https://spdx.org/rdf/3.0.0/terms/Dataset/DatasetAvailabilityType/registration",
+ "dataset_DatasetAvailabilityType:scrapingScript": "https://spdx.org/rdf/3.0.0/terms/Dataset/DatasetAvailabilityType/scrapingScript",
+ "dataset_DatasetType:audio": "https://spdx.org/rdf/3.0.0/terms/Dataset/DatasetType/audio",
+ "dataset_DatasetType:categorical": "https://spdx.org/rdf/3.0.0/terms/Dataset/DatasetType/categorical",
+ "dataset_DatasetType:graph": "https://spdx.org/rdf/3.0.0/terms/Dataset/DatasetType/graph",
+ "dataset_DatasetType:image": "https://spdx.org/rdf/3.0.0/terms/Dataset/DatasetType/image",
+ "dataset_DatasetType:noAssertion": "https://spdx.org/rdf/3.0.0/terms/Dataset/DatasetType/noAssertion",
+ "dataset_DatasetType:numeric": "https://spdx.org/rdf/3.0.0/terms/Dataset/DatasetType/numeric",
+ "dataset_DatasetType:other": "https://spdx.org/rdf/3.0.0/terms/Dataset/DatasetType/other",
+ "dataset_DatasetType:sensor": "https://spdx.org/rdf/3.0.0/terms/Dataset/DatasetType/sensor",
+ "dataset_DatasetType:structured": "https://spdx.org/rdf/3.0.0/terms/Dataset/DatasetType/structured",
+ "dataset_DatasetType:syntactic": "https://spdx.org/rdf/3.0.0/terms/Dataset/DatasetType/syntactic",
+ "dataset_DatasetType:text": "https://spdx.org/rdf/3.0.0/terms/Dataset/DatasetType/text",
+ "dataset_DatasetType:timeseries": "https://spdx.org/rdf/3.0.0/terms/Dataset/DatasetType/timeseries",
+ "dataset_DatasetType:timestamp": "https://spdx.org/rdf/3.0.0/terms/Dataset/DatasetType/timestamp",
+ "dataset_DatasetType:video": "https://spdx.org/rdf/3.0.0/terms/Dataset/DatasetType/video",
+ "security_CvssSeverityType:critical": "https://spdx.org/rdf/3.0.0/terms/Security/CvssSeverityType/critical",
+ "security_CvssSeverityType:high": "https://spdx.org/rdf/3.0.0/terms/Security/CvssSeverityType/high",
+ "security_CvssSeverityType:low": "https://spdx.org/rdf/3.0.0/terms/Security/CvssSeverityType/low",
+ "security_CvssSeverityType:medium": "https://spdx.org/rdf/3.0.0/terms/Security/CvssSeverityType/medium",
+ "security_CvssSeverityType:none": "https://spdx.org/rdf/3.0.0/terms/Security/CvssSeverityType/none",
+ "security_ExploitCatalogType:kev": "https://spdx.org/rdf/3.0.0/terms/Security/ExploitCatalogType/kev",
+ "security_ExploitCatalogType:other": "https://spdx.org/rdf/3.0.0/terms/Security/ExploitCatalogType/other",
+ "security_SsvcDecisionType:act": "https://spdx.org/rdf/3.0.0/terms/Security/SsvcDecisionType/act",
+ "security_SsvcDecisionType:attend": "https://spdx.org/rdf/3.0.0/terms/Security/SsvcDecisionType/attend",
+ "security_SsvcDecisionType:track": "https://spdx.org/rdf/3.0.0/terms/Security/SsvcDecisionType/track",
+ "security_SsvcDecisionType:trackStar": "https://spdx.org/rdf/3.0.0/terms/Security/SsvcDecisionType/trackStar",
+ "security_VexJustificationType:componentNotPresent": "https://spdx.org/rdf/3.0.0/terms/Security/VexJustificationType/componentNotPresent",
+ "security_VexJustificationType:inlineMitigationsAlreadyExist": "https://spdx.org/rdf/3.0.0/terms/Security/VexJustificationType/inlineMitigationsAlreadyExist",
+ "security_VexJustificationType:vulnerableCodeCannotBeControlledByAdversary": "https://spdx.org/rdf/3.0.0/terms/Security/VexJustificationType/vulnerableCodeCannotBeControlledByAdversary",
+ "security_VexJustificationType:vulnerableCodeNotInExecutePath": "https://spdx.org/rdf/3.0.0/terms/Security/VexJustificationType/vulnerableCodeNotInExecutePath",
+ "security_VexJustificationType:vulnerableCodeNotPresent": "https://spdx.org/rdf/3.0.0/terms/Security/VexJustificationType/vulnerableCodeNotPresent",
+ "software_ContentIdentifierType:gitoid": "https://spdx.org/rdf/3.0.0/terms/Software/ContentIdentifierType/gitoid",
+ "software_ContentIdentifierType:swhid": "https://spdx.org/rdf/3.0.0/terms/Software/ContentIdentifierType/swhid",
+ "software_FileKindType:directory": "https://spdx.org/rdf/3.0.0/terms/Software/FileKindType/directory",
+ "software_FileKindType:file": "https://spdx.org/rdf/3.0.0/terms/Software/FileKindType/file",
+ "software_SbomType:analyzed": "https://spdx.org/rdf/3.0.0/terms/Software/SbomType/analyzed",
+ "software_SbomType:build": "https://spdx.org/rdf/3.0.0/terms/Software/SbomType/build",
+ "software_SbomType:deployed": "https://spdx.org/rdf/3.0.0/terms/Software/SbomType/deployed",
+ "software_SbomType:design": "https://spdx.org/rdf/3.0.0/terms/Software/SbomType/design",
+ "software_SbomType:runtime": "https://spdx.org/rdf/3.0.0/terms/Software/SbomType/runtime",
+ "software_SbomType:source": "https://spdx.org/rdf/3.0.0/terms/Software/SbomType/source",
+ "software_SoftwarePurpose:application": "https://spdx.org/rdf/3.0.0/terms/Software/SoftwarePurpose/application",
+ "software_SoftwarePurpose:archive": "https://spdx.org/rdf/3.0.0/terms/Software/SoftwarePurpose/archive",
+ "software_SoftwarePurpose:bom": "https://spdx.org/rdf/3.0.0/terms/Software/SoftwarePurpose/bom",
+ "software_SoftwarePurpose:configuration": "https://spdx.org/rdf/3.0.0/terms/Software/SoftwarePurpose/configuration",
+ "software_SoftwarePurpose:container": "https://spdx.org/rdf/3.0.0/terms/Software/SoftwarePurpose/container",
+ "software_SoftwarePurpose:data": "https://spdx.org/rdf/3.0.0/terms/Software/SoftwarePurpose/data",
+ "software_SoftwarePurpose:device": "https://spdx.org/rdf/3.0.0/terms/Software/SoftwarePurpose/device",
+ "software_SoftwarePurpose:deviceDriver": "https://spdx.org/rdf/3.0.0/terms/Software/SoftwarePurpose/deviceDriver",
+ "software_SoftwarePurpose:diskImage": "https://spdx.org/rdf/3.0.0/terms/Software/SoftwarePurpose/diskImage",
+ "software_SoftwarePurpose:documentation": "https://spdx.org/rdf/3.0.0/terms/Software/SoftwarePurpose/documentation",
+ "software_SoftwarePurpose:evidence": "https://spdx.org/rdf/3.0.0/terms/Software/SoftwarePurpose/evidence",
+ "software_SoftwarePurpose:executable": "https://spdx.org/rdf/3.0.0/terms/Software/SoftwarePurpose/executable",
+ "software_SoftwarePurpose:file": "https://spdx.org/rdf/3.0.0/terms/Software/SoftwarePurpose/file",
+ "software_SoftwarePurpose:filesystemImage": "https://spdx.org/rdf/3.0.0/terms/Software/SoftwarePurpose/filesystemImage",
+ "software_SoftwarePurpose:firmware": "https://spdx.org/rdf/3.0.0/terms/Software/SoftwarePurpose/firmware",
+ "software_SoftwarePurpose:framework": "https://spdx.org/rdf/3.0.0/terms/Software/SoftwarePurpose/framework",
+ "software_SoftwarePurpose:install": "https://spdx.org/rdf/3.0.0/terms/Software/SoftwarePurpose/install",
+ "software_SoftwarePurpose:library": "https://spdx.org/rdf/3.0.0/terms/Software/SoftwarePurpose/library",
+ "software_SoftwarePurpose:manifest": "https://spdx.org/rdf/3.0.0/terms/Software/SoftwarePurpose/manifest",
+ "software_SoftwarePurpose:model": "https://spdx.org/rdf/3.0.0/terms/Software/SoftwarePurpose/model",
+ "software_SoftwarePurpose:module": "https://spdx.org/rdf/3.0.0/terms/Software/SoftwarePurpose/module",
+ "software_SoftwarePurpose:operatingSystem": "https://spdx.org/rdf/3.0.0/terms/Software/SoftwarePurpose/operatingSystem",
+ "software_SoftwarePurpose:other": "https://spdx.org/rdf/3.0.0/terms/Software/SoftwarePurpose/other",
+ "software_SoftwarePurpose:patch": "https://spdx.org/rdf/3.0.0/terms/Software/SoftwarePurpose/patch",
+ "software_SoftwarePurpose:platform": "https://spdx.org/rdf/3.0.0/terms/Software/SoftwarePurpose/platform",
+ "software_SoftwarePurpose:requirement": "https://spdx.org/rdf/3.0.0/terms/Software/SoftwarePurpose/requirement",
+ "software_SoftwarePurpose:source": "https://spdx.org/rdf/3.0.0/terms/Software/SoftwarePurpose/source",
+ "software_SoftwarePurpose:specification": "https://spdx.org/rdf/3.0.0/terms/Software/SoftwarePurpose/specification",
+ "software_SoftwarePurpose:test": "https://spdx.org/rdf/3.0.0/terms/Software/SoftwarePurpose/test",
+ "spdx:ExpandedLicensing/NoAssertionLicense": "https://spdx.org/rdf/3.0.0/terms/ExpandedLicensing/NoAssertionLicense",
+ "spdx:ExpandedLicensing/NoneLicense": "https://spdx.org/rdf/3.0.0/terms/ExpandedLicensing/NoneLicense",
+}
+
+
+# CLASSES
+# The class that contains properties to describe energy consumption incurred
+# by an AI model in different stages of its lifecycle.
+@register("https://spdx.org/rdf/3.0.0/terms/AI/EnergyConsumption", compact_type="ai_EnergyConsumption", abstract=False)
+class ai_EnergyConsumption(SHACLObject):
+ NODE_KIND = NodeKind.BlankNodeOrIRI
+ NAMED_INDIVIDUALS = {
+ }
+
+ @classmethod
+ def _register_props(cls):
+ super()._register_props()
+ # Specifies the amount of energy consumed when finetuning the AI model that is
+ # being used in the AI system.
+ cls._add_property(
+ "ai_finetuningEnergyConsumption",
+ ListProp(ObjectProp(ai_EnergyConsumptionDescription, False)),
+ iri="https://spdx.org/rdf/3.0.0/terms/AI/finetuningEnergyConsumption",
+ compact="ai_finetuningEnergyConsumption",
+ )
+ # Specifies the amount of energy consumed during inference time by an AI model
+ # that is being used in the AI system.
+ cls._add_property(
+ "ai_inferenceEnergyConsumption",
+ ListProp(ObjectProp(ai_EnergyConsumptionDescription, False)),
+ iri="https://spdx.org/rdf/3.0.0/terms/AI/inferenceEnergyConsumption",
+ compact="ai_inferenceEnergyConsumption",
+ )
+ # Specifies the amount of energy consumed when training the AI model that is
+ # being used in the AI system.
+ cls._add_property(
+ "ai_trainingEnergyConsumption",
+ ListProp(ObjectProp(ai_EnergyConsumptionDescription, False)),
+ iri="https://spdx.org/rdf/3.0.0/terms/AI/trainingEnergyConsumption",
+ compact="ai_trainingEnergyConsumption",
+ )
+
+
+# The class that helps note down the quantity of energy consumption and the unit
+# used for measurement.
+@register("https://spdx.org/rdf/3.0.0/terms/AI/EnergyConsumptionDescription", compact_type="ai_EnergyConsumptionDescription", abstract=False)
+class ai_EnergyConsumptionDescription(SHACLObject):
+ NODE_KIND = NodeKind.BlankNodeOrIRI
+ NAMED_INDIVIDUALS = {
+ }
+
+ @classmethod
+ def _register_props(cls):
+ super()._register_props()
+ # Represents the energy quantity.
+ cls._add_property(
+ "ai_energyQuantity",
+ FloatProp(),
+ iri="https://spdx.org/rdf/3.0.0/terms/AI/energyQuantity",
+ min_count=1,
+ compact="ai_energyQuantity",
+ )
+ # Specifies the unit in which energy is measured.
+ cls._add_property(
+ "ai_energyUnit",
+ EnumProp([
+ ("https://spdx.org/rdf/3.0.0/terms/AI/EnergyUnitType/kilowattHour", "kilowattHour"),
+ ("https://spdx.org/rdf/3.0.0/terms/AI/EnergyUnitType/megajoule", "megajoule"),
+ ("https://spdx.org/rdf/3.0.0/terms/AI/EnergyUnitType/other", "other"),
+ ]),
+ iri="https://spdx.org/rdf/3.0.0/terms/AI/energyUnit",
+ min_count=1,
+ compact="ai_energyUnit",
+ )
+
+
+# Specifies the unit of energy consumption.
+@register("https://spdx.org/rdf/3.0.0/terms/AI/EnergyUnitType", compact_type="ai_EnergyUnitType", abstract=False)
+class ai_EnergyUnitType(SHACLObject):
+ NODE_KIND = NodeKind.BlankNodeOrIRI
+ NAMED_INDIVIDUALS = {
+ "kilowattHour": "https://spdx.org/rdf/3.0.0/terms/AI/EnergyUnitType/kilowattHour",
+ "megajoule": "https://spdx.org/rdf/3.0.0/terms/AI/EnergyUnitType/megajoule",
+ "other": "https://spdx.org/rdf/3.0.0/terms/AI/EnergyUnitType/other",
+ }
+ # Kilowatt-hour.
+ kilowattHour = "https://spdx.org/rdf/3.0.0/terms/AI/EnergyUnitType/kilowattHour"
+ # Megajoule.
+ megajoule = "https://spdx.org/rdf/3.0.0/terms/AI/EnergyUnitType/megajoule"
+ # Any other units of energy measurement.
+ other = "https://spdx.org/rdf/3.0.0/terms/AI/EnergyUnitType/other"
+
+
+# Specifies the safety risk level.
+@register("https://spdx.org/rdf/3.0.0/terms/AI/SafetyRiskAssessmentType", compact_type="ai_SafetyRiskAssessmentType", abstract=False)
+class ai_SafetyRiskAssessmentType(SHACLObject):
+ NODE_KIND = NodeKind.BlankNodeOrIRI
+ NAMED_INDIVIDUALS = {
+ "high": "https://spdx.org/rdf/3.0.0/terms/AI/SafetyRiskAssessmentType/high",
+ "low": "https://spdx.org/rdf/3.0.0/terms/AI/SafetyRiskAssessmentType/low",
+ "medium": "https://spdx.org/rdf/3.0.0/terms/AI/SafetyRiskAssessmentType/medium",
+ "serious": "https://spdx.org/rdf/3.0.0/terms/AI/SafetyRiskAssessmentType/serious",
+ }
+ # The second-highest level of risk posed by an AI system.
+ high = "https://spdx.org/rdf/3.0.0/terms/AI/SafetyRiskAssessmentType/high"
+ # Low/no risk is posed by an AI system.
+ low = "https://spdx.org/rdf/3.0.0/terms/AI/SafetyRiskAssessmentType/low"
+ # The third-highest level of risk posed by an AI system.
+ medium = "https://spdx.org/rdf/3.0.0/terms/AI/SafetyRiskAssessmentType/medium"
+ # The highest level of risk posed by an AI system.
+ serious = "https://spdx.org/rdf/3.0.0/terms/AI/SafetyRiskAssessmentType/serious"
+
+
+# Specifies the type of an annotation.
+@register("https://spdx.org/rdf/3.0.0/terms/Core/AnnotationType", compact_type="AnnotationType", abstract=False)
+class AnnotationType(SHACLObject):
+ NODE_KIND = NodeKind.BlankNodeOrIRI
+ NAMED_INDIVIDUALS = {
+ "other": "https://spdx.org/rdf/3.0.0/terms/Core/AnnotationType/other",
+ "review": "https://spdx.org/rdf/3.0.0/terms/Core/AnnotationType/review",
+ }
+ # Used to store extra information about an Element which is not part of a Review (e.g. extra information provided during the creation of the Element).
+ other = "https://spdx.org/rdf/3.0.0/terms/Core/AnnotationType/other"
+ # Used when someone reviews the Element.
+ review = "https://spdx.org/rdf/3.0.0/terms/Core/AnnotationType/review"
+
+
+# Provides information about the creation of the Element.
+@register("https://spdx.org/rdf/3.0.0/terms/Core/CreationInfo", compact_type="CreationInfo", abstract=False)
+class CreationInfo(SHACLObject):
+ NODE_KIND = NodeKind.BlankNodeOrIRI
+ NAMED_INDIVIDUALS = {
+ }
+
+ @classmethod
+ def _register_props(cls):
+ super()._register_props()
+ # Provide consumers with comments by the creator of the Element about the
+ # Element.
+ cls._add_property(
+ "comment",
+ StringProp(),
+ iri="https://spdx.org/rdf/3.0.0/terms/Core/comment",
+ compact="comment",
+ )
+ # Identifies when the Element was originally created.
+ cls._add_property(
+ "created",
+ DateTimeStampProp(pattern=r"^\d\d\d\d-\d\d-\d\dT\d\d:\d\d:\d\dZ$",),
+ iri="https://spdx.org/rdf/3.0.0/terms/Core/created",
+ min_count=1,
+ compact="created",
+ )
+ # Identifies who or what created the Element.
+ cls._add_property(
+ "createdBy",
+ ListProp(ObjectProp(Agent, False)),
+ iri="https://spdx.org/rdf/3.0.0/terms/Core/createdBy",
+ min_count=1,
+ compact="createdBy",
+ )
+ # Identifies the tooling that was used during the creation of the Element.
+ cls._add_property(
+ "createdUsing",
+ ListProp(ObjectProp(Tool, False)),
+ iri="https://spdx.org/rdf/3.0.0/terms/Core/createdUsing",
+ compact="createdUsing",
+ )
+ # Provides a reference number that can be used to understand how to parse and interpret an Element.
+ cls._add_property(
+ "specVersion",
+ StringProp(pattern=r"^(0|[1-9]\d*)\.(0|[1-9]\d*)\.(0|[1-9]\d*)(?:-((?:0|[1-9]\d*|\d*[a-zA-Z-][0-9a-zA-Z-]*)(?:\.(?:0|[1-9]\d*|\d*[a-zA-Z-][0-9a-zA-Z-]*))*))?(?:\+([0-9a-zA-Z-]+(?:\.[0-9a-zA-Z-]+)*))?$",),
+ iri="https://spdx.org/rdf/3.0.0/terms/Core/specVersion",
+ min_count=1,
+ compact="specVersion",
+ )
+
+
+# A key with an associated value.
+@register("https://spdx.org/rdf/3.0.0/terms/Core/DictionaryEntry", compact_type="DictionaryEntry", abstract=False)
+class DictionaryEntry(SHACLObject):
+ NODE_KIND = NodeKind.BlankNodeOrIRI
+ NAMED_INDIVIDUALS = {
+ }
+
+ @classmethod
+ def _register_props(cls):
+ super()._register_props()
+ # A key used in a generic key-value pair.
+ cls._add_property(
+ "key",
+ StringProp(),
+ iri="https://spdx.org/rdf/3.0.0/terms/Core/key",
+ min_count=1,
+ compact="key",
+ )
+ # A value used in a generic key-value pair.
+ cls._add_property(
+ "value",
+ StringProp(),
+ iri="https://spdx.org/rdf/3.0.0/terms/Core/value",
+ compact="value",
+ )
+
+
+# Base domain class from which all other SPDX-3.0 domain classes derive.
+@register("https://spdx.org/rdf/3.0.0/terms/Core/Element", compact_type="Element", abstract=True)
+class Element(SHACLObject):
+ NODE_KIND = NodeKind.BlankNodeOrIRI
+ ID_ALIAS = "spdxId"
+ NAMED_INDIVIDUALS = {
+ "NoAssertionElement": "https://spdx.org/rdf/3.0.0/terms/Core/NoAssertionElement",
+ "NoneElement": "https://spdx.org/rdf/3.0.0/terms/Core/NoneElement",
+ }
+ # An Individual Value for Element representing a set of Elements of unknown
+ # identify or cardinality (number).
+ NoAssertionElement = "https://spdx.org/rdf/3.0.0/terms/Core/NoAssertionElement"
+ # An Individual Value for Element representing a set of Elements with
+ # cardinality (number/count) of zero.
+ NoneElement = "https://spdx.org/rdf/3.0.0/terms/Core/NoneElement"
+
+ @classmethod
+ def _register_props(cls):
+ super()._register_props()
+ # Provide consumers with comments by the creator of the Element about the
+ # Element.
+ cls._add_property(
+ "comment",
+ StringProp(),
+ iri="https://spdx.org/rdf/3.0.0/terms/Core/comment",
+ compact="comment",
+ )
+ # Provides information about the creation of the Element.
+ cls._add_property(
+ "creationInfo",
+ ObjectProp(CreationInfo, True),
+ iri="https://spdx.org/rdf/3.0.0/terms/Core/creationInfo",
+ min_count=1,
+ compact="creationInfo",
+ )
+ # Provides a detailed description of the Element.
+ cls._add_property(
+ "description",
+ StringProp(),
+ iri="https://spdx.org/rdf/3.0.0/terms/Core/description",
+ compact="description",
+ )
+ # Specifies an Extension characterization of some aspect of an Element.
+ cls._add_property(
+ "extension",
+ ListProp(ObjectProp(extension_Extension, False)),
+ iri="https://spdx.org/rdf/3.0.0/terms/Core/extension",
+ compact="extension",
+ )
+ # Provides a reference to a resource outside the scope of SPDX-3.0 content
+ # that uniquely identifies an Element.
+ cls._add_property(
+ "externalIdentifier",
+ ListProp(ObjectProp(ExternalIdentifier, False)),
+ iri="https://spdx.org/rdf/3.0.0/terms/Core/externalIdentifier",
+ compact="externalIdentifier",
+ )
+ # Points to a resource outside the scope of the SPDX-3.0 content
+ # that provides additional characteristics of an Element.
+ cls._add_property(
+ "externalRef",
+ ListProp(ObjectProp(ExternalRef, False)),
+ iri="https://spdx.org/rdf/3.0.0/terms/Core/externalRef",
+ compact="externalRef",
+ )
+ # Identifies the name of an Element as designated by the creator.
+ cls._add_property(
+ "name",
+ StringProp(),
+ iri="https://spdx.org/rdf/3.0.0/terms/Core/name",
+ compact="name",
+ )
+ # A short description of an Element.
+ cls._add_property(
+ "summary",
+ StringProp(),
+ iri="https://spdx.org/rdf/3.0.0/terms/Core/summary",
+ compact="summary",
+ )
+ # Provides an IntegrityMethod with which the integrity of an Element can be
+ # asserted.
+ cls._add_property(
+ "verifiedUsing",
+ ListProp(ObjectProp(IntegrityMethod, False)),
+ iri="https://spdx.org/rdf/3.0.0/terms/Core/verifiedUsing",
+ compact="verifiedUsing",
+ )
+
+
+# A collection of Elements, not necessarily with unifying context.
+@register("https://spdx.org/rdf/3.0.0/terms/Core/ElementCollection", compact_type="ElementCollection", abstract=True)
+class ElementCollection(Element):
+ NODE_KIND = NodeKind.BlankNodeOrIRI
+ ID_ALIAS = "spdxId"
+ NAMED_INDIVIDUALS = {
+ }
+
+ @classmethod
+ def _register_props(cls):
+ super()._register_props()
+ # Refers to one or more Elements that are part of an ElementCollection.
+ cls._add_property(
+ "element",
+ ListProp(ObjectProp(Element, False)),
+ iri="https://spdx.org/rdf/3.0.0/terms/Core/element",
+ compact="element",
+ )
+ # Describes one a profile which the creator of this ElementCollection intends to
+ # conform to.
+ cls._add_property(
+ "profileConformance",
+ ListProp(EnumProp([
+ ("https://spdx.org/rdf/3.0.0/terms/Core/ProfileIdentifierType/ai", "ai"),
+ ("https://spdx.org/rdf/3.0.0/terms/Core/ProfileIdentifierType/build", "build"),
+ ("https://spdx.org/rdf/3.0.0/terms/Core/ProfileIdentifierType/core", "core"),
+ ("https://spdx.org/rdf/3.0.0/terms/Core/ProfileIdentifierType/dataset", "dataset"),
+ ("https://spdx.org/rdf/3.0.0/terms/Core/ProfileIdentifierType/expandedLicensing", "expandedLicensing"),
+ ("https://spdx.org/rdf/3.0.0/terms/Core/ProfileIdentifierType/extension", "extension"),
+ ("https://spdx.org/rdf/3.0.0/terms/Core/ProfileIdentifierType/lite", "lite"),
+ ("https://spdx.org/rdf/3.0.0/terms/Core/ProfileIdentifierType/security", "security"),
+ ("https://spdx.org/rdf/3.0.0/terms/Core/ProfileIdentifierType/simpleLicensing", "simpleLicensing"),
+ ("https://spdx.org/rdf/3.0.0/terms/Core/ProfileIdentifierType/software", "software"),
+ ])),
+ iri="https://spdx.org/rdf/3.0.0/terms/Core/profileConformance",
+ compact="profileConformance",
+ )
+ # This property is used to denote the root Element(s) of a tree of elements contained in a BOM.
+ cls._add_property(
+ "rootElement",
+ ListProp(ObjectProp(Element, False)),
+ iri="https://spdx.org/rdf/3.0.0/terms/Core/rootElement",
+ compact="rootElement",
+ )
+
+
+# A reference to a resource identifier defined outside the scope of SPDX-3.0 content that uniquely identifies an Element.
+@register("https://spdx.org/rdf/3.0.0/terms/Core/ExternalIdentifier", compact_type="ExternalIdentifier", abstract=False)
+class ExternalIdentifier(SHACLObject):
+ NODE_KIND = NodeKind.BlankNodeOrIRI
+ NAMED_INDIVIDUALS = {
+ }
+
+ @classmethod
+ def _register_props(cls):
+ super()._register_props()
+ # Provide consumers with comments by the creator of the Element about the
+ # Element.
+ cls._add_property(
+ "comment",
+ StringProp(),
+ iri="https://spdx.org/rdf/3.0.0/terms/Core/comment",
+ compact="comment",
+ )
+ # Specifies the type of the external identifier.
+ cls._add_property(
+ "externalIdentifierType",
+ EnumProp([
+ ("https://spdx.org/rdf/3.0.0/terms/Core/ExternalIdentifierType/cpe22", "cpe22"),
+ ("https://spdx.org/rdf/3.0.0/terms/Core/ExternalIdentifierType/cpe23", "cpe23"),
+ ("https://spdx.org/rdf/3.0.0/terms/Core/ExternalIdentifierType/cve", "cve"),
+ ("https://spdx.org/rdf/3.0.0/terms/Core/ExternalIdentifierType/email", "email"),
+ ("https://spdx.org/rdf/3.0.0/terms/Core/ExternalIdentifierType/gitoid", "gitoid"),
+ ("https://spdx.org/rdf/3.0.0/terms/Core/ExternalIdentifierType/other", "other"),
+ ("https://spdx.org/rdf/3.0.0/terms/Core/ExternalIdentifierType/packageUrl", "packageUrl"),
+ ("https://spdx.org/rdf/3.0.0/terms/Core/ExternalIdentifierType/securityOther", "securityOther"),
+ ("https://spdx.org/rdf/3.0.0/terms/Core/ExternalIdentifierType/swhid", "swhid"),
+ ("https://spdx.org/rdf/3.0.0/terms/Core/ExternalIdentifierType/swid", "swid"),
+ ("https://spdx.org/rdf/3.0.0/terms/Core/ExternalIdentifierType/urlScheme", "urlScheme"),
+ ]),
+ iri="https://spdx.org/rdf/3.0.0/terms/Core/externalIdentifierType",
+ min_count=1,
+ compact="externalIdentifierType",
+ )
+ # Uniquely identifies an external element.
+ cls._add_property(
+ "identifier",
+ StringProp(),
+ iri="https://spdx.org/rdf/3.0.0/terms/Core/identifier",
+ min_count=1,
+ compact="identifier",
+ )
+ # Provides the location for more information regarding an external identifier.
+ cls._add_property(
+ "identifierLocator",
+ ListProp(AnyURIProp()),
+ iri="https://spdx.org/rdf/3.0.0/terms/Core/identifierLocator",
+ compact="identifierLocator",
+ )
+ # An entity that is authorized to issue identification credentials.
+ cls._add_property(
+ "issuingAuthority",
+ StringProp(),
+ iri="https://spdx.org/rdf/3.0.0/terms/Core/issuingAuthority",
+ compact="issuingAuthority",
+ )
+
+
+# Specifies the type of an external identifier.
+@register("https://spdx.org/rdf/3.0.0/terms/Core/ExternalIdentifierType", compact_type="ExternalIdentifierType", abstract=False)
+class ExternalIdentifierType(SHACLObject):
+ NODE_KIND = NodeKind.BlankNodeOrIRI
+ NAMED_INDIVIDUALS = {
+ "cpe22": "https://spdx.org/rdf/3.0.0/terms/Core/ExternalIdentifierType/cpe22",
+ "cpe23": "https://spdx.org/rdf/3.0.0/terms/Core/ExternalIdentifierType/cpe23",
+ "cve": "https://spdx.org/rdf/3.0.0/terms/Core/ExternalIdentifierType/cve",
+ "email": "https://spdx.org/rdf/3.0.0/terms/Core/ExternalIdentifierType/email",
+ "gitoid": "https://spdx.org/rdf/3.0.0/terms/Core/ExternalIdentifierType/gitoid",
+ "other": "https://spdx.org/rdf/3.0.0/terms/Core/ExternalIdentifierType/other",
+ "packageUrl": "https://spdx.org/rdf/3.0.0/terms/Core/ExternalIdentifierType/packageUrl",
+ "securityOther": "https://spdx.org/rdf/3.0.0/terms/Core/ExternalIdentifierType/securityOther",
+ "swhid": "https://spdx.org/rdf/3.0.0/terms/Core/ExternalIdentifierType/swhid",
+ "swid": "https://spdx.org/rdf/3.0.0/terms/Core/ExternalIdentifierType/swid",
+ "urlScheme": "https://spdx.org/rdf/3.0.0/terms/Core/ExternalIdentifierType/urlScheme",
+ }
+ # https://cpe.mitre.org/files/cpe-specification_2.2.pdf
+ cpe22 = "https://spdx.org/rdf/3.0.0/terms/Core/ExternalIdentifierType/cpe22"
+ # https://nvlpubs.nist.gov/nistpubs/Legacy/IR/nistir7695.pdf
+ cpe23 = "https://spdx.org/rdf/3.0.0/terms/Core/ExternalIdentifierType/cpe23"
+ # An identifier for a specific software flaw defined within the official CVE Dictionary and that conforms to the CVE specification as defined by https://csrc.nist.gov/glossary/term/cve_id.
+ cve = "https://spdx.org/rdf/3.0.0/terms/Core/ExternalIdentifierType/cve"
+ # https://datatracker.ietf.org/doc/html/rfc3696#section-3
+ email = "https://spdx.org/rdf/3.0.0/terms/Core/ExternalIdentifierType/email"
+ # https://www.iana.org/assignments/uri-schemes/prov/gitoid Gitoid stands for [Git Object ID](https://git-scm.com/book/en/v2/Git-Internals-Git-Objects) and a gitoid of type blob is a unique hash of a binary artifact. A gitoid may represent the software [Artifact ID](https://github.com/omnibor/spec/blob/main/spec/SPEC.md#artifact-id) or the [OmniBOR Identifier](https://github.com/omnibor/spec/blob/main/spec/SPEC.md#omnibor-identifier) for the software artifact's associated [OmniBOR Document](https://github.com/omnibor/spec/blob/main/spec/SPEC.md#omnibor-document); this ambiguity exists because the OmniBOR Document is itself an artifact, and the gitoid of that artifact is its valid identifier. Omnibor is a minimalistic schema to describe software [Artifact Dependency Graphs](https://github.com/omnibor/spec/blob/main/spec/SPEC.md#artifact-dependency-graph-adg). Gitoids calculated on software artifacts (Snippet, File, or Package Elements) should be recorded in the SPDX 3.0 SoftwareArtifact's ContentIdentifier property. Gitoids calculated on the OmniBOR Document (OmniBOR Identifiers) should be recorded in the SPDX 3.0 Element's ExternalIdentifier property.
+ gitoid = "https://spdx.org/rdf/3.0.0/terms/Core/ExternalIdentifierType/gitoid"
+ # Used when the type doesn't match any of the other options.
+ other = "https://spdx.org/rdf/3.0.0/terms/Core/ExternalIdentifierType/other"
+ # https://github.com/package-url/purl-spec
+ packageUrl = "https://spdx.org/rdf/3.0.0/terms/Core/ExternalIdentifierType/packageUrl"
+ # Used when there is a security related identifier of unspecified type.
+ securityOther = "https://spdx.org/rdf/3.0.0/terms/Core/ExternalIdentifierType/securityOther"
+ # SoftWare Hash IDentifier, persistent intrinsic identifiers for digital artifacts, such as files, trees (also known as directories or folders), commits, and other objects typically found in version control systems. The syntax of the identifiers is defined in the [SWHID specification](https://www.swhid.org/specification/v1.1/4.Syntax) and they typically look like `swh:1:cnt:94a9ed024d3859793618152ea559a168bbcbb5e2`.
+ swhid = "https://spdx.org/rdf/3.0.0/terms/Core/ExternalIdentifierType/swhid"
+ # https://www.ietf.org/archive/id/draft-ietf-sacm-coswid-21.html#section-2.3
+ swid = "https://spdx.org/rdf/3.0.0/terms/Core/ExternalIdentifierType/swid"
+ # the scheme used in order to locate a resource https://www.iana.org/assignments/uri-schemes/uri-schemes.xhtml
+ urlScheme = "https://spdx.org/rdf/3.0.0/terms/Core/ExternalIdentifierType/urlScheme"
+
+
+# A map of Element identifiers that are used within a Document but defined external to that Document.
+@register("https://spdx.org/rdf/3.0.0/terms/Core/ExternalMap", compact_type="ExternalMap", abstract=False)
+class ExternalMap(SHACLObject):
+ NODE_KIND = NodeKind.BlankNodeOrIRI
+ NAMED_INDIVIDUALS = {
+ }
+
+ @classmethod
+ def _register_props(cls):
+ super()._register_props()
+ # Artifact representing a serialization instance of SPDX data containing the
+ # definition of a particular Element.
+ cls._add_property(
+ "definingArtifact",
+ ObjectProp(Artifact, False),
+ iri="https://spdx.org/rdf/3.0.0/terms/Core/definingArtifact",
+ compact="definingArtifact",
+ )
+ # Identifies an external Element used within a Document but defined external to
+ # that Document.
+ cls._add_property(
+ "externalSpdxId",
+ AnyURIProp(),
+ iri="https://spdx.org/rdf/3.0.0/terms/Core/externalSpdxId",
+ min_count=1,
+ compact="externalSpdxId",
+ )
+ # Provides an indication of where to retrieve an external Element.
+ cls._add_property(
+ "locationHint",
+ AnyURIProp(),
+ iri="https://spdx.org/rdf/3.0.0/terms/Core/locationHint",
+ compact="locationHint",
+ )
+ # Provides an IntegrityMethod with which the integrity of an Element can be
+ # asserted.
+ cls._add_property(
+ "verifiedUsing",
+ ListProp(ObjectProp(IntegrityMethod, False)),
+ iri="https://spdx.org/rdf/3.0.0/terms/Core/verifiedUsing",
+ compact="verifiedUsing",
+ )
+
+
+# A reference to a resource outside the scope of SPDX-3.0 content related to an Element.
+@register("https://spdx.org/rdf/3.0.0/terms/Core/ExternalRef", compact_type="ExternalRef", abstract=False)
+class ExternalRef(SHACLObject):
+ NODE_KIND = NodeKind.BlankNodeOrIRI
+ NAMED_INDIVIDUALS = {
+ }
+
+ @classmethod
+ def _register_props(cls):
+ super()._register_props()
+ # Provide consumers with comments by the creator of the Element about the
+ # Element.
+ cls._add_property(
+ "comment",
+ StringProp(),
+ iri="https://spdx.org/rdf/3.0.0/terms/Core/comment",
+ compact="comment",
+ )
+ # Specifies the media type of an Element or Property.
+ cls._add_property(
+ "contentType",
+ StringProp(pattern=r"^[^\/]+\/[^\/]+$",),
+ iri="https://spdx.org/rdf/3.0.0/terms/Core/contentType",
+ compact="contentType",
+ )
+ # Specifies the type of the external reference.
+ cls._add_property(
+ "externalRefType",
+ EnumProp([
+ ("https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/altDownloadLocation", "altDownloadLocation"),
+ ("https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/altWebPage", "altWebPage"),
+ ("https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/binaryArtifact", "binaryArtifact"),
+ ("https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/bower", "bower"),
+ ("https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/buildMeta", "buildMeta"),
+ ("https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/buildSystem", "buildSystem"),
+ ("https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/certificationReport", "certificationReport"),
+ ("https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/chat", "chat"),
+ ("https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/componentAnalysisReport", "componentAnalysisReport"),
+ ("https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/cwe", "cwe"),
+ ("https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/documentation", "documentation"),
+ ("https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/dynamicAnalysisReport", "dynamicAnalysisReport"),
+ ("https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/eolNotice", "eolNotice"),
+ ("https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/exportControlAssessment", "exportControlAssessment"),
+ ("https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/funding", "funding"),
+ ("https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/issueTracker", "issueTracker"),
+ ("https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/license", "license"),
+ ("https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/mailingList", "mailingList"),
+ ("https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/mavenCentral", "mavenCentral"),
+ ("https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/metrics", "metrics"),
+ ("https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/npm", "npm"),
+ ("https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/nuget", "nuget"),
+ ("https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/other", "other"),
+ ("https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/privacyAssessment", "privacyAssessment"),
+ ("https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/productMetadata", "productMetadata"),
+ ("https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/purchaseOrder", "purchaseOrder"),
+ ("https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/qualityAssessmentReport", "qualityAssessmentReport"),
+ ("https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/releaseHistory", "releaseHistory"),
+ ("https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/releaseNotes", "releaseNotes"),
+ ("https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/riskAssessment", "riskAssessment"),
+ ("https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/runtimeAnalysisReport", "runtimeAnalysisReport"),
+ ("https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/secureSoftwareAttestation", "secureSoftwareAttestation"),
+ ("https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/securityAdversaryModel", "securityAdversaryModel"),
+ ("https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/securityAdvisory", "securityAdvisory"),
+ ("https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/securityFix", "securityFix"),
+ ("https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/securityOther", "securityOther"),
+ ("https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/securityPenTestReport", "securityPenTestReport"),
+ ("https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/securityPolicy", "securityPolicy"),
+ ("https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/securityThreatModel", "securityThreatModel"),
+ ("https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/socialMedia", "socialMedia"),
+ ("https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/sourceArtifact", "sourceArtifact"),
+ ("https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/staticAnalysisReport", "staticAnalysisReport"),
+ ("https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/support", "support"),
+ ("https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/vcs", "vcs"),
+ ("https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/vulnerabilityDisclosureReport", "vulnerabilityDisclosureReport"),
+ ("https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/vulnerabilityExploitabilityAssessment", "vulnerabilityExploitabilityAssessment"),
+ ]),
+ iri="https://spdx.org/rdf/3.0.0/terms/Core/externalRefType",
+ compact="externalRefType",
+ )
+ # Provides the location of an external reference.
+ cls._add_property(
+ "locator",
+ ListProp(StringProp()),
+ iri="https://spdx.org/rdf/3.0.0/terms/Core/locator",
+ compact="locator",
+ )
+
+
+# Specifies the type of an external reference.
+@register("https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType", compact_type="ExternalRefType", abstract=False)
+class ExternalRefType(SHACLObject):
+ NODE_KIND = NodeKind.BlankNodeOrIRI
+ NAMED_INDIVIDUALS = {
+ "altDownloadLocation": "https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/altDownloadLocation",
+ "altWebPage": "https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/altWebPage",
+ "binaryArtifact": "https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/binaryArtifact",
+ "bower": "https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/bower",
+ "buildMeta": "https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/buildMeta",
+ "buildSystem": "https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/buildSystem",
+ "certificationReport": "https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/certificationReport",
+ "chat": "https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/chat",
+ "componentAnalysisReport": "https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/componentAnalysisReport",
+ "cwe": "https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/cwe",
+ "documentation": "https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/documentation",
+ "dynamicAnalysisReport": "https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/dynamicAnalysisReport",
+ "eolNotice": "https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/eolNotice",
+ "exportControlAssessment": "https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/exportControlAssessment",
+ "funding": "https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/funding",
+ "issueTracker": "https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/issueTracker",
+ "license": "https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/license",
+ "mailingList": "https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/mailingList",
+ "mavenCentral": "https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/mavenCentral",
+ "metrics": "https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/metrics",
+ "npm": "https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/npm",
+ "nuget": "https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/nuget",
+ "other": "https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/other",
+ "privacyAssessment": "https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/privacyAssessment",
+ "productMetadata": "https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/productMetadata",
+ "purchaseOrder": "https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/purchaseOrder",
+ "qualityAssessmentReport": "https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/qualityAssessmentReport",
+ "releaseHistory": "https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/releaseHistory",
+ "releaseNotes": "https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/releaseNotes",
+ "riskAssessment": "https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/riskAssessment",
+ "runtimeAnalysisReport": "https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/runtimeAnalysisReport",
+ "secureSoftwareAttestation": "https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/secureSoftwareAttestation",
+ "securityAdversaryModel": "https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/securityAdversaryModel",
+ "securityAdvisory": "https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/securityAdvisory",
+ "securityFix": "https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/securityFix",
+ "securityOther": "https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/securityOther",
+ "securityPenTestReport": "https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/securityPenTestReport",
+ "securityPolicy": "https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/securityPolicy",
+ "securityThreatModel": "https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/securityThreatModel",
+ "socialMedia": "https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/socialMedia",
+ "sourceArtifact": "https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/sourceArtifact",
+ "staticAnalysisReport": "https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/staticAnalysisReport",
+ "support": "https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/support",
+ "vcs": "https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/vcs",
+ "vulnerabilityDisclosureReport": "https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/vulnerabilityDisclosureReport",
+ "vulnerabilityExploitabilityAssessment": "https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/vulnerabilityExploitabilityAssessment",
+ }
+ # A reference to an alternative download location.
+ altDownloadLocation = "https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/altDownloadLocation"
+ # A reference to an alternative web page.
+ altWebPage = "https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/altWebPage"
+ # A reference to binary artifacts related to a package.
+ binaryArtifact = "https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/binaryArtifact"
+ # A reference to a bower package.
+ bower = "https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/bower"
+ # A reference build metadata related to a published package.
+ buildMeta = "https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/buildMeta"
+ # A reference build system used to create or publish the package.
+ buildSystem = "https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/buildSystem"
+ # A reference to a certification report for a package from an accredited/independent body.
+ certificationReport = "https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/certificationReport"
+ # A reference to the instant messaging system used by the maintainer for a package.
+ chat = "https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/chat"
+ # A reference to a Software Composition Analysis (SCA) report.
+ componentAnalysisReport = "https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/componentAnalysisReport"
+ # A reference to a source of software flaw defined within the official CWE Dictionary that conforms to the CWE specification as defined by https://csrc.nist.gov/glossary/term/common_weakness_enumeration.
+ cwe = "https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/cwe"
+ # A reference to the documentation for a package.
+ documentation = "https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/documentation"
+ # A reference to a dynamic analysis report for a package.
+ dynamicAnalysisReport = "https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/dynamicAnalysisReport"
+ # A reference to the End Of Sale (EOS) and/or End Of Life (EOL) information related to a package.
+ eolNotice = "https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/eolNotice"
+ # A reference to a export control assessment for a package.
+ exportControlAssessment = "https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/exportControlAssessment"
+ # A reference to funding information related to a package.
+ funding = "https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/funding"
+ # A reference to the issue tracker for a package.
+ issueTracker = "https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/issueTracker"
+ # A reference to additional license information related to an artifact.
+ license = "https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/license"
+ # A reference to the mailing list used by the maintainer for a package.
+ mailingList = "https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/mailingList"
+ # A reference to a maven repository artifact.
+ mavenCentral = "https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/mavenCentral"
+ # A reference to metrics related to package such as OpenSSF scorecards.
+ metrics = "https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/metrics"
+ # A reference to an npm package.
+ npm = "https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/npm"
+ # A reference to a nuget package.
+ nuget = "https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/nuget"
+ # Used when the type doesn't match any of the other options.
+ other = "https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/other"
+ # A reference to a privacy assessment for a package.
+ privacyAssessment = "https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/privacyAssessment"
+ # A reference to additional product metadata such as reference within organization's product catalog.
+ productMetadata = "https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/productMetadata"
+ # A reference to a purchase order for a package.
+ purchaseOrder = "https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/purchaseOrder"
+ # A reference to a quality assessment for a package.
+ qualityAssessmentReport = "https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/qualityAssessmentReport"
+ # A reference to a published list of releases for a package.
+ releaseHistory = "https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/releaseHistory"
+ # A reference to the release notes for a package.
+ releaseNotes = "https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/releaseNotes"
+ # A reference to a risk assessment for a package.
+ riskAssessment = "https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/riskAssessment"
+ # A reference to a runtime analysis report for a package.
+ runtimeAnalysisReport = "https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/runtimeAnalysisReport"
+ # A reference to information assuring that the software is developed using security practices as defined by [NIST SP 800-218 Secure Software Development Framework (SSDF) Version 1.1](https://csrc.nist.gov/pubs/sp/800/218/final) or [CISA Secure Software Development Attestation Form](https://www.cisa.gov/resources-tools/resources/secure-software-development-attestation-form).
+ secureSoftwareAttestation = "https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/secureSoftwareAttestation"
+ # A reference to the security adversary model for a package.
+ securityAdversaryModel = "https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/securityAdversaryModel"
+ # A reference to a published security advisory (where advisory as defined per ISO 29147:2018) that may affect one or more elements, e.g., vendor advisories or specific NVD entries.
+ securityAdvisory = "https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/securityAdvisory"
+ # A reference to the patch or source code that fixes a vulnerability.
+ securityFix = "https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/securityFix"
+ # A reference to related security information of unspecified type.
+ securityOther = "https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/securityOther"
+ # A reference to a [penetration test](https://en.wikipedia.org/wiki/Penetration_test) report for a package.
+ securityPenTestReport = "https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/securityPenTestReport"
+ # A reference to instructions for reporting newly discovered security vulnerabilities for a package.
+ securityPolicy = "https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/securityPolicy"
+ # A reference the [security threat model](https://en.wikipedia.org/wiki/Threat_model) for a package.
+ securityThreatModel = "https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/securityThreatModel"
+ # A reference to a social media channel for a package.
+ socialMedia = "https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/socialMedia"
+ # A reference to an artifact containing the sources for a package.
+ sourceArtifact = "https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/sourceArtifact"
+ # A reference to a static analysis report for a package.
+ staticAnalysisReport = "https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/staticAnalysisReport"
+ # A reference to the software support channel or other support information for a package.
+ support = "https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/support"
+ # A reference to a version control system related to a software artifact.
+ vcs = "https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/vcs"
+ # A reference to a Vulnerability Disclosure Report (VDR) which provides the software supplier's analysis and findings describing the impact (or lack of impact) that reported vulnerabilities have on packages or products in the supplier's SBOM as defined in [NIST SP 800-161](https://csrc.nist.gov/pubs/sp/800/161/r1/final).
+ vulnerabilityDisclosureReport = "https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/vulnerabilityDisclosureReport"
+ # A reference to a Vulnerability Exploitability eXchange (VEX) statement which provides information on whether a product is impacted by a specific vulnerability in an included package and, if affected, whether there are actions recommended to remediate. See also [NTIA VEX one-page summary](https://ntia.gov/files/ntia/publications/vex_one-page_summary.pdf).
+ vulnerabilityExploitabilityAssessment = "https://spdx.org/rdf/3.0.0/terms/Core/ExternalRefType/vulnerabilityExploitabilityAssessment"
+
+
+# A mathematical algorithm that maps data of arbitrary size to a bit string.
+@register("https://spdx.org/rdf/3.0.0/terms/Core/HashAlgorithm", compact_type="HashAlgorithm", abstract=False)
+class HashAlgorithm(SHACLObject):
+ NODE_KIND = NodeKind.BlankNodeOrIRI
+ NAMED_INDIVIDUALS = {
+ "blake2b256": "https://spdx.org/rdf/3.0.0/terms/Core/HashAlgorithm/blake2b256",
+ "blake2b384": "https://spdx.org/rdf/3.0.0/terms/Core/HashAlgorithm/blake2b384",
+ "blake2b512": "https://spdx.org/rdf/3.0.0/terms/Core/HashAlgorithm/blake2b512",
+ "blake3": "https://spdx.org/rdf/3.0.0/terms/Core/HashAlgorithm/blake3",
+ "crystalsDilithium": "https://spdx.org/rdf/3.0.0/terms/Core/HashAlgorithm/crystalsDilithium",
+ "crystalsKyber": "https://spdx.org/rdf/3.0.0/terms/Core/HashAlgorithm/crystalsKyber",
+ "falcon": "https://spdx.org/rdf/3.0.0/terms/Core/HashAlgorithm/falcon",
+ "md2": "https://spdx.org/rdf/3.0.0/terms/Core/HashAlgorithm/md2",
+ "md4": "https://spdx.org/rdf/3.0.0/terms/Core/HashAlgorithm/md4",
+ "md5": "https://spdx.org/rdf/3.0.0/terms/Core/HashAlgorithm/md5",
+ "md6": "https://spdx.org/rdf/3.0.0/terms/Core/HashAlgorithm/md6",
+ "other": "https://spdx.org/rdf/3.0.0/terms/Core/HashAlgorithm/other",
+ "sha1": "https://spdx.org/rdf/3.0.0/terms/Core/HashAlgorithm/sha1",
+ "sha224": "https://spdx.org/rdf/3.0.0/terms/Core/HashAlgorithm/sha224",
+ "sha256": "https://spdx.org/rdf/3.0.0/terms/Core/HashAlgorithm/sha256",
+ "sha384": "https://spdx.org/rdf/3.0.0/terms/Core/HashAlgorithm/sha384",
+ "sha3_224": "https://spdx.org/rdf/3.0.0/terms/Core/HashAlgorithm/sha3_224",
+ "sha3_256": "https://spdx.org/rdf/3.0.0/terms/Core/HashAlgorithm/sha3_256",
+ "sha3_384": "https://spdx.org/rdf/3.0.0/terms/Core/HashAlgorithm/sha3_384",
+ "sha3_512": "https://spdx.org/rdf/3.0.0/terms/Core/HashAlgorithm/sha3_512",
+ "sha512": "https://spdx.org/rdf/3.0.0/terms/Core/HashAlgorithm/sha512",
+ }
+ # blake2b algorithm with a digest size of 256 https://datatracker.ietf.org/doc/html/rfc7693#section-4
+ blake2b256 = "https://spdx.org/rdf/3.0.0/terms/Core/HashAlgorithm/blake2b256"
+ # blake2b algorithm with a digest size of 384 https://datatracker.ietf.org/doc/html/rfc7693#section-4
+ blake2b384 = "https://spdx.org/rdf/3.0.0/terms/Core/HashAlgorithm/blake2b384"
+ # blake2b algorithm with a digest size of 512 https://datatracker.ietf.org/doc/html/rfc7693#section-4
+ blake2b512 = "https://spdx.org/rdf/3.0.0/terms/Core/HashAlgorithm/blake2b512"
+ # https://github.com/BLAKE3-team/BLAKE3-specs/blob/master/blake3.pdf
+ blake3 = "https://spdx.org/rdf/3.0.0/terms/Core/HashAlgorithm/blake3"
+ # https://pq-crystals.org/dilithium/index.shtml
+ crystalsDilithium = "https://spdx.org/rdf/3.0.0/terms/Core/HashAlgorithm/crystalsDilithium"
+ # https://pq-crystals.org/kyber/index.shtml
+ crystalsKyber = "https://spdx.org/rdf/3.0.0/terms/Core/HashAlgorithm/crystalsKyber"
+ # https://falcon-sign.info/falcon.pdf
+ falcon = "https://spdx.org/rdf/3.0.0/terms/Core/HashAlgorithm/falcon"
+ # https://datatracker.ietf.org/doc/rfc1319/
+ md2 = "https://spdx.org/rdf/3.0.0/terms/Core/HashAlgorithm/md2"
+ # https://datatracker.ietf.org/doc/html/rfc1186
+ md4 = "https://spdx.org/rdf/3.0.0/terms/Core/HashAlgorithm/md4"
+ # https://datatracker.ietf.org/doc/html/rfc1321
+ md5 = "https://spdx.org/rdf/3.0.0/terms/Core/HashAlgorithm/md5"
+ # https://people.csail.mit.edu/rivest/pubs/RABCx08.pdf
+ md6 = "https://spdx.org/rdf/3.0.0/terms/Core/HashAlgorithm/md6"
+ # any hashing algorithm that does not exist in this list of entries
+ other = "https://spdx.org/rdf/3.0.0/terms/Core/HashAlgorithm/other"
+ # https://datatracker.ietf.org/doc/html/rfc3174
+ sha1 = "https://spdx.org/rdf/3.0.0/terms/Core/HashAlgorithm/sha1"
+ # secure hashing algorithm with a digest length of 224 https://datatracker.ietf.org/doc/html/draft-ietf-pkix-sha224-01
+ sha224 = "https://spdx.org/rdf/3.0.0/terms/Core/HashAlgorithm/sha224"
+ # secure hashing algorithm with a digest length of 256 https://www.rfc-editor.org/rfc/rfc4634
+ sha256 = "https://spdx.org/rdf/3.0.0/terms/Core/HashAlgorithm/sha256"
+ # secure hashing algorithm with a digest length of 384 https://www.rfc-editor.org/rfc/rfc4634
+ sha384 = "https://spdx.org/rdf/3.0.0/terms/Core/HashAlgorithm/sha384"
+ # sha3 with a digest length of 224 https://nvlpubs.nist.gov/nistpubs/FIPS/NIST.FIPS.202.pdf
+ sha3_224 = "https://spdx.org/rdf/3.0.0/terms/Core/HashAlgorithm/sha3_224"
+ # sha3 with a digest length of 256 https://nvlpubs.nist.gov/nistpubs/FIPS/NIST.FIPS.202.pdf
+ sha3_256 = "https://spdx.org/rdf/3.0.0/terms/Core/HashAlgorithm/sha3_256"
+ # sha3 with a digest length of 384 https://nvlpubs.nist.gov/nistpubs/FIPS/NIST.FIPS.202.pdf
+ sha3_384 = "https://spdx.org/rdf/3.0.0/terms/Core/HashAlgorithm/sha3_384"
+ # sha3 with a digest length of 512 https://nvlpubs.nist.gov/nistpubs/FIPS/NIST.FIPS.202.pdf
+ sha3_512 = "https://spdx.org/rdf/3.0.0/terms/Core/HashAlgorithm/sha3_512"
+ # secure hashing algorithm with a digest length of 512 https://www.rfc-editor.org/rfc/rfc4634
+ sha512 = "https://spdx.org/rdf/3.0.0/terms/Core/HashAlgorithm/sha512"
+
+
+# Provides an independently reproducible mechanism that permits verification of a specific Element.
+@register("https://spdx.org/rdf/3.0.0/terms/Core/IntegrityMethod", compact_type="IntegrityMethod", abstract=True)
+class IntegrityMethod(SHACLObject):
+ NODE_KIND = NodeKind.BlankNodeOrIRI
+ NAMED_INDIVIDUALS = {
+ }
+
+ @classmethod
+ def _register_props(cls):
+ super()._register_props()
+ # Provide consumers with comments by the creator of the Element about the
+ # Element.
+ cls._add_property(
+ "comment",
+ StringProp(),
+ iri="https://spdx.org/rdf/3.0.0/terms/Core/comment",
+ compact="comment",
+ )
+
+
+# Provide an enumerated set of lifecycle phases that can provide context to relationships.
+@register("https://spdx.org/rdf/3.0.0/terms/Core/LifecycleScopeType", compact_type="LifecycleScopeType", abstract=False)
+class LifecycleScopeType(SHACLObject):
+ NODE_KIND = NodeKind.BlankNodeOrIRI
+ NAMED_INDIVIDUALS = {
+ "build": "https://spdx.org/rdf/3.0.0/terms/Core/LifecycleScopeType/build",
+ "design": "https://spdx.org/rdf/3.0.0/terms/Core/LifecycleScopeType/design",
+ "development": "https://spdx.org/rdf/3.0.0/terms/Core/LifecycleScopeType/development",
+ "other": "https://spdx.org/rdf/3.0.0/terms/Core/LifecycleScopeType/other",
+ "runtime": "https://spdx.org/rdf/3.0.0/terms/Core/LifecycleScopeType/runtime",
+ "test": "https://spdx.org/rdf/3.0.0/terms/Core/LifecycleScopeType/test",
+ }
+ # A relationship has specific context implications during an element's build phase, during development.
+ build = "https://spdx.org/rdf/3.0.0/terms/Core/LifecycleScopeType/build"
+ # A relationship has specific context implications during an element's design.
+ design = "https://spdx.org/rdf/3.0.0/terms/Core/LifecycleScopeType/design"
+ # A relationship has specific context implications during development phase of an element.
+ development = "https://spdx.org/rdf/3.0.0/terms/Core/LifecycleScopeType/development"
+ # A relationship has other specific context information necessary to capture that the above set of enumerations does not handle.
+ other = "https://spdx.org/rdf/3.0.0/terms/Core/LifecycleScopeType/other"
+ # A relationship has specific context implications during the execution phase of an element.
+ runtime = "https://spdx.org/rdf/3.0.0/terms/Core/LifecycleScopeType/runtime"
+ # A relationship has specific context implications during an element's testing phase, during development.
+ test = "https://spdx.org/rdf/3.0.0/terms/Core/LifecycleScopeType/test"
+
+
+# A mapping between prefixes and namespace partial URIs.
+@register("https://spdx.org/rdf/3.0.0/terms/Core/NamespaceMap", compact_type="NamespaceMap", abstract=False)
+class NamespaceMap(SHACLObject):
+ NODE_KIND = NodeKind.BlankNodeOrIRI
+ NAMED_INDIVIDUALS = {
+ }
+
+ @classmethod
+ def _register_props(cls):
+ super()._register_props()
+ # Provides an unambiguous mechanism for conveying a URI fragment portion of an
+ # ElementID.
+ cls._add_property(
+ "namespace",
+ AnyURIProp(),
+ iri="https://spdx.org/rdf/3.0.0/terms/Core/namespace",
+ min_count=1,
+ compact="namespace",
+ )
+ # A substitute for a URI.
+ cls._add_property(
+ "prefix",
+ StringProp(),
+ iri="https://spdx.org/rdf/3.0.0/terms/Core/prefix",
+ min_count=1,
+ compact="prefix",
+ )
+
+
+# An SPDX version 2.X compatible verification method for software packages.
+@register("https://spdx.org/rdf/3.0.0/terms/Core/PackageVerificationCode", compact_type="PackageVerificationCode", abstract=False)
+class PackageVerificationCode(IntegrityMethod):
+ NODE_KIND = NodeKind.BlankNodeOrIRI
+ NAMED_INDIVIDUALS = {
+ }
+
+ @classmethod
+ def _register_props(cls):
+ super()._register_props()
+ # Specifies the algorithm used for calculating the hash value.
+ cls._add_property(
+ "algorithm",
+ EnumProp([
+ ("https://spdx.org/rdf/3.0.0/terms/Core/HashAlgorithm/blake2b256", "blake2b256"),
+ ("https://spdx.org/rdf/3.0.0/terms/Core/HashAlgorithm/blake2b384", "blake2b384"),
+ ("https://spdx.org/rdf/3.0.0/terms/Core/HashAlgorithm/blake2b512", "blake2b512"),
+ ("https://spdx.org/rdf/3.0.0/terms/Core/HashAlgorithm/blake3", "blake3"),
+ ("https://spdx.org/rdf/3.0.0/terms/Core/HashAlgorithm/crystalsDilithium", "crystalsDilithium"),
+ ("https://spdx.org/rdf/3.0.0/terms/Core/HashAlgorithm/crystalsKyber", "crystalsKyber"),
+ ("https://spdx.org/rdf/3.0.0/terms/Core/HashAlgorithm/falcon", "falcon"),
+ ("https://spdx.org/rdf/3.0.0/terms/Core/HashAlgorithm/md2", "md2"),
+ ("https://spdx.org/rdf/3.0.0/terms/Core/HashAlgorithm/md4", "md4"),
+ ("https://spdx.org/rdf/3.0.0/terms/Core/HashAlgorithm/md5", "md5"),
+ ("https://spdx.org/rdf/3.0.0/terms/Core/HashAlgorithm/md6", "md6"),
+ ("https://spdx.org/rdf/3.0.0/terms/Core/HashAlgorithm/other", "other"),
+ ("https://spdx.org/rdf/3.0.0/terms/Core/HashAlgorithm/sha1", "sha1"),
+ ("https://spdx.org/rdf/3.0.0/terms/Core/HashAlgorithm/sha224", "sha224"),
+ ("https://spdx.org/rdf/3.0.0/terms/Core/HashAlgorithm/sha256", "sha256"),
+ ("https://spdx.org/rdf/3.0.0/terms/Core/HashAlgorithm/sha384", "sha384"),
+ ("https://spdx.org/rdf/3.0.0/terms/Core/HashAlgorithm/sha3_224", "sha3_224"),
+ ("https://spdx.org/rdf/3.0.0/terms/Core/HashAlgorithm/sha3_256", "sha3_256"),
+ ("https://spdx.org/rdf/3.0.0/terms/Core/HashAlgorithm/sha3_384", "sha3_384"),
+ ("https://spdx.org/rdf/3.0.0/terms/Core/HashAlgorithm/sha3_512", "sha3_512"),
+ ("https://spdx.org/rdf/3.0.0/terms/Core/HashAlgorithm/sha512", "sha512"),
+ ]),
+ iri="https://spdx.org/rdf/3.0.0/terms/Core/algorithm",
+ min_count=1,
+ compact="algorithm",
+ )
+ # The result of applying a hash algorithm to an Element.
+ cls._add_property(
+ "hashValue",
+ StringProp(),
+ iri="https://spdx.org/rdf/3.0.0/terms/Core/hashValue",
+ min_count=1,
+ compact="hashValue",
+ )
+ # The relative file name of a file to be excluded from the
+ # `PackageVerificationCode`.
+ cls._add_property(
+ "packageVerificationCodeExcludedFile",
+ ListProp(StringProp()),
+ iri="https://spdx.org/rdf/3.0.0/terms/Core/packageVerificationCodeExcludedFile",
+ compact="packageVerificationCodeExcludedFile",
+ )
+
+
+# A tuple of two positive integers that define a range.
+@register("https://spdx.org/rdf/3.0.0/terms/Core/PositiveIntegerRange", compact_type="PositiveIntegerRange", abstract=False)
+class PositiveIntegerRange(SHACLObject):
+ NODE_KIND = NodeKind.BlankNodeOrIRI
+ NAMED_INDIVIDUALS = {
+ }
+
+ @classmethod
+ def _register_props(cls):
+ super()._register_props()
+ # Defines the beginning of a range.
+ cls._add_property(
+ "beginIntegerRange",
+ PositiveIntegerProp(),
+ iri="https://spdx.org/rdf/3.0.0/terms/Core/beginIntegerRange",
+ min_count=1,
+ compact="beginIntegerRange",
+ )
+ # Defines the end of a range.
+ cls._add_property(
+ "endIntegerRange",
+ PositiveIntegerProp(),
+ iri="https://spdx.org/rdf/3.0.0/terms/Core/endIntegerRange",
+ min_count=1,
+ compact="endIntegerRange",
+ )
+
+
+# Categories of presence or absence.
+@register("https://spdx.org/rdf/3.0.0/terms/Core/PresenceType", compact_type="PresenceType", abstract=False)
+class PresenceType(SHACLObject):
+ NODE_KIND = NodeKind.BlankNodeOrIRI
+ NAMED_INDIVIDUALS = {
+ "no": "https://spdx.org/rdf/3.0.0/terms/Core/PresenceType/no",
+ "noAssertion": "https://spdx.org/rdf/3.0.0/terms/Core/PresenceType/noAssertion",
+ "yes": "https://spdx.org/rdf/3.0.0/terms/Core/PresenceType/yes",
+ }
+ # Indicates absence of the field.
+ no = "https://spdx.org/rdf/3.0.0/terms/Core/PresenceType/no"
+ # Makes no assertion about the field.
+ noAssertion = "https://spdx.org/rdf/3.0.0/terms/Core/PresenceType/noAssertion"
+ # Indicates presence of the field.
+ yes = "https://spdx.org/rdf/3.0.0/terms/Core/PresenceType/yes"
+
+
+# Enumeration of the valid profiles.
+@register("https://spdx.org/rdf/3.0.0/terms/Core/ProfileIdentifierType", compact_type="ProfileIdentifierType", abstract=False)
+class ProfileIdentifierType(SHACLObject):
+ NODE_KIND = NodeKind.BlankNodeOrIRI
+ NAMED_INDIVIDUALS = {
+ "ai": "https://spdx.org/rdf/3.0.0/terms/Core/ProfileIdentifierType/ai",
+ "build": "https://spdx.org/rdf/3.0.0/terms/Core/ProfileIdentifierType/build",
+ "core": "https://spdx.org/rdf/3.0.0/terms/Core/ProfileIdentifierType/core",
+ "dataset": "https://spdx.org/rdf/3.0.0/terms/Core/ProfileIdentifierType/dataset",
+ "expandedLicensing": "https://spdx.org/rdf/3.0.0/terms/Core/ProfileIdentifierType/expandedLicensing",
+ "extension": "https://spdx.org/rdf/3.0.0/terms/Core/ProfileIdentifierType/extension",
+ "lite": "https://spdx.org/rdf/3.0.0/terms/Core/ProfileIdentifierType/lite",
+ "security": "https://spdx.org/rdf/3.0.0/terms/Core/ProfileIdentifierType/security",
+ "simpleLicensing": "https://spdx.org/rdf/3.0.0/terms/Core/ProfileIdentifierType/simpleLicensing",
+ "software": "https://spdx.org/rdf/3.0.0/terms/Core/ProfileIdentifierType/software",
+ }
+ # the element follows the AI profile specification
+ ai = "https://spdx.org/rdf/3.0.0/terms/Core/ProfileIdentifierType/ai"
+ # the element follows the Build profile specification
+ build = "https://spdx.org/rdf/3.0.0/terms/Core/ProfileIdentifierType/build"
+ # the element follows the Core profile specification
+ core = "https://spdx.org/rdf/3.0.0/terms/Core/ProfileIdentifierType/core"
+ # the element follows the Dataset profile specification
+ dataset = "https://spdx.org/rdf/3.0.0/terms/Core/ProfileIdentifierType/dataset"
+ # the element follows the expanded Licensing profile
+ expandedLicensing = "https://spdx.org/rdf/3.0.0/terms/Core/ProfileIdentifierType/expandedLicensing"
+ # the element follows the Extension profile specification
+ extension = "https://spdx.org/rdf/3.0.0/terms/Core/ProfileIdentifierType/extension"
+ # the element follows the Lite profile specification
+ lite = "https://spdx.org/rdf/3.0.0/terms/Core/ProfileIdentifierType/lite"
+ # the element follows the Security profile specification
+ security = "https://spdx.org/rdf/3.0.0/terms/Core/ProfileIdentifierType/security"
+ # the element follows the simple Licensing profile
+ simpleLicensing = "https://spdx.org/rdf/3.0.0/terms/Core/ProfileIdentifierType/simpleLicensing"
+ # the element follows the Software profile specification
+ software = "https://spdx.org/rdf/3.0.0/terms/Core/ProfileIdentifierType/software"
+
+
+# Describes a relationship between one or more elements.
+@register("https://spdx.org/rdf/3.0.0/terms/Core/Relationship", compact_type="Relationship", abstract=False)
+class Relationship(Element):
+ NODE_KIND = NodeKind.BlankNodeOrIRI
+ ID_ALIAS = "spdxId"
+ NAMED_INDIVIDUALS = {
+ }
+
+ @classmethod
+ def _register_props(cls):
+ super()._register_props()
+ # Provides information about the completeness of relationships.
+ cls._add_property(
+ "completeness",
+ EnumProp([
+ ("https://spdx.org/rdf/3.0.0/terms/Core/RelationshipCompleteness/complete", "complete"),
+ ("https://spdx.org/rdf/3.0.0/terms/Core/RelationshipCompleteness/incomplete", "incomplete"),
+ ("https://spdx.org/rdf/3.0.0/terms/Core/RelationshipCompleteness/noAssertion", "noAssertion"),
+ ]),
+ iri="https://spdx.org/rdf/3.0.0/terms/Core/completeness",
+ compact="completeness",
+ )
+ # Specifies the time from which an element is no longer applicable / valid.
+ cls._add_property(
+ "endTime",
+ DateTimeStampProp(pattern=r"^\d\d\d\d-\d\d-\d\dT\d\d:\d\d:\d\dZ$",),
+ iri="https://spdx.org/rdf/3.0.0/terms/Core/endTime",
+ compact="endTime",
+ )
+ # References the Element on the left-hand side of a relationship.
+ cls._add_property(
+ "from_",
+ ObjectProp(Element, True),
+ iri="https://spdx.org/rdf/3.0.0/terms/Core/from",
+ min_count=1,
+ compact="from",
+ )
+ # Information about the relationship between two Elements.
+ cls._add_property(
+ "relationshipType",
+ EnumProp([
+ ("https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/affects", "affects"),
+ ("https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/amendedBy", "amendedBy"),
+ ("https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/ancestorOf", "ancestorOf"),
+ ("https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/availableFrom", "availableFrom"),
+ ("https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/configures", "configures"),
+ ("https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/contains", "contains"),
+ ("https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/coordinatedBy", "coordinatedBy"),
+ ("https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/copiedTo", "copiedTo"),
+ ("https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/delegatedTo", "delegatedTo"),
+ ("https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/dependsOn", "dependsOn"),
+ ("https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/descendantOf", "descendantOf"),
+ ("https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/describes", "describes"),
+ ("https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/doesNotAffect", "doesNotAffect"),
+ ("https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/expandsTo", "expandsTo"),
+ ("https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/exploitCreatedBy", "exploitCreatedBy"),
+ ("https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/fixedBy", "fixedBy"),
+ ("https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/fixedIn", "fixedIn"),
+ ("https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/foundBy", "foundBy"),
+ ("https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/generates", "generates"),
+ ("https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/hasAddedFile", "hasAddedFile"),
+ ("https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/hasAssessmentFor", "hasAssessmentFor"),
+ ("https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/hasAssociatedVulnerability", "hasAssociatedVulnerability"),
+ ("https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/hasConcludedLicense", "hasConcludedLicense"),
+ ("https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/hasDataFile", "hasDataFile"),
+ ("https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/hasDeclaredLicense", "hasDeclaredLicense"),
+ ("https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/hasDeletedFile", "hasDeletedFile"),
+ ("https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/hasDependencyManifest", "hasDependencyManifest"),
+ ("https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/hasDistributionArtifact", "hasDistributionArtifact"),
+ ("https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/hasDocumentation", "hasDocumentation"),
+ ("https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/hasDynamicLink", "hasDynamicLink"),
+ ("https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/hasEvidence", "hasEvidence"),
+ ("https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/hasExample", "hasExample"),
+ ("https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/hasHost", "hasHost"),
+ ("https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/hasInputs", "hasInputs"),
+ ("https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/hasMetadata", "hasMetadata"),
+ ("https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/hasOptionalComponent", "hasOptionalComponent"),
+ ("https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/hasOptionalDependency", "hasOptionalDependency"),
+ ("https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/hasOutputs", "hasOutputs"),
+ ("https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/hasPrerequsite", "hasPrerequsite"),
+ ("https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/hasProvidedDependency", "hasProvidedDependency"),
+ ("https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/hasRequirement", "hasRequirement"),
+ ("https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/hasSpecification", "hasSpecification"),
+ ("https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/hasStaticLink", "hasStaticLink"),
+ ("https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/hasTest", "hasTest"),
+ ("https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/hasTestCase", "hasTestCase"),
+ ("https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/hasVariant", "hasVariant"),
+ ("https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/invokedBy", "invokedBy"),
+ ("https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/modifiedBy", "modifiedBy"),
+ ("https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/other", "other"),
+ ("https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/packagedBy", "packagedBy"),
+ ("https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/patchedBy", "patchedBy"),
+ ("https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/publishedBy", "publishedBy"),
+ ("https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/reportedBy", "reportedBy"),
+ ("https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/republishedBy", "republishedBy"),
+ ("https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/serializedInArtifact", "serializedInArtifact"),
+ ("https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/testedOn", "testedOn"),
+ ("https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/trainedOn", "trainedOn"),
+ ("https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/underInvestigationFor", "underInvestigationFor"),
+ ("https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/usesTool", "usesTool"),
+ ]),
+ iri="https://spdx.org/rdf/3.0.0/terms/Core/relationshipType",
+ min_count=1,
+ compact="relationshipType",
+ )
+ # Specifies the time from which an element is applicable / valid.
+ cls._add_property(
+ "startTime",
+ DateTimeStampProp(pattern=r"^\d\d\d\d-\d\d-\d\dT\d\d:\d\d:\d\dZ$",),
+ iri="https://spdx.org/rdf/3.0.0/terms/Core/startTime",
+ compact="startTime",
+ )
+ # References an Element on the right-hand side of a relationship.
+ cls._add_property(
+ "to",
+ ListProp(ObjectProp(Element, False)),
+ iri="https://spdx.org/rdf/3.0.0/terms/Core/to",
+ min_count=1,
+ compact="to",
+ )
+
+
+# Indicates whether a relationship is known to be complete, incomplete, or if no assertion is made with respect to relationship completeness.
+@register("https://spdx.org/rdf/3.0.0/terms/Core/RelationshipCompleteness", compact_type="RelationshipCompleteness", abstract=False)
+class RelationshipCompleteness(SHACLObject):
+ NODE_KIND = NodeKind.BlankNodeOrIRI
+ NAMED_INDIVIDUALS = {
+ "complete": "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipCompleteness/complete",
+ "incomplete": "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipCompleteness/incomplete",
+ "noAssertion": "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipCompleteness/noAssertion",
+ }
+ # The relationship is known to be exhaustive.
+ complete = "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipCompleteness/complete"
+ # The relationship is known not to be exhaustive.
+ incomplete = "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipCompleteness/incomplete"
+ # No assertion can be made about the completeness of the relationship.
+ noAssertion = "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipCompleteness/noAssertion"
+
+
+# Information about the relationship between two Elements.
+@register("https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType", compact_type="RelationshipType", abstract=False)
+class RelationshipType(SHACLObject):
+ NODE_KIND = NodeKind.BlankNodeOrIRI
+ NAMED_INDIVIDUALS = {
+ "affects": "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/affects",
+ "amendedBy": "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/amendedBy",
+ "ancestorOf": "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/ancestorOf",
+ "availableFrom": "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/availableFrom",
+ "configures": "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/configures",
+ "contains": "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/contains",
+ "coordinatedBy": "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/coordinatedBy",
+ "copiedTo": "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/copiedTo",
+ "delegatedTo": "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/delegatedTo",
+ "dependsOn": "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/dependsOn",
+ "descendantOf": "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/descendantOf",
+ "describes": "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/describes",
+ "doesNotAffect": "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/doesNotAffect",
+ "expandsTo": "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/expandsTo",
+ "exploitCreatedBy": "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/exploitCreatedBy",
+ "fixedBy": "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/fixedBy",
+ "fixedIn": "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/fixedIn",
+ "foundBy": "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/foundBy",
+ "generates": "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/generates",
+ "hasAddedFile": "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/hasAddedFile",
+ "hasAssessmentFor": "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/hasAssessmentFor",
+ "hasAssociatedVulnerability": "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/hasAssociatedVulnerability",
+ "hasConcludedLicense": "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/hasConcludedLicense",
+ "hasDataFile": "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/hasDataFile",
+ "hasDeclaredLicense": "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/hasDeclaredLicense",
+ "hasDeletedFile": "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/hasDeletedFile",
+ "hasDependencyManifest": "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/hasDependencyManifest",
+ "hasDistributionArtifact": "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/hasDistributionArtifact",
+ "hasDocumentation": "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/hasDocumentation",
+ "hasDynamicLink": "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/hasDynamicLink",
+ "hasEvidence": "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/hasEvidence",
+ "hasExample": "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/hasExample",
+ "hasHost": "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/hasHost",
+ "hasInputs": "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/hasInputs",
+ "hasMetadata": "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/hasMetadata",
+ "hasOptionalComponent": "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/hasOptionalComponent",
+ "hasOptionalDependency": "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/hasOptionalDependency",
+ "hasOutputs": "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/hasOutputs",
+ "hasPrerequsite": "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/hasPrerequsite",
+ "hasProvidedDependency": "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/hasProvidedDependency",
+ "hasRequirement": "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/hasRequirement",
+ "hasSpecification": "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/hasSpecification",
+ "hasStaticLink": "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/hasStaticLink",
+ "hasTest": "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/hasTest",
+ "hasTestCase": "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/hasTestCase",
+ "hasVariant": "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/hasVariant",
+ "invokedBy": "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/invokedBy",
+ "modifiedBy": "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/modifiedBy",
+ "other": "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/other",
+ "packagedBy": "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/packagedBy",
+ "patchedBy": "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/patchedBy",
+ "publishedBy": "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/publishedBy",
+ "reportedBy": "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/reportedBy",
+ "republishedBy": "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/republishedBy",
+ "serializedInArtifact": "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/serializedInArtifact",
+ "testedOn": "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/testedOn",
+ "trainedOn": "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/trainedOn",
+ "underInvestigationFor": "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/underInvestigationFor",
+ "usesTool": "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/usesTool",
+ }
+ # (Security/VEX) The `from` vulnerability affect each `to` Element
+ affects = "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/affects"
+ # The `from` Element is amended by each `to` Element
+ amendedBy = "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/amendedBy"
+ # The `from` Element is an ancestor of each `to` Element
+ ancestorOf = "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/ancestorOf"
+ # The `from` Element is available from the additional supplier described by each `to` Element
+ availableFrom = "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/availableFrom"
+ # The `from` Element is a configuration applied to each `to` Element during a LifecycleScopeType period
+ configures = "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/configures"
+ # The `from` Element contains each `to` Element
+ contains = "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/contains"
+ # (Security) The `from` Vulnerability is coordinatedBy the `to` Agent(s) (vendor, researcher, or consumer agent)
+ coordinatedBy = "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/coordinatedBy"
+ # The `from` Element has been copied to each `to` Element
+ copiedTo = "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/copiedTo"
+ # The `from` Agent is delegating an action to the Agent of the `to` Relationship (which must be of type invokedBy) during a LifecycleScopeType. (e.g. the `to` invokedBy Relationship is being done on behalf of `from`)
+ delegatedTo = "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/delegatedTo"
+ # The `from` Element depends on each `to` Element during a LifecycleScopeType period.
+ dependsOn = "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/dependsOn"
+ # The `from` Element is a descendant of each `to` Element
+ descendantOf = "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/descendantOf"
+ # The `from` Element describes each `to` Element. To denote the root(s) of a tree of elements in a collection, the rootElement property should be used.
+ describes = "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/describes"
+ # (Security/VEX) The `from` Vulnerability has no impact on each `to` Element
+ doesNotAffect = "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/doesNotAffect"
+ # The `from` archive expands out as an artifact described by each `to` Element
+ expandsTo = "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/expandsTo"
+ # (Security) The `from` Vulnerability has had an exploit created against it by each `to` Agent
+ exploitCreatedBy = "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/exploitCreatedBy"
+ # (Security) Designates a `from` Vulnerability has been fixed by the `to` Agent(s)
+ fixedBy = "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/fixedBy"
+ # (Security/VEX) A `from` Vulnerability has been fixed in each of the `to` Element(s)
+ fixedIn = "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/fixedIn"
+ # (Security) Designates a `from` Vulnerability was originally discovered by the `to` Agent(s)
+ foundBy = "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/foundBy"
+ # The `from` Element generates each `to` Element
+ generates = "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/generates"
+ # Every `to` Element is is a file added to the `from` Element (`from` hasAddedFile `to`)
+ hasAddedFile = "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/hasAddedFile"
+ # (Security) Relates a `from` Vulnerability and each `to` Element(s) with a security assessment. To be used with `VulnAssessmentRelationship` types
+ hasAssessmentFor = "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/hasAssessmentFor"
+ # (Security) Used to associate a `from` Artifact with each `to` Vulnerability
+ hasAssociatedVulnerability = "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/hasAssociatedVulnerability"
+ # The `from` Software Artifact is concluded by the SPDX data creator to be governed by each `to` license
+ hasConcludedLicense = "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/hasConcludedLicense"
+ # The `from` Element treats each `to` Element as a data file
+ hasDataFile = "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/hasDataFile"
+ # The `from` Software Artifact was discovered to actually contain each `to` license, for example as detected by use of automated tooling.
+ hasDeclaredLicense = "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/hasDeclaredLicense"
+ # Every `to` Element is a file deleted from the `from` Element (`from` hasDeletedFile `to`)
+ hasDeletedFile = "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/hasDeletedFile"
+ # The `from` Element has manifest files that contain dependency information in each `to` Element
+ hasDependencyManifest = "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/hasDependencyManifest"
+ # The `from` Element is distributed as an artifact in each Element `to`, (e.g. an RPM or archive file)
+ hasDistributionArtifact = "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/hasDistributionArtifact"
+ # The `from` Element is documented by each `to` Element
+ hasDocumentation = "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/hasDocumentation"
+ # The `from` Element dynamically links in each `to` Element, during a LifecycleScopeType period.
+ hasDynamicLink = "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/hasDynamicLink"
+ # (Dataset) Every `to` Element is considered as evidence for the `from` Element (`from` hasEvidence `to`)
+ hasEvidence = "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/hasEvidence"
+ # Every `to` Element is an example for the `from` Element (`from` hasExample `to`)
+ hasExample = "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/hasExample"
+ # The `from` Build was run on the `to` Element during a LifecycleScopeType period (e.g. The host that the build runs on)
+ hasHost = "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/hasHost"
+ # The `from` Build has each `to` Elements as an input during a LifecycleScopeType period.
+ hasInputs = "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/hasInputs"
+ # Every `to` Element is metadata about the `from` Element (`from` hasMetadata `to`)
+ hasMetadata = "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/hasMetadata"
+ # Every `to` Element is an optional component of the `from` Element (`from` hasOptionalComponent `to`)
+ hasOptionalComponent = "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/hasOptionalComponent"
+ # The `from` Element optionally depends on each `to` Element during a LifecycleScopeType period
+ hasOptionalDependency = "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/hasOptionalDependency"
+ # The `from` Build element generates each `to` Element as an output during a LifecycleScopeType period.
+ hasOutputs = "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/hasOutputs"
+ # The `from` Element has a prerequsite on each `to` Element, during a LifecycleScopeType period
+ hasPrerequsite = "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/hasPrerequsite"
+ # The `from` Element has a dependency on each `to` Element, but dependency is not in the distributed artifact, but assumed to be provided, during a LifecycleScopeType period
+ hasProvidedDependency = "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/hasProvidedDependency"
+ # The `from` Element has a requirement on each `to` Element, during a LifecycleScopeType period
+ hasRequirement = "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/hasRequirement"
+ # Every `to` Element is a specification for the `from` Element (`from` hasSpecification `to`), during a LifecycleScopeType period
+ hasSpecification = "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/hasSpecification"
+ # The `from` Element statically links in each `to` Element, during a LifecycleScopeType period
+ hasStaticLink = "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/hasStaticLink"
+ # Every `to` Element is a test artifact for the `from` Element (`from` hasTest `to`), during a LifecycleScopeType period
+ hasTest = "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/hasTest"
+ # Every `to` Element is a test case for the `from` Element (`from` hasTestCase `to`)
+ hasTestCase = "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/hasTestCase"
+ # Every `to` Element is a variant the `from` Element (`from` hasVariant `to`)
+ hasVariant = "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/hasVariant"
+ # The `from` Element was invoked by the `to` Agent during a LifecycleScopeType period (for example, a Build element that describes a build step)
+ invokedBy = "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/invokedBy"
+ # The `from` Element is modified by each `to` Element
+ modifiedBy = "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/modifiedBy"
+ # Every `to` Element is related to the `from` Element where the relationship type is not described by any of the SPDX relationhip types (this relationship is directionless)
+ other = "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/other"
+ # Every `to` Element is a packaged instance of the `from` Element (`from` packagedBy `to`)
+ packagedBy = "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/packagedBy"
+ # Every `to` Element is a patch for the `from` Element (`from` patchedBy `to`)
+ patchedBy = "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/patchedBy"
+ # (Security) Designates a `from` Vulnerability was made available for public use or reference by each `to` Agent
+ publishedBy = "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/publishedBy"
+ # (Security) Designates a `from` Vulnerability was first reported to a project, vendor, or tracking database for formal identification by each `to` Agent
+ reportedBy = "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/reportedBy"
+ # (Security) Designates a `from` Vulnerability's details were tracked, aggregated, and/or enriched to improve context (i.e. NVD) by a `to` Agent(s)
+ republishedBy = "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/republishedBy"
+ # The `from` SPDXDocument can be found in a serialized form in each `to` Artifact
+ serializedInArtifact = "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/serializedInArtifact"
+ # (AI, Dataset) The `from` Element has been tested on the `to` Element
+ testedOn = "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/testedOn"
+ # (AI, Dataset) The `from` Element has been trained by the `to` Element(s)
+ trainedOn = "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/trainedOn"
+ # (Security/VEX) The `from` Vulnerability impact is being investigated for each `to` Element
+ underInvestigationFor = "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/underInvestigationFor"
+ # The `from` Element uses each `to` Element as a tool during a LifecycleScopeType period.
+ usesTool = "https://spdx.org/rdf/3.0.0/terms/Core/RelationshipType/usesTool"
+
+
+# A collection of SPDX Elements that could potentially be serialized.
+@register("https://spdx.org/rdf/3.0.0/terms/Core/SpdxDocument", compact_type="SpdxDocument", abstract=False)
+class SpdxDocument(ElementCollection):
+ NODE_KIND = NodeKind.BlankNodeOrIRI
+ ID_ALIAS = "spdxId"
+ NAMED_INDIVIDUALS = {
+ }
+
+ @classmethod
+ def _register_props(cls):
+ super()._register_props()
+ # Provides the license under which the SPDX documentation of the Element can be
+ # used.
+ cls._add_property(
+ "dataLicense",
+ ObjectProp(simplelicensing_AnyLicenseInfo, False),
+ iri="https://spdx.org/rdf/3.0.0/terms/Core/dataLicense",
+ compact="dataLicense",
+ )
+ # Provides an ExternalMap of Element identifiers.
+ cls._add_property(
+ "imports",
+ ListProp(ObjectProp(ExternalMap, False)),
+ iri="https://spdx.org/rdf/3.0.0/terms/Core/imports",
+ compact="imports",
+ )
+ # Provides a NamespaceMap of prefixes and associated namespace partial URIs applicable to an SpdxDocument and independent of any specific serialization format or instance.
+ cls._add_property(
+ "namespaceMap",
+ ListProp(ObjectProp(NamespaceMap, False)),
+ iri="https://spdx.org/rdf/3.0.0/terms/Core/namespaceMap",
+ compact="namespaceMap",
+ )
+
+
+# Indicates the type of support that is associated with an artifact.
+@register("https://spdx.org/rdf/3.0.0/terms/Core/SupportType", compact_type="SupportType", abstract=False)
+class SupportType(SHACLObject):
+ NODE_KIND = NodeKind.BlankNodeOrIRI
+ NAMED_INDIVIDUALS = {
+ "deployed": "https://spdx.org/rdf/3.0.0/terms/Core/SupportType/deployed",
+ "development": "https://spdx.org/rdf/3.0.0/terms/Core/SupportType/development",
+ "endOfSupport": "https://spdx.org/rdf/3.0.0/terms/Core/SupportType/endOfSupport",
+ "limitedSupport": "https://spdx.org/rdf/3.0.0/terms/Core/SupportType/limitedSupport",
+ "noAssertion": "https://spdx.org/rdf/3.0.0/terms/Core/SupportType/noAssertion",
+ "noSupport": "https://spdx.org/rdf/3.0.0/terms/Core/SupportType/noSupport",
+ "support": "https://spdx.org/rdf/3.0.0/terms/Core/SupportType/support",
+ }
+ # in addition to being supported by the supplier, the software is known to have been deployed and is in use. For a software as a service provider, this implies the software is now available as a service.
+ deployed = "https://spdx.org/rdf/3.0.0/terms/Core/SupportType/deployed"
+ # the artifact is in active development and is not considered ready for formal support from the supplier.
+ development = "https://spdx.org/rdf/3.0.0/terms/Core/SupportType/development"
+ # there is a defined end of support for the artifact from the supplier. This may also be referred to as end of life. There is a validUntilDate that can be used to signal when support ends for the artifact.
+ endOfSupport = "https://spdx.org/rdf/3.0.0/terms/Core/SupportType/endOfSupport"
+ # the artifact has been released, and there is limited support available from the supplier. There is a validUntilDate that can provide additional information about the duration of support.
+ limitedSupport = "https://spdx.org/rdf/3.0.0/terms/Core/SupportType/limitedSupport"
+ # no assertion about the type of support is made. This is considered the default if no other support type is used.
+ noAssertion = "https://spdx.org/rdf/3.0.0/terms/Core/SupportType/noAssertion"
+ # there is no support for the artifact from the supplier, consumer assumes any support obligations.
+ noSupport = "https://spdx.org/rdf/3.0.0/terms/Core/SupportType/noSupport"
+ # the artifact has been released, and is supported from the supplier. There is a validUntilDate that can provide additional information about the duration of support.
+ support = "https://spdx.org/rdf/3.0.0/terms/Core/SupportType/support"
+
+
+# An element of hardware and/or software utilized to carry out a particular function.
+@register("https://spdx.org/rdf/3.0.0/terms/Core/Tool", compact_type="Tool", abstract=False)
+class Tool(Element):
+ NODE_KIND = NodeKind.BlankNodeOrIRI
+ ID_ALIAS = "spdxId"
+ NAMED_INDIVIDUALS = {
+ }
+
+
+# Categories of confidentiality level.
+@register("https://spdx.org/rdf/3.0.0/terms/Dataset/ConfidentialityLevelType", compact_type="dataset_ConfidentialityLevelType", abstract=False)
+class dataset_ConfidentialityLevelType(SHACLObject):
+ NODE_KIND = NodeKind.BlankNodeOrIRI
+ NAMED_INDIVIDUALS = {
+ "amber": "https://spdx.org/rdf/3.0.0/terms/Dataset/ConfidentialityLevelType/amber",
+ "clear": "https://spdx.org/rdf/3.0.0/terms/Dataset/ConfidentialityLevelType/clear",
+ "green": "https://spdx.org/rdf/3.0.0/terms/Dataset/ConfidentialityLevelType/green",
+ "red": "https://spdx.org/rdf/3.0.0/terms/Dataset/ConfidentialityLevelType/red",
+ }
+ # Data points in the dataset can be shared only with specific
+ amber = "https://spdx.org/rdf/3.0.0/terms/Dataset/ConfidentialityLevelType/amber"
+ # Dataset may be distributed freely, without restriction.
+ clear = "https://spdx.org/rdf/3.0.0/terms/Dataset/ConfidentialityLevelType/clear"
+ # Dataset can be shared within a community of peers and partners.
+ green = "https://spdx.org/rdf/3.0.0/terms/Dataset/ConfidentialityLevelType/green"
+ # Data points in the dataset are highly confidential and can only be shared
+ red = "https://spdx.org/rdf/3.0.0/terms/Dataset/ConfidentialityLevelType/red"
+
+
+# Availability of dataset.
+@register("https://spdx.org/rdf/3.0.0/terms/Dataset/DatasetAvailabilityType", compact_type="dataset_DatasetAvailabilityType", abstract=False)
+class dataset_DatasetAvailabilityType(SHACLObject):
+ NODE_KIND = NodeKind.BlankNodeOrIRI
+ NAMED_INDIVIDUALS = {
+ "clickthrough": "https://spdx.org/rdf/3.0.0/terms/Dataset/DatasetAvailabilityType/clickthrough",
+ "directDownload": "https://spdx.org/rdf/3.0.0/terms/Dataset/DatasetAvailabilityType/directDownload",
+ "query": "https://spdx.org/rdf/3.0.0/terms/Dataset/DatasetAvailabilityType/query",
+ "registration": "https://spdx.org/rdf/3.0.0/terms/Dataset/DatasetAvailabilityType/registration",
+ "scrapingScript": "https://spdx.org/rdf/3.0.0/terms/Dataset/DatasetAvailabilityType/scrapingScript",
+ }
+ # the dataset is not publicly available and can only be accessed
+ clickthrough = "https://spdx.org/rdf/3.0.0/terms/Dataset/DatasetAvailabilityType/clickthrough"
+ # the dataset is publicly available and can be downloaded
+ directDownload = "https://spdx.org/rdf/3.0.0/terms/Dataset/DatasetAvailabilityType/directDownload"
+ # the dataset is publicly available, but not all at once, and can only
+ query = "https://spdx.org/rdf/3.0.0/terms/Dataset/DatasetAvailabilityType/query"
+ # the dataset is not publicly available and an email registration
+ registration = "https://spdx.org/rdf/3.0.0/terms/Dataset/DatasetAvailabilityType/registration"
+ # the dataset provider is not making available the underlying
+ scrapingScript = "https://spdx.org/rdf/3.0.0/terms/Dataset/DatasetAvailabilityType/scrapingScript"
+
+
+# Enumeration of dataset types.
+@register("https://spdx.org/rdf/3.0.0/terms/Dataset/DatasetType", compact_type="dataset_DatasetType", abstract=False)
+class dataset_DatasetType(SHACLObject):
+ NODE_KIND = NodeKind.BlankNodeOrIRI
+ NAMED_INDIVIDUALS = {
+ "audio": "https://spdx.org/rdf/3.0.0/terms/Dataset/DatasetType/audio",
+ "categorical": "https://spdx.org/rdf/3.0.0/terms/Dataset/DatasetType/categorical",
+ "graph": "https://spdx.org/rdf/3.0.0/terms/Dataset/DatasetType/graph",
+ "image": "https://spdx.org/rdf/3.0.0/terms/Dataset/DatasetType/image",
+ "noAssertion": "https://spdx.org/rdf/3.0.0/terms/Dataset/DatasetType/noAssertion",
+ "numeric": "https://spdx.org/rdf/3.0.0/terms/Dataset/DatasetType/numeric",
+ "other": "https://spdx.org/rdf/3.0.0/terms/Dataset/DatasetType/other",
+ "sensor": "https://spdx.org/rdf/3.0.0/terms/Dataset/DatasetType/sensor",
+ "structured": "https://spdx.org/rdf/3.0.0/terms/Dataset/DatasetType/structured",
+ "syntactic": "https://spdx.org/rdf/3.0.0/terms/Dataset/DatasetType/syntactic",
+ "text": "https://spdx.org/rdf/3.0.0/terms/Dataset/DatasetType/text",
+ "timeseries": "https://spdx.org/rdf/3.0.0/terms/Dataset/DatasetType/timeseries",
+ "timestamp": "https://spdx.org/rdf/3.0.0/terms/Dataset/DatasetType/timestamp",
+ "video": "https://spdx.org/rdf/3.0.0/terms/Dataset/DatasetType/video",
+ }
+ # data is audio based, such as a collection of music from the 80s.
+ audio = "https://spdx.org/rdf/3.0.0/terms/Dataset/DatasetType/audio"
+ # data that is classified into a discrete number of categories,
+ categorical = "https://spdx.org/rdf/3.0.0/terms/Dataset/DatasetType/categorical"
+ # data is in the form of a graph where entries are somehow related to
+ graph = "https://spdx.org/rdf/3.0.0/terms/Dataset/DatasetType/graph"
+ # data is a collection of images such as pictures of animals.
+ image = "https://spdx.org/rdf/3.0.0/terms/Dataset/DatasetType/image"
+ # data type is not known.
+ noAssertion = "https://spdx.org/rdf/3.0.0/terms/Dataset/DatasetType/noAssertion"
+ # data consists only of numeric entries.
+ numeric = "https://spdx.org/rdf/3.0.0/terms/Dataset/DatasetType/numeric"
+ # data is of a type not included in this list.
+ other = "https://spdx.org/rdf/3.0.0/terms/Dataset/DatasetType/other"
+ # data is recorded from a physical sensor, such as a thermometer
+ sensor = "https://spdx.org/rdf/3.0.0/terms/Dataset/DatasetType/sensor"
+ # data is stored in tabular format or retrieved from a relational
+ structured = "https://spdx.org/rdf/3.0.0/terms/Dataset/DatasetType/structured"
+ # data describes the syntax or semantics of a language or text, such
+ syntactic = "https://spdx.org/rdf/3.0.0/terms/Dataset/DatasetType/syntactic"
+ # data consists of unstructured text, such as a book, Wikipedia article
+ text = "https://spdx.org/rdf/3.0.0/terms/Dataset/DatasetType/text"
+ # data is recorded in an ordered sequence of timestamped entries,
+ timeseries = "https://spdx.org/rdf/3.0.0/terms/Dataset/DatasetType/timeseries"
+ # data is recorded with a timestamp for each entry, but not
+ timestamp = "https://spdx.org/rdf/3.0.0/terms/Dataset/DatasetType/timestamp"
+ # data is video based, such as a collection of movie clips featuring Tom
+ video = "https://spdx.org/rdf/3.0.0/terms/Dataset/DatasetType/video"
+
+
+# Abstract class for additional text intended to be added to a License, but
+# which is not itself a standalone License.
+@register("https://spdx.org/rdf/3.0.0/terms/ExpandedLicensing/LicenseAddition", compact_type="expandedlicensing_LicenseAddition", abstract=True)
+class expandedlicensing_LicenseAddition(Element):
+ NODE_KIND = NodeKind.BlankNodeOrIRI
+ ID_ALIAS = "spdxId"
+ NAMED_INDIVIDUALS = {
+ }
+
+ @classmethod
+ def _register_props(cls):
+ super()._register_props()
+ # Identifies the full text of a LicenseAddition.
+ cls._add_property(
+ "expandedlicensing_additionText",
+ StringProp(),
+ iri="https://spdx.org/rdf/3.0.0/terms/ExpandedLicensing/additionText",
+ min_count=1,
+ compact="expandedlicensing_additionText",
+ )
+ # Specifies whether an additional text identifier has been marked as deprecated.
+ cls._add_property(
+ "expandedlicensing_isDeprecatedAdditionId",
+ BooleanProp(),
+ iri="https://spdx.org/rdf/3.0.0/terms/ExpandedLicensing/isDeprecatedAdditionId",
+ compact="expandedlicensing_isDeprecatedAdditionId",
+ )
+ # Identifies all the text and metadata associated with a license in the license
+ # XML format.
+ cls._add_property(
+ "expandedlicensing_licenseXml",
+ StringProp(),
+ iri="https://spdx.org/rdf/3.0.0/terms/ExpandedLicensing/licenseXml",
+ compact="expandedlicensing_licenseXml",
+ )
+ # Specifies the licenseId that is preferred to be used in place of a deprecated
+ # License or LicenseAddition.
+ cls._add_property(
+ "expandedlicensing_obsoletedBy",
+ StringProp(),
+ iri="https://spdx.org/rdf/3.0.0/terms/ExpandedLicensing/obsoletedBy",
+ compact="expandedlicensing_obsoletedBy",
+ )
+ # Contains a URL where the License or LicenseAddition can be found in use.
+ cls._add_property(
+ "expandedlicensing_seeAlso",
+ ListProp(AnyURIProp()),
+ iri="https://spdx.org/rdf/3.0.0/terms/ExpandedLicensing/seeAlso",
+ compact="expandedlicensing_seeAlso",
+ )
+ # Identifies the full text of a LicenseAddition, in SPDX templating format.
+ cls._add_property(
+ "expandedlicensing_standardAdditionTemplate",
+ StringProp(),
+ iri="https://spdx.org/rdf/3.0.0/terms/ExpandedLicensing/standardAdditionTemplate",
+ compact="expandedlicensing_standardAdditionTemplate",
+ )
+
+
+# A license exception that is listed on the SPDX Exceptions list.
+@register("https://spdx.org/rdf/3.0.0/terms/ExpandedLicensing/ListedLicenseException", compact_type="expandedlicensing_ListedLicenseException", abstract=False)
+class expandedlicensing_ListedLicenseException(expandedlicensing_LicenseAddition):
+ NODE_KIND = NodeKind.BlankNodeOrIRI
+ ID_ALIAS = "spdxId"
+ NAMED_INDIVIDUALS = {
+ }
+
+ @classmethod
+ def _register_props(cls):
+ super()._register_props()
+ # Specifies the SPDX License List version in which this license or exception
+ # identifier was deprecated.
+ cls._add_property(
+ "expandedlicensing_deprecatedVersion",
+ StringProp(),
+ iri="https://spdx.org/rdf/3.0.0/terms/ExpandedLicensing/deprecatedVersion",
+ compact="expandedlicensing_deprecatedVersion",
+ )
+ # Specifies the SPDX License List version in which this ListedLicense or
+ # ListedLicenseException identifier was first added.
+ cls._add_property(
+ "expandedlicensing_listVersionAdded",
+ StringProp(),
+ iri="https://spdx.org/rdf/3.0.0/terms/ExpandedLicensing/listVersionAdded",
+ compact="expandedlicensing_listVersionAdded",
+ )
+
+
+# A property name with an associated value.
+@register("https://spdx.org/rdf/3.0.0/terms/Extension/CdxPropertyEntry", compact_type="extension_CdxPropertyEntry", abstract=False)
+class extension_CdxPropertyEntry(SHACLObject):
+ NODE_KIND = NodeKind.BlankNodeOrIRI
+ NAMED_INDIVIDUALS = {
+ }
+
+ @classmethod
+ def _register_props(cls):
+ super()._register_props()
+ # A name used in a CdxExtension name-value pair.
+ cls._add_property(
+ "extension_cdxPropName",
+ StringProp(),
+ iri="https://spdx.org/rdf/3.0.0/terms/Extension/cdxPropName",
+ min_count=1,
+ compact="extension_cdxPropName",
+ )
+ # A value used in a CdxExtension name-value pair.
+ cls._add_property(
+ "extension_cdxPropValue",
+ StringProp(),
+ iri="https://spdx.org/rdf/3.0.0/terms/Extension/cdxPropValue",
+ compact="extension_cdxPropValue",
+ )
+
+
+# A characterization of some aspect of an Element that is associated with the Element in a generalized fashion.
+@register("https://spdx.org/rdf/3.0.0/terms/Extension/Extension", compact_type="extension_Extension", abstract=True)
+class extension_Extension(SHACLExtensibleObject, SHACLObject):
+ NODE_KIND = NodeKind.BlankNodeOrIRI
+ NAMED_INDIVIDUALS = {
+ }
+
+
+# Specifies the CVSS base, temporal, threat, or environmental severity type.
+@register("https://spdx.org/rdf/3.0.0/terms/Security/CvssSeverityType", compact_type="security_CvssSeverityType", abstract=False)
+class security_CvssSeverityType(SHACLObject):
+ NODE_KIND = NodeKind.BlankNodeOrIRI
+ NAMED_INDIVIDUALS = {
+ "critical": "https://spdx.org/rdf/3.0.0/terms/Security/CvssSeverityType/critical",
+ "high": "https://spdx.org/rdf/3.0.0/terms/Security/CvssSeverityType/high",
+ "low": "https://spdx.org/rdf/3.0.0/terms/Security/CvssSeverityType/low",
+ "medium": "https://spdx.org/rdf/3.0.0/terms/Security/CvssSeverityType/medium",
+ "none": "https://spdx.org/rdf/3.0.0/terms/Security/CvssSeverityType/none",
+ }
+ # When a CVSS score is between 9.0 - 10.0
+ critical = "https://spdx.org/rdf/3.0.0/terms/Security/CvssSeverityType/critical"
+ # When a CVSS score is between 7.0 - 8.9
+ high = "https://spdx.org/rdf/3.0.0/terms/Security/CvssSeverityType/high"
+ # When a CVSS score is between 0 - 3.9
+ low = "https://spdx.org/rdf/3.0.0/terms/Security/CvssSeverityType/low"
+ # When a CVSS score is between 4 - 6.9
+ medium = "https://spdx.org/rdf/3.0.0/terms/Security/CvssSeverityType/medium"
+ # When a CVSS score is 0
+ none = "https://spdx.org/rdf/3.0.0/terms/Security/CvssSeverityType/none"
+
+
+# Specifies the exploit catalog type.
+@register("https://spdx.org/rdf/3.0.0/terms/Security/ExploitCatalogType", compact_type="security_ExploitCatalogType", abstract=False)
+class security_ExploitCatalogType(SHACLObject):
+ NODE_KIND = NodeKind.BlankNodeOrIRI
+ NAMED_INDIVIDUALS = {
+ "kev": "https://spdx.org/rdf/3.0.0/terms/Security/ExploitCatalogType/kev",
+ "other": "https://spdx.org/rdf/3.0.0/terms/Security/ExploitCatalogType/other",
+ }
+ # CISA's Known Exploited Vulnerability (KEV) Catalog
+ kev = "https://spdx.org/rdf/3.0.0/terms/Security/ExploitCatalogType/kev"
+ # Other exploit catalogs
+ other = "https://spdx.org/rdf/3.0.0/terms/Security/ExploitCatalogType/other"
+
+
+# Specifies the SSVC decision type.
+@register("https://spdx.org/rdf/3.0.0/terms/Security/SsvcDecisionType", compact_type="security_SsvcDecisionType", abstract=False)
+class security_SsvcDecisionType(SHACLObject):
+ NODE_KIND = NodeKind.BlankNodeOrIRI
+ NAMED_INDIVIDUALS = {
+ "act": "https://spdx.org/rdf/3.0.0/terms/Security/SsvcDecisionType/act",
+ "attend": "https://spdx.org/rdf/3.0.0/terms/Security/SsvcDecisionType/attend",
+ "track": "https://spdx.org/rdf/3.0.0/terms/Security/SsvcDecisionType/track",
+ "trackStar": "https://spdx.org/rdf/3.0.0/terms/Security/SsvcDecisionType/trackStar",
+ }
+ # The vulnerability requires attention from the organization's internal, supervisory-level and leadership-level individuals. Necessary actions include requesting assistance or information about the vulnerability, as well as publishing a notification either internally and/or externally. Typically, internal groups would meet to determine the overall response and then execute agreed upon actions. CISA recommends remediating Act vulnerabilities as soon as possible.
+ act = "https://spdx.org/rdf/3.0.0/terms/Security/SsvcDecisionType/act"
+ # The vulnerability requires attention from the organization's internal, supervisory-level individuals. Necessary actions include requesting assistance or information about the vulnerability, and may involve publishing a notification either internally and/or externally. CISA recommends remediating Attend vulnerabilities sooner than standard update timelines.
+ attend = "https://spdx.org/rdf/3.0.0/terms/Security/SsvcDecisionType/attend"
+ # The vulnerability does not require action at this time. The organization would continue to track the vulnerability and reassess it if new information becomes available. CISA recommends remediating Track vulnerabilities within standard update timelines.
+ track = "https://spdx.org/rdf/3.0.0/terms/Security/SsvcDecisionType/track"
+ # ("Track*" in the SSVC spec) The vulnerability contains specific characteristics that may require closer monitoring for changes. CISA recommends remediating Track* vulnerabilities within standard update timelines.
+ trackStar = "https://spdx.org/rdf/3.0.0/terms/Security/SsvcDecisionType/trackStar"
+
+
+# Specifies the VEX justification type.
+@register("https://spdx.org/rdf/3.0.0/terms/Security/VexJustificationType", compact_type="security_VexJustificationType", abstract=False)
+class security_VexJustificationType(SHACLObject):
+ NODE_KIND = NodeKind.BlankNodeOrIRI
+ NAMED_INDIVIDUALS = {
+ "componentNotPresent": "https://spdx.org/rdf/3.0.0/terms/Security/VexJustificationType/componentNotPresent",
+ "inlineMitigationsAlreadyExist": "https://spdx.org/rdf/3.0.0/terms/Security/VexJustificationType/inlineMitigationsAlreadyExist",
+ "vulnerableCodeCannotBeControlledByAdversary": "https://spdx.org/rdf/3.0.0/terms/Security/VexJustificationType/vulnerableCodeCannotBeControlledByAdversary",
+ "vulnerableCodeNotInExecutePath": "https://spdx.org/rdf/3.0.0/terms/Security/VexJustificationType/vulnerableCodeNotInExecutePath",
+ "vulnerableCodeNotPresent": "https://spdx.org/rdf/3.0.0/terms/Security/VexJustificationType/vulnerableCodeNotPresent",
+ }
+ # The software is not affected because the vulnerable component is not in the product.
+ componentNotPresent = "https://spdx.org/rdf/3.0.0/terms/Security/VexJustificationType/componentNotPresent"
+ # Built-in inline controls or mitigations prevent an adversary from leveraging the vulnerability.
+ inlineMitigationsAlreadyExist = "https://spdx.org/rdf/3.0.0/terms/Security/VexJustificationType/inlineMitigationsAlreadyExist"
+ # The vulnerable component is present, and the component contains the vulnerable code. However, vulnerable code is used in such a way that an attacker cannot mount any anticipated attack.
+ vulnerableCodeCannotBeControlledByAdversary = "https://spdx.org/rdf/3.0.0/terms/Security/VexJustificationType/vulnerableCodeCannotBeControlledByAdversary"
+ # The affected code is not reachable through the execution of the code, including non-anticipated states of the product.
+ vulnerableCodeNotInExecutePath = "https://spdx.org/rdf/3.0.0/terms/Security/VexJustificationType/vulnerableCodeNotInExecutePath"
+ # The product is not affected because the code underlying the vulnerability is not present in the product.
+ vulnerableCodeNotPresent = "https://spdx.org/rdf/3.0.0/terms/Security/VexJustificationType/vulnerableCodeNotPresent"
+
+
+# Abstract ancestor class for all vulnerability assessments
+@register("https://spdx.org/rdf/3.0.0/terms/Security/VulnAssessmentRelationship", compact_type="security_VulnAssessmentRelationship", abstract=True)
+class security_VulnAssessmentRelationship(Relationship):
+ NODE_KIND = NodeKind.BlankNodeOrIRI
+ ID_ALIAS = "spdxId"
+ NAMED_INDIVIDUALS = {
+ }
+
+ @classmethod
+ def _register_props(cls):
+ super()._register_props()
+ # Identifies who or what supplied the artifact or VulnAssessmentRelationship
+ # referenced by the Element.
+ cls._add_property(
+ "suppliedBy",
+ ObjectProp(Agent, False),
+ iri="https://spdx.org/rdf/3.0.0/terms/Core/suppliedBy",
+ compact="suppliedBy",
+ )
+ # Specifies an Element contained in a piece of software where a vulnerability was
+ # found.
+ cls._add_property(
+ "security_assessedElement",
+ ObjectProp(Element, False),
+ iri="https://spdx.org/rdf/3.0.0/terms/Security/assessedElement",
+ compact="security_assessedElement",
+ )
+ # Specifies a time when a vulnerability assessment was modified
+ cls._add_property(
+ "security_modifiedTime",
+ DateTimeStampProp(pattern=r"^\d\d\d\d-\d\d-\d\dT\d\d:\d\d:\d\dZ$",),
+ iri="https://spdx.org/rdf/3.0.0/terms/Security/modifiedTime",
+ compact="security_modifiedTime",
+ )
+ # Specifies the time when a vulnerability was published.
+ cls._add_property(
+ "security_publishedTime",
+ DateTimeStampProp(pattern=r"^\d\d\d\d-\d\d-\d\dT\d\d:\d\d:\d\dZ$",),
+ iri="https://spdx.org/rdf/3.0.0/terms/Security/publishedTime",
+ compact="security_publishedTime",
+ )
+ # Specified the time and date when a vulnerability was withdrawn.
+ cls._add_property(
+ "security_withdrawnTime",
+ DateTimeStampProp(pattern=r"^\d\d\d\d-\d\d-\d\dT\d\d:\d\d:\d\dZ$",),
+ iri="https://spdx.org/rdf/3.0.0/terms/Security/withdrawnTime",
+ compact="security_withdrawnTime",
+ )
+
+
+# Abstract class representing a license combination consisting of one or more
+# licenses (optionally including additional text), which may be combined
+# according to the SPDX license expression syntax.
+@register("https://spdx.org/rdf/3.0.0/terms/SimpleLicensing/AnyLicenseInfo", compact_type="simplelicensing_AnyLicenseInfo", abstract=True)
+class simplelicensing_AnyLicenseInfo(Element):
+ NODE_KIND = NodeKind.BlankNodeOrIRI
+ ID_ALIAS = "spdxId"
+ NAMED_INDIVIDUALS = {
+ }
+
+
+# An SPDX Element containing an SPDX license expression string.
+@register("https://spdx.org/rdf/3.0.0/terms/SimpleLicensing/LicenseExpression", compact_type="simplelicensing_LicenseExpression", abstract=False)
+class simplelicensing_LicenseExpression(simplelicensing_AnyLicenseInfo):
+ NODE_KIND = NodeKind.BlankNodeOrIRI
+ ID_ALIAS = "spdxId"
+ NAMED_INDIVIDUALS = {
+ }
+
+ @classmethod
+ def _register_props(cls):
+ super()._register_props()
+ # Maps a LicenseRef or AdditionRef string for a Custom License or a Custom
+ # License Addition to its URI ID.
+ cls._add_property(
+ "simplelicensing_customIdToUri",
+ ListProp(ObjectProp(DictionaryEntry, False)),
+ iri="https://spdx.org/rdf/3.0.0/terms/SimpleLicensing/customIdToUri",
+ compact="simplelicensing_customIdToUri",
+ )
+ # A string in the license expression format.
+ cls._add_property(
+ "simplelicensing_licenseExpression",
+ StringProp(),
+ iri="https://spdx.org/rdf/3.0.0/terms/SimpleLicensing/licenseExpression",
+ min_count=1,
+ compact="simplelicensing_licenseExpression",
+ )
+ # The version of the SPDX License List used in the license expression.
+ cls._add_property(
+ "simplelicensing_licenseListVersion",
+ StringProp(pattern=r"^(0|[1-9]\d*)\.(0|[1-9]\d*)\.(0|[1-9]\d*)(?:-((?:0|[1-9]\d*|\d*[a-zA-Z-][0-9a-zA-Z-]*)(?:\.(?:0|[1-9]\d*|\d*[a-zA-Z-][0-9a-zA-Z-]*))*))?(?:\+([0-9a-zA-Z-]+(?:\.[0-9a-zA-Z-]+)*))?$",),
+ iri="https://spdx.org/rdf/3.0.0/terms/SimpleLicensing/licenseListVersion",
+ compact="simplelicensing_licenseListVersion",
+ )
+
+
+# A license or addition that is not listed on the SPDX License List.
+@register("https://spdx.org/rdf/3.0.0/terms/SimpleLicensing/SimpleLicensingText", compact_type="simplelicensing_SimpleLicensingText", abstract=False)
+class simplelicensing_SimpleLicensingText(Element):
+ NODE_KIND = NodeKind.BlankNodeOrIRI
+ ID_ALIAS = "spdxId"
+ NAMED_INDIVIDUALS = {
+ }
+
+ @classmethod
+ def _register_props(cls):
+ super()._register_props()
+ # Identifies the full text of a License or Addition.
+ cls._add_property(
+ "simplelicensing_licenseText",
+ StringProp(),
+ iri="https://spdx.org/rdf/3.0.0/terms/SimpleLicensing/licenseText",
+ min_count=1,
+ compact="simplelicensing_licenseText",
+ )
+
+
+# A canonical, unique, immutable identifier
+@register("https://spdx.org/rdf/3.0.0/terms/Software/ContentIdentifier", compact_type="software_ContentIdentifier", abstract=False)
+class software_ContentIdentifier(IntegrityMethod):
+ NODE_KIND = NodeKind.BlankNodeOrIRI
+ NAMED_INDIVIDUALS = {
+ }
+
+ @classmethod
+ def _register_props(cls):
+ super()._register_props()
+ # Specifies the type of the content identifier.
+ cls._add_property(
+ "software_contentIdentifierType",
+ EnumProp([
+ ("https://spdx.org/rdf/3.0.0/terms/Software/ContentIdentifierType/gitoid", "gitoid"),
+ ("https://spdx.org/rdf/3.0.0/terms/Software/ContentIdentifierType/swhid", "swhid"),
+ ]),
+ iri="https://spdx.org/rdf/3.0.0/terms/Software/contentIdentifierType",
+ min_count=1,
+ compact="software_contentIdentifierType",
+ )
+ # Specifies the value of the content identifier.
+ cls._add_property(
+ "software_contentIdentifierValue",
+ AnyURIProp(),
+ iri="https://spdx.org/rdf/3.0.0/terms/Software/contentIdentifierValue",
+ min_count=1,
+ compact="software_contentIdentifierValue",
+ )
+
+
+# Specifies the type of a content identifier.
+@register("https://spdx.org/rdf/3.0.0/terms/Software/ContentIdentifierType", compact_type="software_ContentIdentifierType", abstract=False)
+class software_ContentIdentifierType(SHACLObject):
+ NODE_KIND = NodeKind.BlankNodeOrIRI
+ NAMED_INDIVIDUALS = {
+ "gitoid": "https://spdx.org/rdf/3.0.0/terms/Software/ContentIdentifierType/gitoid",
+ "swhid": "https://spdx.org/rdf/3.0.0/terms/Software/ContentIdentifierType/swhid",
+ }
+ # Gitoid stands for [Git Object ID](https://git-scm.com/book/en/v2/Git-Internals-Git-Objects) and a gitoid of type blob is a unique hash of a binary artifact. A gitoid may represent the software [Artifact ID](https://github.com/omnibor/spec/blob/main/spec/SPEC.md#artifact-id) or the [OmniBOR Identifier](https://github.com/omnibor/spec/blob/main/spec/SPEC.md#omnibor-identifier) for the software artifact's associated [OmniBOR Document](https://github.com/omnibor/spec/blob/main/spec/SPEC.md#omnibor-document).
+ gitoid = "https://spdx.org/rdf/3.0.0/terms/Software/ContentIdentifierType/gitoid"
+ # SoftWare Hash IDentifier, persistent intrinsic identifiers for digital artifacts. The syntax of the identifiers is defined in the [SWHID specification](https://www.swhid.org/specification/v1.1/4.Syntax) and in the case of filess they typically look like `swh:1:cnt:94a9ed024d3859793618152ea559a168bbcbb5e2`.
+ swhid = "https://spdx.org/rdf/3.0.0/terms/Software/ContentIdentifierType/swhid"
+
+
+# Enumeration of the different kinds of SPDX file.
+@register("https://spdx.org/rdf/3.0.0/terms/Software/FileKindType", compact_type="software_FileKindType", abstract=False)
+class software_FileKindType(SHACLObject):
+ NODE_KIND = NodeKind.BlankNodeOrIRI
+ NAMED_INDIVIDUALS = {
+ "directory": "https://spdx.org/rdf/3.0.0/terms/Software/FileKindType/directory",
+ "file": "https://spdx.org/rdf/3.0.0/terms/Software/FileKindType/file",
+ }
+ # The file represents a directory and all content stored in that
+ directory = "https://spdx.org/rdf/3.0.0/terms/Software/FileKindType/directory"
+ # The file represents a single file (default).
+ file = "https://spdx.org/rdf/3.0.0/terms/Software/FileKindType/file"
+
+
+# Provides a set of values to be used to describe the common types of SBOMs that
+# tools may create.
+@register("https://spdx.org/rdf/3.0.0/terms/Software/SbomType", compact_type="software_SbomType", abstract=False)
+class software_SbomType(SHACLObject):
+ NODE_KIND = NodeKind.BlankNodeOrIRI
+ NAMED_INDIVIDUALS = {
+ "analyzed": "https://spdx.org/rdf/3.0.0/terms/Software/SbomType/analyzed",
+ "build": "https://spdx.org/rdf/3.0.0/terms/Software/SbomType/build",
+ "deployed": "https://spdx.org/rdf/3.0.0/terms/Software/SbomType/deployed",
+ "design": "https://spdx.org/rdf/3.0.0/terms/Software/SbomType/design",
+ "runtime": "https://spdx.org/rdf/3.0.0/terms/Software/SbomType/runtime",
+ "source": "https://spdx.org/rdf/3.0.0/terms/Software/SbomType/source",
+ }
+ # SBOM generated through analysis of artifacts (e.g., executables, packages, containers, and virtual machine images) after its build. Such analysis generally requires a variety of heuristics. In some contexts, this may also be referred to as a "3rd party" SBOM.
+ analyzed = "https://spdx.org/rdf/3.0.0/terms/Software/SbomType/analyzed"
+ # SBOM generated as part of the process of building the software to create a releasable artifact (e.g., executable or package) from data such as source files, dependencies, built components, build process ephemeral data, and other SBOMs.
+ build = "https://spdx.org/rdf/3.0.0/terms/Software/SbomType/build"
+ # SBOM provides an inventory of software that is present on a system. This may be an assembly of other SBOMs that combines analysis of configuration options, and examination of execution behavior in a (potentially simulated) deployment environment.
+ deployed = "https://spdx.org/rdf/3.0.0/terms/Software/SbomType/deployed"
+ # SBOM of intended, planned software project or product with included components (some of which may not yet exist) for a new software artifact.
+ design = "https://spdx.org/rdf/3.0.0/terms/Software/SbomType/design"
+ # SBOM generated through instrumenting the system running the software, to capture only components present in the system, as well as external call-outs or dynamically loaded components. In some contexts, this may also be referred to as an "Instrumented" or "Dynamic" SBOM.
+ runtime = "https://spdx.org/rdf/3.0.0/terms/Software/SbomType/runtime"
+ # SBOM created directly from the development environment, source files, and included dependencies used to build an product artifact.
+ source = "https://spdx.org/rdf/3.0.0/terms/Software/SbomType/source"
+
+
+# Provides information about the primary purpose of an Element.
+@register("https://spdx.org/rdf/3.0.0/terms/Software/SoftwarePurpose", compact_type="software_SoftwarePurpose", abstract=False)
+class software_SoftwarePurpose(SHACLObject):
+ NODE_KIND = NodeKind.BlankNodeOrIRI
+ NAMED_INDIVIDUALS = {
+ "application": "https://spdx.org/rdf/3.0.0/terms/Software/SoftwarePurpose/application",
+ "archive": "https://spdx.org/rdf/3.0.0/terms/Software/SoftwarePurpose/archive",
+ "bom": "https://spdx.org/rdf/3.0.0/terms/Software/SoftwarePurpose/bom",
+ "configuration": "https://spdx.org/rdf/3.0.0/terms/Software/SoftwarePurpose/configuration",
+ "container": "https://spdx.org/rdf/3.0.0/terms/Software/SoftwarePurpose/container",
+ "data": "https://spdx.org/rdf/3.0.0/terms/Software/SoftwarePurpose/data",
+ "device": "https://spdx.org/rdf/3.0.0/terms/Software/SoftwarePurpose/device",
+ "deviceDriver": "https://spdx.org/rdf/3.0.0/terms/Software/SoftwarePurpose/deviceDriver",
+ "diskImage": "https://spdx.org/rdf/3.0.0/terms/Software/SoftwarePurpose/diskImage",
+ "documentation": "https://spdx.org/rdf/3.0.0/terms/Software/SoftwarePurpose/documentation",
+ "evidence": "https://spdx.org/rdf/3.0.0/terms/Software/SoftwarePurpose/evidence",
+ "executable": "https://spdx.org/rdf/3.0.0/terms/Software/SoftwarePurpose/executable",
+ "file": "https://spdx.org/rdf/3.0.0/terms/Software/SoftwarePurpose/file",
+ "filesystemImage": "https://spdx.org/rdf/3.0.0/terms/Software/SoftwarePurpose/filesystemImage",
+ "firmware": "https://spdx.org/rdf/3.0.0/terms/Software/SoftwarePurpose/firmware",
+ "framework": "https://spdx.org/rdf/3.0.0/terms/Software/SoftwarePurpose/framework",
+ "install": "https://spdx.org/rdf/3.0.0/terms/Software/SoftwarePurpose/install",
+ "library": "https://spdx.org/rdf/3.0.0/terms/Software/SoftwarePurpose/library",
+ "manifest": "https://spdx.org/rdf/3.0.0/terms/Software/SoftwarePurpose/manifest",
+ "model": "https://spdx.org/rdf/3.0.0/terms/Software/SoftwarePurpose/model",
+ "module": "https://spdx.org/rdf/3.0.0/terms/Software/SoftwarePurpose/module",
+ "operatingSystem": "https://spdx.org/rdf/3.0.0/terms/Software/SoftwarePurpose/operatingSystem",
+ "other": "https://spdx.org/rdf/3.0.0/terms/Software/SoftwarePurpose/other",
+ "patch": "https://spdx.org/rdf/3.0.0/terms/Software/SoftwarePurpose/patch",
+ "platform": "https://spdx.org/rdf/3.0.0/terms/Software/SoftwarePurpose/platform",
+ "requirement": "https://spdx.org/rdf/3.0.0/terms/Software/SoftwarePurpose/requirement",
+ "source": "https://spdx.org/rdf/3.0.0/terms/Software/SoftwarePurpose/source",
+ "specification": "https://spdx.org/rdf/3.0.0/terms/Software/SoftwarePurpose/specification",
+ "test": "https://spdx.org/rdf/3.0.0/terms/Software/SoftwarePurpose/test",
+ }
+ # the Element is a software application
+ application = "https://spdx.org/rdf/3.0.0/terms/Software/SoftwarePurpose/application"
+ # the Element is an archived collection of one or more files (.tar, .zip, etc)
+ archive = "https://spdx.org/rdf/3.0.0/terms/Software/SoftwarePurpose/archive"
+ # Element is a bill of materials
+ bom = "https://spdx.org/rdf/3.0.0/terms/Software/SoftwarePurpose/bom"
+ # Element is configuration data
+ configuration = "https://spdx.org/rdf/3.0.0/terms/Software/SoftwarePurpose/configuration"
+ # the Element is a container image which can be used by a container runtime application
+ container = "https://spdx.org/rdf/3.0.0/terms/Software/SoftwarePurpose/container"
+ # Element is data
+ data = "https://spdx.org/rdf/3.0.0/terms/Software/SoftwarePurpose/data"
+ # the Element refers to a chipset, processor, or electronic board
+ device = "https://spdx.org/rdf/3.0.0/terms/Software/SoftwarePurpose/device"
+ # Element represents software that controls hardware devices
+ deviceDriver = "https://spdx.org/rdf/3.0.0/terms/Software/SoftwarePurpose/deviceDriver"
+ # the Element refers to a disk image that can be written to a disk, booted in a VM, etc. A disk image typically contains most or all of the components necessary to boot, such as bootloaders, kernels, firmware, userspace, etc.
+ diskImage = "https://spdx.org/rdf/3.0.0/terms/Software/SoftwarePurpose/diskImage"
+ # Element is documentation
+ documentation = "https://spdx.org/rdf/3.0.0/terms/Software/SoftwarePurpose/documentation"
+ # the Element is the evidence that a specification or requirement has been fulfilled
+ evidence = "https://spdx.org/rdf/3.0.0/terms/Software/SoftwarePurpose/evidence"
+ # Element is an Artifact that can be run on a computer
+ executable = "https://spdx.org/rdf/3.0.0/terms/Software/SoftwarePurpose/executable"
+ # the Element is a single file which can be independently distributed (configuration file, statically linked binary, Kubernetes deployment, etc)
+ file = "https://spdx.org/rdf/3.0.0/terms/Software/SoftwarePurpose/file"
+ # the Element is a file system image that can be written to a disk (or virtual) partition
+ filesystemImage = "https://spdx.org/rdf/3.0.0/terms/Software/SoftwarePurpose/filesystemImage"
+ # the Element provides low level control over a device's hardware
+ firmware = "https://spdx.org/rdf/3.0.0/terms/Software/SoftwarePurpose/firmware"
+ # the Element is a software framework
+ framework = "https://spdx.org/rdf/3.0.0/terms/Software/SoftwarePurpose/framework"
+ # the Element is used to install software on disk
+ install = "https://spdx.org/rdf/3.0.0/terms/Software/SoftwarePurpose/install"
+ # the Element is a software library
+ library = "https://spdx.org/rdf/3.0.0/terms/Software/SoftwarePurpose/library"
+ # the Element is a software manifest
+ manifest = "https://spdx.org/rdf/3.0.0/terms/Software/SoftwarePurpose/manifest"
+ # the Element is a machine learning or artificial intelligence model
+ model = "https://spdx.org/rdf/3.0.0/terms/Software/SoftwarePurpose/model"
+ # the Element is a module of a piece of software
+ module = "https://spdx.org/rdf/3.0.0/terms/Software/SoftwarePurpose/module"
+ # the Element is an operating system
+ operatingSystem = "https://spdx.org/rdf/3.0.0/terms/Software/SoftwarePurpose/operatingSystem"
+ # the Element doesn't fit into any of the other categories
+ other = "https://spdx.org/rdf/3.0.0/terms/Software/SoftwarePurpose/other"
+ # Element contains a set of changes to update, fix, or improve another Element
+ patch = "https://spdx.org/rdf/3.0.0/terms/Software/SoftwarePurpose/patch"
+ # Element represents a runtime environment
+ platform = "https://spdx.org/rdf/3.0.0/terms/Software/SoftwarePurpose/platform"
+ # the Element provides a requirement needed as input for another Element
+ requirement = "https://spdx.org/rdf/3.0.0/terms/Software/SoftwarePurpose/requirement"
+ # the Element is a single or a collection of source files
+ source = "https://spdx.org/rdf/3.0.0/terms/Software/SoftwarePurpose/source"
+ # the Element is a plan, guideline or strategy how to create, perform or analyse an application
+ specification = "https://spdx.org/rdf/3.0.0/terms/Software/SoftwarePurpose/specification"
+ # The Element is a test used to verify functionality on an software element
+ test = "https://spdx.org/rdf/3.0.0/terms/Software/SoftwarePurpose/test"
+
+
+# Class that describes a build instance of software/artifacts.
+@register("https://spdx.org/rdf/3.0.0/terms/Build/Build", compact_type="build_Build", abstract=False)
+class build_Build(Element):
+ NODE_KIND = NodeKind.BlankNodeOrIRI
+ ID_ALIAS = "spdxId"
+ NAMED_INDIVIDUALS = {
+ }
+
+ @classmethod
+ def _register_props(cls):
+ super()._register_props()
+ # Property that describes the time at which a build stops.
+ cls._add_property(
+ "build_buildEndTime",
+ DateTimeStampProp(pattern=r"^\d\d\d\d-\d\d-\d\dT\d\d:\d\d:\d\dZ$",),
+ iri="https://spdx.org/rdf/3.0.0/terms/Build/buildEndTime",
+ compact="build_buildEndTime",
+ )
+ # A buildId is a locally unique identifier used by a builder to identify a unique
+ # instance of a build produced by it.
+ cls._add_property(
+ "build_buildId",
+ StringProp(),
+ iri="https://spdx.org/rdf/3.0.0/terms/Build/buildId",
+ compact="build_buildId",
+ )
+ # Property describing the start time of a build.
+ cls._add_property(
+ "build_buildStartTime",
+ DateTimeStampProp(pattern=r"^\d\d\d\d-\d\d-\d\dT\d\d:\d\d:\d\dZ$",),
+ iri="https://spdx.org/rdf/3.0.0/terms/Build/buildStartTime",
+ compact="build_buildStartTime",
+ )
+ # A buildType is a hint that is used to indicate the toolchain, platform, or
+ # infrastructure that the build was invoked on.
+ cls._add_property(
+ "build_buildType",
+ AnyURIProp(),
+ iri="https://spdx.org/rdf/3.0.0/terms/Build/buildType",
+ min_count=1,
+ compact="build_buildType",
+ )
+ # Property that describes the digest of the build configuration file used to
+ # invoke a build.
+ cls._add_property(
+ "build_configSourceDigest",
+ ListProp(ObjectProp(Hash, False)),
+ iri="https://spdx.org/rdf/3.0.0/terms/Build/configSourceDigest",
+ compact="build_configSourceDigest",
+ )
+ # Property describes the invocation entrypoint of a build.
+ cls._add_property(
+ "build_configSourceEntrypoint",
+ ListProp(StringProp()),
+ iri="https://spdx.org/rdf/3.0.0/terms/Build/configSourceEntrypoint",
+ compact="build_configSourceEntrypoint",
+ )
+ # Property that describes the URI of the build configuration source file.
+ cls._add_property(
+ "build_configSourceUri",
+ ListProp(AnyURIProp()),
+ iri="https://spdx.org/rdf/3.0.0/terms/Build/configSourceUri",
+ compact="build_configSourceUri",
+ )
+ # Property describing the session in which a build is invoked.
+ cls._add_property(
+ "build_environment",
+ ListProp(ObjectProp(DictionaryEntry, False)),
+ iri="https://spdx.org/rdf/3.0.0/terms/Build/environment",
+ compact="build_environment",
+ )
+ # Property describing the parameters used in an instance of a build.
+ cls._add_property(
+ "build_parameters",
+ ListProp(ObjectProp(DictionaryEntry, False)),
+ iri="https://spdx.org/rdf/3.0.0/terms/Build/parameters",
+ compact="build_parameters",
+ )
+
+
+# Agent represents anything with the potential to act on a system.
+@register("https://spdx.org/rdf/3.0.0/terms/Core/Agent", compact_type="Agent", abstract=False)
+class Agent(Element):
+ NODE_KIND = NodeKind.BlankNodeOrIRI
+ ID_ALIAS = "spdxId"
+ NAMED_INDIVIDUALS = {
+ }
+
+
+# An assertion made in relation to one or more elements.
+@register("https://spdx.org/rdf/3.0.0/terms/Core/Annotation", compact_type="Annotation", abstract=False)
+class Annotation(Element):
+ NODE_KIND = NodeKind.BlankNodeOrIRI
+ ID_ALIAS = "spdxId"
+ NAMED_INDIVIDUALS = {
+ }
+
+ @classmethod
+ def _register_props(cls):
+ super()._register_props()
+ # Describes the type of annotation.
+ cls._add_property(
+ "annotationType",
+ EnumProp([
+ ("https://spdx.org/rdf/3.0.0/terms/Core/AnnotationType/other", "other"),
+ ("https://spdx.org/rdf/3.0.0/terms/Core/AnnotationType/review", "review"),
+ ]),
+ iri="https://spdx.org/rdf/3.0.0/terms/Core/annotationType",
+ min_count=1,
+ compact="annotationType",
+ )
+ # Specifies the media type of an Element or Property.
+ cls._add_property(
+ "contentType",
+ StringProp(pattern=r"^[^\/]+\/[^\/]+$",),
+ iri="https://spdx.org/rdf/3.0.0/terms/Core/contentType",
+ compact="contentType",
+ )
+ # Commentary on an assertion that an annotator has made.
+ cls._add_property(
+ "statement",
+ StringProp(),
+ iri="https://spdx.org/rdf/3.0.0/terms/Core/statement",
+ compact="statement",
+ )
+ # An Element an annotator has made an assertion about.
+ cls._add_property(
+ "subject",
+ ObjectProp(Element, True),
+ iri="https://spdx.org/rdf/3.0.0/terms/Core/subject",
+ min_count=1,
+ compact="subject",
+ )
+
+
+# A distinct article or unit within the digital domain.
+@register("https://spdx.org/rdf/3.0.0/terms/Core/Artifact", compact_type="Artifact", abstract=True)
+class Artifact(Element):
+ NODE_KIND = NodeKind.BlankNodeOrIRI
+ ID_ALIAS = "spdxId"
+ NAMED_INDIVIDUALS = {
+ }
+
+ @classmethod
+ def _register_props(cls):
+ super()._register_props()
+ # Specifies the time an artifact was built.
+ cls._add_property(
+ "builtTime",
+ DateTimeStampProp(pattern=r"^\d\d\d\d-\d\d-\d\dT\d\d:\d\d:\d\dZ$",),
+ iri="https://spdx.org/rdf/3.0.0/terms/Core/builtTime",
+ compact="builtTime",
+ )
+ # Identifies from where or whom the Element originally came.
+ cls._add_property(
+ "originatedBy",
+ ListProp(ObjectProp(Agent, False)),
+ iri="https://spdx.org/rdf/3.0.0/terms/Core/originatedBy",
+ compact="originatedBy",
+ )
+ # Specifies the time an artifact was released.
+ cls._add_property(
+ "releaseTime",
+ DateTimeStampProp(pattern=r"^\d\d\d\d-\d\d-\d\dT\d\d:\d\d:\d\dZ$",),
+ iri="https://spdx.org/rdf/3.0.0/terms/Core/releaseTime",
+ compact="releaseTime",
+ )
+ # The name of a relevant standard that may apply to an artifact.
+ cls._add_property(
+ "standardName",
+ ListProp(StringProp()),
+ iri="https://spdx.org/rdf/3.0.0/terms/Core/standardName",
+ compact="standardName",
+ )
+ # Identifies who or what supplied the artifact or VulnAssessmentRelationship
+ # referenced by the Element.
+ cls._add_property(
+ "suppliedBy",
+ ObjectProp(Agent, False),
+ iri="https://spdx.org/rdf/3.0.0/terms/Core/suppliedBy",
+ compact="suppliedBy",
+ )
+ # Specifies the level of support associated with an artifact.
+ cls._add_property(
+ "supportLevel",
+ ListProp(EnumProp([
+ ("https://spdx.org/rdf/3.0.0/terms/Core/SupportType/deployed", "deployed"),
+ ("https://spdx.org/rdf/3.0.0/terms/Core/SupportType/development", "development"),
+ ("https://spdx.org/rdf/3.0.0/terms/Core/SupportType/endOfSupport", "endOfSupport"),
+ ("https://spdx.org/rdf/3.0.0/terms/Core/SupportType/limitedSupport", "limitedSupport"),
+ ("https://spdx.org/rdf/3.0.0/terms/Core/SupportType/noAssertion", "noAssertion"),
+ ("https://spdx.org/rdf/3.0.0/terms/Core/SupportType/noSupport", "noSupport"),
+ ("https://spdx.org/rdf/3.0.0/terms/Core/SupportType/support", "support"),
+ ])),
+ iri="https://spdx.org/rdf/3.0.0/terms/Core/supportLevel",
+ compact="supportLevel",
+ )
+ # Specifies until when the artifact can be used before its usage needs to be
+ # reassessed.
+ cls._add_property(
+ "validUntilTime",
+ DateTimeStampProp(pattern=r"^\d\d\d\d-\d\d-\d\dT\d\d:\d\d:\d\dZ$",),
+ iri="https://spdx.org/rdf/3.0.0/terms/Core/validUntilTime",
+ compact="validUntilTime",
+ )
+
+
+# A collection of Elements that have a shared context.
+@register("https://spdx.org/rdf/3.0.0/terms/Core/Bundle", compact_type="Bundle", abstract=False)
+class Bundle(ElementCollection):
+ NODE_KIND = NodeKind.BlankNodeOrIRI
+ ID_ALIAS = "spdxId"
+ NAMED_INDIVIDUALS = {
+ }
+
+ @classmethod
+ def _register_props(cls):
+ super()._register_props()
+ # Gives information about the circumstances or unifying properties
+ # that Elements of the bundle have been assembled under.
+ cls._add_property(
+ "context",
+ StringProp(),
+ iri="https://spdx.org/rdf/3.0.0/terms/Core/context",
+ compact="context",
+ )
+
+
+# A mathematically calculated representation of a grouping of data.
+@register("https://spdx.org/rdf/3.0.0/terms/Core/Hash", compact_type="Hash", abstract=False)
+class Hash(IntegrityMethod):
+ NODE_KIND = NodeKind.BlankNodeOrIRI
+ NAMED_INDIVIDUALS = {
+ }
+
+ @classmethod
+ def _register_props(cls):
+ super()._register_props()
+ # Specifies the algorithm used for calculating the hash value.
+ cls._add_property(
+ "algorithm",
+ EnumProp([
+ ("https://spdx.org/rdf/3.0.0/terms/Core/HashAlgorithm/blake2b256", "blake2b256"),
+ ("https://spdx.org/rdf/3.0.0/terms/Core/HashAlgorithm/blake2b384", "blake2b384"),
+ ("https://spdx.org/rdf/3.0.0/terms/Core/HashAlgorithm/blake2b512", "blake2b512"),
+ ("https://spdx.org/rdf/3.0.0/terms/Core/HashAlgorithm/blake3", "blake3"),
+ ("https://spdx.org/rdf/3.0.0/terms/Core/HashAlgorithm/crystalsDilithium", "crystalsDilithium"),
+ ("https://spdx.org/rdf/3.0.0/terms/Core/HashAlgorithm/crystalsKyber", "crystalsKyber"),
+ ("https://spdx.org/rdf/3.0.0/terms/Core/HashAlgorithm/falcon", "falcon"),
+ ("https://spdx.org/rdf/3.0.0/terms/Core/HashAlgorithm/md2", "md2"),
+ ("https://spdx.org/rdf/3.0.0/terms/Core/HashAlgorithm/md4", "md4"),
+ ("https://spdx.org/rdf/3.0.0/terms/Core/HashAlgorithm/md5", "md5"),
+ ("https://spdx.org/rdf/3.0.0/terms/Core/HashAlgorithm/md6", "md6"),
+ ("https://spdx.org/rdf/3.0.0/terms/Core/HashAlgorithm/other", "other"),
+ ("https://spdx.org/rdf/3.0.0/terms/Core/HashAlgorithm/sha1", "sha1"),
+ ("https://spdx.org/rdf/3.0.0/terms/Core/HashAlgorithm/sha224", "sha224"),
+ ("https://spdx.org/rdf/3.0.0/terms/Core/HashAlgorithm/sha256", "sha256"),
+ ("https://spdx.org/rdf/3.0.0/terms/Core/HashAlgorithm/sha384", "sha384"),
+ ("https://spdx.org/rdf/3.0.0/terms/Core/HashAlgorithm/sha3_224", "sha3_224"),
+ ("https://spdx.org/rdf/3.0.0/terms/Core/HashAlgorithm/sha3_256", "sha3_256"),
+ ("https://spdx.org/rdf/3.0.0/terms/Core/HashAlgorithm/sha3_384", "sha3_384"),
+ ("https://spdx.org/rdf/3.0.0/terms/Core/HashAlgorithm/sha3_512", "sha3_512"),
+ ("https://spdx.org/rdf/3.0.0/terms/Core/HashAlgorithm/sha512", "sha512"),
+ ]),
+ iri="https://spdx.org/rdf/3.0.0/terms/Core/algorithm",
+ min_count=1,
+ compact="algorithm",
+ )
+ # The result of applying a hash algorithm to an Element.
+ cls._add_property(
+ "hashValue",
+ StringProp(),
+ iri="https://spdx.org/rdf/3.0.0/terms/Core/hashValue",
+ min_count=1,
+ compact="hashValue",
+ )
+
+
+# Provide context for a relationship that occurs in the lifecycle.
+@register("https://spdx.org/rdf/3.0.0/terms/Core/LifecycleScopedRelationship", compact_type="LifecycleScopedRelationship", abstract=False)
+class LifecycleScopedRelationship(Relationship):
+ NODE_KIND = NodeKind.BlankNodeOrIRI
+ ID_ALIAS = "spdxId"
+ NAMED_INDIVIDUALS = {
+ }
+
+ @classmethod
+ def _register_props(cls):
+ super()._register_props()
+ # Capture the scope of information about a specific relationship between elements.
+ cls._add_property(
+ "scope",
+ EnumProp([
+ ("https://spdx.org/rdf/3.0.0/terms/Core/LifecycleScopeType/build", "build"),
+ ("https://spdx.org/rdf/3.0.0/terms/Core/LifecycleScopeType/design", "design"),
+ ("https://spdx.org/rdf/3.0.0/terms/Core/LifecycleScopeType/development", "development"),
+ ("https://spdx.org/rdf/3.0.0/terms/Core/LifecycleScopeType/other", "other"),
+ ("https://spdx.org/rdf/3.0.0/terms/Core/LifecycleScopeType/runtime", "runtime"),
+ ("https://spdx.org/rdf/3.0.0/terms/Core/LifecycleScopeType/test", "test"),
+ ]),
+ iri="https://spdx.org/rdf/3.0.0/terms/Core/scope",
+ compact="scope",
+ )
+
+
+# A group of people who work together in an organized way for a shared purpose.
+@register("https://spdx.org/rdf/3.0.0/terms/Core/Organization", compact_type="Organization", abstract=False)
+class Organization(Agent):
+ NODE_KIND = NodeKind.BlankNodeOrIRI
+ ID_ALIAS = "spdxId"
+ NAMED_INDIVIDUALS = {
+ }
+
+
+# An individual human being.
+@register("https://spdx.org/rdf/3.0.0/terms/Core/Person", compact_type="Person", abstract=False)
+class Person(Agent):
+ NODE_KIND = NodeKind.BlankNodeOrIRI
+ ID_ALIAS = "spdxId"
+ NAMED_INDIVIDUALS = {
+ }
+
+
+# A software agent.
+@register("https://spdx.org/rdf/3.0.0/terms/Core/SoftwareAgent", compact_type="SoftwareAgent", abstract=False)
+class SoftwareAgent(Agent):
+ NODE_KIND = NodeKind.BlankNodeOrIRI
+ ID_ALIAS = "spdxId"
+ NAMED_INDIVIDUALS = {
+ }
+
+
+# Portion of an AnyLicenseInfo representing a set of licensing information
+# where all elements apply.
+@register("https://spdx.org/rdf/3.0.0/terms/ExpandedLicensing/ConjunctiveLicenseSet", compact_type="expandedlicensing_ConjunctiveLicenseSet", abstract=False)
+class expandedlicensing_ConjunctiveLicenseSet(simplelicensing_AnyLicenseInfo):
+ NODE_KIND = NodeKind.BlankNodeOrIRI
+ ID_ALIAS = "spdxId"
+ NAMED_INDIVIDUALS = {
+ }
+
+ @classmethod
+ def _register_props(cls):
+ super()._register_props()
+ # A license expression participating in a license set.
+ cls._add_property(
+ "expandedlicensing_member",
+ ListProp(ObjectProp(simplelicensing_AnyLicenseInfo, False)),
+ iri="https://spdx.org/rdf/3.0.0/terms/ExpandedLicensing/member",
+ min_count=2,
+ compact="expandedlicensing_member",
+ )
+
+
+# A license addition that is not listed on the SPDX Exceptions List.
+@register("https://spdx.org/rdf/3.0.0/terms/ExpandedLicensing/CustomLicenseAddition", compact_type="expandedlicensing_CustomLicenseAddition", abstract=False)
+class expandedlicensing_CustomLicenseAddition(expandedlicensing_LicenseAddition):
+ NODE_KIND = NodeKind.BlankNodeOrIRI
+ ID_ALIAS = "spdxId"
+ NAMED_INDIVIDUALS = {
+ }
+
+
+# Portion of an AnyLicenseInfo representing a set of licensing information where
+# only one of the elements applies.
+@register("https://spdx.org/rdf/3.0.0/terms/ExpandedLicensing/DisjunctiveLicenseSet", compact_type="expandedlicensing_DisjunctiveLicenseSet", abstract=False)
+class expandedlicensing_DisjunctiveLicenseSet(simplelicensing_AnyLicenseInfo):
+ NODE_KIND = NodeKind.BlankNodeOrIRI
+ ID_ALIAS = "spdxId"
+ NAMED_INDIVIDUALS = {
+ }
+
+ @classmethod
+ def _register_props(cls):
+ super()._register_props()
+ # A license expression participating in a license set.
+ cls._add_property(
+ "expandedlicensing_member",
+ ListProp(ObjectProp(simplelicensing_AnyLicenseInfo, False)),
+ iri="https://spdx.org/rdf/3.0.0/terms/ExpandedLicensing/member",
+ min_count=2,
+ compact="expandedlicensing_member",
+ )
+
+
+# Abstract class representing a License or an OrLaterOperator.
+@register("https://spdx.org/rdf/3.0.0/terms/ExpandedLicensing/ExtendableLicense", compact_type="expandedlicensing_ExtendableLicense", abstract=True)
+class expandedlicensing_ExtendableLicense(simplelicensing_AnyLicenseInfo):
+ NODE_KIND = NodeKind.BlankNodeOrIRI
+ ID_ALIAS = "spdxId"
+ NAMED_INDIVIDUALS = {
+ }
+
+
+# A concrete subclass of AnyLicenseInfo used by Individuals in the
+# ExpandedLicensing profile.
+@register("https://spdx.org/rdf/3.0.0/terms/ExpandedLicensing/IndividualLicensingInfo", compact_type="expandedlicensing_IndividualLicensingInfo", abstract=False)
+class expandedlicensing_IndividualLicensingInfo(simplelicensing_AnyLicenseInfo):
+ NODE_KIND = NodeKind.BlankNodeOrIRI
+ ID_ALIAS = "spdxId"
+ NAMED_INDIVIDUALS = {
+ "NoAssertionLicense": "https://spdx.org/rdf/3.0.0/terms/ExpandedLicensing/NoAssertionLicense",
+ "NoneLicense": "https://spdx.org/rdf/3.0.0/terms/ExpandedLicensing/NoneLicense",
+ }
+ # An Individual Value for License when no assertion can be made about its actual
+ # value.
+ NoAssertionLicense = "https://spdx.org/rdf/3.0.0/terms/ExpandedLicensing/NoAssertionLicense"
+ # An Individual Value for License where the SPDX data creator determines that no
+ # license is present.
+ NoneLicense = "https://spdx.org/rdf/3.0.0/terms/ExpandedLicensing/NoneLicense"
+
+
+# Abstract class for the portion of an AnyLicenseInfo representing a license.
+@register("https://spdx.org/rdf/3.0.0/terms/ExpandedLicensing/License", compact_type="expandedlicensing_License", abstract=True)
+class expandedlicensing_License(expandedlicensing_ExtendableLicense):
+ NODE_KIND = NodeKind.BlankNodeOrIRI
+ ID_ALIAS = "spdxId"
+ NAMED_INDIVIDUALS = {
+ }
+
+ @classmethod
+ def _register_props(cls):
+ super()._register_props()
+ # Specifies whether a license or additional text identifier has been marked as
+ # deprecated.
+ cls._add_property(
+ "expandedlicensing_isDeprecatedLicenseId",
+ BooleanProp(),
+ iri="https://spdx.org/rdf/3.0.0/terms/ExpandedLicensing/isDeprecatedLicenseId",
+ compact="expandedlicensing_isDeprecatedLicenseId",
+ )
+ # Specifies whether the License is listed as free by the
+ # [Free Software Foundation (FSF)](https://fsf.org).
+ cls._add_property(
+ "expandedlicensing_isFsfLibre",
+ BooleanProp(),
+ iri="https://spdx.org/rdf/3.0.0/terms/ExpandedLicensing/isFsfLibre",
+ compact="expandedlicensing_isFsfLibre",
+ )
+ # Specifies whether the License is listed as approved by the
+ # [Open Source Initiative (OSI)](https://opensource.org).
+ cls._add_property(
+ "expandedlicensing_isOsiApproved",
+ BooleanProp(),
+ iri="https://spdx.org/rdf/3.0.0/terms/ExpandedLicensing/isOsiApproved",
+ compact="expandedlicensing_isOsiApproved",
+ )
+ # Identifies all the text and metadata associated with a license in the license
+ # XML format.
+ cls._add_property(
+ "expandedlicensing_licenseXml",
+ StringProp(),
+ iri="https://spdx.org/rdf/3.0.0/terms/ExpandedLicensing/licenseXml",
+ compact="expandedlicensing_licenseXml",
+ )
+ # Specifies the licenseId that is preferred to be used in place of a deprecated
+ # License or LicenseAddition.
+ cls._add_property(
+ "expandedlicensing_obsoletedBy",
+ StringProp(),
+ iri="https://spdx.org/rdf/3.0.0/terms/ExpandedLicensing/obsoletedBy",
+ compact="expandedlicensing_obsoletedBy",
+ )
+ # Contains a URL where the License or LicenseAddition can be found in use.
+ cls._add_property(
+ "expandedlicensing_seeAlso",
+ ListProp(AnyURIProp()),
+ iri="https://spdx.org/rdf/3.0.0/terms/ExpandedLicensing/seeAlso",
+ compact="expandedlicensing_seeAlso",
+ )
+ # Provides a License author's preferred text to indicate that a file is covered
+ # by the License.
+ cls._add_property(
+ "expandedlicensing_standardLicenseHeader",
+ StringProp(),
+ iri="https://spdx.org/rdf/3.0.0/terms/ExpandedLicensing/standardLicenseHeader",
+ compact="expandedlicensing_standardLicenseHeader",
+ )
+ # Identifies the full text of a License, in SPDX templating format.
+ cls._add_property(
+ "expandedlicensing_standardLicenseTemplate",
+ StringProp(),
+ iri="https://spdx.org/rdf/3.0.0/terms/ExpandedLicensing/standardLicenseTemplate",
+ compact="expandedlicensing_standardLicenseTemplate",
+ )
+ # Identifies the full text of a License or Addition.
+ cls._add_property(
+ "simplelicensing_licenseText",
+ StringProp(),
+ iri="https://spdx.org/rdf/3.0.0/terms/SimpleLicensing/licenseText",
+ min_count=1,
+ compact="simplelicensing_licenseText",
+ )
+
+
+# A license that is listed on the SPDX License List.
+@register("https://spdx.org/rdf/3.0.0/terms/ExpandedLicensing/ListedLicense", compact_type="expandedlicensing_ListedLicense", abstract=False)
+class expandedlicensing_ListedLicense(expandedlicensing_License):
+ NODE_KIND = NodeKind.BlankNodeOrIRI
+ ID_ALIAS = "spdxId"
+ NAMED_INDIVIDUALS = {
+ }
+
+ @classmethod
+ def _register_props(cls):
+ super()._register_props()
+ # Specifies the SPDX License List version in which this license or exception
+ # identifier was deprecated.
+ cls._add_property(
+ "expandedlicensing_deprecatedVersion",
+ StringProp(),
+ iri="https://spdx.org/rdf/3.0.0/terms/ExpandedLicensing/deprecatedVersion",
+ compact="expandedlicensing_deprecatedVersion",
+ )
+ # Specifies the SPDX License List version in which this ListedLicense or
+ # ListedLicenseException identifier was first added.
+ cls._add_property(
+ "expandedlicensing_listVersionAdded",
+ StringProp(),
+ iri="https://spdx.org/rdf/3.0.0/terms/ExpandedLicensing/listVersionAdded",
+ compact="expandedlicensing_listVersionAdded",
+ )
+
+
+# Portion of an AnyLicenseInfo representing this version, or any later version,
+# of the indicated License.
+@register("https://spdx.org/rdf/3.0.0/terms/ExpandedLicensing/OrLaterOperator", compact_type="expandedlicensing_OrLaterOperator", abstract=False)
+class expandedlicensing_OrLaterOperator(expandedlicensing_ExtendableLicense):
+ NODE_KIND = NodeKind.BlankNodeOrIRI
+ ID_ALIAS = "spdxId"
+ NAMED_INDIVIDUALS = {
+ }
+
+ @classmethod
+ def _register_props(cls):
+ super()._register_props()
+ # A License participating in an 'or later' model.
+ cls._add_property(
+ "expandedlicensing_subjectLicense",
+ ObjectProp(expandedlicensing_License, True),
+ iri="https://spdx.org/rdf/3.0.0/terms/ExpandedLicensing/subjectLicense",
+ min_count=1,
+ compact="expandedlicensing_subjectLicense",
+ )
+
+
+# Portion of an AnyLicenseInfo representing a License which has additional
+# text applied to it.
+@register("https://spdx.org/rdf/3.0.0/terms/ExpandedLicensing/WithAdditionOperator", compact_type="expandedlicensing_WithAdditionOperator", abstract=False)
+class expandedlicensing_WithAdditionOperator(simplelicensing_AnyLicenseInfo):
+ NODE_KIND = NodeKind.BlankNodeOrIRI
+ ID_ALIAS = "spdxId"
+ NAMED_INDIVIDUALS = {
+ }
+
+ @classmethod
+ def _register_props(cls):
+ super()._register_props()
+ # A LicenseAddition participating in a 'with addition' model.
+ cls._add_property(
+ "expandedlicensing_subjectAddition",
+ ObjectProp(expandedlicensing_LicenseAddition, True),
+ iri="https://spdx.org/rdf/3.0.0/terms/ExpandedLicensing/subjectAddition",
+ min_count=1,
+ compact="expandedlicensing_subjectAddition",
+ )
+ # A License participating in a 'with addition' model.
+ cls._add_property(
+ "expandedlicensing_subjectExtendableLicense",
+ ObjectProp(expandedlicensing_ExtendableLicense, True),
+ iri="https://spdx.org/rdf/3.0.0/terms/ExpandedLicensing/subjectExtendableLicense",
+ min_count=1,
+ compact="expandedlicensing_subjectExtendableLicense",
+ )
+
+
+# A type of extension consisting of a list of name value pairs.
+@register("https://spdx.org/rdf/3.0.0/terms/Extension/CdxPropertiesExtension", compact_type="extension_CdxPropertiesExtension", abstract=False)
+class extension_CdxPropertiesExtension(extension_Extension):
+ NODE_KIND = NodeKind.BlankNodeOrIRI
+ NAMED_INDIVIDUALS = {
+ }
+
+ @classmethod
+ def _register_props(cls):
+ super()._register_props()
+ # Provides a map of a property names to a values.
+ cls._add_property(
+ "extension_cdxProperty",
+ ListProp(ObjectProp(extension_CdxPropertyEntry, False)),
+ iri="https://spdx.org/rdf/3.0.0/terms/Extension/cdxProperty",
+ min_count=1,
+ compact="extension_cdxProperty",
+ )
+
+
+# Provides a CVSS version 2.0 assessment for a vulnerability.
+@register("https://spdx.org/rdf/3.0.0/terms/Security/CvssV2VulnAssessmentRelationship", compact_type="security_CvssV2VulnAssessmentRelationship", abstract=False)
+class security_CvssV2VulnAssessmentRelationship(security_VulnAssessmentRelationship):
+ NODE_KIND = NodeKind.BlankNodeOrIRI
+ ID_ALIAS = "spdxId"
+ NAMED_INDIVIDUALS = {
+ }
+
+ @classmethod
+ def _register_props(cls):
+ super()._register_props()
+ # Provides a numerical (0-10) representation of the severity of a vulnerability.
+ cls._add_property(
+ "security_score",
+ FloatProp(),
+ iri="https://spdx.org/rdf/3.0.0/terms/Security/score",
+ min_count=1,
+ compact="security_score",
+ )
+ # Specifies the CVSS vector string for a vulnerability.
+ cls._add_property(
+ "security_vectorString",
+ StringProp(),
+ iri="https://spdx.org/rdf/3.0.0/terms/Security/vectorString",
+ min_count=1,
+ compact="security_vectorString",
+ )
+
+
+# Provides a CVSS version 3 assessment for a vulnerability.
+@register("https://spdx.org/rdf/3.0.0/terms/Security/CvssV3VulnAssessmentRelationship", compact_type="security_CvssV3VulnAssessmentRelationship", abstract=False)
+class security_CvssV3VulnAssessmentRelationship(security_VulnAssessmentRelationship):
+ NODE_KIND = NodeKind.BlankNodeOrIRI
+ ID_ALIAS = "spdxId"
+ NAMED_INDIVIDUALS = {
+ }
+
+ @classmethod
+ def _register_props(cls):
+ super()._register_props()
+ # Provides a numerical (0-10) representation of the severity of a vulnerability.
+ cls._add_property(
+ "security_score",
+ FloatProp(),
+ iri="https://spdx.org/rdf/3.0.0/terms/Security/score",
+ min_count=1,
+ compact="security_score",
+ )
+ # Specifies the CVSS qualitative severity rating of a vulnerability in relation to a piece of software.
+ cls._add_property(
+ "security_severity",
+ EnumProp([
+ ("https://spdx.org/rdf/3.0.0/terms/Security/CvssSeverityType/critical", "critical"),
+ ("https://spdx.org/rdf/3.0.0/terms/Security/CvssSeverityType/high", "high"),
+ ("https://spdx.org/rdf/3.0.0/terms/Security/CvssSeverityType/low", "low"),
+ ("https://spdx.org/rdf/3.0.0/terms/Security/CvssSeverityType/medium", "medium"),
+ ("https://spdx.org/rdf/3.0.0/terms/Security/CvssSeverityType/none", "none"),
+ ]),
+ iri="https://spdx.org/rdf/3.0.0/terms/Security/severity",
+ min_count=1,
+ compact="security_severity",
+ )
+ # Specifies the CVSS vector string for a vulnerability.
+ cls._add_property(
+ "security_vectorString",
+ StringProp(),
+ iri="https://spdx.org/rdf/3.0.0/terms/Security/vectorString",
+ min_count=1,
+ compact="security_vectorString",
+ )
+
+
+# Provides a CVSS version 4 assessment for a vulnerability.
+@register("https://spdx.org/rdf/3.0.0/terms/Security/CvssV4VulnAssessmentRelationship", compact_type="security_CvssV4VulnAssessmentRelationship", abstract=False)
+class security_CvssV4VulnAssessmentRelationship(security_VulnAssessmentRelationship):
+ NODE_KIND = NodeKind.BlankNodeOrIRI
+ ID_ALIAS = "spdxId"
+ NAMED_INDIVIDUALS = {
+ }
+
+ @classmethod
+ def _register_props(cls):
+ super()._register_props()
+ # Provides a numerical (0-10) representation of the severity of a vulnerability.
+ cls._add_property(
+ "security_score",
+ FloatProp(),
+ iri="https://spdx.org/rdf/3.0.0/terms/Security/score",
+ min_count=1,
+ compact="security_score",
+ )
+ # Specifies the CVSS qualitative severity rating of a vulnerability in relation to a piece of software.
+ cls._add_property(
+ "security_severity",
+ EnumProp([
+ ("https://spdx.org/rdf/3.0.0/terms/Security/CvssSeverityType/critical", "critical"),
+ ("https://spdx.org/rdf/3.0.0/terms/Security/CvssSeverityType/high", "high"),
+ ("https://spdx.org/rdf/3.0.0/terms/Security/CvssSeverityType/low", "low"),
+ ("https://spdx.org/rdf/3.0.0/terms/Security/CvssSeverityType/medium", "medium"),
+ ("https://spdx.org/rdf/3.0.0/terms/Security/CvssSeverityType/none", "none"),
+ ]),
+ iri="https://spdx.org/rdf/3.0.0/terms/Security/severity",
+ min_count=1,
+ compact="security_severity",
+ )
+ # Specifies the CVSS vector string for a vulnerability.
+ cls._add_property(
+ "security_vectorString",
+ StringProp(),
+ iri="https://spdx.org/rdf/3.0.0/terms/Security/vectorString",
+ min_count=1,
+ compact="security_vectorString",
+ )
+
+
+# Provides an EPSS assessment for a vulnerability.
+@register("https://spdx.org/rdf/3.0.0/terms/Security/EpssVulnAssessmentRelationship", compact_type="security_EpssVulnAssessmentRelationship", abstract=False)
+class security_EpssVulnAssessmentRelationship(security_VulnAssessmentRelationship):
+ NODE_KIND = NodeKind.BlankNodeOrIRI
+ ID_ALIAS = "spdxId"
+ NAMED_INDIVIDUALS = {
+ }
+
+ @classmethod
+ def _register_props(cls):
+ super()._register_props()
+ # The percentile of the current probability score.
+ cls._add_property(
+ "security_percentile",
+ FloatProp(),
+ iri="https://spdx.org/rdf/3.0.0/terms/Security/percentile",
+ min_count=1,
+ compact="security_percentile",
+ )
+ # A probability score between 0 and 1 of a vulnerability being exploited.
+ cls._add_property(
+ "security_probability",
+ FloatProp(),
+ iri="https://spdx.org/rdf/3.0.0/terms/Security/probability",
+ min_count=1,
+ compact="security_probability",
+ )
+ # Specifies the time when a vulnerability was published.
+ cls._add_property(
+ "security_publishedTime",
+ DateTimeStampProp(pattern=r"^\d\d\d\d-\d\d-\d\dT\d\d:\d\d:\d\dZ$",),
+ iri="https://spdx.org/rdf/3.0.0/terms/Security/publishedTime",
+ min_count=1,
+ compact="security_publishedTime",
+ )
+
+
+# Provides an exploit assessment of a vulnerability.
+@register("https://spdx.org/rdf/3.0.0/terms/Security/ExploitCatalogVulnAssessmentRelationship", compact_type="security_ExploitCatalogVulnAssessmentRelationship", abstract=False)
+class security_ExploitCatalogVulnAssessmentRelationship(security_VulnAssessmentRelationship):
+ NODE_KIND = NodeKind.BlankNodeOrIRI
+ ID_ALIAS = "spdxId"
+ NAMED_INDIVIDUALS = {
+ }
+
+ @classmethod
+ def _register_props(cls):
+ super()._register_props()
+ # Specifies the exploit catalog type.
+ cls._add_property(
+ "security_catalogType",
+ EnumProp([
+ ("https://spdx.org/rdf/3.0.0/terms/Security/ExploitCatalogType/kev", "kev"),
+ ("https://spdx.org/rdf/3.0.0/terms/Security/ExploitCatalogType/other", "other"),
+ ]),
+ iri="https://spdx.org/rdf/3.0.0/terms/Security/catalogType",
+ min_count=1,
+ compact="security_catalogType",
+ )
+ # Describe that a CVE is known to have an exploit because it's been listed in an exploit catalog.
+ cls._add_property(
+ "security_exploited",
+ BooleanProp(),
+ iri="https://spdx.org/rdf/3.0.0/terms/Security/exploited",
+ min_count=1,
+ compact="security_exploited",
+ )
+ # Provides the location of an exploit catalog.
+ cls._add_property(
+ "security_locator",
+ AnyURIProp(),
+ iri="https://spdx.org/rdf/3.0.0/terms/Security/locator",
+ min_count=1,
+ compact="security_locator",
+ )
+
+
+# Provides an SSVC assessment for a vulnerability.
+@register("https://spdx.org/rdf/3.0.0/terms/Security/SsvcVulnAssessmentRelationship", compact_type="security_SsvcVulnAssessmentRelationship", abstract=False)
+class security_SsvcVulnAssessmentRelationship(security_VulnAssessmentRelationship):
+ NODE_KIND = NodeKind.BlankNodeOrIRI
+ ID_ALIAS = "spdxId"
+ NAMED_INDIVIDUALS = {
+ }
+
+ @classmethod
+ def _register_props(cls):
+ super()._register_props()
+ # Provide the enumeration of possible decisions in the Stakeholder-Specific Vulnerability Categorization (SSVC) decision tree [https://www.cisa.gov/sites/default/files/publications/cisa-ssvc-guide%20508c.pdf](https://www.cisa.gov/sites/default/files/publications/cisa-ssvc-guide%20508c.pdf)
+ cls._add_property(
+ "security_decisionType",
+ EnumProp([
+ ("https://spdx.org/rdf/3.0.0/terms/Security/SsvcDecisionType/act", "act"),
+ ("https://spdx.org/rdf/3.0.0/terms/Security/SsvcDecisionType/attend", "attend"),
+ ("https://spdx.org/rdf/3.0.0/terms/Security/SsvcDecisionType/track", "track"),
+ ("https://spdx.org/rdf/3.0.0/terms/Security/SsvcDecisionType/trackStar", "trackStar"),
+ ]),
+ iri="https://spdx.org/rdf/3.0.0/terms/Security/decisionType",
+ min_count=1,
+ compact="security_decisionType",
+ )
+
+
+# Asbtract ancestor class for all VEX relationships
+@register("https://spdx.org/rdf/3.0.0/terms/Security/VexVulnAssessmentRelationship", compact_type="security_VexVulnAssessmentRelationship", abstract=True)
+class security_VexVulnAssessmentRelationship(security_VulnAssessmentRelationship):
+ NODE_KIND = NodeKind.BlankNodeOrIRI
+ ID_ALIAS = "spdxId"
+ NAMED_INDIVIDUALS = {
+ }
+
+ @classmethod
+ def _register_props(cls):
+ super()._register_props()
+ # Conveys information about how VEX status was determined.
+ cls._add_property(
+ "security_statusNotes",
+ StringProp(),
+ iri="https://spdx.org/rdf/3.0.0/terms/Security/statusNotes",
+ compact="security_statusNotes",
+ )
+ # Specifies the version of a VEX statement.
+ cls._add_property(
+ "security_vexVersion",
+ StringProp(),
+ iri="https://spdx.org/rdf/3.0.0/terms/Security/vexVersion",
+ compact="security_vexVersion",
+ )
+
+
+# Specifies a vulnerability and its associated information.
+@register("https://spdx.org/rdf/3.0.0/terms/Security/Vulnerability", compact_type="security_Vulnerability", abstract=False)
+class security_Vulnerability(Artifact):
+ NODE_KIND = NodeKind.BlankNodeOrIRI
+ ID_ALIAS = "spdxId"
+ NAMED_INDIVIDUALS = {
+ }
+
+ @classmethod
+ def _register_props(cls):
+ super()._register_props()
+ # Specifies a time when a vulnerability assessment was modified
+ cls._add_property(
+ "security_modifiedTime",
+ DateTimeStampProp(pattern=r"^\d\d\d\d-\d\d-\d\dT\d\d:\d\d:\d\dZ$",),
+ iri="https://spdx.org/rdf/3.0.0/terms/Security/modifiedTime",
+ compact="security_modifiedTime",
+ )
+ # Specifies the time when a vulnerability was published.
+ cls._add_property(
+ "security_publishedTime",
+ DateTimeStampProp(pattern=r"^\d\d\d\d-\d\d-\d\dT\d\d:\d\d:\d\dZ$",),
+ iri="https://spdx.org/rdf/3.0.0/terms/Security/publishedTime",
+ compact="security_publishedTime",
+ )
+ # Specified the time and date when a vulnerability was withdrawn.
+ cls._add_property(
+ "security_withdrawnTime",
+ DateTimeStampProp(pattern=r"^\d\d\d\d-\d\d-\d\dT\d\d:\d\d:\d\dZ$",),
+ iri="https://spdx.org/rdf/3.0.0/terms/Security/withdrawnTime",
+ compact="security_withdrawnTime",
+ )
+
+
+# A distinct article or unit related to Software.
+@register("https://spdx.org/rdf/3.0.0/terms/Software/SoftwareArtifact", compact_type="software_SoftwareArtifact", abstract=True)
+class software_SoftwareArtifact(Artifact):
+ NODE_KIND = NodeKind.BlankNodeOrIRI
+ ID_ALIAS = "spdxId"
+ NAMED_INDIVIDUALS = {
+ }
+
+ @classmethod
+ def _register_props(cls):
+ super()._register_props()
+ # Provides additional purpose information of the software artifact.
+ cls._add_property(
+ "software_additionalPurpose",
+ ListProp(EnumProp([
+ ("https://spdx.org/rdf/3.0.0/terms/Software/SoftwarePurpose/application", "application"),
+ ("https://spdx.org/rdf/3.0.0/terms/Software/SoftwarePurpose/archive", "archive"),
+ ("https://spdx.org/rdf/3.0.0/terms/Software/SoftwarePurpose/bom", "bom"),
+ ("https://spdx.org/rdf/3.0.0/terms/Software/SoftwarePurpose/configuration", "configuration"),
+ ("https://spdx.org/rdf/3.0.0/terms/Software/SoftwarePurpose/container", "container"),
+ ("https://spdx.org/rdf/3.0.0/terms/Software/SoftwarePurpose/data", "data"),
+ ("https://spdx.org/rdf/3.0.0/terms/Software/SoftwarePurpose/device", "device"),
+ ("https://spdx.org/rdf/3.0.0/terms/Software/SoftwarePurpose/deviceDriver", "deviceDriver"),
+ ("https://spdx.org/rdf/3.0.0/terms/Software/SoftwarePurpose/diskImage", "diskImage"),
+ ("https://spdx.org/rdf/3.0.0/terms/Software/SoftwarePurpose/documentation", "documentation"),
+ ("https://spdx.org/rdf/3.0.0/terms/Software/SoftwarePurpose/evidence", "evidence"),
+ ("https://spdx.org/rdf/3.0.0/terms/Software/SoftwarePurpose/executable", "executable"),
+ ("https://spdx.org/rdf/3.0.0/terms/Software/SoftwarePurpose/file", "file"),
+ ("https://spdx.org/rdf/3.0.0/terms/Software/SoftwarePurpose/filesystemImage", "filesystemImage"),
+ ("https://spdx.org/rdf/3.0.0/terms/Software/SoftwarePurpose/firmware", "firmware"),
+ ("https://spdx.org/rdf/3.0.0/terms/Software/SoftwarePurpose/framework", "framework"),
+ ("https://spdx.org/rdf/3.0.0/terms/Software/SoftwarePurpose/install", "install"),
+ ("https://spdx.org/rdf/3.0.0/terms/Software/SoftwarePurpose/library", "library"),
+ ("https://spdx.org/rdf/3.0.0/terms/Software/SoftwarePurpose/manifest", "manifest"),
+ ("https://spdx.org/rdf/3.0.0/terms/Software/SoftwarePurpose/model", "model"),
+ ("https://spdx.org/rdf/3.0.0/terms/Software/SoftwarePurpose/module", "module"),
+ ("https://spdx.org/rdf/3.0.0/terms/Software/SoftwarePurpose/operatingSystem", "operatingSystem"),
+ ("https://spdx.org/rdf/3.0.0/terms/Software/SoftwarePurpose/other", "other"),
+ ("https://spdx.org/rdf/3.0.0/terms/Software/SoftwarePurpose/patch", "patch"),
+ ("https://spdx.org/rdf/3.0.0/terms/Software/SoftwarePurpose/platform", "platform"),
+ ("https://spdx.org/rdf/3.0.0/terms/Software/SoftwarePurpose/requirement", "requirement"),
+ ("https://spdx.org/rdf/3.0.0/terms/Software/SoftwarePurpose/source", "source"),
+ ("https://spdx.org/rdf/3.0.0/terms/Software/SoftwarePurpose/specification", "specification"),
+ ("https://spdx.org/rdf/3.0.0/terms/Software/SoftwarePurpose/test", "test"),
+ ])),
+ iri="https://spdx.org/rdf/3.0.0/terms/Software/additionalPurpose",
+ compact="software_additionalPurpose",
+ )
+ # Provides a place for the SPDX data creator to record acknowledgement text for
+ # a software Package, File or Snippet.
+ cls._add_property(
+ "software_attributionText",
+ ListProp(StringProp()),
+ iri="https://spdx.org/rdf/3.0.0/terms/Software/attributionText",
+ compact="software_attributionText",
+ )
+ # A canonical, unique, immutable identifier of the artifact content, that may be
+ # used for verifying its identity and/or integrity.
+ cls._add_property(
+ "software_contentIdentifier",
+ ListProp(ObjectProp(software_ContentIdentifier, False)),
+ iri="https://spdx.org/rdf/3.0.0/terms/Software/contentIdentifier",
+ compact="software_contentIdentifier",
+ )
+ # Identifies the text of one or more copyright notices for a software Package,
+ # File or Snippet, if any.
+ cls._add_property(
+ "software_copyrightText",
+ StringProp(),
+ iri="https://spdx.org/rdf/3.0.0/terms/Software/copyrightText",
+ compact="software_copyrightText",
+ )
+ # Provides information about the primary purpose of the software artifact.
+ cls._add_property(
+ "software_primaryPurpose",
+ EnumProp([
+ ("https://spdx.org/rdf/3.0.0/terms/Software/SoftwarePurpose/application", "application"),
+ ("https://spdx.org/rdf/3.0.0/terms/Software/SoftwarePurpose/archive", "archive"),
+ ("https://spdx.org/rdf/3.0.0/terms/Software/SoftwarePurpose/bom", "bom"),
+ ("https://spdx.org/rdf/3.0.0/terms/Software/SoftwarePurpose/configuration", "configuration"),
+ ("https://spdx.org/rdf/3.0.0/terms/Software/SoftwarePurpose/container", "container"),
+ ("https://spdx.org/rdf/3.0.0/terms/Software/SoftwarePurpose/data", "data"),
+ ("https://spdx.org/rdf/3.0.0/terms/Software/SoftwarePurpose/device", "device"),
+ ("https://spdx.org/rdf/3.0.0/terms/Software/SoftwarePurpose/deviceDriver", "deviceDriver"),
+ ("https://spdx.org/rdf/3.0.0/terms/Software/SoftwarePurpose/diskImage", "diskImage"),
+ ("https://spdx.org/rdf/3.0.0/terms/Software/SoftwarePurpose/documentation", "documentation"),
+ ("https://spdx.org/rdf/3.0.0/terms/Software/SoftwarePurpose/evidence", "evidence"),
+ ("https://spdx.org/rdf/3.0.0/terms/Software/SoftwarePurpose/executable", "executable"),
+ ("https://spdx.org/rdf/3.0.0/terms/Software/SoftwarePurpose/file", "file"),
+ ("https://spdx.org/rdf/3.0.0/terms/Software/SoftwarePurpose/filesystemImage", "filesystemImage"),
+ ("https://spdx.org/rdf/3.0.0/terms/Software/SoftwarePurpose/firmware", "firmware"),
+ ("https://spdx.org/rdf/3.0.0/terms/Software/SoftwarePurpose/framework", "framework"),
+ ("https://spdx.org/rdf/3.0.0/terms/Software/SoftwarePurpose/install", "install"),
+ ("https://spdx.org/rdf/3.0.0/terms/Software/SoftwarePurpose/library", "library"),
+ ("https://spdx.org/rdf/3.0.0/terms/Software/SoftwarePurpose/manifest", "manifest"),
+ ("https://spdx.org/rdf/3.0.0/terms/Software/SoftwarePurpose/model", "model"),
+ ("https://spdx.org/rdf/3.0.0/terms/Software/SoftwarePurpose/module", "module"),
+ ("https://spdx.org/rdf/3.0.0/terms/Software/SoftwarePurpose/operatingSystem", "operatingSystem"),
+ ("https://spdx.org/rdf/3.0.0/terms/Software/SoftwarePurpose/other", "other"),
+ ("https://spdx.org/rdf/3.0.0/terms/Software/SoftwarePurpose/patch", "patch"),
+ ("https://spdx.org/rdf/3.0.0/terms/Software/SoftwarePurpose/platform", "platform"),
+ ("https://spdx.org/rdf/3.0.0/terms/Software/SoftwarePurpose/requirement", "requirement"),
+ ("https://spdx.org/rdf/3.0.0/terms/Software/SoftwarePurpose/source", "source"),
+ ("https://spdx.org/rdf/3.0.0/terms/Software/SoftwarePurpose/specification", "specification"),
+ ("https://spdx.org/rdf/3.0.0/terms/Software/SoftwarePurpose/test", "test"),
+ ]),
+ iri="https://spdx.org/rdf/3.0.0/terms/Software/primaryPurpose",
+ compact="software_primaryPurpose",
+ )
+
+
+# A container for a grouping of SPDX-3.0 content characterizing details
+# (provenence, composition, licensing, etc.) about a product.
+@register("https://spdx.org/rdf/3.0.0/terms/Core/Bom", compact_type="Bom", abstract=False)
+class Bom(Bundle):
+ NODE_KIND = NodeKind.BlankNodeOrIRI
+ ID_ALIAS = "spdxId"
+ NAMED_INDIVIDUALS = {
+ }
+
+
+# A license that is not listed on the SPDX License List.
+@register("https://spdx.org/rdf/3.0.0/terms/ExpandedLicensing/CustomLicense", compact_type="expandedlicensing_CustomLicense", abstract=False)
+class expandedlicensing_CustomLicense(expandedlicensing_License):
+ NODE_KIND = NodeKind.BlankNodeOrIRI
+ ID_ALIAS = "spdxId"
+ NAMED_INDIVIDUALS = {
+ }
+
+
+# Connects a vulnerability and an element designating the element as a product
+# affected by the vulnerability.
+@register("https://spdx.org/rdf/3.0.0/terms/Security/VexAffectedVulnAssessmentRelationship", compact_type="security_VexAffectedVulnAssessmentRelationship", abstract=False)
+class security_VexAffectedVulnAssessmentRelationship(security_VexVulnAssessmentRelationship):
+ NODE_KIND = NodeKind.BlankNodeOrIRI
+ ID_ALIAS = "spdxId"
+ NAMED_INDIVIDUALS = {
+ }
+
+ @classmethod
+ def _register_props(cls):
+ super()._register_props()
+ # Provides advise on how to mitigate or remediate a vulnerability when a VEX product
+ # is affected by it.
+ cls._add_property(
+ "security_actionStatement",
+ StringProp(),
+ iri="https://spdx.org/rdf/3.0.0/terms/Security/actionStatement",
+ compact="security_actionStatement",
+ )
+ # Records the time when a recommended action was communicated in a VEX statement
+ # to mitigate a vulnerability.
+ cls._add_property(
+ "security_actionStatementTime",
+ ListProp(DateTimeStampProp(pattern=r"^\d\d\d\d-\d\d-\d\dT\d\d:\d\d:\d\dZ$",)),
+ iri="https://spdx.org/rdf/3.0.0/terms/Security/actionStatementTime",
+ compact="security_actionStatementTime",
+ )
+
+
+# Links a vulnerability and elements representing products (in the VEX sense) where
+# a fix has been applied and are no longer affected.
+@register("https://spdx.org/rdf/3.0.0/terms/Security/VexFixedVulnAssessmentRelationship", compact_type="security_VexFixedVulnAssessmentRelationship", abstract=False)
+class security_VexFixedVulnAssessmentRelationship(security_VexVulnAssessmentRelationship):
+ NODE_KIND = NodeKind.BlankNodeOrIRI
+ ID_ALIAS = "spdxId"
+ NAMED_INDIVIDUALS = {
+ }
+
+
+# Links a vulnerability and one or more elements designating the latter as products
+# not affected by the vulnerability.
+@register("https://spdx.org/rdf/3.0.0/terms/Security/VexNotAffectedVulnAssessmentRelationship", compact_type="security_VexNotAffectedVulnAssessmentRelationship", abstract=False)
+class security_VexNotAffectedVulnAssessmentRelationship(security_VexVulnAssessmentRelationship):
+ NODE_KIND = NodeKind.BlankNodeOrIRI
+ ID_ALIAS = "spdxId"
+ NAMED_INDIVIDUALS = {
+ }
+
+ @classmethod
+ def _register_props(cls):
+ super()._register_props()
+ # Explains why a VEX product is not affected by a vulnerability. It is an
+ # alternative in VexNotAffectedVulnAssessmentRelationship to the machine-readable
+ # justification label.
+ cls._add_property(
+ "security_impactStatement",
+ StringProp(),
+ iri="https://spdx.org/rdf/3.0.0/terms/Security/impactStatement",
+ compact="security_impactStatement",
+ )
+ # Timestamp of impact statement.
+ cls._add_property(
+ "security_impactStatementTime",
+ DateTimeStampProp(pattern=r"^\d\d\d\d-\d\d-\d\dT\d\d:\d\d:\d\dZ$",),
+ iri="https://spdx.org/rdf/3.0.0/terms/Security/impactStatementTime",
+ compact="security_impactStatementTime",
+ )
+ # Impact justification label to be used when linking a vulnerability to an element
+ # representing a VEX product with a VexNotAffectedVulnAssessmentRelationship
+ # relationship.
+ cls._add_property(
+ "security_justificationType",
+ EnumProp([
+ ("https://spdx.org/rdf/3.0.0/terms/Security/VexJustificationType/componentNotPresent", "componentNotPresent"),
+ ("https://spdx.org/rdf/3.0.0/terms/Security/VexJustificationType/inlineMitigationsAlreadyExist", "inlineMitigationsAlreadyExist"),
+ ("https://spdx.org/rdf/3.0.0/terms/Security/VexJustificationType/vulnerableCodeCannotBeControlledByAdversary", "vulnerableCodeCannotBeControlledByAdversary"),
+ ("https://spdx.org/rdf/3.0.0/terms/Security/VexJustificationType/vulnerableCodeNotInExecutePath", "vulnerableCodeNotInExecutePath"),
+ ("https://spdx.org/rdf/3.0.0/terms/Security/VexJustificationType/vulnerableCodeNotPresent", "vulnerableCodeNotPresent"),
+ ]),
+ iri="https://spdx.org/rdf/3.0.0/terms/Security/justificationType",
+ compact="security_justificationType",
+ )
+
+
+# Designates elements as products where the impact of a vulnerability is being
+# investigated.
+@register("https://spdx.org/rdf/3.0.0/terms/Security/VexUnderInvestigationVulnAssessmentRelationship", compact_type="security_VexUnderInvestigationVulnAssessmentRelationship", abstract=False)
+class security_VexUnderInvestigationVulnAssessmentRelationship(security_VexVulnAssessmentRelationship):
+ NODE_KIND = NodeKind.BlankNodeOrIRI
+ ID_ALIAS = "spdxId"
+ NAMED_INDIVIDUALS = {
+ }
+
+
+# Refers to any object that stores content on a computer.
+@register("https://spdx.org/rdf/3.0.0/terms/Software/File", compact_type="software_File", abstract=False)
+class software_File(software_SoftwareArtifact):
+ NODE_KIND = NodeKind.BlankNodeOrIRI
+ ID_ALIAS = "spdxId"
+ NAMED_INDIVIDUALS = {
+ }
+
+ @classmethod
+ def _register_props(cls):
+ super()._register_props()
+ # Provides information about the content type of an Element.
+ cls._add_property(
+ "software_contentType",
+ StringProp(pattern=r"^[^\/]+\/[^\/]+$",),
+ iri="https://spdx.org/rdf/3.0.0/terms/Software/contentType",
+ compact="software_contentType",
+ )
+ # Describes if a given file is a directory or non-directory kind of file.
+ cls._add_property(
+ "software_fileKind",
+ EnumProp([
+ ("https://spdx.org/rdf/3.0.0/terms/Software/FileKindType/directory", "directory"),
+ ("https://spdx.org/rdf/3.0.0/terms/Software/FileKindType/file", "file"),
+ ]),
+ iri="https://spdx.org/rdf/3.0.0/terms/Software/fileKind",
+ compact="software_fileKind",
+ )
+
+
+# Refers to any unit of content that can be associated with a distribution of
+# software.
+@register("https://spdx.org/rdf/3.0.0/terms/Software/Package", compact_type="software_Package", abstract=False)
+class software_Package(software_SoftwareArtifact):
+ NODE_KIND = NodeKind.BlankNodeOrIRI
+ ID_ALIAS = "spdxId"
+ NAMED_INDIVIDUALS = {
+ }
+
+ @classmethod
+ def _register_props(cls):
+ super()._register_props()
+ # Identifies the download Uniform Resource Identifier for the package at the time
+ # that the document was created.
+ cls._add_property(
+ "software_downloadLocation",
+ AnyURIProp(),
+ iri="https://spdx.org/rdf/3.0.0/terms/Software/downloadLocation",
+ compact="software_downloadLocation",
+ )
+ # A place for the SPDX document creator to record a website that serves as the
+ # package's home page.
+ cls._add_property(
+ "software_homePage",
+ AnyURIProp(),
+ iri="https://spdx.org/rdf/3.0.0/terms/Software/homePage",
+ compact="software_homePage",
+ )
+ # Provides a place for the SPDX data creator to record the package URL string
+ # (in accordance with the
+ # [package URL spec](https://github.com/package-url/purl-spec/blob/master/PURL-SPECIFICATION.rst))
+ # for a software Package.
+ cls._add_property(
+ "software_packageUrl",
+ AnyURIProp(),
+ iri="https://spdx.org/rdf/3.0.0/terms/Software/packageUrl",
+ compact="software_packageUrl",
+ )
+ # Identify the version of a package.
+ cls._add_property(
+ "software_packageVersion",
+ StringProp(),
+ iri="https://spdx.org/rdf/3.0.0/terms/Software/packageVersion",
+ compact="software_packageVersion",
+ )
+ # Records any relevant background information or additional comments
+ # about the origin of the package.
+ cls._add_property(
+ "software_sourceInfo",
+ StringProp(),
+ iri="https://spdx.org/rdf/3.0.0/terms/Software/sourceInfo",
+ compact="software_sourceInfo",
+ )
+
+
+# A collection of SPDX Elements describing a single package.
+@register("https://spdx.org/rdf/3.0.0/terms/Software/Sbom", compact_type="software_Sbom", abstract=False)
+class software_Sbom(Bom):
+ NODE_KIND = NodeKind.BlankNodeOrIRI
+ ID_ALIAS = "spdxId"
+ NAMED_INDIVIDUALS = {
+ }
+
+ @classmethod
+ def _register_props(cls):
+ super()._register_props()
+ # Provides information about the type of an SBOM.
+ cls._add_property(
+ "software_sbomType",
+ ListProp(EnumProp([
+ ("https://spdx.org/rdf/3.0.0/terms/Software/SbomType/analyzed", "analyzed"),
+ ("https://spdx.org/rdf/3.0.0/terms/Software/SbomType/build", "build"),
+ ("https://spdx.org/rdf/3.0.0/terms/Software/SbomType/deployed", "deployed"),
+ ("https://spdx.org/rdf/3.0.0/terms/Software/SbomType/design", "design"),
+ ("https://spdx.org/rdf/3.0.0/terms/Software/SbomType/runtime", "runtime"),
+ ("https://spdx.org/rdf/3.0.0/terms/Software/SbomType/source", "source"),
+ ])),
+ iri="https://spdx.org/rdf/3.0.0/terms/Software/sbomType",
+ compact="software_sbomType",
+ )
+
+
+# Describes a certain part of a file.
+@register("https://spdx.org/rdf/3.0.0/terms/Software/Snippet", compact_type="software_Snippet", abstract=False)
+class software_Snippet(software_SoftwareArtifact):
+ NODE_KIND = NodeKind.BlankNodeOrIRI
+ ID_ALIAS = "spdxId"
+ NAMED_INDIVIDUALS = {
+ }
+
+ @classmethod
+ def _register_props(cls):
+ super()._register_props()
+ # Defines the byte range in the original host file that the snippet information
+ # applies to.
+ cls._add_property(
+ "software_byteRange",
+ ObjectProp(PositiveIntegerRange, False),
+ iri="https://spdx.org/rdf/3.0.0/terms/Software/byteRange",
+ compact="software_byteRange",
+ )
+ # Defines the line range in the original host file that the snippet information
+ # applies to.
+ cls._add_property(
+ "software_lineRange",
+ ObjectProp(PositiveIntegerRange, False),
+ iri="https://spdx.org/rdf/3.0.0/terms/Software/lineRange",
+ compact="software_lineRange",
+ )
+ # Defines the original host file that the snippet information applies to.
+ cls._add_property(
+ "software_snippetFromFile",
+ ObjectProp(software_File, True),
+ iri="https://spdx.org/rdf/3.0.0/terms/Software/snippetFromFile",
+ min_count=1,
+ compact="software_snippetFromFile",
+ )
+
+
+# Specifies an AI package and its associated information.
+@register("https://spdx.org/rdf/3.0.0/terms/AI/AIPackage", compact_type="ai_AIPackage", abstract=False)
+class ai_AIPackage(software_Package):
+ NODE_KIND = NodeKind.BlankNodeOrIRI
+ ID_ALIAS = "spdxId"
+ NAMED_INDIVIDUALS = {
+ }
+
+ @classmethod
+ def _register_props(cls):
+ super()._register_props()
+ # States if a human is involved in the decisions of the AI software.
+ cls._add_property(
+ "ai_autonomyType",
+ EnumProp([
+ ("https://spdx.org/rdf/3.0.0/terms/Core/PresenceType/no", "no"),
+ ("https://spdx.org/rdf/3.0.0/terms/Core/PresenceType/noAssertion", "noAssertion"),
+ ("https://spdx.org/rdf/3.0.0/terms/Core/PresenceType/yes", "yes"),
+ ]),
+ iri="https://spdx.org/rdf/3.0.0/terms/AI/autonomyType",
+ compact="ai_autonomyType",
+ )
+ # Captures the domain in which the AI package can be used.
+ cls._add_property(
+ "ai_domain",
+ ListProp(StringProp()),
+ iri="https://spdx.org/rdf/3.0.0/terms/AI/domain",
+ compact="ai_domain",
+ )
+ # Indicates the amount of energy consumed to train the AI model.
+ cls._add_property(
+ "ai_energyConsumption",
+ ObjectProp(ai_EnergyConsumption, False),
+ iri="https://spdx.org/rdf/3.0.0/terms/AI/energyConsumption",
+ compact="ai_energyConsumption",
+ )
+ # Records a hyperparameter used to build the AI model contained in the AI
+ # package.
+ cls._add_property(
+ "ai_hyperparameter",
+ ListProp(ObjectProp(DictionaryEntry, False)),
+ iri="https://spdx.org/rdf/3.0.0/terms/AI/hyperparameter",
+ compact="ai_hyperparameter",
+ )
+ # Provides relevant information about the AI software, not including the model
+ # description.
+ cls._add_property(
+ "ai_informationAboutApplication",
+ StringProp(),
+ iri="https://spdx.org/rdf/3.0.0/terms/AI/informationAboutApplication",
+ compact="ai_informationAboutApplication",
+ )
+ # Describes relevant information about different steps of the training process.
+ cls._add_property(
+ "ai_informationAboutTraining",
+ StringProp(),
+ iri="https://spdx.org/rdf/3.0.0/terms/AI/informationAboutTraining",
+ compact="ai_informationAboutTraining",
+ )
+ # Captures a limitation of the AI software.
+ cls._add_property(
+ "ai_limitation",
+ StringProp(),
+ iri="https://spdx.org/rdf/3.0.0/terms/AI/limitation",
+ compact="ai_limitation",
+ )
+ # Records the measurement of prediction quality of the AI model.
+ cls._add_property(
+ "ai_metric",
+ ListProp(ObjectProp(DictionaryEntry, False)),
+ iri="https://spdx.org/rdf/3.0.0/terms/AI/metric",
+ compact="ai_metric",
+ )
+ # Captures the threshold that was used for computation of a metric described in
+ # the metric field.
+ cls._add_property(
+ "ai_metricDecisionThreshold",
+ ListProp(ObjectProp(DictionaryEntry, False)),
+ iri="https://spdx.org/rdf/3.0.0/terms/AI/metricDecisionThreshold",
+ compact="ai_metricDecisionThreshold",
+ )
+ # Describes all the preprocessing steps applied to the training data before the
+ # model training.
+ cls._add_property(
+ "ai_modelDataPreprocessing",
+ ListProp(StringProp()),
+ iri="https://spdx.org/rdf/3.0.0/terms/AI/modelDataPreprocessing",
+ compact="ai_modelDataPreprocessing",
+ )
+ # Describes methods that can be used to explain the model.
+ cls._add_property(
+ "ai_modelExplainability",
+ ListProp(StringProp()),
+ iri="https://spdx.org/rdf/3.0.0/terms/AI/modelExplainability",
+ compact="ai_modelExplainability",
+ )
+ # Records the results of general safety risk assessment of the AI system.
+ cls._add_property(
+ "ai_safetyRiskAssessment",
+ EnumProp([
+ ("https://spdx.org/rdf/3.0.0/terms/AI/SafetyRiskAssessmentType/high", "high"),
+ ("https://spdx.org/rdf/3.0.0/terms/AI/SafetyRiskAssessmentType/low", "low"),
+ ("https://spdx.org/rdf/3.0.0/terms/AI/SafetyRiskAssessmentType/medium", "medium"),
+ ("https://spdx.org/rdf/3.0.0/terms/AI/SafetyRiskAssessmentType/serious", "serious"),
+ ]),
+ iri="https://spdx.org/rdf/3.0.0/terms/AI/safetyRiskAssessment",
+ compact="ai_safetyRiskAssessment",
+ )
+ # Captures a standard that is being complied with.
+ cls._add_property(
+ "ai_standardCompliance",
+ ListProp(StringProp()),
+ iri="https://spdx.org/rdf/3.0.0/terms/AI/standardCompliance",
+ compact="ai_standardCompliance",
+ )
+ # Records the type of the model used in the AI software.
+ cls._add_property(
+ "ai_typeOfModel",
+ ListProp(StringProp()),
+ iri="https://spdx.org/rdf/3.0.0/terms/AI/typeOfModel",
+ compact="ai_typeOfModel",
+ )
+ # Records if sensitive personal information is used during model training or
+ # could be used during the inference.
+ cls._add_property(
+ "ai_useSensitivePersonalInformation",
+ EnumProp([
+ ("https://spdx.org/rdf/3.0.0/terms/Core/PresenceType/no", "no"),
+ ("https://spdx.org/rdf/3.0.0/terms/Core/PresenceType/noAssertion", "noAssertion"),
+ ("https://spdx.org/rdf/3.0.0/terms/Core/PresenceType/yes", "yes"),
+ ]),
+ iri="https://spdx.org/rdf/3.0.0/terms/AI/useSensitivePersonalInformation",
+ compact="ai_useSensitivePersonalInformation",
+ )
+
+
+# Specifies a data package and its associated information.
+@register("https://spdx.org/rdf/3.0.0/terms/Dataset/DatasetPackage", compact_type="dataset_DatasetPackage", abstract=False)
+class dataset_DatasetPackage(software_Package):
+ NODE_KIND = NodeKind.BlankNodeOrIRI
+ ID_ALIAS = "spdxId"
+ NAMED_INDIVIDUALS = {
+ }
+
+ @classmethod
+ def _register_props(cls):
+ super()._register_props()
+ # Describes the anonymization methods used.
+ cls._add_property(
+ "dataset_anonymizationMethodUsed",
+ ListProp(StringProp()),
+ iri="https://spdx.org/rdf/3.0.0/terms/Dataset/anonymizationMethodUsed",
+ compact="dataset_anonymizationMethodUsed",
+ )
+ # Describes the confidentiality level of the data points contained in the dataset.
+ cls._add_property(
+ "dataset_confidentialityLevel",
+ EnumProp([
+ ("https://spdx.org/rdf/3.0.0/terms/Dataset/ConfidentialityLevelType/amber", "amber"),
+ ("https://spdx.org/rdf/3.0.0/terms/Dataset/ConfidentialityLevelType/clear", "clear"),
+ ("https://spdx.org/rdf/3.0.0/terms/Dataset/ConfidentialityLevelType/green", "green"),
+ ("https://spdx.org/rdf/3.0.0/terms/Dataset/ConfidentialityLevelType/red", "red"),
+ ]),
+ iri="https://spdx.org/rdf/3.0.0/terms/Dataset/confidentialityLevel",
+ compact="dataset_confidentialityLevel",
+ )
+ # Describes how the dataset was collected.
+ cls._add_property(
+ "dataset_dataCollectionProcess",
+ StringProp(),
+ iri="https://spdx.org/rdf/3.0.0/terms/Dataset/dataCollectionProcess",
+ compact="dataset_dataCollectionProcess",
+ )
+ # Describes the preprocessing steps that were applied to the raw data to create the given dataset.
+ cls._add_property(
+ "dataset_dataPreprocessing",
+ ListProp(StringProp()),
+ iri="https://spdx.org/rdf/3.0.0/terms/Dataset/dataPreprocessing",
+ compact="dataset_dataPreprocessing",
+ )
+ # The field describes the availability of a dataset.
+ cls._add_property(
+ "dataset_datasetAvailability",
+ EnumProp([
+ ("https://spdx.org/rdf/3.0.0/terms/Dataset/DatasetAvailabilityType/clickthrough", "clickthrough"),
+ ("https://spdx.org/rdf/3.0.0/terms/Dataset/DatasetAvailabilityType/directDownload", "directDownload"),
+ ("https://spdx.org/rdf/3.0.0/terms/Dataset/DatasetAvailabilityType/query", "query"),
+ ("https://spdx.org/rdf/3.0.0/terms/Dataset/DatasetAvailabilityType/registration", "registration"),
+ ("https://spdx.org/rdf/3.0.0/terms/Dataset/DatasetAvailabilityType/scrapingScript", "scrapingScript"),
+ ]),
+ iri="https://spdx.org/rdf/3.0.0/terms/Dataset/datasetAvailability",
+ compact="dataset_datasetAvailability",
+ )
+ # Describes potentially noisy elements of the dataset.
+ cls._add_property(
+ "dataset_datasetNoise",
+ StringProp(),
+ iri="https://spdx.org/rdf/3.0.0/terms/Dataset/datasetNoise",
+ compact="dataset_datasetNoise",
+ )
+ # Captures the size of the dataset.
+ cls._add_property(
+ "dataset_datasetSize",
+ NonNegativeIntegerProp(),
+ iri="https://spdx.org/rdf/3.0.0/terms/Dataset/datasetSize",
+ compact="dataset_datasetSize",
+ )
+ # Describes the type of the given dataset.
+ cls._add_property(
+ "dataset_datasetType",
+ ListProp(EnumProp([
+ ("https://spdx.org/rdf/3.0.0/terms/Dataset/DatasetType/audio", "audio"),
+ ("https://spdx.org/rdf/3.0.0/terms/Dataset/DatasetType/categorical", "categorical"),
+ ("https://spdx.org/rdf/3.0.0/terms/Dataset/DatasetType/graph", "graph"),
+ ("https://spdx.org/rdf/3.0.0/terms/Dataset/DatasetType/image", "image"),
+ ("https://spdx.org/rdf/3.0.0/terms/Dataset/DatasetType/noAssertion", "noAssertion"),
+ ("https://spdx.org/rdf/3.0.0/terms/Dataset/DatasetType/numeric", "numeric"),
+ ("https://spdx.org/rdf/3.0.0/terms/Dataset/DatasetType/other", "other"),
+ ("https://spdx.org/rdf/3.0.0/terms/Dataset/DatasetType/sensor", "sensor"),
+ ("https://spdx.org/rdf/3.0.0/terms/Dataset/DatasetType/structured", "structured"),
+ ("https://spdx.org/rdf/3.0.0/terms/Dataset/DatasetType/syntactic", "syntactic"),
+ ("https://spdx.org/rdf/3.0.0/terms/Dataset/DatasetType/text", "text"),
+ ("https://spdx.org/rdf/3.0.0/terms/Dataset/DatasetType/timeseries", "timeseries"),
+ ("https://spdx.org/rdf/3.0.0/terms/Dataset/DatasetType/timestamp", "timestamp"),
+ ("https://spdx.org/rdf/3.0.0/terms/Dataset/DatasetType/video", "video"),
+ ])),
+ iri="https://spdx.org/rdf/3.0.0/terms/Dataset/datasetType",
+ min_count=1,
+ compact="dataset_datasetType",
+ )
+ # Describes a mechanism to update the dataset.
+ cls._add_property(
+ "dataset_datasetUpdateMechanism",
+ StringProp(),
+ iri="https://spdx.org/rdf/3.0.0/terms/Dataset/datasetUpdateMechanism",
+ compact="dataset_datasetUpdateMechanism",
+ )
+ # Describes if any sensitive personal information is present in the dataset.
+ cls._add_property(
+ "dataset_hasSensitivePersonalInformation",
+ EnumProp([
+ ("https://spdx.org/rdf/3.0.0/terms/Core/PresenceType/no", "no"),
+ ("https://spdx.org/rdf/3.0.0/terms/Core/PresenceType/noAssertion", "noAssertion"),
+ ("https://spdx.org/rdf/3.0.0/terms/Core/PresenceType/yes", "yes"),
+ ]),
+ iri="https://spdx.org/rdf/3.0.0/terms/Dataset/hasSensitivePersonalInformation",
+ compact="dataset_hasSensitivePersonalInformation",
+ )
+ # Describes what the given dataset should be used for.
+ cls._add_property(
+ "dataset_intendedUse",
+ StringProp(),
+ iri="https://spdx.org/rdf/3.0.0/terms/Dataset/intendedUse",
+ compact="dataset_intendedUse",
+ )
+ # Records the biases that the dataset is known to encompass.
+ cls._add_property(
+ "dataset_knownBias",
+ ListProp(StringProp()),
+ iri="https://spdx.org/rdf/3.0.0/terms/Dataset/knownBias",
+ compact="dataset_knownBias",
+ )
+ # Describes a sensor used for collecting the data.
+ cls._add_property(
+ "dataset_sensor",
+ ListProp(ObjectProp(DictionaryEntry, False)),
+ iri="https://spdx.org/rdf/3.0.0/terms/Dataset/sensor",
+ compact="dataset_sensor",
+ )
+
+
+"""Format Guard"""
+# fmt: on
+
+
+def main():
+ import argparse
+ from pathlib import Path
+
+ parser = argparse.ArgumentParser(description="Python SHACL model test")
+ parser.add_argument("infile", type=Path, help="Input file")
+ parser.add_argument("--print", action="store_true", help="Print object tree")
+ parser.add_argument("--outfile", type=Path, help="Output file")
+
+ args = parser.parse_args()
+
+ objectset = SHACLObjectSet()
+ with args.infile.open("r") as f:
+ d = JSONLDDeserializer()
+ d.read(f, objectset)
+
+ if args.print:
+ print_tree(objectset.objects)
+
+ if args.outfile:
+ with args.outfile.open("wb") as f:
+ s = JSONLDSerializer()
+ s.write(objectset, f)
+
+ return 0
+
+
+if __name__ == "__main__":
+ sys.exit(main())
diff --git a/poky/meta/lib/oe/spdx30_tasks.py b/poky/meta/lib/oe/spdx30_tasks.py
new file mode 100644
index 0000000000..6a2858c665
--- /dev/null
+++ b/poky/meta/lib/oe/spdx30_tasks.py
@@ -0,0 +1,1243 @@
+#
+# Copyright OpenEmbedded Contributors
+#
+# SPDX-License-Identifier: GPL-2.0-only
+#
+
+import json
+import oe.cve_check
+import oe.packagedata
+import oe.patch
+import oe.sbom30
+import oe.spdx30
+import oe.spdx_common
+import oe.sdk
+import os
+
+from contextlib import contextmanager
+from datetime import datetime, timezone
+from pathlib import Path
+
+
+def set_timestamp_now(d, o, prop):
+ if d.getVar("SPDX_INCLUDE_TIMESTAMPS") == "1":
+ setattr(o, prop, datetime.now(timezone.utc))
+ else:
+ # Doing this helps to validated that the property actually exists, and
+ # also that it is not mandatory
+ delattr(o, prop)
+
+
+def add_license_expression(d, objset, license_expression, license_data):
+ simple_license_text = {}
+ license_text_map = {}
+ license_ref_idx = 0
+
+ def add_license_text(name):
+ nonlocal objset
+ nonlocal simple_license_text
+
+ if name in simple_license_text:
+ return simple_license_text[name]
+
+ lic = objset.find_filter(
+ oe.spdx30.simplelicensing_SimpleLicensingText,
+ name=name,
+ )
+
+ if lic is not None:
+ simple_license_text[name] = lic
+ return lic
+
+ lic = objset.add(
+ oe.spdx30.simplelicensing_SimpleLicensingText(
+ _id=objset.new_spdxid("license-text", name),
+ creationInfo=objset.doc.creationInfo,
+ name=name,
+ )
+ )
+ simple_license_text[name] = lic
+
+ if name == "PD":
+ lic.simplelicensing_licenseText = "Software released to the public domain"
+ return lic
+
+ # Seach for the license in COMMON_LICENSE_DIR and LICENSE_PATH
+ for directory in [d.getVar("COMMON_LICENSE_DIR")] + (
+ d.getVar("LICENSE_PATH") or ""
+ ).split():
+ try:
+ with (Path(directory) / name).open(errors="replace") as f:
+ lic.simplelicensing_licenseText = f.read()
+ return lic
+
+ except FileNotFoundError:
+ pass
+
+ # If it's not SPDX or PD, then NO_GENERIC_LICENSE must be set
+ filename = d.getVarFlag("NO_GENERIC_LICENSE", name)
+ if filename:
+ filename = d.expand("${S}/" + filename)
+ with open(filename, errors="replace") as f:
+ lic.simplelicensing_licenseText = f.read()
+ return lic
+ else:
+ bb.fatal("Cannot find any text for license %s" % name)
+
+ def convert(l):
+ nonlocal license_text_map
+ nonlocal license_ref_idx
+
+ if l == "(" or l == ")":
+ return l
+
+ if l == "&":
+ return "AND"
+
+ if l == "|":
+ return "OR"
+
+ if l == "CLOSED":
+ return "NONE"
+
+ spdx_license = d.getVarFlag("SPDXLICENSEMAP", l) or l
+ if spdx_license in license_data["licenses"]:
+ return spdx_license
+
+ spdx_license = "LicenseRef-" + l
+ if spdx_license not in license_text_map:
+ license_text_map[spdx_license] = add_license_text(l)._id
+
+ return spdx_license
+
+ lic_split = (
+ license_expression.replace("(", " ( ")
+ .replace(")", " ) ")
+ .replace("|", " | ")
+ .replace("&", " & ")
+ .split()
+ )
+ spdx_license_expression = " ".join(convert(l) for l in lic_split)
+
+ return objset.new_license_expression(spdx_license_expression, license_data, license_text_map)
+
+
+def add_package_files(
+ d,
+ objset,
+ topdir,
+ get_spdxid,
+ get_purposes,
+ license_data,
+ *,
+ archive=None,
+ ignore_dirs=[],
+ ignore_top_level_dirs=[],
+):
+ source_date_epoch = d.getVar("SOURCE_DATE_EPOCH")
+ if source_date_epoch:
+ source_date_epoch = int(source_date_epoch)
+
+ spdx_files = set()
+
+ file_counter = 1
+ for subdir, dirs, files in os.walk(topdir):
+ dirs[:] = [d for d in dirs if d not in ignore_dirs]
+ if subdir == str(topdir):
+ dirs[:] = [d for d in dirs if d not in ignore_top_level_dirs]
+
+ for file in files:
+ filepath = Path(subdir) / file
+ if filepath.is_symlink() or not filepath.is_file():
+ continue
+
+ bb.debug(1, "Adding file %s to %s" % (filepath, objset.doc._id))
+
+ filename = str(filepath.relative_to(topdir))
+ file_purposes = get_purposes(filepath)
+
+ spdx_file = objset.new_file(
+ get_spdxid(file_counter),
+ filename,
+ filepath,
+ purposes=file_purposes,
+ )
+ spdx_files.add(spdx_file)
+
+ if oe.spdx30.software_SoftwarePurpose.source in file_purposes:
+ objset.scan_declared_licenses(spdx_file, filepath, license_data)
+
+ if archive is not None:
+ with filepath.open("rb") as f:
+ info = archive.gettarinfo(fileobj=f)
+ info.name = filename
+ info.uid = 0
+ info.gid = 0
+ info.uname = "root"
+ info.gname = "root"
+
+ if source_date_epoch is not None and info.mtime > source_date_epoch:
+ info.mtime = source_date_epoch
+
+ archive.addfile(info, f)
+
+ file_counter += 1
+
+ return spdx_files
+
+
+def get_package_sources_from_debug(
+ d, package, package_files, sources, source_hash_cache
+):
+ def file_path_match(file_path, pkg_file):
+ if file_path.lstrip("/") == pkg_file.name.lstrip("/"):
+ return True
+
+ for e in pkg_file.extension:
+ if isinstance(e, oe.sbom30.OEFileNameAliasExtension):
+ for a in e.aliases:
+ if file_path.lstrip("/") == a.lstrip("/"):
+ return True
+
+ return False
+
+ debug_search_paths = [
+ Path(d.getVar("PKGD")),
+ Path(d.getVar("STAGING_DIR_TARGET")),
+ Path(d.getVar("STAGING_DIR_NATIVE")),
+ Path(d.getVar("STAGING_KERNEL_DIR")),
+ ]
+
+ pkg_data = oe.packagedata.read_subpkgdata_extended(package, d)
+
+ if pkg_data is None:
+ return
+
+ dep_source_files = set()
+
+ for file_path, file_data in pkg_data["files_info"].items():
+ if not "debugsrc" in file_data:
+ continue
+
+ if not any(file_path_match(file_path, pkg_file) for pkg_file in package_files):
+ bb.fatal(
+ "No package file found for %s in %s; SPDX found: %s"
+ % (str(file_path), package, " ".join(p.name for p in package_files))
+ )
+ continue
+
+ for debugsrc in file_data["debugsrc"]:
+ for search in debug_search_paths:
+ if debugsrc.startswith("/usr/src/kernel"):
+ debugsrc_path = search / debugsrc.replace("/usr/src/kernel/", "")
+ else:
+ debugsrc_path = search / debugsrc.lstrip("/")
+
+ if debugsrc_path in source_hash_cache:
+ file_sha256 = source_hash_cache[debugsrc_path]
+ if file_sha256 is None:
+ continue
+ else:
+ # We can only hash files below, skip directories, links, etc.
+ if not debugsrc_path.is_file():
+ source_hash_cache[debugsrc_path] = None
+ continue
+
+ file_sha256 = bb.utils.sha256_file(debugsrc_path)
+ source_hash_cache[debugsrc_path] = file_sha256
+
+ if file_sha256 in sources:
+ source_file = sources[file_sha256]
+ dep_source_files.add(source_file)
+ else:
+ bb.debug(
+ 1,
+ "Debug source %s with SHA256 %s not found in any dependency"
+ % (str(debugsrc_path), file_sha256),
+ )
+ break
+ else:
+ bb.debug(1, "Debug source %s not found" % debugsrc)
+
+ return dep_source_files
+
+
+def collect_dep_objsets(d, build):
+ deps = oe.spdx_common.get_spdx_deps(d)
+
+ dep_objsets = []
+ dep_builds = set()
+
+ dep_build_spdxids = set()
+ for dep in deps:
+ bb.debug(1, "Fetching SPDX for dependency %s" % (dep.pn))
+ dep_build, dep_objset = oe.sbom30.find_root_obj_in_jsonld(
+ d, "recipes", dep.pn, oe.spdx30.build_Build
+ )
+ # If the dependency is part of the taskhash, return it to be linked
+ # against. Otherwise, it cannot be linked against because this recipe
+ # will not rebuilt if dependency changes
+ if dep.in_taskhash:
+ dep_objsets.append(dep_objset)
+
+ # The build _can_ be linked against (by alias)
+ dep_builds.add(dep_build)
+
+ return dep_objsets, dep_builds
+
+
+def collect_dep_sources(dep_objsets):
+ sources = {}
+ for objset in dep_objsets:
+ # Don't collect sources from native recipes as they
+ # match non-native sources also.
+ if objset.is_native():
+ continue
+
+ bb.debug(1, "Fetching Sources for dependency %s" % (objset.doc.name))
+
+ dep_build = objset.find_root(oe.spdx30.build_Build)
+ if not dep_build:
+ bb.fatal("Unable to find a build")
+
+ for e in objset.foreach_type(oe.spdx30.Relationship):
+ if dep_build is not e.from_:
+ continue
+
+ if e.relationshipType != oe.spdx30.RelationshipType.hasInputs:
+ continue
+
+ for to in e.to:
+ if not isinstance(to, oe.spdx30.software_File):
+ continue
+
+ if (
+ to.software_primaryPurpose
+ != oe.spdx30.software_SoftwarePurpose.source
+ ):
+ continue
+
+ for v in to.verifiedUsing:
+ if v.algorithm == oe.spdx30.HashAlgorithm.sha256:
+ sources[v.hashValue] = to
+ break
+ else:
+ bb.fatal(
+ "No SHA256 found for %s in %s" % (to.name, objset.doc.name)
+ )
+
+ return sources
+
+
+def add_download_files(d, objset):
+ inputs = set()
+
+ urls = d.getVar("SRC_URI").split()
+ fetch = bb.fetch2.Fetch(urls, d)
+
+ for download_idx, src_uri in enumerate(urls):
+ fd = fetch.ud[src_uri]
+
+ for name in fd.names:
+ file_name = os.path.basename(fetch.localpath(src_uri))
+ if oe.patch.patch_path(src_uri, fetch, "", expand=False):
+ primary_purpose = oe.spdx30.software_SoftwarePurpose.patch
+ else:
+ primary_purpose = oe.spdx30.software_SoftwarePurpose.source
+
+ if fd.type == "file":
+ if os.path.isdir(fd.localpath):
+ walk_idx = 1
+ for root, dirs, files in os.walk(fd.localpath):
+ for f in files:
+ f_path = os.path.join(root, f)
+ if os.path.islink(f_path):
+ # TODO: SPDX doesn't support symlinks yet
+ continue
+
+ file = objset.new_file(
+ objset.new_spdxid(
+ "source", str(download_idx + 1), str(walk_idx)
+ ),
+ os.path.join(
+ file_name, os.path.relpath(f_path, fd.localpath)
+ ),
+ f_path,
+ purposes=[primary_purpose],
+ )
+
+ inputs.add(file)
+ walk_idx += 1
+
+ else:
+ file = objset.new_file(
+ objset.new_spdxid("source", str(download_idx + 1)),
+ file_name,
+ fd.localpath,
+ purposes=[primary_purpose],
+ )
+ inputs.add(file)
+
+ else:
+ uri = fd.type
+ proto = getattr(fd, "proto", None)
+ if proto is not None:
+ uri = uri + "+" + proto
+ uri = uri + "://" + fd.host + fd.path
+
+ if fd.method.supports_srcrev():
+ uri = uri + "@" + fd.revisions[name]
+
+ dl = objset.add(
+ oe.spdx30.software_Package(
+ _id=objset.new_spdxid("source", str(download_idx + 1)),
+ creationInfo=objset.doc.creationInfo,
+ name=file_name,
+ software_primaryPurpose=primary_purpose,
+ software_downloadLocation=uri,
+ )
+ )
+
+ if fd.method.supports_checksum(fd):
+ # TODO Need something better than hard coding this
+ for checksum_id in ["sha256", "sha1"]:
+ expected_checksum = getattr(
+ fd, "%s_expected" % checksum_id, None
+ )
+ if expected_checksum is None:
+ continue
+
+ dl.verifiedUsing.append(
+ oe.spdx30.Hash(
+ algorithm=getattr(oe.spdx30.HashAlgorithm, checksum_id),
+ hashValue=expected_checksum,
+ )
+ )
+
+ inputs.add(dl)
+
+ return inputs
+
+
+def set_purposes(d, element, *var_names, force_purposes=[]):
+ purposes = force_purposes[:]
+
+ for var_name in var_names:
+ val = d.getVar(var_name)
+ if val:
+ purposes.extend(val.split())
+ break
+
+ if not purposes:
+ bb.warn("No SPDX purposes found in %s" % " ".join(var_names))
+ return
+
+ element.software_primaryPurpose = getattr(
+ oe.spdx30.software_SoftwarePurpose, purposes[0]
+ )
+ element.software_additionalPurpose = [
+ getattr(oe.spdx30.software_SoftwarePurpose, p) for p in purposes[1:]
+ ]
+
+
+def create_spdx(d):
+ def set_var_field(var, obj, name, package=None):
+ val = None
+ if package:
+ val = d.getVar("%s:%s" % (var, package))
+
+ if not val:
+ val = d.getVar(var)
+
+ if val:
+ setattr(obj, name, val)
+
+ license_data = oe.spdx_common.load_spdx_license_data(d)
+
+ deploydir = Path(d.getVar("SPDXDEPLOY"))
+ deploy_dir_spdx = Path(d.getVar("DEPLOY_DIR_SPDX"))
+ spdx_workdir = Path(d.getVar("SPDXWORK"))
+ include_sources = d.getVar("SPDX_INCLUDE_SOURCES") == "1"
+ pkg_arch = d.getVar("SSTATE_PKGARCH")
+ is_native = bb.data.inherits_class("native", d) or bb.data.inherits_class(
+ "cross", d
+ )
+ include_vex = d.getVar("SPDX_INCLUDE_VEX")
+ if not include_vex in ("none", "current", "all"):
+ bb.fatal("SPDX_INCLUDE_VEX must be one of 'none', 'current', 'all'")
+
+ build_objset = oe.sbom30.ObjectSet.new_objset(d, d.getVar("PN"))
+
+ build = build_objset.new_task_build("recipe", "recipe")
+ build_objset.doc.rootElement.append(build)
+
+ build_objset.set_is_native(is_native)
+
+ for var in (d.getVar("SPDX_CUSTOM_ANNOTATION_VARS") or "").split():
+ new_annotation(
+ d,
+ build_objset,
+ build,
+ "%s=%s" % (var, d.getVar(var)),
+ oe.spdx30.AnnotationType.other,
+ )
+
+ build_inputs = set()
+
+ # Add CVEs
+ cve_by_status = {}
+ if include_vex != "none":
+ for cve in d.getVarFlags("CVE_STATUS") or {}:
+ decoded_status = oe.cve_check.decode_cve_status(d, cve)
+
+ # If this CVE is fixed upstream, skip it unless all CVEs are
+ # specified.
+ if include_vex != "all" and 'detail' in decoded_status and \
+ decoded_status['detail'] in (
+ "fixed-version",
+ "cpe-stable-backport",
+ ):
+ bb.debug(1, "Skipping %s since it is already fixed upstream" % cve)
+ continue
+
+ cve_by_status.setdefault(decoded_status['mapping'], {})[cve] = (
+ build_objset.new_cve_vuln(cve),
+ decoded_status['detail'],
+ decoded_status['description'],
+ )
+
+ cpe_ids = oe.cve_check.get_cpe_ids(d.getVar("CVE_PRODUCT"), d.getVar("CVE_VERSION"))
+
+ source_files = add_download_files(d, build_objset)
+ build_inputs |= source_files
+
+ recipe_spdx_license = add_license_expression(d, build_objset, d.getVar("LICENSE"), license_data)
+ build_objset.new_relationship(
+ source_files,
+ oe.spdx30.RelationshipType.hasConcludedLicense,
+ [recipe_spdx_license],
+ )
+
+ if oe.spdx_common.process_sources(d) and include_sources:
+ bb.debug(1, "Adding source files to SPDX")
+ oe.spdx_common.get_patched_src(d)
+
+ build_inputs |= add_package_files(
+ d,
+ build_objset,
+ spdx_workdir,
+ lambda file_counter: build_objset.new_spdxid(
+ "sourcefile", str(file_counter)
+ ),
+ lambda filepath: [oe.spdx30.software_SoftwarePurpose.source],
+ license_data,
+ ignore_dirs=[".git"],
+ ignore_top_level_dirs=["temp"],
+ archive=None,
+ )
+
+ dep_objsets, dep_builds = collect_dep_objsets(d, build)
+ if dep_builds:
+ build_objset.new_scoped_relationship(
+ [build],
+ oe.spdx30.RelationshipType.dependsOn,
+ oe.spdx30.LifecycleScopeType.build,
+ sorted(oe.sbom30.get_element_link_id(b) for b in dep_builds),
+ )
+
+ debug_source_ids = set()
+ source_hash_cache = {}
+
+ # Write out the package SPDX data now. It is not complete as we cannot
+ # write the runtime data, so write it to a staging area and a later task
+ # will write out the final collection
+
+ # TODO: Handle native recipe output
+ if not is_native:
+ bb.debug(1, "Collecting Dependency sources files")
+ sources = collect_dep_sources(dep_objsets)
+
+ bb.build.exec_func("read_subpackage_metadata", d)
+
+ pkgdest = Path(d.getVar("PKGDEST"))
+ for package in d.getVar("PACKAGES").split():
+ if not oe.packagedata.packaged(package, d):
+ continue
+
+ pkg_name = d.getVar("PKG:%s" % package) or package
+
+ bb.debug(1, "Creating SPDX for package %s" % pkg_name)
+
+ pkg_objset = oe.sbom30.ObjectSet.new_objset(d, pkg_name)
+
+ spdx_package = pkg_objset.add_root(
+ oe.spdx30.software_Package(
+ _id=pkg_objset.new_spdxid("package", pkg_name),
+ creationInfo=pkg_objset.doc.creationInfo,
+ name=pkg_name,
+ software_packageVersion=d.getVar("PV"),
+ )
+ )
+ set_timestamp_now(d, spdx_package, "builtTime")
+
+ set_purposes(
+ d,
+ spdx_package,
+ "SPDX_PACKAGE_ADDITIONAL_PURPOSE:%s" % package,
+ "SPDX_PACKAGE_ADDITIONAL_PURPOSE",
+ force_purposes=["install"],
+ )
+
+ supplier = build_objset.new_agent("SPDX_PACKAGE_SUPPLIER")
+ if supplier is not None:
+ spdx_package.supplier = (
+ supplier if isinstance(supplier, str) else supplier._id
+ )
+
+ set_var_field(
+ "HOMEPAGE", spdx_package, "software_homePage", package=package
+ )
+ set_var_field("SUMMARY", spdx_package, "summary", package=package)
+ set_var_field("DESCRIPTION", spdx_package, "description", package=package)
+
+ pkg_objset.new_scoped_relationship(
+ [build._id],
+ oe.spdx30.RelationshipType.hasOutputs,
+ oe.spdx30.LifecycleScopeType.build,
+ [spdx_package],
+ )
+
+ for cpe_id in cpe_ids:
+ spdx_package.externalIdentifier.append(
+ oe.spdx30.ExternalIdentifier(
+ externalIdentifierType=oe.spdx30.ExternalIdentifierType.cpe23,
+ identifier=cpe_id,
+ )
+ )
+
+ # TODO: Generate a file for each actual IPK/DEB/RPM/TGZ file
+ # generated and link it to the package
+ # spdx_package_file = pkg_objset.add(oe.spdx30.software_File(
+ # _id=pkg_objset.new_spdxid("distribution", pkg_name),
+ # creationInfo=pkg_objset.doc.creationInfo,
+ # name=pkg_name,
+ # software_primaryPurpose=spdx_package.software_primaryPurpose,
+ # software_additionalPurpose=spdx_package.software_additionalPurpose,
+ # ))
+ # set_timestamp_now(d, spdx_package_file, "builtTime")
+
+ ## TODO add hashes
+ # pkg_objset.new_relationship(
+ # [spdx_package],
+ # oe.spdx30.RelationshipType.hasDistributionArtifact,
+ # [spdx_package_file],
+ # )
+
+ # NOTE: licenses live in the recipe collection and are referenced
+ # by ID in the package collection(s). This helps reduce duplication
+ # (since a lot of packages will have the same license), and also
+ # prevents duplicate license SPDX IDs in the packages
+ package_license = d.getVar("LICENSE:%s" % package)
+ if package_license and package_license != d.getVar("LICENSE"):
+ package_spdx_license = add_license_expression(
+ d, build_objset, package_license, license_data
+ )
+ else:
+ package_spdx_license = recipe_spdx_license
+
+ pkg_objset.new_relationship(
+ [spdx_package],
+ oe.spdx30.RelationshipType.hasConcludedLicense,
+ [package_spdx_license._id],
+ )
+
+ # NOTE: CVE Elements live in the recipe collection
+ all_cves = set()
+ for status, cves in cve_by_status.items():
+ for cve, items in cves.items():
+ spdx_cve, detail, description = items
+
+ all_cves.add(spdx_cve._id)
+
+ if status == "Patched":
+ pkg_objset.new_vex_patched_relationship(
+ [spdx_cve._id], [spdx_package]
+ )
+ elif status == "Unpatched":
+ pkg_objset.new_vex_unpatched_relationship(
+ [spdx_cve._id], [spdx_package]
+ )
+ elif status == "Ignored":
+ spdx_vex = pkg_objset.new_vex_ignored_relationship(
+ [spdx_cve._id],
+ [spdx_package],
+ impact_statement=description,
+ )
+
+ if detail in (
+ "ignored",
+ "cpe-incorrect",
+ "disputed",
+ "upstream-wontfix",
+ ):
+ # VEX doesn't have justifications for this
+ pass
+ elif detail in (
+ "not-applicable-config",
+ "not-applicable-platform",
+ ):
+ for v in spdx_vex:
+ v.security_justificationType = (
+ oe.spdx30.security_VexJustificationType.vulnerableCodeNotPresent
+ )
+ else:
+ bb.fatal(f"Unknown detail '{detail}' for ignored {cve}")
+ else:
+ bb.fatal(f"Unknown {cve} status '{status}'")
+
+ if all_cves:
+ pkg_objset.new_relationship(
+ [spdx_package],
+ oe.spdx30.RelationshipType.hasAssociatedVulnerability,
+ sorted(list(all_cves)),
+ )
+
+ bb.debug(1, "Adding package files to SPDX for package %s" % pkg_name)
+ package_files = add_package_files(
+ d,
+ pkg_objset,
+ pkgdest / package,
+ lambda file_counter: pkg_objset.new_spdxid(
+ "package", pkg_name, "file", str(file_counter)
+ ),
+ # TODO: Can we know the purpose here?
+ lambda filepath: [],
+ license_data,
+ ignore_top_level_dirs=["CONTROL", "DEBIAN"],
+ archive=None,
+ )
+
+ if package_files:
+ pkg_objset.new_relationship(
+ [spdx_package],
+ oe.spdx30.RelationshipType.contains,
+ sorted(list(package_files)),
+ )
+
+ if include_sources:
+ debug_sources = get_package_sources_from_debug(
+ d, package, package_files, sources, source_hash_cache
+ )
+ debug_source_ids |= set(
+ oe.sbom30.get_element_link_id(d) for d in debug_sources
+ )
+
+ oe.sbom30.write_recipe_jsonld_doc(
+ d, pkg_objset, "packages-staging", deploydir, create_spdx_id_links=False
+ )
+
+ if include_sources:
+ bb.debug(1, "Adding sysroot files to SPDX")
+ sysroot_files = add_package_files(
+ d,
+ build_objset,
+ d.expand("${COMPONENTS_DIR}/${PACKAGE_ARCH}/${PN}"),
+ lambda file_counter: build_objset.new_spdxid("sysroot", str(file_counter)),
+ lambda filepath: [],
+ license_data,
+ archive=None,
+ )
+
+ if sysroot_files:
+ build_objset.new_scoped_relationship(
+ [build],
+ oe.spdx30.RelationshipType.hasOutputs,
+ oe.spdx30.LifecycleScopeType.build,
+ sorted(list(sysroot_files)),
+ )
+
+ if build_inputs or debug_source_ids:
+ build_objset.new_scoped_relationship(
+ [build],
+ oe.spdx30.RelationshipType.hasInputs,
+ oe.spdx30.LifecycleScopeType.build,
+ sorted(list(build_inputs)) + sorted(list(debug_source_ids)),
+ )
+
+ oe.sbom30.write_recipe_jsonld_doc(d, build_objset, "recipes", deploydir)
+
+
+def create_package_spdx(d):
+ deploy_dir_spdx = Path(d.getVar("DEPLOY_DIR_SPDX"))
+ deploydir = Path(d.getVar("SPDXRUNTIMEDEPLOY"))
+ is_native = bb.data.inherits_class("native", d) or bb.data.inherits_class(
+ "cross", d
+ )
+
+ providers = oe.spdx_common.collect_package_providers(d)
+ pkg_arch = d.getVar("SSTATE_PKGARCH")
+
+ if is_native:
+ return
+
+ bb.build.exec_func("read_subpackage_metadata", d)
+
+ dep_package_cache = {}
+
+ # Any element common to all packages that need to be referenced by ID
+ # should be written into this objset set
+ common_objset = oe.sbom30.ObjectSet.new_objset(
+ d, "%s-package-common" % d.getVar("PN")
+ )
+
+ pkgdest = Path(d.getVar("PKGDEST"))
+ for package in d.getVar("PACKAGES").split():
+ localdata = bb.data.createCopy(d)
+ pkg_name = d.getVar("PKG:%s" % package) or package
+ localdata.setVar("PKG", pkg_name)
+ localdata.setVar("OVERRIDES", d.getVar("OVERRIDES", False) + ":" + package)
+
+ if not oe.packagedata.packaged(package, localdata):
+ continue
+
+ spdx_package, pkg_objset = oe.sbom30.load_obj_in_jsonld(
+ d,
+ pkg_arch,
+ "packages-staging",
+ pkg_name,
+ oe.spdx30.software_Package,
+ software_primaryPurpose=oe.spdx30.software_SoftwarePurpose.install,
+ )
+
+ # We will write out a new collection, so link it to the new
+ # creation info in the common package data. The old creation info
+ # should still exist and be referenced by all the existing elements
+ # in the package
+ pkg_objset.creationInfo = pkg_objset.copy_creation_info(
+ common_objset.doc.creationInfo
+ )
+
+ runtime_spdx_deps = set()
+
+ deps = bb.utils.explode_dep_versions2(localdata.getVar("RDEPENDS") or "")
+ seen_deps = set()
+ for dep, _ in deps.items():
+ if dep in seen_deps:
+ continue
+
+ if dep not in providers:
+ continue
+
+ (dep, _) = providers[dep]
+
+ if not oe.packagedata.packaged(dep, localdata):
+ continue
+
+ dep_pkg_data = oe.packagedata.read_subpkgdata_dict(dep, d)
+ dep_pkg = dep_pkg_data["PKG"]
+
+ if dep in dep_package_cache:
+ dep_spdx_package = dep_package_cache[dep]
+ else:
+ bb.debug(1, "Searching for %s" % dep_pkg)
+ dep_spdx_package, _ = oe.sbom30.find_root_obj_in_jsonld(
+ d,
+ "packages-staging",
+ dep_pkg,
+ oe.spdx30.software_Package,
+ software_primaryPurpose=oe.spdx30.software_SoftwarePurpose.install,
+ )
+ dep_package_cache[dep] = dep_spdx_package
+
+ runtime_spdx_deps.add(dep_spdx_package)
+ seen_deps.add(dep)
+
+ if runtime_spdx_deps:
+ pkg_objset.new_scoped_relationship(
+ [spdx_package],
+ oe.spdx30.RelationshipType.dependsOn,
+ oe.spdx30.LifecycleScopeType.runtime,
+ [oe.sbom30.get_element_link_id(dep) for dep in runtime_spdx_deps],
+ )
+
+ oe.sbom30.write_recipe_jsonld_doc(d, pkg_objset, "packages", deploydir)
+
+ oe.sbom30.write_recipe_jsonld_doc(d, common_objset, "common-package", deploydir)
+
+
+def write_bitbake_spdx(d):
+ # Set PN to "bitbake" so that SPDX IDs can be generated
+ d.setVar("PN", "bitbake")
+ d.setVar("BB_TASKHASH", "bitbake")
+ oe.spdx_common.load_spdx_license_data(d)
+
+ deploy_dir_spdx = Path(d.getVar("DEPLOY_DIR_SPDX"))
+
+ objset = oe.sbom30.ObjectSet.new_objset(d, "bitbake", False)
+
+ host_import_key = d.getVar("SPDX_BUILD_HOST")
+ invoked_by = objset.new_agent("SPDX_INVOKED_BY", add=False)
+ on_behalf_of = objset.new_agent("SPDX_ON_BEHALF_OF", add=False)
+
+ if d.getVar("SPDX_INCLUDE_BITBAKE_PARENT_BUILD") == "1":
+ # Since the Build objects are unique, we may as well set the creation
+ # time to the current time instead of the fallback SDE
+ objset.doc.creationInfo.created = datetime.now(timezone.utc)
+
+ # Each invocation of bitbake should have a unique ID since it is a
+ # unique build
+ nonce = os.urandom(16).hex()
+
+ build = objset.add_root(
+ oe.spdx30.build_Build(
+ _id=objset.new_spdxid(nonce, include_unihash=False),
+ creationInfo=objset.doc.creationInfo,
+ build_buildType=oe.sbom30.SPDX_BUILD_TYPE,
+ )
+ )
+ set_timestamp_now(d, build, "build_buildStartTime")
+
+ if host_import_key:
+ objset.new_scoped_relationship(
+ [build],
+ oe.spdx30.RelationshipType.hasHost,
+ oe.spdx30.LifecycleScopeType.build,
+ [objset.new_import("SPDX_BUILD_HOST")],
+ )
+
+ if invoked_by:
+ objset.add(invoked_by)
+ invoked_by_spdx = objset.new_scoped_relationship(
+ [build],
+ oe.spdx30.RelationshipType.invokedBy,
+ oe.spdx30.LifecycleScopeType.build,
+ [invoked_by],
+ )
+
+ if on_behalf_of:
+ objset.add(on_behalf_of)
+ objset.new_scoped_relationship(
+ [on_behalf_of],
+ oe.spdx30.RelationshipType.delegatedTo,
+ oe.spdx30.LifecycleScopeType.build,
+ invoked_by_spdx,
+ )
+
+ elif on_behalf_of:
+ bb.warn("SPDX_ON_BEHALF_OF has no effect if SPDX_INVOKED_BY is not set")
+
+ else:
+ if host_import_key:
+ bb.warn(
+ "SPDX_BUILD_HOST has no effect if SPDX_INCLUDE_BITBAKE_PARENT_BUILD is not set"
+ )
+
+ if invoked_by:
+ bb.warn(
+ "SPDX_INVOKED_BY has no effect if SPDX_INCLUDE_BITBAKE_PARENT_BUILD is not set"
+ )
+
+ if on_behalf_of:
+ bb.warn(
+ "SPDX_ON_BEHALF_OF has no effect if SPDX_INCLUDE_BITBAKE_PARENT_BUILD is not set"
+ )
+
+ for obj in objset.foreach_type(oe.spdx30.Element):
+ obj.extension.append(oe.sbom30.OELinkExtension(link_spdx_id=False))
+ obj.extension.append(oe.sbom30.OEIdAliasExtension())
+
+ oe.sbom30.write_jsonld_doc(d, objset, deploy_dir_spdx / "bitbake.spdx.json")
+
+
+def collect_build_package_inputs(d, objset, build, packages):
+ providers = oe.spdx_common.collect_package_providers(d)
+
+ build_deps = set()
+ missing_providers = set()
+
+ for name in sorted(packages.keys()):
+ if name not in providers:
+ missing_providers.add(name)
+ continue
+
+ pkg_name, pkg_hashfn = providers[name]
+
+ # Copy all of the package SPDX files into the Sbom elements
+ pkg_spdx, _ = oe.sbom30.find_root_obj_in_jsonld(
+ d,
+ "packages",
+ pkg_name,
+ oe.spdx30.software_Package,
+ software_primaryPurpose=oe.spdx30.software_SoftwarePurpose.install,
+ )
+ build_deps.add(pkg_spdx._id)
+
+ if missing_providers:
+ bb.fatal(
+ f"Unable to find SPDX provider(s) for: {', '.join(sorted(missing_providers))}"
+ )
+
+ if build_deps:
+ objset.new_scoped_relationship(
+ [build],
+ oe.spdx30.RelationshipType.hasInputs,
+ oe.spdx30.LifecycleScopeType.build,
+ sorted(list(build_deps)),
+ )
+
+
+def create_rootfs_spdx(d):
+ deploy_dir_spdx = Path(d.getVar("DEPLOY_DIR_SPDX"))
+ deploydir = Path(d.getVar("SPDXROOTFSDEPLOY"))
+ root_packages_file = Path(d.getVar("SPDX_ROOTFS_PACKAGES"))
+ image_basename = d.getVar("IMAGE_BASENAME")
+ machine = d.getVar("MACHINE")
+
+ with root_packages_file.open("r") as f:
+ packages = json.load(f)
+
+ objset = oe.sbom30.ObjectSet.new_objset(d, "%s-%s" % (image_basename, machine))
+
+ rootfs = objset.add_root(
+ oe.spdx30.software_Package(
+ _id=objset.new_spdxid("rootfs", image_basename),
+ creationInfo=objset.doc.creationInfo,
+ name=image_basename,
+ software_primaryPurpose=oe.spdx30.software_SoftwarePurpose.archive,
+ )
+ )
+ set_timestamp_now(d, rootfs, "builtTime")
+
+ rootfs_build = objset.add_root(objset.new_task_build("rootfs", "rootfs"))
+ set_timestamp_now(d, rootfs_build, "build_buildEndTime")
+
+ objset.new_scoped_relationship(
+ [rootfs_build],
+ oe.spdx30.RelationshipType.hasOutputs,
+ oe.spdx30.LifecycleScopeType.build,
+ [rootfs],
+ )
+
+ collect_build_package_inputs(d, objset, rootfs_build, packages)
+
+ oe.sbom30.write_recipe_jsonld_doc(d, objset, "rootfs", deploydir)
+
+
+def create_image_spdx(d):
+ image_deploy_dir = Path(d.getVar("IMGDEPLOYDIR"))
+ manifest_path = Path(d.getVar("IMAGE_OUTPUT_MANIFEST"))
+ spdx_work_dir = Path(d.getVar("SPDXIMAGEWORK"))
+
+ image_basename = d.getVar("IMAGE_BASENAME")
+ machine = d.getVar("MACHINE")
+
+ objset = oe.sbom30.ObjectSet.new_objset(d, "%s-%s" % (image_basename, machine))
+
+ with manifest_path.open("r") as f:
+ manifest = json.load(f)
+
+ builds = []
+ for task in manifest:
+ imagetype = task["imagetype"]
+ taskname = task["taskname"]
+
+ image_build = objset.add_root(
+ objset.new_task_build(taskname, "image/%s" % imagetype)
+ )
+ set_timestamp_now(d, image_build, "build_buildEndTime")
+ builds.append(image_build)
+
+ artifacts = []
+
+ for image in task["images"]:
+ image_filename = image["filename"]
+ image_path = image_deploy_dir / image_filename
+ a = objset.add_root(
+ oe.spdx30.software_File(
+ _id=objset.new_spdxid("image", image_filename),
+ creationInfo=objset.doc.creationInfo,
+ name=image_filename,
+ verifiedUsing=[
+ oe.spdx30.Hash(
+ algorithm=oe.spdx30.HashAlgorithm.sha256,
+ hashValue=bb.utils.sha256_file(image_path),
+ )
+ ],
+ )
+ )
+ set_purposes(
+ d, a, "SPDX_IMAGE_PURPOSE:%s" % imagetype, "SPDX_IMAGE_PURPOSE"
+ )
+ set_timestamp_now(d, a, "builtTime")
+
+ artifacts.append(a)
+
+ if artifacts:
+ objset.new_scoped_relationship(
+ [image_build],
+ oe.spdx30.RelationshipType.hasOutputs,
+ oe.spdx30.LifecycleScopeType.build,
+ artifacts,
+ )
+
+ if builds:
+ rootfs_image, _ = oe.sbom30.find_root_obj_in_jsonld(
+ d,
+ "rootfs",
+ "%s-%s" % (image_basename, machine),
+ oe.spdx30.software_Package,
+ # TODO: Should use a purpose to filter here?
+ )
+ objset.new_scoped_relationship(
+ builds,
+ oe.spdx30.RelationshipType.hasInputs,
+ oe.spdx30.LifecycleScopeType.build,
+ [rootfs_image._id],
+ )
+
+ objset.add_aliases()
+ objset.link()
+ oe.sbom30.write_recipe_jsonld_doc(d, objset, "image", spdx_work_dir)
+
+
+def create_image_sbom_spdx(d):
+ image_name = d.getVar("IMAGE_NAME")
+ image_basename = d.getVar("IMAGE_BASENAME")
+ image_link_name = d.getVar("IMAGE_LINK_NAME")
+ imgdeploydir = Path(d.getVar("SPDXIMAGEDEPLOYDIR"))
+ machine = d.getVar("MACHINE")
+
+ spdx_path = imgdeploydir / (image_name + ".spdx.json")
+
+ root_elements = []
+
+ # TODO: Do we need to add the rootfs or are the image files sufficient?
+ rootfs_image, _ = oe.sbom30.find_root_obj_in_jsonld(
+ d,
+ "rootfs",
+ "%s-%s" % (image_basename, machine),
+ oe.spdx30.software_Package,
+ # TODO: Should use a purpose here?
+ )
+ root_elements.append(rootfs_image._id)
+
+ image_objset, _ = oe.sbom30.find_jsonld(
+ d, "image", "%s-%s" % (image_basename, machine), required=True
+ )
+ for o in image_objset.foreach_root(oe.spdx30.software_File):
+ root_elements.append(o._id)
+
+ objset, sbom = oe.sbom30.create_sbom(d, image_name, root_elements)
+
+ oe.sbom30.write_jsonld_doc(d, objset, spdx_path)
+
+ def make_image_link(target_path, suffix):
+ if image_link_name:
+ link = imgdeploydir / (image_link_name + suffix)
+ if link != target_path:
+ link.symlink_to(os.path.relpath(target_path, link.parent))
+
+ make_image_link(spdx_path, ".spdx.json")
+
+
+def sdk_create_spdx(d, sdk_type, spdx_work_dir, toolchain_outputname):
+ sdk_name = toolchain_outputname + "-" + sdk_type
+ sdk_packages = oe.sdk.sdk_list_installed_packages(d, sdk_type == "target")
+
+ objset = oe.sbom30.ObjectSet.new_objset(d, sdk_name)
+
+ sdk_rootfs = objset.add_root(
+ oe.spdx30.software_Package(
+ _id=objset.new_spdxid("sdk-rootfs", sdk_name),
+ creationInfo=objset.doc.creationInfo,
+ name=sdk_name,
+ software_primaryPurpose=oe.spdx30.software_SoftwarePurpose.archive,
+ )
+ )
+ set_timestamp_now(d, sdk_rootfs, "builtTime")
+
+ sdk_build = objset.add_root(objset.new_task_build("sdk-rootfs", "sdk-rootfs"))
+ set_timestamp_now(d, sdk_build, "build_buildEndTime")
+
+ objset.new_scoped_relationship(
+ [sdk_build],
+ oe.spdx30.RelationshipType.hasOutputs,
+ oe.spdx30.LifecycleScopeType.build,
+ [sdk_rootfs],
+ )
+
+ collect_build_package_inputs(d, objset, sdk_build, sdk_packages)
+
+ objset.add_aliases()
+ oe.sbom30.write_jsonld_doc(d, objset, spdx_work_dir / "sdk-rootfs.spdx.json")
+
+
+def create_sdk_sbom(d, sdk_deploydir, spdx_work_dir, toolchain_outputname):
+ # Load the document written earlier
+ rootfs_objset = oe.sbom30.load_jsonld(
+ d, spdx_work_dir / "sdk-rootfs.spdx.json", required=True
+ )
+
+ # Create a new build for the SDK installer
+ sdk_build = rootfs_objset.new_task_build("sdk-populate", "sdk-populate")
+ set_timestamp_now(d, sdk_build, "build_buildEndTime")
+
+ rootfs = rootfs_objset.find_root(oe.spdx30.software_Package)
+ if rootfs is None:
+ bb.fatal("Unable to find rootfs artifact")
+
+ rootfs_objset.new_scoped_relationship(
+ [sdk_build],
+ oe.spdx30.RelationshipType.hasInputs,
+ oe.spdx30.LifecycleScopeType.build,
+ [rootfs],
+ )
+
+ files = set()
+ root_files = []
+
+ # NOTE: os.walk() doesn't return symlinks
+ for dirpath, dirnames, filenames in os.walk(sdk_deploydir):
+ for fn in filenames:
+ fpath = Path(dirpath) / fn
+ if not fpath.is_file() or fpath.is_symlink():
+ continue
+
+ relpath = str(fpath.relative_to(sdk_deploydir))
+
+ f = rootfs_objset.new_file(
+ rootfs_objset.new_spdxid("sdk-installer", relpath),
+ relpath,
+ fpath,
+ )
+ set_timestamp_now(d, f, "builtTime")
+
+ if fn.endswith(".manifest"):
+ f.software_primaryPurpose = oe.spdx30.software_SoftwarePurpose.manifest
+ elif fn.endswith(".testdata.json"):
+ f.software_primaryPurpose = (
+ oe.spdx30.software_SoftwarePurpose.configuration
+ )
+ else:
+ set_purposes(d, f, "SPDX_SDK_PURPOSE")
+ root_files.append(f)
+
+ files.add(f)
+
+ if files:
+ rootfs_objset.new_scoped_relationship(
+ [sdk_build],
+ oe.spdx30.RelationshipType.hasOutputs,
+ oe.spdx30.LifecycleScopeType.build,
+ files,
+ )
+ else:
+ bb.warn(f"No SDK output files found in {sdk_deploydir}")
+
+ objset, sbom = oe.sbom30.create_sbom(
+ d, toolchain_outputname, sorted(list(files)), [rootfs_objset]
+ )
+
+ oe.sbom30.write_jsonld_doc(
+ d, objset, sdk_deploydir / (toolchain_outputname + ".spdx.json")
+ )
diff --git a/poky/meta/lib/oe/spdx_common.py b/poky/meta/lib/oe/spdx_common.py
new file mode 100644
index 0000000000..dfe90f96cf
--- /dev/null
+++ b/poky/meta/lib/oe/spdx_common.py
@@ -0,0 +1,227 @@
+#
+# Copyright OpenEmbedded Contributors
+#
+# SPDX-License-Identifier: GPL-2.0-only
+#
+
+import bb
+import collections
+import json
+import oe.packagedata
+import re
+import shutil
+
+from pathlib import Path
+
+
+LIC_REGEX = re.compile(
+ rb"^\W*SPDX-License-Identifier:\s*([ \w\d.()+-]+?)(?:\s+\W*)?$",
+ re.MULTILINE,
+)
+
+
+def extract_licenses(filename):
+ """
+ Extract SPDX License identifiers from a file
+ """
+ try:
+ with open(filename, "rb") as f:
+ size = min(15000, os.stat(filename).st_size)
+ txt = f.read(size)
+ licenses = re.findall(LIC_REGEX, txt)
+ if licenses:
+ ascii_licenses = [lic.decode("ascii") for lic in licenses]
+ return ascii_licenses
+ except Exception as e:
+ bb.warn(f"Exception reading {filename}: {e}")
+ return []
+
+
+def is_work_shared_spdx(d):
+ return bb.data.inherits_class("kernel", d) or ("work-shared" in d.getVar("WORKDIR"))
+
+
+def load_spdx_license_data(d):
+
+ with open(d.getVar("SPDX_LICENSES"), "r") as f:
+ data = json.load(f)
+ # Transform the license array to a dictionary
+ data["licenses"] = {l["licenseId"]: l for l in data["licenses"]}
+
+ return data
+
+
+def process_sources(d):
+ """
+ Returns True if the sources for this recipe should be included in the SPDX
+ or False if not
+ """
+ pn = d.getVar("PN")
+ assume_provided = (d.getVar("ASSUME_PROVIDED") or "").split()
+ if pn in assume_provided:
+ for p in d.getVar("PROVIDES").split():
+ if p != pn:
+ pn = p
+ break
+
+ # glibc-locale: do_fetch, do_unpack and do_patch tasks have been deleted,
+ # so avoid archiving source here.
+ if pn.startswith("glibc-locale"):
+ return False
+ if d.getVar("PN") == "libtool-cross":
+ return False
+ if d.getVar("PN") == "libgcc-initial":
+ return False
+ if d.getVar("PN") == "shadow-sysroot":
+ return False
+
+ # We just archive gcc-source for all the gcc related recipes
+ if d.getVar("BPN") in ["gcc", "libgcc"]:
+ bb.debug(1, "spdx: There is bug in scan of %s is, do nothing" % pn)
+ return False
+
+ return True
+
+
+Dep = collections.namedtuple("Dep", ["pn", "hashfn", "in_taskhash"])
+
+
+def collect_direct_deps(d, dep_task):
+ """
+ Find direct dependencies of current task
+
+ Returns the list of recipes that have a dep_task that the current task
+ depends on
+ """
+ current_task = "do_" + d.getVar("BB_CURRENTTASK")
+ pn = d.getVar("PN")
+
+ taskdepdata = d.getVar("BB_TASKDEPDATA", False)
+
+ for this_dep in taskdepdata.values():
+ if this_dep[0] == pn and this_dep[1] == current_task:
+ break
+ else:
+ bb.fatal(f"Unable to find this {pn}:{current_task} in taskdepdata")
+
+ deps = set()
+
+ for dep_name in this_dep.deps:
+ dep_data = taskdepdata[dep_name]
+ if dep_data.taskname == dep_task and dep_data.pn != pn:
+ deps.add((dep_data.pn, dep_data.hashfn, dep_name in this_dep.taskhash_deps))
+
+ return sorted(deps)
+
+
+def get_spdx_deps(d):
+ """
+ Reads the SPDX dependencies JSON file and returns the data
+ """
+ spdx_deps_file = Path(d.getVar("SPDXDEPS"))
+
+ deps = []
+ with spdx_deps_file.open("r") as f:
+ for d in json.load(f):
+ deps.append(Dep(*d))
+ return deps
+
+
+def collect_package_providers(d):
+ """
+ Returns a dictionary where each RPROVIDES is mapped to the package that
+ provides it
+ """
+ deploy_dir_spdx = Path(d.getVar("DEPLOY_DIR_SPDX"))
+
+ providers = {}
+
+ deps = collect_direct_deps(d, "do_create_spdx")
+ deps.append((d.getVar("PN"), d.getVar("BB_HASHFILENAME"), True))
+
+ for dep_pn, dep_hashfn, _ in deps:
+ localdata = d
+ recipe_data = oe.packagedata.read_pkgdata(dep_pn, localdata)
+ if not recipe_data:
+ localdata = bb.data.createCopy(d)
+ localdata.setVar("PKGDATA_DIR", "${PKGDATA_DIR_SDK}")
+ recipe_data = oe.packagedata.read_pkgdata(dep_pn, localdata)
+
+ for pkg in recipe_data.get("PACKAGES", "").split():
+ pkg_data = oe.packagedata.read_subpkgdata_dict(pkg, localdata)
+ rprovides = set(
+ n
+ for n, _ in bb.utils.explode_dep_versions2(
+ pkg_data.get("RPROVIDES", "")
+ ).items()
+ )
+ rprovides.add(pkg)
+
+ if "PKG" in pkg_data:
+ pkg = pkg_data["PKG"]
+ rprovides.add(pkg)
+
+ for r in rprovides:
+ providers[r] = (pkg, dep_hashfn)
+
+ return providers
+
+
+def get_patched_src(d):
+ """
+ Save patched source of the recipe in SPDX_WORKDIR.
+ """
+ spdx_workdir = d.getVar("SPDXWORK")
+ spdx_sysroot_native = d.getVar("STAGING_DIR_NATIVE")
+ pn = d.getVar("PN")
+
+ workdir = d.getVar("WORKDIR")
+
+ try:
+ # The kernel class functions require it to be on work-shared, so we dont change WORKDIR
+ if not is_work_shared_spdx(d):
+ # Change the WORKDIR to make do_unpack do_patch run in another dir.
+ d.setVar("WORKDIR", spdx_workdir)
+ # Restore the original path to recipe's native sysroot (it's relative to WORKDIR).
+ d.setVar("STAGING_DIR_NATIVE", spdx_sysroot_native)
+
+ # The changed 'WORKDIR' also caused 'B' changed, create dir 'B' for the
+ # possibly requiring of the following tasks (such as some recipes's
+ # do_patch required 'B' existed).
+ bb.utils.mkdirhier(d.getVar("B"))
+
+ bb.build.exec_func("do_unpack", d)
+ # Copy source of kernel to spdx_workdir
+ if is_work_shared_spdx(d):
+ share_src = d.getVar("WORKDIR")
+ d.setVar("WORKDIR", spdx_workdir)
+ d.setVar("STAGING_DIR_NATIVE", spdx_sysroot_native)
+ src_dir = (
+ spdx_workdir
+ + "/"
+ + d.getVar("PN")
+ + "-"
+ + d.getVar("PV")
+ + "-"
+ + d.getVar("PR")
+ )
+ bb.utils.mkdirhier(src_dir)
+ if bb.data.inherits_class("kernel", d):
+ share_src = d.getVar("STAGING_KERNEL_DIR")
+ cmd_copy_share = "cp -rf " + share_src + "/* " + src_dir + "/"
+ cmd_copy_shared_res = os.popen(cmd_copy_share).read()
+ bb.note("cmd_copy_shared_result = " + cmd_copy_shared_res)
+
+ git_path = src_dir + "/.git"
+ if os.path.exists(git_path):
+ shutils.rmtree(git_path)
+
+ # Make sure gcc and kernel sources are patched only once
+ if not (d.getVar("SRC_URI") == "" or is_work_shared_spdx(d)):
+ bb.build.exec_func("do_patch", d)
+
+ # Some userland has no source.
+ if not os.path.exists(spdx_workdir):
+ bb.utils.mkdirhier(spdx_workdir)
+ finally:
+ d.setVar("WORKDIR", workdir)
diff --git a/poky/meta/lib/oe/sstatesig.py b/poky/meta/lib/oe/sstatesig.py
index b6f8ab92cb..1f97606763 100644
--- a/poky/meta/lib/oe/sstatesig.py
+++ b/poky/meta/lib/oe/sstatesig.py
@@ -399,7 +399,13 @@ def find_siginfo(pn, taskname, taskhashlist, d):
return siginfo.rpartition('.')[2]
def get_time(fullpath):
- return os.stat(fullpath).st_mtime
+ # NFS can end up in a weird state where the file exists but has no stat info.
+ # If that happens, we assume it doesn't acutally exist and show a warning
+ try:
+ return os.stat(fullpath).st_mtime
+ except FileNotFoundError:
+ bb.warn("Could not obtain mtime for {}".format(fullpath))
+ return None
# First search in stamps dir
localdata = d.createCopy()
@@ -422,13 +428,17 @@ def find_siginfo(pn, taskname, taskhashlist, d):
if taskhashlist:
for taskhash in taskhashlist:
if fullpath.endswith('.%s' % taskhash):
- hashfiles[taskhash] = {'path':fullpath, 'sstate':False, 'time':get_time(fullpath)}
+ mtime = get_time(fullpath)
+ if mtime:
+ hashfiles[taskhash] = {'path':fullpath, 'sstate':False, 'time':mtime}
if len(hashfiles) == len(taskhashlist):
foundall = True
break
else:
hashval = get_hashval(fullpath)
- hashfiles[hashval] = {'path':fullpath, 'sstate':False, 'time':get_time(fullpath)}
+ mtime = get_time(fullpath)
+ if mtime:
+ hashfiles[hashval] = {'path':fullpath, 'sstate':False, 'time':mtime}
if not taskhashlist or (len(hashfiles) < 2 and not foundall):
# That didn't work, look in sstate-cache
@@ -459,7 +469,9 @@ def find_siginfo(pn, taskname, taskhashlist, d):
actual_hashval = get_hashval(fullpath)
if actual_hashval in hashfiles:
continue
- hashfiles[actual_hashval] = {'path':fullpath, 'sstate':True, 'time':get_time(fullpath)}
+ mtime = get_time(fullpath)
+ if mtime:
+ hashfiles[actual_hashval] = {'path':fullpath, 'sstate':True, 'time':mtime}
return hashfiles
@@ -552,6 +564,7 @@ def OEOuthashBasic(path, sigfile, task, d):
if task == "package":
include_timestamps = True
include_root = False
+ source_date_epoch = float(d.getVar("SOURCE_DATE_EPOCH"))
hash_version = d.getVar('HASHEQUIV_HASH_VERSION')
extra_sigdata = d.getVar("HASHEQUIV_EXTRA_SIGDATA")
@@ -643,7 +656,11 @@ def OEOuthashBasic(path, sigfile, task, d):
raise Exception(msg).with_traceback(e.__traceback__)
if include_timestamps:
- update_hash(" %10d" % s.st_mtime)
+ # Need to clamp to SOURCE_DATE_EPOCH
+ if s.st_mtime > source_date_epoch:
+ update_hash(" %10d" % source_date_epoch)
+ else:
+ update_hash(" %10d" % s.st_mtime)
update_hash(" ")
if stat.S_ISBLK(s.st_mode) or stat.S_ISCHR(s.st_mode):
diff --git a/poky/meta/lib/oe/utils.py b/poky/meta/lib/oe/utils.py
index 14a7d07ef0..c9c7a47041 100644
--- a/poky/meta/lib/oe/utils.py
+++ b/poky/meta/lib/oe/utils.py
@@ -482,19 +482,6 @@ def get_multilib_datastore(variant, d):
localdata.setVar("MLPREFIX", "")
return localdata
-class ImageQAFailed(Exception):
- def __init__(self, description, name=None, logfile=None):
- self.description = description
- self.name = name
- self.logfile=logfile
-
- def __str__(self):
- msg = 'Function failed: %s' % self.name
- if self.description:
- msg = msg + ' (%s)' % self.description
-
- return msg
-
def sh_quote(string):
import shlex
return shlex.quote(string)