diff options
author | Andrew Geissler <geissonator@yahoo.com> | 2021-02-13 00:35:20 +0300 |
---|---|---|
committer | Brad Bishop <bradleyb@fuzziesquirrel.com> | 2021-02-25 23:15:06 +0300 |
commit | d1e894976442c78577f52fe7b169812d00289120 (patch) | |
tree | b65864e37eca17e36157663e48e82e3745145379 /poky/bitbake/lib | |
parent | ec0e87b37a04927ed4549b1b9d2e23a8c345cb7a (diff) | |
download | openbmc-d1e894976442c78577f52fe7b169812d00289120.tar.xz |
poky: subtree update:796be0593a..9294bc4bb4
This includes our temporary libpam revert until OpenBMC can get in
support for the new libraries. See openbmc/openbmc#3750 for more
information.
Abdellatif El Khlifi (4):
kernel-fitimage: adding support for Initramfs bundle and u-boot script
kernel: skip installing fitImage when using Initramfs bundles
oeqa/selftest/imagefeatures: adding fitImage initramfs bundle testcase
ref-manual/ref-classes: update kernel-fitimage with Initramfs bundle and boot script
Adrian Herrera (2):
scripts: oe-run-native, fix *-native directories
common-licenses: add BSD-3-Clause-Clear license
Alan Perry (2):
binutils: add libopcodes package for perf
iproute2: Add subpackage for rdma command
Alejandro Hernandez Samaniego (2):
newlib: Upgrade 3.3.0 -> 4.1.0
newlib: Update licence
Alex Stewart (1):
opkg: upgrade to version 0.4.4
Alexander Kanavin (89):
selftest/reproducible: enable world reproducibility test
selftest/reproducible: add an exclusion list for items that are not yet reproducible
kea: upgrade 1.7.10 -> 1.8.1
valgrind: exclude bar_bad/bar_bad_xml from ptests
bzip2: run ptests without valgrind
lttng-tools: disable more failing ptests
glib-2.0: add a patch to increase a test timeout
acpica: upgrade 20201113 -> 20201217
bind: upgrade 9.16.9 -> 9.16.10
diffoscope: upgrade 161 -> 163
dnf: upgrade 4.4.0 -> 4.5.2
enchant2: upgrade 2.2.13 -> 2.2.14
epiphany: upgrade 3.38.1 -> 3.38.2
ethtool: upgrade 5.9 -> 5.10
gtk+3: upgrade 3.24.23 -> 3.24.24
init-system-helpers: upgrade 1.58 -> 1.60
kbd: upgrade 2.3.0 -> 2.4.0
kea: upgrade 1.8.1 -> 1.8.2
libmodulemd: upgrade 2.9.4 -> 2.11.1
libpcre2: upgrade 10.35 -> 10.36
libtirpc: upgrade 1.2.6 -> 1.3.1
libusb1: upgrade 1.0.23 -> 1.0.24
libva: upgrade 2.9.0 -> 2.10.0
libx11: upgrade 1.6.12 -> 1.7.0
lighttpd: upgrade 1.4.56 -> 1.4.57
ninja: upgrade 1.10.1 -> 1.10.2
puzzles: upgrade to latest revision
python3-hypothesis: upgrade 5.41.5 -> 5.43.3
python3-py: upgrade 1.9.0 -> 1.10.0
python3-setuptools-scm: upgrade 4.1.2 -> 5.0.1
sqlite3: upgrade 3.33.0 -> 3.34.0
stress-ng: upgrade 0.11.24 -> 0.12.00
sudo: upgrade 1.9.3p1 -> 1.9.4p1
sysvinit: upgrade 2.97 -> 2.98
vala: upgrade 0.50.1 -> 0.50.2
vulkan-headers: upgrade 1.2.154.0 -> 1.2.162.0
webkitgtk: upgrade 2.30.2 -> 2.30.4
xprop: upgrade 1.2.4 -> 1.2.5
xserver-xorg: upgrade 1.20.9 -> 1.20.10
glib-2.0: update 2.66.2 -> 2.66.4
rpm: update 4.16.0 -> 4.16.1.2
piglit: update to latest revision
sbc: update 1.4 -> 1.5
libdnf: update 0.55.0 -> 0.55.2
libva-utils: update 2.9.1 -> 2.10.0
python3-importlib-metadata: update 3.1.1 -> 3.3.0
python3: update 3.9.0 -> 3.9.1
vulkan-loader: upgrade 1.2.154.1 -> 1.2.162.0
vulkan-tools: upgrade 1.2.154.0 -> 1.2.162.0
systemd-bootchart: update 233 -> 234
zstd: add recipe from meta-oe
zstd: update 1.4.5 -> 1.4.8
devtool: gitsm:// should be handled same as git:// in upgrades
ovmf: upgrade 202008 -> 202011
libksba: update 1.4.0 -> 1.5.0
libjitterentropy: update 2.2.0 -> 3.0.0
icu: update 68.1 -> 68.2
gnutls: update 3.6.15 -> 3.7.0
gnupg: update 2.2.23 -> 2.2.26
boost: update 1.74.0 -> 1.75.0
kexec-tools: update 2.0.20 -> 2.0.21
vulkan-samples: update to latest revision
libpam: update 1.3.1 -> 1.5.1
strace: update 5.9 -> 5.10
python3-pytest: update 6.1.2 -> 6.2.1
mtools: update 4.0.25 -> 4.0.26
gnu-config: update to latest revision
cmake: update 3.18.4 -> 3.19.2
ccache: upgrade 3.7.11 -> 4.1
ccache.bbclass: use ccache from host distribution
gawk: add missing ptest dependency
util-linux: upgrade 2.36 -> 2.36.1
ell: upgrade 0.33 -> 0.35
net-tools: correct version check
oeqa/ptest: print a warning if ptests failed
bash: update 5.0 -> 5.1
runtime_test.py: correct output check for bash 5.1
distcc: update 3.3.3 -> 3.3.5
gptfdisk: update 1.0.5 -> 1.0.6
python3-setuptools: update 51.0.0 -> 52.0.0
ruby: update 2.7.2 -> 3.0.0
vulkan-samples: update to latest revision
dpkg: update 1.20.5 -> 1.20.7.1
libhandy: upgrade 1.0.2 -> 1.0.3
tar: update 1.32 -> 1.33
at: correct upstream version check
shaderc: correct version check
spirv-tools: correct version check
u-boot: upgrade 2020.10 -> 2021.01
Alistair Francis (1):
opensbi: Bump from 0.8 to 0.9
Anatol Belski (1):
iproute2: Make it easier to manipulate SUBDIRS list from bbappend
Andreas Müller (1):
openssl: re-enable whirlpool
Andrey Mozzhuhin (1):
toolchain-shar-extract.sh: Handle special characters in script path
Anton Kachalov (1):
rootfs: add option to allow delayed postinsts on read-only rootfs
Anuj Mittal (45):
mesa: add more details to elf-tls patch
mesa: remove patch disabling asm
linux-yocto: update genericx86 to v5.4.87
enchant2: upgrade 2.2.14 -> 2.2.15
gstreamer1.0: upgrade 1.18.2 -> 1.18.3
gstreamer1.0-plugins-base: upgrade 1.18.2 -> 1.18.3
gstreamer1.0-plugins-good: upgrade 1.18.2 -> 1.18.3
gstreamer1.0-plugins-bad: upgrade 1.18.2 -> 1.18.3
gstreamer1.0-libav: upgrade 1.18.2 -> 1.18.3
gstreamer1.0-omx: upgrade 1.18.2 -> 1.18.3
gstreamer1.0-rtsp-server: upgrade 1.18.2 -> 1.18.3
gstreamer1.0-python: upgrade 1.18.2 -> 1.18.3
gstreamer1.0-vaapi: upgrade 1.18.2 -> 1.18.3
gst-examples: upgrade 1.18.2 -> 1.18.3
gst-devtools: upgrade 1.18.2 -> 1.18.3
gstreamer1.0-plugins-ugly: upgrade 1.18.2 -> 1.18.3
libepoxy: upgrade 1.5.4 -> 1.5.5
libproxy: upgrade 0.4.15 -> 0.4.17
stress-ng: upgrade 0.12.00 -> 0.12.01
vulkan-tools: upgrade 1.2.162.0 -> 1.2.162.1
harfbuzz: upgrade 2.7.2 -> 2.7.4
mpg123: upgrade 1.26.3 -> 1.26.4
piglit: upgrade to latest revision
vala: upgrade 0.50.2 -> 0.50.3
gcr: upgrade 3.38.0 -> 3.38.1
python3-pygments: upgrade 2.7.3 -> 2.7.4
logrotate: upgrade 3.17.0 -> 3.18.0
lzip: upgrade 1.21 -> 1.22
python3-mako: upgrade 1.1.3 -> 1.1.4
wget: upgrade 1.20.3 -> 1.21.1
lighttpd: upgrade 1.4.57 -> 1.4.58
python3-importlib-metadata: upgrade 3.3.0 -> 3.4.0
python3-git: upgrade 3.1.11 -> 3.1.12
acpica: upgrade 20201217 -> 20210105
diffstat: upgrade 1.63 -> 1.64
python3-dbusmock: upgrade 0.19 -> 0.22.0
python3-hypothesis: upgrade 5.43.3 -> 6.0.2
python3-numpy: upgrade 1.19.4 -> 1.19.5
resolvconf: upgrade 1.83 -> 1.87
sudo: upgrade 1.9.4p1 -> 1.9.5p1
git: upgrade 2.29.2 -> 2.30.0
meson: upgrade 0.56.0 -> 0.56.2
rt-tests/hwlatdetect: upgrade 1.9 -> 1.10
gstreamer1.0: fix failing ptest
python3: fix CVE-2021-3177
Awais Belal (1):
kernel.bbclass: fix deployment for initramfs images
Bruce Ashfield (38):
linux-yocto-rt/5.4: update to -rt44
linux-yocto/5.4: update to v5.4.80
lttng-modules: fix build against v5.10+
kern-tools: non-gcc config support and option re-classification
linux-yocto/cfg: qemuppc: set CONFIG_SCSI to '=y'
linux-yocto/5.4: update to v5.4.82
linux-yocto/cfg: qemuarm64-gfx.cfg: add CONFIG_INPUT_UINPUT
linux-yocto/5.4: update to v5.4.83
linux-yocto/5.8/cfg: fix -tiny warnings
linux-yocto/5.4/cfg: fix -tiny warnings
systemtap: fix on target build for 4.4 and 5.10+
linux-yocto/5.4/cfg: fix FIRMWARE_LOADER warnings
kernel-devsrc: fix 32bit ARM devsrc builds
linux-yocto/5.4: update to v5.4.85
linux-yocto-dev: bump to v5.11-rc
libc-headers: update to v5.10
machine/qemuarm*: add vmalloc kernel parameter
linux-yocto: introduce v5.10 reference kernel recipes
linux-yocto/5.10: update to v5.10.2
conf/machine: bump qemu preferred versions to 5.10
poky/poky-tiny: set preferred kernel to 5.10
yocto-bsp: explicitly set preferred version for reference boards
poky-alt: don't use conditional assignment for preferred kernel version
linux-yocto/5.10: update to v5.10.4
linux-yocto/5.10: update to v5.10.5
linux-yocto/5.4: update to v5.4.87
linux-yocto/5.10/cfg: x86 and beaglebone config fixes
linux-yocto: remove 5.8 recipes
yocto-bsp: drop 5.8 bbappend
linux-yocto/5.10: update to v5.10.8
linux-yocto/5.4: update to v5.4.90
linux-yocto-rt/5.10: fix 5.10-rt build breakage
linux-yocto-rt/5.4: fix 5.4-stable caused build breakage
linux-yocto/5.10: update to v5.10.10
linux-yocto/5.10: update to v5.10.12
linux-yocto/5.4: update to v5.4.94
linux-yocto/5.10: binutils 2.36 fixes
yocto-bsp: linux-yocto: update to v5.10.12
Changhyeok Bae (1):
python3-importlib-metadata: Add toml dependency
Changqing Li (4):
libexif: fix CVE-2020-0198; CVE-2020-0452
libpam: support usrmerge
libpam: remove unused code
qemu: fix do_compile error
Chee Yang Lee (1):
initrdscripts: init-install-efi.sh install extra files for ESP
Chen Qi (1):
systemd: change /bin/nologin to /sbin/nologin
Chris Laplante (2):
contrib/git-hooks: add a sendemail-validate example hook that adds FROM: lines to outgoing patch emails
systemd.bbclass: improve error message when a service unit specified in SYSTEMD_SERVICE is not found
Christophe Priouzeau (1):
bitbake: fetch2/wget: Update user-agent
Christopher Larson (2):
grub-efi-cfg: exclude OVERRIDES from build_efi_cfg vardeps
uboot-extlinux-config: exclude OVERRIDES from do_create_extlinux_config vardeps
Deepak Rawat (1):
openssl: add support for mingw64 as target
Denys Dmytriyenko (2):
maintainers: update own email address
wayland: upgrade 1.18.0 -> 1.19.0
Diego Sueiro (4):
wic: Introduce empty plugin to create unformatted empty partitions
modutils-initscripts: Use depmod -a when modules.dep is empty
staging: Introduce /sysroot-only to SYSROOT_DIRS
dev-manual: Add usage of /sysroot-only in SYSROOT_DIRS
Dmitry Baryshkov (4):
perl: fix installation failure because of shell issue
linux-firmware: upgrade 20201118 -> 20201218
linux-firmware: package firmware for Lontium lt9611uxc bridge
mesa,mesa-gl: upgrade to 20.3.2
Dorinda (8):
sanity: Verify that user isn't building in PSEUDO_IGNORE_PATHS
sanity.bbclass: sanity check for if bitbake is present in PATH
sanity.bbclass: check if PSEUDO_IGNORE_PATHS and ${S} overlap
elfutils: split libdebuginfod into its own package
elfutils: add PACKAGECONFIG for debuginfod
elfutils: add support for ipk
sanity.bbclass: Check if PSEUDO_IGNORE_PATHS and paths under pseudo control overlap
oe-pkgdata-util: Check if environment script is initialized
Easwar Hariharan (1):
classes/kernel-fitimage: make fitimage_emit_section_config more readable
Elvis Stansvik (1):
ref-manual: terms: Fix poky tarball root folder
Hongxu Jia (1):
deb: do not insert feed uris if apt not installed
Jack Mitchell (1):
distutils3: allow setup.py to be run from a different directory to ${S}
Joey Degges (4):
bitbake: tests/fetch: Organize usehead tests by net requirements
bitbake: tests/fetch: Document behavior of test_gitfetch_usehead
bitbake: tests/fetch: Test usehead with a non-default name
bitbake: fetch/git: Fix usehead for non-default names
Jonathan Richardson (1):
core-image-tiny-initramfs: Add compatiblity for aarch64
Jose Quaresma (22):
gstreamer1.0: upgrade 1.18.1 -> 1.18.2
gstreamer1.0-plugins-bad: v4l2codecs fix typo
gstreamer1.0-plugins-bad: add support for aom plugin
gstreamer1.0-plugins-bad: add support for x265 plugin
gstreamer1.0-plugins-bad: sctp plugin uses the internal usrsctp static lib
gstreamer1.0-plugins-bad: remove unsupported plugins comment
gstreamer1.0-plugins-bad: netsim plugin don't have external deps
gstreamer1.0-plugins-bad: transcode plugin external deps is always present
gstreamer1.0: use the correct meson option for the capabilities
shaderc: upgrade 2020.3 -> 2020.4
spirv-tools: upgrade 2020.5 -> 2020.6
common-licenses: Add GPL-3.0-with-bison-exception
glslang: upgrade 8.13.3743 -> 11.1.0
glslang: enable shared libs
glslang: disable precompiled header
shaderc: avoid reproducible issues
shaderc: fix the build with glslang 11.1.0
spirv-headers: Add receipe
spirv-tools: cleanup
shaderc: add spirv-headers as dependencie
spirv-tools: fix reproducible
selftest/reproducible: remove spirv-tools-dev from exclusion list
Joshua Watt (4):
diffoscope: upgrade 163 -> 164
ref-manual: Clarify recommended operator for PROVIDES
bash: Disable bracketed input by default
bitbake: logging: Make bitbake logger compatible with python logger
Kai Kang (1):
adwaita-icon-theme: add version 3.34.3 back
Kamel Bouhara (2):
npm.bbclass: make shrinkwrap file optional
recipetool: create: only add npmsw url if required
Kevin Hao (2):
Revert "yocto-bsp: explicitly set preferred version for reference boards"
meta-yocto-bsp: Bump the kernel to v5.10
Khairul Rohaizzat Jamaluddin (4):
openssl: Update 1.1.1h -> 1.1.1i
go: Update 1.15.5 -> 1.15.6
curl: Update 7.73.0 -> 7.74.0
ffmpeg: Fix CVE-2020-35964, CVE-2020-35965
Khem Raj (37):
musl: Update to latest master
systemd: Fix reallocarray check
go.bbclass: Use external linker for native packages
qemuriscv: check serial consoles w.r.t. /proc/consoles
busybox-inittab: Implement SYSVINIT_ENABLED_GETTYS and USE_VT
initscripts: use quotes for shell variable comparision
busybox: Install /etc/default/rcS when used as init system
busybox: Run mdev as daemon
rcS: Define identifier for init system used
initscripts: Use initctl on sysvinit only
busybox: Sync rcS.default with sysvinit
ltp: Fix ltp-pan crash on 32bit arches using 64bit time_t
pulseaudio: Fix build with clang for non-x86 target
util-linux: Build fixes for 32bit arches with 64bit time_t
libpam: Drop musl patches
ccache: Build fixes for clang and riscv32
shadow: Remove lastlog pam plugin on musl system
rxvt-unicode: Disable lastlog on musl systems
openssh: Disable lastlog on musl
dropbear: Disable lastlog and wtmp on musl
ccache: Fix build on aarch64/clang
openssl: Enable rc4/rc2/bf/md4 algorithms
openssl: Enable psk for qtbase
libyaml: Enable static lib on native/nativesdk
musl/glibc: Document assembly file directive fix
musl: Update to 1.2.2 release
binutils: Upgrade to 2.36 release
binutils: Package libdep linker plugins
binutils: Disable parallel install for target/nativesdk binutils
musl: Drop adding .file directive in asm files
glibc: Drop adding .file directive in asm files
glibc: Upgrade to 2.33
glibc: Enable cet
glibc: Require full ISA support for x86-64 level marker
security_flags.inc: Use -O with -D_FORTIFY_SOURCE
systemd: Fix build on musl
autoconf: Fix typo for prefuncs
Lee Chee Yang (8):
gdk-pixbuf: fix CVE-2020-29385
wic/direct/kparser: ensure fsuuid for vfat and msdos align with format
p11-kit: upgrade 0.23.21 -> 0.23.22
cve-check: replace Looseversion with custom version class
cve_check: add CVE_VERSION_SUFFIX to indicate suffix in versioning
openssl: set CVE_VERSION_SUFFIX
wic/selftest: test_permissions also test bitbake image
wic: debug mode to keep tmp directory
Leon Anavi (1):
common-tasks.rst: Fix GNU_HASH in hello.bb
Li Wang (2):
qemu: CVE-2020-25723
qemu: CVE-2020-28916
Luca Boccassi (7):
classes/kernel-fitimage: add ability to sign individual images
systemd: update 246 -> 247
systemd: add package config for systemd-oomd
systemd: ship new systemd-dissect in -extra-utils
systemd: set -Dmode=release as recommended by NEWS
systemd: add RRECOMMENDS for weak dependencies, if enabled
systemd: update to v247.3
Mans Rullgard (1):
boost: drop arm-intrinsics.patch
Marek Vasut (2):
meta: toolchain-shar-relocate.sh: Do not use $target_sdk_dir as regex
meta: toolchain-shar-relocate.sh: Filter out post-relocate-setup script
Mark Jonas (1):
parted: Make readline dependency optional
Martin Jansa (3):
license.bbclass: Add COMMON_LICENSE_DIR and LICENSE_PATH dirs to PSEUDO_IGNORE_PATHS
busybox.inc: install rcS, rcK and rcS.default only with busybox in VIRTUAL-RUNTIME_init_manager
image_types.bbclass: tar: use posix format instead of gnu
Matt Hoosier (1):
bitbake: fetch/git: download LFS content too during do_fetch
Maxime Roussin-Bélanger (1):
meta: add missing descriptions in some support recipes
Michael Halstead (4):
releases: conf: add link to 3.2.1, update to include 3.2.1
releases: conf: add link to 3.1.5, update to include 3.2.1 & 3.1.5
uninative: Upgrade to 2.10
yocto-uninative.inc: version 2.11 updates glibc to 2.33
Michael Ho (2):
rootfs_ipk: allow do_populate_sdk in parallel to do_rootfs
license_image.bbclass: fix missing recipeinfo on self
Mike Looijmans (1):
license_image.bbclass: Don't attempt to symlink to the same file
Mikko Rapeli (1):
zip: whitelist CVE-2018-13410 and CVE-2018-13684
Milan Shah (2):
oe-pkgdata-util: Added a test to verify oe-pkgdata-util without parameters
bitbake: utils: add docstrings to functions
Mingli Yu (4):
kbd: fix transaction conflict
systemd: resolve executable path if it is relative
libpam: add ptest support
qemu: make ptest rework
Nathan Rossi (8):
gcc: Add patch to resolve i*86 tune configuration overrides
qemu.inc: Add seccomp PACKAGECONFIG option
ncurses: Prevent LDFLAGS being emitted in .pc files
which: add nativesdk to BBCLASSEXTEND
sed: add nativesdk to BBCLASSEXTEND
grep: add nativesdk to BBCLASSEXTEND
coreutils: enable xattrs by default for nativesdk
gcc: Backport patch to resolve i*86 tune configuration overrides
Naveen Saini (1):
gstreamer1.0-plugins-bad: fix msdk pkgconfig build failure
Oleksandr Kravchuk (4):
python3-smmap: update to 4.0.0
python3-numpy: update to 0.20.0
inetutils: update to 2.0
ell: update to 0.37
Oleksiy Obitotskyy (2):
flex: Fix --noline option behavior
dtc: improve reproducibility
Oleksiy Obitotskyy yIEf0zt.mo (1):
toolchain-shar-relocate.sh: Fix handling files with colons
Ovidiu Panait (5):
timezone: upgrade to 2020e
timezone: upgrade to 2020f
variables: Add documentation for KERNEL_DTC_FLAGS
kernel-devicetree: Introduce KERNEL_DTC_FLAGS to pass dtc flags
timezone: upgrade to 2021a
Paul Barker (22):
bitbake.conf: Prevent pyc file generation in pseudo context
documentation: Simplify oe_wiki and oe_home links
documentation: Simplify layerindex and layer links
documentation: Simplify remaining yocto_home links
profile-manual: Simplify yocto_bugs link
ref-manual: Simplify oe_lists link
documentation: Use https links where possible
selftest: Add argument to keep build dir
wic: Add workdir argument
wic: Allow exec_native_cmd to run HOSTTOOLS
wic: Ensure internal workdir is not reused
image_types_wic: Move wic working directory
wic: Update pseudo db when excluding content from rootfs
wic: Copy rootfs dir if fstab needs updating
wic: Optimise fstab modification for ext2/3/4 and msdos partitions
bitbake: bitbake-hashclient: Remove obsolete call to client.connect
bitbake: hashserv: client: Fix handling of null responses
bitbake: hashserv: Support read-only server
bitbake: hashserv: Support upstream command line argument
bitbake: hashserv: Add short forms of remaining command line arguments
bitbake: hashserv: server: Support searching upstream for outhash
bitbake: hashserv: Add get-outhash message
Paul Eggleton (11):
classes/kernel-fitimage: add variable for description
classes/kernel-fitimage: allow substituting mkimage command
classes/kernel-fitimage: add ability to add additional signing options
oe-selftest: move FIT image tests to their own module
oe-selftest: fitimage: Test for FIT_DESC
oe-selftest: fitimage: add test for signing FIT images
classes: minor corrections to kernel-fitimage section
variables: clarify KERNEL_ALT_IMAGETYPE reference
variables: explicitly state that UBOOT_MKIMAGE_DTCOPTS is optional
variables: Add documentation for new kernel-fitimage vars
ref-manual: use consistent capitalisation of U-Boot
Paul Gortmaker (1):
systemd: dont spew hidepid mount errors for kernels < v5.8
Peter Bergin (1):
buildhistory.bbclass: avoid exception for empty BUILDHISTORY_FEATURES variable
Peter Kjellerstedt (7):
lib/oe/path: Add canonicalize()
bitbake.conf: Canonicalize paths in PSEUDO_IGNORE_PATHS
wic: Pass canonicalized paths in PSEUDO_IGNORE_PATHS
glibc: Make adjtime() for 32 bit support being called with delta == NULL
bitbake: cache: Make CoreRecipeInfo include rprovides_pkg for skipped recipes
bitbake: cooker: Include all packages a recipe provides in SkippedPackage.rprovides
apr-util: Only specify --with-dbm=gdbm if gdbm support is enabled
Quentin Schulz (1):
docs: fix missing & and ; surrounding references from poky.yaml
Randy Li (2):
meson: Add sysroot property to nativesdk-meson
meson: Don't turn string into a list in nativesdk
Richard Purdie (69):
pseudo: Drop patches merged into upstream branch
bitbake: data_smart: Ensure hash reflects vardepvalue flags correctly
linuxloader: Avoid confusing string concat errors
systemd: Ensure uid/gid ranges are set deterministically
grub: Fix build reproducibility issue
u-boot-tools: Fix reproducibility issue
grub: Add second fix for determinism issue
oeqa/commands: Ensure sync can be found regardless of PATH
cups: Mark CVE-2009-0032 as a non-issue
cups: Mark CVE-2008-1033 as a non-issue
groff: Fix reproducibility issue
man-db: Avoid reproducibility failures after fixing groff-native
meta-selftest/staticids: Add ids for other recipes
selftest/reproducible: Add useradd-staticids to reproducible builds tests
grub: Further reproducibility fix
man-db: Fix reproducibility issue
bitbake.conf: Add mkfifo to HOSTTOOLS
bitbake.conf: Add /run/ to PSEUDO_IGNORE_PATHS
ppp: Update 2.4.8 -> 2.4.9
ppp: Fix reproducibility issue
sanity: Bump min python version to 3.6
pseudo: Add lchmod wrapper
qemu: Upgrade 5.1.0->5.2.0
qemu: Drop vm reservation changes to resolve build issues
qemu: Fix mingw builds
qemu: Add some user space mmap tweaks to address musl 32 bit build issues
ppp: Fix patch typo
pseudo: Update for arm host and memleak fixes/cleanup
vulkan-samples: Fix reproducibility issue
vulkan-samples: Disable PCH for reproducibility
lttng-modules: Upgrade 2.12.3->2.12.4
lttng-modules: Drop gcc7 related patch
bash: Set HEREDOC_PIPESIZE deterministically
bash: Add makefile race workaround
build-appliance-image: Update to master head revision
bitbake: fetch2/perforce: Fix localfile to include ud.module
ncurses: Don't put terminfo into the sysroot
python3: Avoid installing test data into recipe-sysroot
staging: Clean up files installed into the sysroot
gobject-introspection: Fix variable override order
nativesdk-buildtools-perl-dummy: Add missing entries for nativesdk-automake
package_rpm: Clean up unset runtime package variable handling
bitbake.conf/python: Drop setting RDEPENDS/RPROVIDES default
native: Stop clearing PACKAGES
meta: Clean up various class-native* RDEPENDS overrides
gtk-doc: Disable dependencies in native case
pseudo: Update to include passwd and file renaming fixes
at: Upgrade 3.1.23 -> 3.2.1
msmtp: Fix to work with autoconf 2.70
ruby: Fix to work with autoconf 2.70
lrzsz: Fix to work with autoconf 2.70
Revert "sanity.bbclass: check if PSEUDO_IGNORE_PATHS and ${S} overlap"
image_types: Ensure tar archives are reproducible
qemu.inc: Should depend on qemu-system-native, not qemu-native
python3-setuptools: Add back accidentally dropped RDEPENDS
opkg: Fix build reproducibility issue
Revert "msmtp: Fix to work with autoconf 2.70"
grub: Backport fix to work with new binutils
package: Ensure do_packagedata is cleaned correctly
openssh: Backport a fix to fix with glibc 2.33 on some platforms
pseudo: Update to work with glibc 2.33
bitbake: bitbake-worker: Try and avoid potential short write events issues
apr: Fix to work with autoconf 2.70
bitbake: cooker: Ensure reparsing is handled correctly
bitbake: bblayers/action: When adding layers, catch BBHandledException
bitbake: bitbake: Bump release to 1.49.1
sanity.conf: Increase minimum bitbake version due to logging function change
Fix up bitbake logging compatibility
opkg: Fix patch glitches
Robert Rosengren (1):
mpg123: Add support for FPU-less targets
Robert Yang (10):
buildtools-tarball.bb: Fix PATH for environment setup script
ncurses: Make ncurses-tools depend on ncurses-terminfo-base
minicom: RDEPENDS on ncurses-terminfo-base
archiver.bbclass: Fix --runall=deploy_archives for images
ccache: Extend to nativesdk
ccache.bbclass: Set CCACHE_TEMPDIR
Revert "ccache.bbclass: use ccache from host distribution"
ccache.bbclass: Use ccache-native and disable ccache for native recipes
apt: Fix do_compile error when enable ccache
oeqa/selftest: binutils-cross-x86_64 -> libgcc-initial
Ross Burton (28):
wic-image-minimal: only depend on syslinux on x86 targets
syslinux: rewrite recipe so only target code is x86-specific
wic-tools: don't build syslinux-native for targets without syslinux
image-uefi.conf: add EFI arch variable
systemd-boot: build the EFI stub
systemd-boot: allow building for Arm targets
wic-tools: add grub-efi and systemd-boot on arm64
lib/oe/qa: handle the 'no specific instruction set' ELF e_machine value
local.conf: add aarch64 to the SDKMACHINE example values
kernel: set COMPATIBLE_HOST to *-linux
bitbake.conf: default SDKMACHINE to the build host architecture
diffstat: point the license checksum at the license
ruby: remove tcl DEPENDS
base: use URI instead of decodeurl when detecting unpack dependencies
lib/oe/package_manager: ensure repodata is wiped
core-image-sato-sdk-ptest: these images need ptest
ovmf-shell-image: image is only buildable on x86-64
bitbake: fetch2: handle empty elements in _param_str_split
bitbake: tests/fetch: add test for empty query parameters
Revert "lrzsz: Fix to work with autoconf 2.70"
unfs3: fix build with new autoconf
gnu-config: update to latest commit
autoconf: merge .bb and .inc files
autotools: don't warn about obsolete usage
autoconf: upgrade to 2.71
autotools: disable gtkdocize for now
autotools: remove intltoolize logic
autotools: no need to depend on gnu-config
Sakib Sajal (2):
buildstats.bbclass: add functionality to collect build system stats
linux-yocto*: add features/gpio/mockup.scc to KERNEL_FEATURES
Scott Branden (1):
kmod: update 27 -> 28
Scott Murray (3):
grub: fix "CVE:" line in one of the patches
patch: fix CVE-2019-20633
glibc: CVE-2019-25013
Shachar Menashe (1):
openssl: drop support for deprecated algorithms
Sinan Kaya (8):
gcsections: add more suppressions for SDK builds
sudo: split sudo binary into its own package
iproute2: split ip to individual package
procps: split ps and sysctl into individual packages
net-tools: split mii-tool into its own package
runqemu: Add support for VHD/VHDX rootfs
meta/classes: Add supprot for WIC<>VHD/VHDX conversion
appliance: Add VHD/VHDX generation
Steve Sakoman (2):
oeqa/selftest/cases/devtool.py: fix typo in ignore_patterns call
glibc: update to latest release/2.32/master branch
Tanu Kaskinen (6):
maintainers.inc: remove myself from maintainers
pulseaudio: Remove OE_LT_RPATH_ALLOW
pulseaudio: disable EsounD support
pulseaudio: disable GConf support
pulseaudio: switch build system from Autotools to Meson
pulseaudio: fix client.conf location
Teoh Jay Shen (4):
oeqa/terminal : improve the test case
oeqa/suspend : add test for suspend state
oeqa/ethernet_ip_connman : add test for network connections
oeqa/usb_hid.py : add test to check the usb/human interface device status after suspend state
Thomas Perrot (1):
go.bbclass: don't stage test data with sources of dependencies
Tim Orling (6):
python3-hypothesis: upgrade 5.41.4 -> 5.41.5
python3-importlib-metadata: upgrade 3.1.0 -> 3.1.1
python3-pygments: upgrade v2.7.2 -> v2.7.3
python3-setuptools: upgrade 50.3.2 -> 51.0.0
python3-setuptools-scm: add python3-toml dep
python3-packaging: upgrade 20.4 -> 20.8
Tomasz Dziendzielski (18):
populate_sdk_base: Fix condition syntax if SDK_RELOCATE_AFTER_INSTALL is disabled
lib/oe/utils: Return empty string in parallel_make
devtool: Fix source extraction for gcc shared source
externalsrc: Fix parsing error with devtool non-git sources
devtool: Fix file:// fetcher symlink directory structure
selftest/devtool: Add modify_localfiles_only test checking symlink path
meta: Fix native inheritance order in recipes
insane: Add test for native/nativesdk inherit order
lib/oe/package_manager: Do not pass stderr to package manager as an argument
externalsrc: Detect code changes in submodules
insane: Add missing INSANE_SKIP mechanism for native-last QA check
insane: native-last: Only print classes inherited after native/nativesdk
lib/oe/patch.py: Don't return command stderr from runcmd function
python3: Use addtask statement instead of task dependencies
lib/oe/patch.py: Ignore scissors line on applying patch
sstatesig: Add descriptive error message to getpwuid/getgrgid "uid/gid not found" KeyError
bitbake: lib/bb: Don't treat mc recipe (Midnight Commander) as a multiconfig target
bitbake: BBHandler: Don't classify shell functions that names start with "python*" as python function
Trevor Woerner (7):
mesa.inc: switch true/enabled false/disabled
mesa: update 20.2.4 -> 20.3.1
insane.bbclass: allow fifos
selftest-chown: add test for fifos
PSPLASH_FIFO_DIR: refactor
psplash: fix working on first boot (sysvinit)
psplash (sysvinit): add textual updates
Vinícius Ossanes Aquino (1):
cmake: Upgrade 3.19.2 -> 3.19.3
Vivien Didelot (4):
README.hardware: prettify headline
README.hardware: fix the dd command
meta-yocto-bsp: use provided variables
meta-yocto-bsp: use mmcblk0 for root partition
Vyacheslav Yurkov (1):
npm.bbclass: use python3 for npm config
Wang Mingyu (33):
libaio: upgrade 0.3.111 -> 0.3.112
readline: upgrade 8.0 -> 8.1
man-pages: upgrade 5.09 ->5.10
mobile-broadband-provider-info: upgrade 20190618 ->20201225
shared-mime-info: upgrade 2.0 -> 2.1
tiff: upgrade 4.1.0 -> 4.2.0
tcl: upgrade 8.6.10 -> 8.6.11
sysstat: upgrade 12.4.1 -> 12.4.2
nettle: upgrade 3.6 ->3.7
binutils: upgrade 2.35 -> 2.35.1
ed: upgrade 1.16 -> 1.17
ell: upgrade 0.35 -> 0.36
findutils: upgrade 4.7.0 -> 4.8.0
iproute2: upgrade 5.9.0 -> 5.10.0
gnupg: upgrade 2.2.26 -> 2.2.27
libpcap: upgrade 1.9.1 -> 1.10.0
libmodulemd: upgrade 2.11.1 -> 2.11.2
pulseaudio: upgrade 14.0 -> 14.2
btrfs-tools: upgrade 5.9 -> 5.10
gpgme: upgrade 1.15.0 -> 1.15.1
iptables: upgrade 1.8.6 -> 1.8.7
socat: upgrade 1.7.3.4 ->1.7.4.1
libcap: upgrade 2.46 -> 2.47
libjitterentropy: upgrade 3.0.0 -> 3.0.1
libsolv: upgrade 0.7.16 -> 0.7.17
ltp: upgrade 20200930 -> 20210121
stress-ng: upgrade 0.12.01 -> 0.12.02
util-macros: upgrade 1.19.2 -> 1.19.3
gtk-doc: upgrade 1.33.1 -> 1.33.2
e2fsprogs: upgrade 1.45.6 -> 1.45.7
bind: upgrade 9.16.10 -> 9.16.11
libdrm: upgrade 2.4.103 -> 2.4.104
parted: upgrade 3.3 -> 3.4
Yann Dirson (1):
libsdl2: upgrade to 2.0.14
Yi Fan Yu (6):
binutils: Fix CVE-2020-35448
oeqa/selftest/cases/tinfoil.py: increase timeout 10->60s test_wait_event
strace: increase ptest timeout duration 120->240s
sudo: upgrade 1.9.5p1 -> 1.9.5p2
glibc: fix CVE-2020-27618
glib-2.0: add workaround to fix codegen.py.test failing
Yi Zhao (7):
dhcpcd: upgrade 9.3.2 -> 9.3.4
dhcpcd: fix SECCOMP for i386
inetutils: add dnsdomainname to ALTERNATIVE
libcap: update 2.45 -> 2.46
libcap-ng: upgrade 0.8.1 -> 0.8.2
dhcpcd: upgrade 9.3.4 -> 9.4.0
rng-tools: upgrade 6.10 -> 6.11
Yoann Congal (2):
documentation: Fix a Concpets -> Concepts typo
documentation: Prevent building documentation with an outdated version of sphinx
Zhixiong Chi (1):
glibc: CVE-2020-29562 and CVE-2020-29573
akuster (4):
openssl: Enable srp algorithm
cve-check.bbclass: add layer to cve log
cve-check: add include/exclude layers
documentation.conf: add both CVE_CHECK_LAYER_*
hongxu (2):
apt: add nativesdk support
dpkg: add nativesdk support
saloni (2):
libgcrypt: Whitelisted CVEs
libcroco: Added CVE
zangrc (3):
bash: Rename patch name
systemtap: upgrade 4.3 -> 4.4
msmtp: upgrade 1.8.13 -> 1.8.14
zhengruoqin (11):
cantarell-fonts: upgrade 0.201 -> 0.301
gdbm: upgrade 1.18.1 -> 1.19
libarchive: upgrade 3.4.3 -> 3.5.1
libevdev: upgrade 1.10.0 -> 1.10.1
libgpg-error: upgrade 1.39 -> 1.41
libmodulemd: upgrade 2.11.2 -> 2.12.0
bison: upgrade 3.7.4 -> 3.7.5
ca-certificates: upgrade 20200601 -> 20210119
mc: upgrade 4.8.25 -> 4.8.26
sqlite3: upgrade 3.34.0 -> 3.34.1
python3-packaging: upgrade 20.8 -> 20.9
Revert "libpam: update 1.3.1 -> 1.5.1"
This reverts commit b0384720a46fb25c4ad180e3f256ffdeb53dc8a6.
OpenBMC is not ready for the removal of pam_cracklib and pam_tally2.
Until code is ready to move to new libs in libpam_1.5, carry a revert
in OpenBMC to stay at libpam_1.3.
openbmc/openbmc#3750 tracks this work
Signed-off-by: Andrew Geissler <geissonator@yahoo.com>
Change-Id: I69357e370d7cf5c5d6dfedde11b88a4f797f7e95
Diffstat (limited to 'poky/bitbake/lib')
41 files changed, 575 insertions, 318 deletions
diff --git a/poky/bitbake/lib/bb/__init__.py b/poky/bitbake/lib/bb/__init__.py index b21773734..e3f40a329 100644 --- a/poky/bitbake/lib/bb/__init__.py +++ b/poky/bitbake/lib/bb/__init__.py @@ -9,7 +9,7 @@ # SPDX-License-Identifier: GPL-2.0-only # -__version__ = "1.49.0" +__version__ = "1.49.1" import sys if sys.version_info < (3, 5, 0): @@ -21,8 +21,8 @@ class BBHandledException(Exception): The big dilemma for generic bitbake code is what information to give the user when an exception occurs. Any exception inheriting this base exception class has already provided information to the user via some 'fired' message type such as - an explicitly fired event using bb.fire, or a bb.error message. If bitbake - encounters an exception derived from this class, no backtrace or other information + an explicitly fired event using bb.fire, or a bb.error message. If bitbake + encounters an exception derived from this class, no backtrace or other information will be given to the user, its assumed the earlier event provided the relevant information. """ pass @@ -42,7 +42,16 @@ class BBLoggerMixin(object): def setup_bblogger(self, name): if name.split(".")[0] == "BitBake": - self.debug = self.bbdebug + self.debug = self._debug_helper + + def _debug_helper(self, *args, **kwargs): + return self.bbdebug(1, *args, **kwargs) + + def debug2(self, *args, **kwargs): + return self.bbdebug(2, *args, **kwargs) + + def debug3(self, *args, **kwargs): + return self.bbdebug(3, *args, **kwargs) def bbdebug(self, level, msg, *args, **kwargs): loglevel = logging.DEBUG - level + 1 @@ -128,7 +137,7 @@ def debug(lvl, *args): mainlogger.warning("Passed invalid debug level '%s' to bb.debug", lvl) args = (lvl,) + args lvl = 1 - mainlogger.debug(lvl, ''.join(args)) + mainlogger.bbdebug(lvl, ''.join(args)) def note(*args): mainlogger.info(''.join(args)) diff --git a/poky/bitbake/lib/bb/build.py b/poky/bitbake/lib/bb/build.py index 974d2ff06..f4f897e41 100644 --- a/poky/bitbake/lib/bb/build.py +++ b/poky/bitbake/lib/bb/build.py @@ -583,7 +583,7 @@ def _exec_task(fn, task, d, quieterr): logger.error("No such task: %s" % task) return 1 - logger.debug(1, "Executing task %s", task) + logger.debug("Executing task %s", task) localdata = _task_data(fn, task, d) tempdir = localdata.getVar('T') @@ -596,7 +596,7 @@ def _exec_task(fn, task, d, quieterr): curnice = os.nice(0) nice = int(nice) - curnice newnice = os.nice(nice) - logger.debug(1, "Renice to %s " % newnice) + logger.debug("Renice to %s " % newnice) ionice = localdata.getVar("BB_TASK_IONICE_LEVEL") if ionice: try: @@ -720,7 +720,7 @@ def _exec_task(fn, task, d, quieterr): logfile.close() if os.path.exists(logfn) and os.path.getsize(logfn) == 0: - logger.debug(2, "Zero size logfn %s, removing", logfn) + logger.debug2("Zero size logfn %s, removing", logfn) bb.utils.remove(logfn) bb.utils.remove(loglink) event.fire(TaskSucceeded(task, fn, logfn, localdata), localdata) diff --git a/poky/bitbake/lib/bb/cache.py b/poky/bitbake/lib/bb/cache.py index b8054e028..aea2b8bc1 100644 --- a/poky/bitbake/lib/bb/cache.py +++ b/poky/bitbake/lib/bb/cache.py @@ -26,7 +26,7 @@ import re logger = logging.getLogger("BitBake.Cache") -__cache_version__ = "153" +__cache_version__ = "154" def getCacheFile(path, filename, mc, data_hash): mcspec = '' @@ -94,6 +94,7 @@ class CoreRecipeInfo(RecipeInfoCommon): if not self.packages: self.packages.append(self.pn) self.packages_dynamic = self.listvar('PACKAGES_DYNAMIC', metadata) + self.rprovides_pkg = self.pkgvar('RPROVIDES', self.packages, metadata) self.skipreason = self.getvar('__SKIPPED', metadata) if self.skipreason: @@ -120,7 +121,6 @@ class CoreRecipeInfo(RecipeInfoCommon): self.depends = self.depvar('DEPENDS', metadata) self.rdepends = self.depvar('RDEPENDS', metadata) self.rrecommends = self.depvar('RRECOMMENDS', metadata) - self.rprovides_pkg = self.pkgvar('RPROVIDES', self.packages, metadata) self.rdepends_pkg = self.pkgvar('RDEPENDS', self.packages, metadata) self.rrecommends_pkg = self.pkgvar('RRECOMMENDS', self.packages, metadata) self.inherits = self.getvar('__inherit_cache', metadata, expand=False) @@ -215,7 +215,7 @@ class CoreRecipeInfo(RecipeInfoCommon): if not self.not_world: cachedata.possible_world.append(fn) #else: - # logger.debug(2, "EXCLUDE FROM WORLD: %s", fn) + # logger.debug2("EXCLUDE FROM WORLD: %s", fn) # create a collection of all targets for sanity checking # tasks, such as upstream versions, license, and tools for @@ -238,7 +238,7 @@ def virtualfn2realfn(virtualfn): Convert a virtual file name to a real one + the associated subclass keyword """ mc = "" - if virtualfn.startswith('mc:'): + if virtualfn.startswith('mc:') and virtualfn.count(':') >= 2: elems = virtualfn.split(':') mc = elems[1] virtualfn = ":".join(elems[2:]) @@ -268,7 +268,7 @@ def variant2virtual(realfn, variant): """ if variant == "": return realfn - if variant.startswith("mc:"): + if variant.startswith("mc:") and variant.count(':') >= 2: elems = variant.split(":") if elems[2]: return "mc:" + elems[1] + ":virtual:" + ":".join(elems[2:]) + ":" + realfn @@ -323,7 +323,7 @@ class NoCache(object): Return a complete set of data for fn. To do this, we need to parse the file. """ - logger.debug(1, "Parsing %s (full)" % virtualfn) + logger.debug("Parsing %s (full)" % virtualfn) (fn, virtual, mc) = virtualfn2realfn(virtualfn) bb_data = self.load_bbfile(virtualfn, appends, virtonly=True) return bb_data[virtual] @@ -400,7 +400,7 @@ class Cache(NoCache): self.cachefile = self.getCacheFile("bb_cache.dat") - self.logger.debug(1, "Cache dir: %s", self.cachedir) + self.logger.debug("Cache dir: %s", self.cachedir) bb.utils.mkdirhier(self.cachedir) cache_ok = True @@ -408,7 +408,7 @@ class Cache(NoCache): for cache_class in self.caches_array: cachefile = self.getCacheFile(cache_class.cachefile) cache_exists = os.path.exists(cachefile) - self.logger.debug(2, "Checking if %s exists: %r", cachefile, cache_exists) + self.logger.debug2("Checking if %s exists: %r", cachefile, cache_exists) cache_ok = cache_ok and cache_exists cache_class.init_cacheData(self) if cache_ok: @@ -416,7 +416,7 @@ class Cache(NoCache): elif os.path.isfile(self.cachefile): self.logger.info("Out of date cache found, rebuilding...") else: - self.logger.debug(1, "Cache file %s not found, building..." % self.cachefile) + self.logger.debug("Cache file %s not found, building..." % self.cachefile) # We don't use the symlink, its just for debugging convinience if self.mc: @@ -453,7 +453,7 @@ class Cache(NoCache): for cache_class in self.caches_array: cachefile = self.getCacheFile(cache_class.cachefile) - self.logger.debug(1, 'Loading cache file: %s' % cachefile) + self.logger.debug('Loading cache file: %s' % cachefile) with open(cachefile, "rb") as cachefile: pickled = pickle.Unpickler(cachefile) # Check cache version information @@ -500,7 +500,7 @@ class Cache(NoCache): def parse(self, filename, appends): """Parse the specified filename, returning the recipe information""" - self.logger.debug(1, "Parsing %s", filename) + self.logger.debug("Parsing %s", filename) infos = [] datastores = self.load_bbfile(filename, appends, mc=self.mc) depends = [] @@ -554,7 +554,7 @@ class Cache(NoCache): cached, infos = self.load(fn, appends) for virtualfn, info_array in infos: if info_array[0].skipped: - self.logger.debug(1, "Skipping %s: %s", virtualfn, info_array[0].skipreason) + self.logger.debug("Skipping %s: %s", virtualfn, info_array[0].skipreason) skipped += 1 else: self.add_info(virtualfn, info_array, cacheData, not cached) @@ -590,21 +590,21 @@ class Cache(NoCache): # File isn't in depends_cache if not fn in self.depends_cache: - self.logger.debug(2, "%s is not cached", fn) + self.logger.debug2("%s is not cached", fn) return False mtime = bb.parse.cached_mtime_noerror(fn) # Check file still exists if mtime == 0: - self.logger.debug(2, "%s no longer exists", fn) + self.logger.debug2("%s no longer exists", fn) self.remove(fn) return False info_array = self.depends_cache[fn] # Check the file's timestamp if mtime != info_array[0].timestamp: - self.logger.debug(2, "%s changed", fn) + self.logger.debug2("%s changed", fn) self.remove(fn) return False @@ -615,13 +615,13 @@ class Cache(NoCache): fmtime = bb.parse.cached_mtime_noerror(f) # Check if file still exists if old_mtime != 0 and fmtime == 0: - self.logger.debug(2, "%s's dependency %s was removed", + self.logger.debug2("%s's dependency %s was removed", fn, f) self.remove(fn) return False if (fmtime != old_mtime): - self.logger.debug(2, "%s's dependency %s changed", + self.logger.debug2("%s's dependency %s changed", fn, f) self.remove(fn) return False @@ -638,14 +638,14 @@ class Cache(NoCache): continue f, exist = f.split(":") if (exist == "True" and not os.path.exists(f)) or (exist == "False" and os.path.exists(f)): - self.logger.debug(2, "%s's file checksum list file %s changed", + self.logger.debug2("%s's file checksum list file %s changed", fn, f) self.remove(fn) return False if tuple(appends) != tuple(info_array[0].appends): - self.logger.debug(2, "appends for %s changed", fn) - self.logger.debug(2, "%s to %s" % (str(appends), str(info_array[0].appends))) + self.logger.debug2("appends for %s changed", fn) + self.logger.debug2("%s to %s" % (str(appends), str(info_array[0].appends))) self.remove(fn) return False @@ -654,10 +654,10 @@ class Cache(NoCache): virtualfn = variant2virtual(fn, cls) self.clean.add(virtualfn) if virtualfn not in self.depends_cache: - self.logger.debug(2, "%s is not cached", virtualfn) + self.logger.debug2("%s is not cached", virtualfn) invalid = True elif len(self.depends_cache[virtualfn]) != len(self.caches_array): - self.logger.debug(2, "Extra caches missing for %s?" % virtualfn) + self.logger.debug2("Extra caches missing for %s?" % virtualfn) invalid = True # If any one of the variants is not present, mark as invalid for all @@ -665,10 +665,10 @@ class Cache(NoCache): for cls in info_array[0].variants: virtualfn = variant2virtual(fn, cls) if virtualfn in self.clean: - self.logger.debug(2, "Removing %s from cache", virtualfn) + self.logger.debug2("Removing %s from cache", virtualfn) self.clean.remove(virtualfn) if fn in self.clean: - self.logger.debug(2, "Marking %s as not clean", fn) + self.logger.debug2("Marking %s as not clean", fn) self.clean.remove(fn) return False @@ -681,10 +681,10 @@ class Cache(NoCache): Called from the parser in error cases """ if fn in self.depends_cache: - self.logger.debug(1, "Removing %s from cache", fn) + self.logger.debug("Removing %s from cache", fn) del self.depends_cache[fn] if fn in self.clean: - self.logger.debug(1, "Marking %s as unclean", fn) + self.logger.debug("Marking %s as unclean", fn) self.clean.remove(fn) def sync(self): @@ -697,13 +697,13 @@ class Cache(NoCache): return if self.cacheclean: - self.logger.debug(2, "Cache is clean, not saving.") + self.logger.debug2("Cache is clean, not saving.") return for cache_class in self.caches_array: cache_class_name = cache_class.__name__ cachefile = self.getCacheFile(cache_class.cachefile) - self.logger.debug(2, "Writing %s", cachefile) + self.logger.debug2("Writing %s", cachefile) with open(cachefile, "wb") as f: p = pickle.Pickler(f, pickle.HIGHEST_PROTOCOL) p.dump(__cache_version__) @@ -879,7 +879,7 @@ class MultiProcessCache(object): bb.utils.mkdirhier(cachedir) self.cachefile = os.path.join(cachedir, cache_file_name or self.__class__.cache_file_name) - logger.debug(1, "Using cache in '%s'", self.cachefile) + logger.debug("Using cache in '%s'", self.cachefile) glf = bb.utils.lockfile(self.cachefile + ".lock") @@ -985,7 +985,7 @@ class SimpleCache(object): bb.utils.mkdirhier(cachedir) self.cachefile = os.path.join(cachedir, cache_file_name or self.__class__.cache_file_name) - logger.debug(1, "Using cache in '%s'", self.cachefile) + logger.debug("Using cache in '%s'", self.cachefile) glf = bb.utils.lockfile(self.cachefile + ".lock") diff --git a/poky/bitbake/lib/bb/cooker.py b/poky/bitbake/lib/bb/cooker.py index 1f4cc1e96..0e492b9be 100644 --- a/poky/bitbake/lib/bb/cooker.py +++ b/poky/bitbake/lib/bb/cooker.py @@ -73,7 +73,9 @@ class SkippedPackage: self.pn = info.pn self.skipreason = info.skipreason self.provides = info.provides - self.rprovides = info.rprovides + self.rprovides = info.packages + info.rprovides + for package in info.packages: + self.rprovides += info.rprovides_pkg[package] elif reason: self.skipreason = reason @@ -409,6 +411,8 @@ class BBCooker: self.data.disableTracking() def parseConfiguration(self): + self.updateCacheSync() + # Change nice level if we're asked to nice = self.data.getVar("BB_NICE_LEVEL") if nice: @@ -439,7 +443,7 @@ class BBCooker: continue except AttributeError: pass - logger.debug(1, "Marking as dirty due to '%s' option change to '%s'" % (o, options[o])) + logger.debug("Marking as dirty due to '%s' option change to '%s'" % (o, options[o])) print("Marking as dirty due to '%s' option change to '%s'" % (o, options[o])) clean = False if hasattr(self.configuration, o): @@ -466,17 +470,17 @@ class BBCooker: for k in bb.utils.approved_variables(): if k in environment and k not in self.configuration.env: - logger.debug(1, "Updating new environment variable %s to %s" % (k, environment[k])) + logger.debug("Updating new environment variable %s to %s" % (k, environment[k])) self.configuration.env[k] = environment[k] clean = False if k in self.configuration.env and k not in environment: - logger.debug(1, "Updating environment variable %s (deleted)" % (k)) + logger.debug("Updating environment variable %s (deleted)" % (k)) del self.configuration.env[k] clean = False if k not in self.configuration.env and k not in environment: continue if environment[k] != self.configuration.env[k]: - logger.debug(1, "Updating environment variable %s from %s to %s" % (k, self.configuration.env[k], environment[k])) + logger.debug("Updating environment variable %s from %s to %s" % (k, self.configuration.env[k], environment[k])) self.configuration.env[k] = environment[k] clean = False @@ -484,7 +488,7 @@ class BBCooker: self.configuration.env = environment if not clean: - logger.debug(1, "Base environment change, triggering reparse") + logger.debug("Base environment change, triggering reparse") self.reset() def runCommands(self, server, data, abort): @@ -612,7 +616,7 @@ class BBCooker: # Replace string such as "mc:*:bash" # into "mc:A:bash mc:B:bash bash" for k in targetlist: - if k.startswith("mc:"): + if k.startswith("mc:") and k.count(':') >= 2: if wildcard: bb.fatal('multiconfig conflict') if k.split(":")[1] == "*": @@ -646,7 +650,7 @@ class BBCooker: for k in fulltargetlist: origk = k mc = "" - if k.startswith("mc:"): + if k.startswith("mc:") and k.count(':') >= 2: mc = k.split(":")[1] k = ":".join(k.split(":")[2:]) ktask = task @@ -695,7 +699,7 @@ class BBCooker: if depmc not in self.multiconfigs: bb.fatal("Multiconfig dependency %s depends on nonexistent multiconfig configuration named configuration %s" % (k,depmc)) else: - logger.debug(1, "Adding providers for multiconfig dependency %s" % l[3]) + logger.debug("Adding providers for multiconfig dependency %s" % l[3]) taskdata[depmc].add_provider(localdata[depmc], self.recipecaches[depmc], l[3]) seen.add(k) new = True @@ -1551,7 +1555,7 @@ class BBCooker: self.inotify_modified_files = [] if not self.baseconfig_valid: - logger.debug(1, "Reloading base configuration data") + logger.debug("Reloading base configuration data") self.initConfigurationData() self.handlePRServ() diff --git a/poky/bitbake/lib/bb/data_smart.py b/poky/bitbake/lib/bb/data_smart.py index c559102cf..2328c334a 100644 --- a/poky/bitbake/lib/bb/data_smart.py +++ b/poky/bitbake/lib/bb/data_smart.py @@ -1005,7 +1005,7 @@ class DataSmart(MutableMapping): else: data.update({key:value}) - varflags = d.getVarFlags(key, internalflags = True) + varflags = d.getVarFlags(key, internalflags = True, expand=["vardepvalue"]) if not varflags: continue for f in varflags: diff --git a/poky/bitbake/lib/bb/fetch2/__init__.py b/poky/bitbake/lib/bb/fetch2/__init__.py index 07b7ae41b..19169d780 100644 --- a/poky/bitbake/lib/bb/fetch2/__init__.py +++ b/poky/bitbake/lib/bb/fetch2/__init__.py @@ -290,7 +290,7 @@ class URI(object): def _param_str_split(self, string, elmdelim, kvdelim="="): ret = collections.OrderedDict() - for k, v in [x.split(kvdelim, 1) for x in string.split(elmdelim)]: + for k, v in [x.split(kvdelim, 1) for x in string.split(elmdelim) if x]: ret[k] = v return ret @@ -428,7 +428,7 @@ def uri_replace(ud, uri_find, uri_replace, replacements, d, mirrortarball=None): uri_decoded = list(decodeurl(ud.url)) uri_find_decoded = list(decodeurl(uri_find)) uri_replace_decoded = list(decodeurl(uri_replace)) - logger.debug(2, "For url %s comparing %s to %s" % (uri_decoded, uri_find_decoded, uri_replace_decoded)) + logger.debug2("For url %s comparing %s to %s" % (uri_decoded, uri_find_decoded, uri_replace_decoded)) result_decoded = ['', '', '', '', '', {}] for loc, i in enumerate(uri_find_decoded): result_decoded[loc] = uri_decoded[loc] @@ -474,7 +474,7 @@ def uri_replace(ud, uri_find, uri_replace, replacements, d, mirrortarball=None): result = encodeurl(result_decoded) if result == ud.url: return None - logger.debug(2, "For url %s returning %s" % (ud.url, result)) + logger.debug2("For url %s returning %s" % (ud.url, result)) return result methods = [] @@ -499,9 +499,9 @@ def fetcher_init(d): # When to drop SCM head revisions controlled by user policy srcrev_policy = d.getVar('BB_SRCREV_POLICY') or "clear" if srcrev_policy == "cache": - logger.debug(1, "Keeping SRCREV cache due to cache policy of: %s", srcrev_policy) + logger.debug("Keeping SRCREV cache due to cache policy of: %s", srcrev_policy) elif srcrev_policy == "clear": - logger.debug(1, "Clearing SRCREV cache due to cache policy of: %s", srcrev_policy) + logger.debug("Clearing SRCREV cache due to cache policy of: %s", srcrev_policy) revs.clear() else: raise FetchError("Invalid SRCREV cache policy of: %s" % srcrev_policy) @@ -857,9 +857,9 @@ def runfetchcmd(cmd, d, quiet=False, cleanup=None, log=None, workdir=None): cmd = 'export PSEUDO_DISABLED=1; ' + cmd if workdir: - logger.debug(1, "Running '%s' in %s" % (cmd, workdir)) + logger.debug("Running '%s' in %s" % (cmd, workdir)) else: - logger.debug(1, "Running %s", cmd) + logger.debug("Running %s", cmd) success = False error_message = "" @@ -900,7 +900,7 @@ def check_network_access(d, info, url): elif not trusted_network(d, url): raise UntrustedUrl(url, info) else: - logger.debug(1, "Fetcher accessed the network with the command %s" % info) + logger.debug("Fetcher accessed the network with the command %s" % info) def build_mirroruris(origud, mirrors, ld): uris = [] @@ -926,7 +926,7 @@ def build_mirroruris(origud, mirrors, ld): continue if not trusted_network(ld, newuri): - logger.debug(1, "Mirror %s not in the list of trusted networks, skipping" % (newuri)) + logger.debug("Mirror %s not in the list of trusted networks, skipping" % (newuri)) continue # Create a local copy of the mirrors minus the current line @@ -939,8 +939,8 @@ def build_mirroruris(origud, mirrors, ld): newud = FetchData(newuri, ld) newud.setup_localpath(ld) except bb.fetch2.BBFetchException as e: - logger.debug(1, "Mirror fetch failure for url %s (original url: %s)" % (newuri, origud.url)) - logger.debug(1, str(e)) + logger.debug("Mirror fetch failure for url %s (original url: %s)" % (newuri, origud.url)) + logger.debug(str(e)) try: # setup_localpath of file:// urls may fail, we should still see # if mirrors of the url exist @@ -1043,8 +1043,8 @@ def try_mirror_url(fetch, origud, ud, ld, check = False): elif isinstance(e, NoChecksumError): raise else: - logger.debug(1, "Mirror fetch failure for url %s (original url: %s)" % (ud.url, origud.url)) - logger.debug(1, str(e)) + logger.debug("Mirror fetch failure for url %s (original url: %s)" % (ud.url, origud.url)) + logger.debug(str(e)) try: ud.method.clean(ud, ld) except UnboundLocalError: @@ -1688,7 +1688,7 @@ class Fetch(object): if m.verify_donestamp(ud, self.d) and not m.need_update(ud, self.d): done = True elif m.try_premirror(ud, self.d): - logger.debug(1, "Trying PREMIRRORS") + logger.debug("Trying PREMIRRORS") mirrors = mirror_from_string(self.d.getVar('PREMIRRORS')) done = m.try_mirrors(self, ud, self.d, mirrors) if done: @@ -1698,7 +1698,7 @@ class Fetch(object): m.update_donestamp(ud, self.d) except ChecksumError as e: logger.warning("Checksum failure encountered with premirror download of %s - will attempt other sources." % u) - logger.debug(1, str(e)) + logger.debug(str(e)) done = False if premirroronly: @@ -1710,7 +1710,7 @@ class Fetch(object): try: if not trusted_network(self.d, ud.url): raise UntrustedUrl(ud.url) - logger.debug(1, "Trying Upstream") + logger.debug("Trying Upstream") m.download(ud, self.d) if hasattr(m, "build_mirror_data"): m.build_mirror_data(ud, self.d) @@ -1725,19 +1725,19 @@ class Fetch(object): except BBFetchException as e: if isinstance(e, ChecksumError): logger.warning("Checksum failure encountered with download of %s - will attempt other sources if available" % u) - logger.debug(1, str(e)) + logger.debug(str(e)) if os.path.exists(ud.localpath): rename_bad_checksum(ud, e.checksum) elif isinstance(e, NoChecksumError): raise else: logger.warning('Failed to fetch URL %s, attempting MIRRORS if available' % u) - logger.debug(1, str(e)) + logger.debug(str(e)) firsterr = e # Remove any incomplete fetch if not verified_stamp: m.clean(ud, self.d) - logger.debug(1, "Trying MIRRORS") + logger.debug("Trying MIRRORS") mirrors = mirror_from_string(self.d.getVar('MIRRORS')) done = m.try_mirrors(self, ud, self.d, mirrors) @@ -1774,7 +1774,7 @@ class Fetch(object): ud = self.ud[u] ud.setup_localpath(self.d) m = ud.method - logger.debug(1, "Testing URL %s", u) + logger.debug("Testing URL %s", u) # First try checking uri, u, from PREMIRRORS mirrors = mirror_from_string(self.d.getVar('PREMIRRORS')) ret = m.try_mirrors(self, ud, self.d, mirrors, True) diff --git a/poky/bitbake/lib/bb/fetch2/bzr.py b/poky/bitbake/lib/bb/fetch2/bzr.py index 566ace9f0..fc558f50b 100644 --- a/poky/bitbake/lib/bb/fetch2/bzr.py +++ b/poky/bitbake/lib/bb/fetch2/bzr.py @@ -74,16 +74,16 @@ class Bzr(FetchMethod): if os.access(os.path.join(ud.pkgdir, os.path.basename(ud.pkgdir), '.bzr'), os.R_OK): bzrcmd = self._buildbzrcommand(ud, d, "update") - logger.debug(1, "BZR Update %s", ud.url) + logger.debug("BZR Update %s", ud.url) bb.fetch2.check_network_access(d, bzrcmd, ud.url) runfetchcmd(bzrcmd, d, workdir=os.path.join(ud.pkgdir, os.path.basename(ud.path))) else: bb.utils.remove(os.path.join(ud.pkgdir, os.path.basename(ud.pkgdir)), True) bzrcmd = self._buildbzrcommand(ud, d, "fetch") bb.fetch2.check_network_access(d, bzrcmd, ud.url) - logger.debug(1, "BZR Checkout %s", ud.url) + logger.debug("BZR Checkout %s", ud.url) bb.utils.mkdirhier(ud.pkgdir) - logger.debug(1, "Running %s", bzrcmd) + logger.debug("Running %s", bzrcmd) runfetchcmd(bzrcmd, d, workdir=ud.pkgdir) scmdata = ud.parm.get("scmdata", "") @@ -109,7 +109,7 @@ class Bzr(FetchMethod): """ Return the latest upstream revision number """ - logger.debug(2, "BZR fetcher hitting network for %s", ud.url) + logger.debug2("BZR fetcher hitting network for %s", ud.url) bb.fetch2.check_network_access(d, self._buildbzrcommand(ud, d, "revno"), ud.url) diff --git a/poky/bitbake/lib/bb/fetch2/clearcase.py b/poky/bitbake/lib/bb/fetch2/clearcase.py index 49d7ae1b0..1a9c86376 100644 --- a/poky/bitbake/lib/bb/fetch2/clearcase.py +++ b/poky/bitbake/lib/bb/fetch2/clearcase.py @@ -70,7 +70,7 @@ class ClearCase(FetchMethod): return ud.type in ['ccrc'] def debug(self, msg): - logger.debug(1, "ClearCase: %s", msg) + logger.debug("ClearCase: %s", msg) def urldata_init(self, ud, d): """ diff --git a/poky/bitbake/lib/bb/fetch2/cvs.py b/poky/bitbake/lib/bb/fetch2/cvs.py index 22abdef79..01de5ff4c 100644 --- a/poky/bitbake/lib/bb/fetch2/cvs.py +++ b/poky/bitbake/lib/bb/fetch2/cvs.py @@ -109,7 +109,7 @@ class Cvs(FetchMethod): cvsupdatecmd = "CVS_RSH=\"%s\" %s" % (cvs_rsh, cvsupdatecmd) # create module directory - logger.debug(2, "Fetch: checking for module directory") + logger.debug2("Fetch: checking for module directory") moddir = os.path.join(ud.pkgdir, localdir) workdir = None if os.access(os.path.join(moddir, 'CVS'), os.R_OK): @@ -123,7 +123,7 @@ class Cvs(FetchMethod): # check out sources there bb.utils.mkdirhier(ud.pkgdir) workdir = ud.pkgdir - logger.debug(1, "Running %s", cvscmd) + logger.debug("Running %s", cvscmd) bb.fetch2.check_network_access(d, cvscmd, ud.url) cmd = cvscmd diff --git a/poky/bitbake/lib/bb/fetch2/git.py b/poky/bitbake/lib/bb/fetch2/git.py index 490d57fbb..e3ba80a3f 100644 --- a/poky/bitbake/lib/bb/fetch2/git.py +++ b/poky/bitbake/lib/bb/fetch2/git.py @@ -220,7 +220,12 @@ class Git(FetchMethod): ud.shallow = False if ud.usehead: - ud.unresolvedrev['default'] = 'HEAD' + # When usehead is set let's associate 'HEAD' with the unresolved + # rev of this repository. This will get resolved into a revision + # later. If an actual revision happens to have also been provided + # then this setting will be overridden. + for name in ud.names: + ud.unresolvedrev[name] = 'HEAD' ud.basecmd = d.getVar("FETCHCMD_git") or "git -c core.fsyncobjectfiles=0" @@ -379,6 +384,35 @@ class Git(FetchMethod): if missing_rev: raise bb.fetch2.FetchError("Unable to find revision %s even from upstream" % missing_rev) + if self._contains_lfs(ud, d, ud.clonedir) and self._need_lfs(ud): + # Unpack temporary working copy, use it to run 'git checkout' to force pre-fetching + # of all LFS blobs needed at the the srcrev. + # + # It would be nice to just do this inline here by running 'git-lfs fetch' + # on the bare clonedir, but that operation requires a working copy on some + # releases of Git LFS. + tmpdir = tempfile.mkdtemp(dir=d.getVar('DL_DIR')) + try: + # Do the checkout. This implicitly involves a Git LFS fetch. + self.unpack(ud, tmpdir, d) + + # Scoop up a copy of any stuff that Git LFS downloaded. Merge them into + # the bare clonedir. + # + # As this procedure is invoked repeatedly on incremental fetches as + # a recipe's SRCREV is bumped throughout its lifetime, this will + # result in a gradual accumulation of LFS blobs in <ud.clonedir>/lfs + # corresponding to all the blobs reachable from the different revs + # fetched across time. + # + # Only do this if the unpack resulted in a .git/lfs directory being + # created; this only happens if at least one blob needed to be + # downloaded. + if os.path.exists(os.path.join(tmpdir, "git", ".git", "lfs")): + runfetchcmd("tar -cf - lfs | tar -xf - -C %s" % ud.clonedir, d, workdir="%s/git/.git" % tmpdir) + finally: + bb.utils.remove(tmpdir, recurse=True) + def build_mirror_data(self, ud, d): if ud.shallow and ud.write_shallow_tarballs: if not os.path.exists(ud.fullshallow): @@ -474,7 +508,7 @@ class Git(FetchMethod): if os.path.exists(destdir): bb.utils.prunedir(destdir) - need_lfs = ud.parm.get("lfs", "1") == "1" + need_lfs = self._need_lfs(ud) if not need_lfs: ud.basecmd = "GIT_LFS_SKIP_SMUDGE=1 " + ud.basecmd @@ -563,6 +597,9 @@ class Git(FetchMethod): raise bb.fetch2.FetchError("The command '%s' gave output with more then 1 line unexpectedly, output: '%s'" % (cmd, output)) return output.split()[0] != "0" + def _need_lfs(self, ud): + return ud.parm.get("lfs", "1") == "1" + def _contains_lfs(self, ud, d, wd): """ Check if the repository has 'lfs' (large file) content @@ -573,8 +610,14 @@ class Git(FetchMethod): else: branchname = "master" - cmd = "%s grep lfs origin/%s:.gitattributes | wc -l" % ( - ud.basecmd, ud.branches[ud.names[0]]) + # The bare clonedir doesn't use the remote names; it has the branch immediately. + if wd == ud.clonedir: + refname = ud.branches[ud.names[0]] + else: + refname = "origin/%s" % ud.branches[ud.names[0]] + + cmd = "%s grep lfs %s:.gitattributes | wc -l" % ( + ud.basecmd, refname) try: output = runfetchcmd(cmd, d, quiet=True, workdir=wd) diff --git a/poky/bitbake/lib/bb/fetch2/gitsm.py b/poky/bitbake/lib/bb/fetch2/gitsm.py index d6e5c5c05..a4527bf36 100644 --- a/poky/bitbake/lib/bb/fetch2/gitsm.py +++ b/poky/bitbake/lib/bb/fetch2/gitsm.py @@ -78,7 +78,7 @@ class GitSM(Git): module_hash = "" if not module_hash: - logger.debug(1, "submodule %s is defined, but is not initialized in the repository. Skipping", m) + logger.debug("submodule %s is defined, but is not initialized in the repository. Skipping", m) continue submodules.append(m) @@ -179,7 +179,7 @@ class GitSM(Git): (ud.basecmd, ud.revisions[ud.names[0]]), d, workdir=ud.clonedir) if len(need_update_list) > 0: - logger.debug(1, 'gitsm: Submodules requiring update: %s' % (' '.join(need_update_list))) + logger.debug('gitsm: Submodules requiring update: %s' % (' '.join(need_update_list))) return True return False diff --git a/poky/bitbake/lib/bb/fetch2/hg.py b/poky/bitbake/lib/bb/fetch2/hg.py index 8f503701e..063e13008 100644 --- a/poky/bitbake/lib/bb/fetch2/hg.py +++ b/poky/bitbake/lib/bb/fetch2/hg.py @@ -150,7 +150,7 @@ class Hg(FetchMethod): def download(self, ud, d): """Fetch url""" - logger.debug(2, "Fetch: checking for module directory '" + ud.moddir + "'") + logger.debug2("Fetch: checking for module directory '" + ud.moddir + "'") # If the checkout doesn't exist and the mirror tarball does, extract it if not os.path.exists(ud.pkgdir) and os.path.exists(ud.fullmirror): @@ -160,7 +160,7 @@ class Hg(FetchMethod): if os.access(os.path.join(ud.moddir, '.hg'), os.R_OK): # Found the source, check whether need pull updatecmd = self._buildhgcommand(ud, d, "update") - logger.debug(1, "Running %s", updatecmd) + logger.debug("Running %s", updatecmd) try: runfetchcmd(updatecmd, d, workdir=ud.moddir) except bb.fetch2.FetchError: @@ -168,7 +168,7 @@ class Hg(FetchMethod): pullcmd = self._buildhgcommand(ud, d, "pull") logger.info("Pulling " + ud.url) # update sources there - logger.debug(1, "Running %s", pullcmd) + logger.debug("Running %s", pullcmd) bb.fetch2.check_network_access(d, pullcmd, ud.url) runfetchcmd(pullcmd, d, workdir=ud.moddir) try: @@ -183,14 +183,14 @@ class Hg(FetchMethod): logger.info("Fetch " + ud.url) # check out sources there bb.utils.mkdirhier(ud.pkgdir) - logger.debug(1, "Running %s", fetchcmd) + logger.debug("Running %s", fetchcmd) bb.fetch2.check_network_access(d, fetchcmd, ud.url) runfetchcmd(fetchcmd, d, workdir=ud.pkgdir) # Even when we clone (fetch), we still need to update as hg's clone # won't checkout the specified revision if its on a branch updatecmd = self._buildhgcommand(ud, d, "update") - logger.debug(1, "Running %s", updatecmd) + logger.debug("Running %s", updatecmd) runfetchcmd(updatecmd, d, workdir=ud.moddir) def clean(self, ud, d): @@ -247,9 +247,9 @@ class Hg(FetchMethod): if scmdata != "nokeep": proto = ud.parm.get('protocol', 'http') if not os.access(os.path.join(codir, '.hg'), os.R_OK): - logger.debug(2, "Unpack: creating new hg repository in '" + codir + "'") + logger.debug2("Unpack: creating new hg repository in '" + codir + "'") runfetchcmd("%s init %s" % (ud.basecmd, codir), d) - logger.debug(2, "Unpack: updating source in '" + codir + "'") + logger.debug2("Unpack: updating source in '" + codir + "'") if ud.user and ud.pswd: runfetchcmd("%s --config auth.default.prefix=* --config auth.default.username=%s --config auth.default.password=%s --config \"auth.default.schemes=%s\" pull %s" % (ud.basecmd, ud.user, ud.pswd, proto, ud.moddir), d, workdir=codir) else: @@ -259,5 +259,5 @@ class Hg(FetchMethod): else: runfetchcmd("%s up -C %s" % (ud.basecmd, revflag), d, workdir=codir) else: - logger.debug(2, "Unpack: extracting source to '" + codir + "'") + logger.debug2("Unpack: extracting source to '" + codir + "'") runfetchcmd("%s archive -t files %s %s" % (ud.basecmd, revflag, codir), d, workdir=ud.moddir) diff --git a/poky/bitbake/lib/bb/fetch2/local.py b/poky/bitbake/lib/bb/fetch2/local.py index 25d4557db..e7d1c8c58 100644 --- a/poky/bitbake/lib/bb/fetch2/local.py +++ b/poky/bitbake/lib/bb/fetch2/local.py @@ -54,12 +54,12 @@ class Local(FetchMethod): return [path] filespath = d.getVar('FILESPATH') if filespath: - logger.debug(2, "Searching for %s in paths:\n %s" % (path, "\n ".join(filespath.split(":")))) + logger.debug2("Searching for %s in paths:\n %s" % (path, "\n ".join(filespath.split(":")))) newpath, hist = bb.utils.which(filespath, path, history=True) searched.extend(hist) if not os.path.exists(newpath): dldirfile = os.path.join(d.getVar("DL_DIR"), path) - logger.debug(2, "Defaulting to %s for %s" % (dldirfile, path)) + logger.debug2("Defaulting to %s for %s" % (dldirfile, path)) bb.utils.mkdirhier(os.path.dirname(dldirfile)) searched.append(dldirfile) return searched diff --git a/poky/bitbake/lib/bb/fetch2/osc.py b/poky/bitbake/lib/bb/fetch2/osc.py index 3a6cd2951..d9ce44390 100644 --- a/poky/bitbake/lib/bb/fetch2/osc.py +++ b/poky/bitbake/lib/bb/fetch2/osc.py @@ -84,13 +84,13 @@ class Osc(FetchMethod): Fetch url """ - logger.debug(2, "Fetch: checking for module directory '" + ud.moddir + "'") + logger.debug2("Fetch: checking for module directory '" + ud.moddir + "'") if os.access(os.path.join(d.getVar('OSCDIR'), ud.path, ud.module), os.R_OK): oscupdatecmd = self._buildosccommand(ud, d, "update") logger.info("Update "+ ud.url) # update sources there - logger.debug(1, "Running %s", oscupdatecmd) + logger.debug("Running %s", oscupdatecmd) bb.fetch2.check_network_access(d, oscupdatecmd, ud.url) runfetchcmd(oscupdatecmd, d, workdir=ud.moddir) else: @@ -98,7 +98,7 @@ class Osc(FetchMethod): logger.info("Fetch " + ud.url) # check out sources there bb.utils.mkdirhier(ud.pkgdir) - logger.debug(1, "Running %s", oscfetchcmd) + logger.debug("Running %s", oscfetchcmd) bb.fetch2.check_network_access(d, oscfetchcmd, ud.url) runfetchcmd(oscfetchcmd, d, workdir=ud.pkgdir) diff --git a/poky/bitbake/lib/bb/fetch2/perforce.py b/poky/bitbake/lib/bb/fetch2/perforce.py index 6f3c95b6c..e2a41a4a1 100644 --- a/poky/bitbake/lib/bb/fetch2/perforce.py +++ b/poky/bitbake/lib/bb/fetch2/perforce.py @@ -90,16 +90,16 @@ class Perforce(FetchMethod): p4port = d.getVar('P4PORT') if p4port: - logger.debug(1, 'Using recipe provided P4PORT: %s' % p4port) + logger.debug('Using recipe provided P4PORT: %s' % p4port) ud.host = p4port else: - logger.debug(1, 'Trying to use P4CONFIG to automatically set P4PORT...') + logger.debug('Trying to use P4CONFIG to automatically set P4PORT...') ud.usingp4config = True p4cmd = '%s info | grep "Server address"' % ud.basecmd bb.fetch2.check_network_access(d, p4cmd, ud.url) ud.host = runfetchcmd(p4cmd, d, True) ud.host = ud.host.split(': ')[1].strip() - logger.debug(1, 'Determined P4PORT to be: %s' % ud.host) + logger.debug('Determined P4PORT to be: %s' % ud.host) if not ud.host: raise FetchError('Could not determine P4PORT from P4CONFIG') @@ -119,6 +119,7 @@ class Perforce(FetchMethod): cleanedpath = ud.path.replace('/...', '').replace('/', '.') cleanedhost = ud.host.replace(':', '.') + cleanedmodule = "" # Merge the path and module into the final depot location if ud.module: if ud.module.find('/') == 0: @@ -133,7 +134,7 @@ class Perforce(FetchMethod): ud.setup_revisions(d) - ud.localfile = d.expand('%s_%s_%s.tar.gz' % (cleanedhost, cleanedpath, ud.revision)) + ud.localfile = d.expand('%s_%s_%s_%s.tar.gz' % (cleanedhost, cleanedpath, cleandedmodule, ud.revision)) def _buildp4command(self, ud, d, command, depot_filename=None): """ @@ -207,7 +208,7 @@ class Perforce(FetchMethod): for filename in p4fileslist: item = filename.split(' - ') lastaction = item[1].split() - logger.debug(1, 'File: %s Last Action: %s' % (item[0], lastaction[0])) + logger.debug('File: %s Last Action: %s' % (item[0], lastaction[0])) if lastaction[0] == 'delete': continue filelist.append(item[0]) @@ -254,7 +255,7 @@ class Perforce(FetchMethod): raise FetchError('Could not determine the latest perforce changelist') tipcset = tip.split(' ')[1] - logger.debug(1, 'p4 tip found to be changelist %s' % tipcset) + logger.debug('p4 tip found to be changelist %s' % tipcset) return tipcset def sortable_revision(self, ud, d, name): diff --git a/poky/bitbake/lib/bb/fetch2/repo.py b/poky/bitbake/lib/bb/fetch2/repo.py index 2bdbbd409..fa4cb8149 100644 --- a/poky/bitbake/lib/bb/fetch2/repo.py +++ b/poky/bitbake/lib/bb/fetch2/repo.py @@ -47,7 +47,7 @@ class Repo(FetchMethod): """Fetch url""" if os.access(os.path.join(d.getVar("DL_DIR"), ud.localfile), os.R_OK): - logger.debug(1, "%s already exists (or was stashed). Skipping repo init / sync.", ud.localpath) + logger.debug("%s already exists (or was stashed). Skipping repo init / sync.", ud.localpath) return repodir = d.getVar("REPODIR") or (d.getVar("DL_DIR") + "/repo") diff --git a/poky/bitbake/lib/bb/fetch2/svn.py b/poky/bitbake/lib/bb/fetch2/svn.py index 971a5add4..8856ef1c6 100644 --- a/poky/bitbake/lib/bb/fetch2/svn.py +++ b/poky/bitbake/lib/bb/fetch2/svn.py @@ -116,7 +116,7 @@ class Svn(FetchMethod): def download(self, ud, d): """Fetch url""" - logger.debug(2, "Fetch: checking for module directory '" + ud.moddir + "'") + logger.debug2("Fetch: checking for module directory '" + ud.moddir + "'") lf = bb.utils.lockfile(ud.svnlock) @@ -129,7 +129,7 @@ class Svn(FetchMethod): runfetchcmd(ud.basecmd + " upgrade", d, workdir=ud.moddir) except FetchError: pass - logger.debug(1, "Running %s", svncmd) + logger.debug("Running %s", svncmd) bb.fetch2.check_network_access(d, svncmd, ud.url) runfetchcmd(svncmd, d, workdir=ud.moddir) else: @@ -137,7 +137,7 @@ class Svn(FetchMethod): logger.info("Fetch " + ud.url) # check out sources there bb.utils.mkdirhier(ud.pkgdir) - logger.debug(1, "Running %s", svncmd) + logger.debug("Running %s", svncmd) bb.fetch2.check_network_access(d, svncmd, ud.url) runfetchcmd(svncmd, d, workdir=ud.pkgdir) diff --git a/poky/bitbake/lib/bb/fetch2/wget.py b/poky/bitbake/lib/bb/fetch2/wget.py index e6d9f528d..78a49676f 100644 --- a/poky/bitbake/lib/bb/fetch2/wget.py +++ b/poky/bitbake/lib/bb/fetch2/wget.py @@ -52,6 +52,12 @@ class WgetProgressHandler(bb.progress.LineFilterProgressHandler): class Wget(FetchMethod): + + # CDNs like CloudFlare may do a 'browser integrity test' which can fail + # with the standard wget/urllib User-Agent, so pretend to be a modern + # browser. + user_agent = "Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:84.0) Gecko/20100101 Firefox/84.0" + """Class to fetch urls via 'wget'""" def supports(self, ud, d): """ @@ -82,7 +88,7 @@ class Wget(FetchMethod): progresshandler = WgetProgressHandler(d) - logger.debug(2, "Fetching %s using command '%s'" % (ud.url, command)) + logger.debug2("Fetching %s using command '%s'" % (ud.url, command)) bb.fetch2.check_network_access(d, command, ud.url) runfetchcmd(command + ' --progress=dot -v', d, quiet, log=progresshandler, workdir=workdir) @@ -297,7 +303,7 @@ class Wget(FetchMethod): # Some servers (FusionForge, as used on Alioth) require that the # optional Accept header is set. r.add_header("Accept", "*/*") - r.add_header("User-Agent", "Mozilla/5.0 (X11; U; Linux i686; en-US; rv:1.9.2.12) Gecko/20101027 Ubuntu/9.10 (karmic) Firefox/3.6.12") + r.add_header("User-Agent", self.user_agent) def add_basic_auth(login_str, request): '''Adds Basic auth to http request, pass in login:password as string''' import base64 @@ -320,11 +326,11 @@ class Wget(FetchMethod): pass except urllib.error.URLError as e: if try_again: - logger.debug(2, "checkstatus: trying again") + logger.debug2("checkstatus: trying again") return self.checkstatus(fetch, ud, d, False) else: # debug for now to avoid spamming the logs in e.g. remote sstate searches - logger.debug(2, "checkstatus() urlopen failed: %s" % e) + logger.debug2("checkstatus() urlopen failed: %s" % e) return False return True @@ -401,9 +407,8 @@ class Wget(FetchMethod): """ f = tempfile.NamedTemporaryFile() with tempfile.TemporaryDirectory(prefix="wget-index-") as workdir, tempfile.NamedTemporaryFile(dir=workdir, prefix="wget-listing-") as f: - agent = "Mozilla/5.0 (X11; U; Linux i686; en-US; rv:1.9.2.12) Gecko/20101027 Ubuntu/9.10 (karmic) Firefox/3.6.12" fetchcmd = self.basecmd - fetchcmd += " -O " + f.name + " --user-agent='" + agent + "' '" + uri + "'" + fetchcmd += " -O " + f.name + " --user-agent='" + self.user_agent + "' '" + uri + "'" try: self._runwget(ud, d, fetchcmd, True, workdir=workdir) fetchresult = f.read() diff --git a/poky/bitbake/lib/bb/parse/__init__.py b/poky/bitbake/lib/bb/parse/__init__.py index 76e180b41..c01807ba8 100644 --- a/poky/bitbake/lib/bb/parse/__init__.py +++ b/poky/bitbake/lib/bb/parse/__init__.py @@ -71,7 +71,7 @@ def update_mtime(f): def update_cache(f): if f in __mtime_cache: - logger.debug(1, "Updating mtime cache for %s" % f) + logger.debug("Updating mtime cache for %s" % f) update_mtime(f) def clear_cache(): diff --git a/poky/bitbake/lib/bb/parse/ast.py b/poky/bitbake/lib/bb/parse/ast.py index 0714296af..0596993d0 100644 --- a/poky/bitbake/lib/bb/parse/ast.py +++ b/poky/bitbake/lib/bb/parse/ast.py @@ -34,7 +34,7 @@ class IncludeNode(AstNode): Include the file and evaluate the statements """ s = data.expand(self.what_file) - logger.debug(2, "CONF %s:%s: including %s", self.filename, self.lineno, s) + logger.debug2("CONF %s:%s: including %s", self.filename, self.lineno, s) # TODO: Cache those includes... maybe not here though if self.force: @@ -376,7 +376,7 @@ def _create_variants(datastores, names, function, onlyfinalise): def multi_finalize(fn, d): appends = (d.getVar("__BBAPPEND") or "").split() for append in appends: - logger.debug(1, "Appending .bbappend file %s to %s", append, fn) + logger.debug("Appending .bbappend file %s to %s", append, fn) bb.parse.BBHandler.handle(append, d, True) onlyfinalise = d.getVar("__ONLYFINALISE", False) diff --git a/poky/bitbake/lib/bb/parse/parse_py/BBHandler.py b/poky/bitbake/lib/bb/parse/parse_py/BBHandler.py index 8a520e307..f8988b863 100644 --- a/poky/bitbake/lib/bb/parse/parse_py/BBHandler.py +++ b/poky/bitbake/lib/bb/parse/parse_py/BBHandler.py @@ -22,7 +22,7 @@ from .ConfHandler import include, init # For compatibility bb.deprecate_import(__name__, "bb.parse", ["vars_from_file"]) -__func_start_regexp__ = re.compile(r"(((?P<py>python)|(?P<fr>fakeroot))\s*)*(?P<func>[\w\.\-\+\{\}\$]+)?\s*\(\s*\)\s*{$" ) +__func_start_regexp__ = re.compile(r"(((?P<py>python(?=(\s|\()))|(?P<fr>fakeroot(?=\s)))\s*)*(?P<func>[\w\.\-\+\{\}\$]+)?\s*\(\s*\)\s*{$" ) __inherit_regexp__ = re.compile(r"inherit\s+(.+)" ) __export_func_regexp__ = re.compile(r"EXPORT_FUNCTIONS\s+(.+)" ) __addtask_regexp__ = re.compile(r"addtask\s+(?P<func>\w+)\s*((before\s*(?P<before>((.*(?=after))|(.*))))|(after\s*(?P<after>((.*(?=before))|(.*)))))*") @@ -60,7 +60,7 @@ def inherit(files, fn, lineno, d): file = abs_fn if not file in __inherit_cache: - logger.debug(1, "Inheriting %s (from %s:%d)" % (file, fn, lineno)) + logger.debug("Inheriting %s (from %s:%d)" % (file, fn, lineno)) __inherit_cache.append( file ) d.setVar('__inherit_cache', __inherit_cache) include(fn, file, lineno, d, "inherit") diff --git a/poky/bitbake/lib/bb/parse/parse_py/ConfHandler.py b/poky/bitbake/lib/bb/parse/parse_py/ConfHandler.py index af64d3446..f171c5c93 100644 --- a/poky/bitbake/lib/bb/parse/parse_py/ConfHandler.py +++ b/poky/bitbake/lib/bb/parse/parse_py/ConfHandler.py @@ -95,7 +95,7 @@ def include_single_file(parentfn, fn, lineno, data, error_out): if exc.errno == errno.ENOENT: if error_out: raise ParseError("Could not %s file %s" % (error_out, fn), parentfn, lineno) - logger.debug(2, "CONF file '%s' not found", fn) + logger.debug2("CONF file '%s' not found", fn) else: if error_out: raise ParseError("Could not %s file %s: %s" % (error_out, fn, exc.strerror), parentfn, lineno) diff --git a/poky/bitbake/lib/bb/persist_data.py b/poky/bitbake/lib/bb/persist_data.py index 5f4fbe350..c6a209fb3 100644 --- a/poky/bitbake/lib/bb/persist_data.py +++ b/poky/bitbake/lib/bb/persist_data.py @@ -248,7 +248,7 @@ class PersistData(object): stacklevel=2) self.data = persist(d) - logger.debug(1, "Using '%s' as the persistent data cache", + logger.debug("Using '%s' as the persistent data cache", self.data.filename) def addDomain(self, domain): diff --git a/poky/bitbake/lib/bb/providers.py b/poky/bitbake/lib/bb/providers.py index 3f66a3d99..0c87dfd4b 100644 --- a/poky/bitbake/lib/bb/providers.py +++ b/poky/bitbake/lib/bb/providers.py @@ -165,7 +165,7 @@ def findPreferredProvider(pn, cfgData, dataCache, pkg_pn = None, item = None): available_vers.sort() logger.warn("versions of %s available: %s", pn, ' '.join(available_vers)) else: - logger.debug(1, "selecting %s as PREFERRED_VERSION %s of package %s%s", preferred_file, pv_str, pn, itemstr) + logger.debug("selecting %s as PREFERRED_VERSION %s of package %s%s", preferred_file, pv_str, pn, itemstr) return (preferred_ver, preferred_file) @@ -232,7 +232,7 @@ def _filterProviders(providers, item, cfgData, dataCache): pkg_pn[pn] = [] pkg_pn[pn].append(p) - logger.debug(1, "providers for %s are: %s", item, list(sorted(pkg_pn.keys()))) + logger.debug("providers for %s are: %s", item, list(sorted(pkg_pn.keys()))) # First add PREFERRED_VERSIONS for pn in sorted(pkg_pn): @@ -291,7 +291,7 @@ def filterProviders(providers, item, cfgData, dataCache): foundUnique = True break - logger.debug(1, "sorted providers for %s are: %s", item, eligible) + logger.debug("sorted providers for %s are: %s", item, eligible) return eligible, foundUnique @@ -333,7 +333,7 @@ def filterProvidersRunTime(providers, item, cfgData, dataCache): provides = dataCache.pn_provides[pn] for provide in provides: prefervar = cfgData.getVar('PREFERRED_PROVIDER_%s' % provide) - #logger.debug(1, "checking PREFERRED_PROVIDER_%s (value %s) against %s", provide, prefervar, pns.keys()) + #logger.debug("checking PREFERRED_PROVIDER_%s (value %s) against %s", provide, prefervar, pns.keys()) if prefervar in pns and pns[prefervar] not in preferred: var = "PREFERRED_PROVIDER_%s = %s" % (provide, prefervar) logger.verbose("selecting %s to satisfy runtime %s due to %s", prefervar, item, var) @@ -349,7 +349,7 @@ def filterProvidersRunTime(providers, item, cfgData, dataCache): if numberPreferred > 1: logger.error("Trying to resolve runtime dependency %s resulted in conflicting PREFERRED_PROVIDER entries being found.\nThe providers found were: %s\nThe PREFERRED_PROVIDER entries resulting in this conflict were: %s. You could set PREFERRED_RPROVIDER_%s" % (item, preferred, preferred_vars, item)) - logger.debug(1, "sorted runtime providers for %s are: %s", item, eligible) + logger.debug("sorted runtime providers for %s are: %s", item, eligible) return eligible, numberPreferred @@ -384,7 +384,7 @@ def getRuntimeProviders(dataCache, rdepend): regexp_cache[pattern] = regexp if regexp.match(rdepend): rproviders += dataCache.packages_dynamic[pattern] - logger.debug(1, "Assuming %s is a dynamic package, but it may not exist" % rdepend) + logger.debug("Assuming %s is a dynamic package, but it may not exist" % rdepend) return rproviders @@ -396,22 +396,22 @@ def buildWorldTargetList(dataCache, task=None): if dataCache.world_target: return - logger.debug(1, "collating packages for \"world\"") + logger.debug("collating packages for \"world\"") for f in dataCache.possible_world: terminal = True pn = dataCache.pkg_fn[f] if task and task not in dataCache.task_deps[f]['tasks']: - logger.debug(2, "World build skipping %s as task %s doesn't exist", f, task) + logger.debug2("World build skipping %s as task %s doesn't exist", f, task) terminal = False for p in dataCache.pn_provides[pn]: if p.startswith('virtual/'): - logger.debug(2, "World build skipping %s due to %s provider starting with virtual/", f, p) + logger.debug2("World build skipping %s due to %s provider starting with virtual/", f, p) terminal = False break for pf in dataCache.providers[p]: if dataCache.pkg_fn[pf] != pn: - logger.debug(2, "World build skipping %s due to both us and %s providing %s", f, pf, p) + logger.debug2("World build skipping %s due to both us and %s providing %s", f, pf, p) terminal = False break if terminal: diff --git a/poky/bitbake/lib/bb/runqueue.py b/poky/bitbake/lib/bb/runqueue.py index 28bdadb45..2f521e5a1 100644 --- a/poky/bitbake/lib/bb/runqueue.py +++ b/poky/bitbake/lib/bb/runqueue.py @@ -38,7 +38,7 @@ def taskname_from_tid(tid): return tid.rsplit(":", 1)[1] def mc_from_tid(tid): - if tid.startswith('mc:'): + if tid.startswith('mc:') and tid.count(':') >= 2: return tid.split(':')[1] return "" @@ -47,13 +47,13 @@ def split_tid(tid): return (mc, fn, taskname) def split_mc(n): - if n.startswith("mc:"): + if n.startswith("mc:") and n.count(':') >= 2: _, mc, n = n.split(":", 2) return (mc, n) return ('', n) def split_tid_mcfn(tid): - if tid.startswith('mc:'): + if tid.startswith('mc:') and tid.count(':') >= 2: elems = tid.split(':') mc = elems[1] fn = ":".join(elems[2:-1]) @@ -544,8 +544,8 @@ class RunQueueData: for tid in self.runtaskentries: if task_done[tid] is False or deps_left[tid] != 0: problem_tasks.append(tid) - logger.debug(2, "Task %s is not buildable", tid) - logger.debug(2, "(Complete marker was %s and the remaining dependency count was %s)\n", task_done[tid], deps_left[tid]) + logger.debug2("Task %s is not buildable", tid) + logger.debug2("(Complete marker was %s and the remaining dependency count was %s)\n", task_done[tid], deps_left[tid]) self.runtaskentries[tid].weight = weight[tid] if problem_tasks: @@ -643,7 +643,7 @@ class RunQueueData: (mc, fn, taskname, taskfn) = split_tid_mcfn(tid) #runtid = build_tid(mc, fn, taskname) - #logger.debug(2, "Processing %s,%s:%s", mc, fn, taskname) + #logger.debug2("Processing %s,%s:%s", mc, fn, taskname) depends = set() task_deps = self.dataCaches[mc].task_deps[taskfn] @@ -1199,9 +1199,9 @@ class RunQueueData: """ Dump some debug information on the internal data structures """ - logger.debug(3, "run_tasks:") + logger.debug3("run_tasks:") for tid in self.runtaskentries: - logger.debug(3, " %s: %s Deps %s RevDeps %s", tid, + logger.debug3(" %s: %s Deps %s RevDeps %s", tid, self.runtaskentries[tid].weight, self.runtaskentries[tid].depends, self.runtaskentries[tid].revdeps) @@ -1238,7 +1238,7 @@ class RunQueue: self.fakeworker = {} def _start_worker(self, mc, fakeroot = False, rqexec = None): - logger.debug(1, "Starting bitbake-worker") + logger.debug("Starting bitbake-worker") magic = "decafbad" if self.cooker.configuration.profile: magic = "decafbadbad" @@ -1283,7 +1283,7 @@ class RunQueue: def _teardown_worker(self, worker): if not worker: return - logger.debug(1, "Teardown for bitbake-worker") + logger.debug("Teardown for bitbake-worker") try: worker.process.stdin.write(b"<quit></quit>") worker.process.stdin.flush() @@ -1356,12 +1356,12 @@ class RunQueue: # If the stamp is missing, it's not current if not os.access(stampfile, os.F_OK): - logger.debug(2, "Stampfile %s not available", stampfile) + logger.debug2("Stampfile %s not available", stampfile) return False # If it's a 'nostamp' task, it's not current taskdep = self.rqdata.dataCaches[mc].task_deps[taskfn] if 'nostamp' in taskdep and taskname in taskdep['nostamp']: - logger.debug(2, "%s.%s is nostamp\n", fn, taskname) + logger.debug2("%s.%s is nostamp\n", fn, taskname) return False if taskname != "do_setscene" and taskname.endswith("_setscene"): @@ -1385,18 +1385,18 @@ class RunQueue: continue if fn == fn2 or (fulldeptree and fn2 not in stampwhitelist): if not t2: - logger.debug(2, 'Stampfile %s does not exist', stampfile2) + logger.debug2('Stampfile %s does not exist', stampfile2) iscurrent = False break if t1 < t2: - logger.debug(2, 'Stampfile %s < %s', stampfile, stampfile2) + logger.debug2('Stampfile %s < %s', stampfile, stampfile2) iscurrent = False break if recurse and iscurrent: if dep in cache: iscurrent = cache[dep] if not iscurrent: - logger.debug(2, 'Stampfile for dependency %s:%s invalid (cached)' % (fn2, taskname2)) + logger.debug2('Stampfile for dependency %s:%s invalid (cached)' % (fn2, taskname2)) else: iscurrent = self.check_stamp_task(dep, recurse=True, cache=cache) cache[dep] = iscurrent @@ -1761,7 +1761,7 @@ class RunQueueExecute: for scheduler in schedulers: if self.scheduler == scheduler.name: self.sched = scheduler(self, self.rqdata) - logger.debug(1, "Using runqueue scheduler '%s'", scheduler.name) + logger.debug("Using runqueue scheduler '%s'", scheduler.name) break else: bb.fatal("Invalid scheduler '%s'. Available schedulers: %s" % @@ -1899,7 +1899,7 @@ class RunQueueExecute: break if alldeps: self.setbuildable(revdep) - logger.debug(1, "Marking task %s as buildable", revdep) + logger.debug("Marking task %s as buildable", revdep) def task_complete(self, task): self.stats.taskCompleted() @@ -1929,7 +1929,7 @@ class RunQueueExecute: def summarise_scenequeue_errors(self): err = False if not self.sqdone: - logger.debug(1, 'We could skip tasks %s', "\n".join(sorted(self.scenequeue_covered))) + logger.debug('We could skip tasks %s', "\n".join(sorted(self.scenequeue_covered))) completeevent = sceneQueueComplete(self.sq_stats, self.rq) bb.event.fire(completeevent, self.cfgData) if self.sq_deferred: @@ -1986,7 +1986,7 @@ class RunQueueExecute: if nexttask in self.sq_buildable and nexttask not in self.sq_running and self.sqdata.stamps[nexttask] not in self.build_stamps.values(): if nexttask not in self.sqdata.unskippable and len(self.sqdata.sq_revdeps[nexttask]) > 0 and self.sqdata.sq_revdeps[nexttask].issubset(self.scenequeue_covered) and self.check_dependencies(nexttask, self.sqdata.sq_revdeps[nexttask]): if nexttask not in self.rqdata.target_tids: - logger.debug(2, "Skipping setscene for task %s" % nexttask) + logger.debug2("Skipping setscene for task %s" % nexttask) self.sq_task_skip(nexttask) self.scenequeue_notneeded.add(nexttask) if nexttask in self.sq_deferred: @@ -1999,28 +1999,28 @@ class RunQueueExecute: if nexttask in self.sq_deferred: if self.sq_deferred[nexttask] not in self.runq_complete: continue - logger.debug(1, "Task %s no longer deferred" % nexttask) + logger.debug("Task %s no longer deferred" % nexttask) del self.sq_deferred[nexttask] valid = self.rq.validate_hashes(set([nexttask]), self.cooker.data, 0, False, summary=False) if not valid: - logger.debug(1, "%s didn't become valid, skipping setscene" % nexttask) + logger.debug("%s didn't become valid, skipping setscene" % nexttask) self.sq_task_failoutright(nexttask) return True else: self.sqdata.outrightfail.remove(nexttask) if nexttask in self.sqdata.outrightfail: - logger.debug(2, 'No package found, so skipping setscene task %s', nexttask) + logger.debug2('No package found, so skipping setscene task %s', nexttask) self.sq_task_failoutright(nexttask) return True if nexttask in self.sqdata.unskippable: - logger.debug(2, "Setscene task %s is unskippable" % nexttask) + logger.debug2("Setscene task %s is unskippable" % nexttask) task = nexttask break if task is not None: (mc, fn, taskname, taskfn) = split_tid_mcfn(task) taskname = taskname + "_setscene" if self.rq.check_stamp_task(task, taskname_from_tid(task), recurse = True, cache=self.stampcache): - logger.debug(2, 'Stamp for underlying task %s is current, so skipping setscene variant', task) + logger.debug2('Stamp for underlying task %s is current, so skipping setscene variant', task) self.sq_task_failoutright(task) return True @@ -2030,12 +2030,12 @@ class RunQueueExecute: return True if self.rq.check_stamp_task(task, taskname, cache=self.stampcache): - logger.debug(2, 'Setscene stamp current task %s, so skip it and its dependencies', task) + logger.debug2('Setscene stamp current task %s, so skip it and its dependencies', task) self.sq_task_skip(task) return True if self.cooker.configuration.skipsetscene: - logger.debug(2, 'No setscene tasks should be executed. Skipping %s', task) + logger.debug2('No setscene tasks should be executed. Skipping %s', task) self.sq_task_failoutright(task) return True @@ -2097,12 +2097,12 @@ class RunQueueExecute: return True if task in self.tasks_covered: - logger.debug(2, "Setscene covered task %s", task) + logger.debug2("Setscene covered task %s", task) self.task_skip(task, "covered") return True if self.rq.check_stamp_task(task, taskname, cache=self.stampcache): - logger.debug(2, "Stamp current task %s", task) + logger.debug2("Stamp current task %s", task) self.task_skip(task, "existing") self.runq_tasksrun.add(task) @@ -2322,7 +2322,7 @@ class RunQueueExecute: remapped = True if not remapped: - #logger.debug(1, "Task %s hash changes: %s->%s %s->%s" % (tid, orighash, newhash, origuni, newuni)) + #logger.debug("Task %s hash changes: %s->%s %s->%s" % (tid, orighash, newhash, origuni, newuni)) self.rqdata.runtaskentries[tid].hash = newhash self.rqdata.runtaskentries[tid].unihash = newuni changed.add(tid) @@ -2337,7 +2337,7 @@ class RunQueueExecute: for mc in self.rq.fakeworker: self.rq.fakeworker[mc].process.stdin.write(b"<newtaskhashes>" + pickle.dumps(bb.parse.siggen.get_taskhashes()) + b"</newtaskhashes>") - hashequiv_logger.debug(1, pprint.pformat("Tasks changed:\n%s" % (changed))) + hashequiv_logger.debug(pprint.pformat("Tasks changed:\n%s" % (changed))) for tid in changed: if tid not in self.rqdata.runq_setscene_tids: @@ -2356,7 +2356,7 @@ class RunQueueExecute: # Check no tasks this covers are running for dep in self.sqdata.sq_covered_tasks[tid]: if dep in self.runq_running and dep not in self.runq_complete: - hashequiv_logger.debug(2, "Task %s is running which blocks setscene for %s from running" % (dep, tid)) + hashequiv_logger.debug2("Task %s is running which blocks setscene for %s from running" % (dep, tid)) valid = False break if not valid: @@ -2430,7 +2430,7 @@ class RunQueueExecute: for dep in sorted(self.sqdata.sq_deps[task]): if fail and task in self.sqdata.sq_harddeps and dep in self.sqdata.sq_harddeps[task]: - logger.debug(2, "%s was unavailable and is a hard dependency of %s so skipping" % (task, dep)) + logger.debug2("%s was unavailable and is a hard dependency of %s so skipping" % (task, dep)) self.sq_task_failoutright(dep) continue if self.sqdata.sq_revdeps[dep].issubset(self.scenequeue_covered | self.scenequeue_notcovered): @@ -2460,7 +2460,7 @@ class RunQueueExecute: completed dependencies as buildable """ - logger.debug(1, 'Found task %s which could be accelerated', task) + logger.debug('Found task %s which could be accelerated', task) self.scenequeue_covered.add(task) self.scenequeue_updatecounters(task) @@ -2775,13 +2775,13 @@ def update_scenequeue_data(tids, sqdata, rqdata, rq, cooker, stampcache, sqrq, s continue if rq.check_stamp_task(tid, taskname + "_setscene", cache=stampcache): - logger.debug(2, 'Setscene stamp current for task %s', tid) + logger.debug2('Setscene stamp current for task %s', tid) sqdata.stamppresent.add(tid) sqrq.sq_task_skip(tid) continue if rq.check_stamp_task(tid, taskname, recurse = True, cache=stampcache): - logger.debug(2, 'Normal stamp current for task %s', tid) + logger.debug2('Normal stamp current for task %s', tid) sqdata.stamppresent.add(tid) sqrq.sq_task_skip(tid) continue diff --git a/poky/bitbake/lib/bb/siggen.py b/poky/bitbake/lib/bb/siggen.py index 0ac395246..0d88c6ec6 100644 --- a/poky/bitbake/lib/bb/siggen.py +++ b/poky/bitbake/lib/bb/siggen.py @@ -541,7 +541,7 @@ class SignatureGeneratorUniHashMixIn(object): # is much more interesting, so it is reported at debug level 1 hashequiv_logger.debug((1, 2)[unihash == taskhash], 'Found unihash %s in place of %s for %s from %s' % (unihash, taskhash, tid, self.server)) else: - hashequiv_logger.debug(2, 'No reported unihash for %s:%s from %s' % (tid, taskhash, self.server)) + hashequiv_logger.debug2('No reported unihash for %s:%s from %s' % (tid, taskhash, self.server)) except hashserv.client.HashConnectionError as e: bb.warn('Error contacting Hash Equivalence Server %s: %s' % (self.server, str(e))) @@ -615,12 +615,12 @@ class SignatureGeneratorUniHashMixIn(object): new_unihash = data['unihash'] if new_unihash != unihash: - hashequiv_logger.debug(1, 'Task %s unihash changed %s -> %s by server %s' % (taskhash, unihash, new_unihash, self.server)) + hashequiv_logger.debug('Task %s unihash changed %s -> %s by server %s' % (taskhash, unihash, new_unihash, self.server)) bb.event.fire(bb.runqueue.taskUniHashUpdate(fn + ':do_' + task, new_unihash), d) self.set_unihash(tid, new_unihash) d.setVar('BB_UNIHASH', new_unihash) else: - hashequiv_logger.debug(1, 'Reported task %s as unihash %s to %s' % (taskhash, unihash, self.server)) + hashequiv_logger.debug('Reported task %s as unihash %s to %s' % (taskhash, unihash, self.server)) except hashserv.client.HashConnectionError as e: bb.warn('Error contacting Hash Equivalence Server %s: %s' % (self.server, str(e))) finally: @@ -748,7 +748,7 @@ def clean_basepath(basepath): if basepath[0] == '/': return cleaned - if basepath.startswith("mc:"): + if basepath.startswith("mc:") and basepath.count(':') >= 2: mc, mc_name, basepath = basepath.split(":", 2) mc_suffix = ':mc:' + mc_name else: diff --git a/poky/bitbake/lib/bb/taskdata.py b/poky/bitbake/lib/bb/taskdata.py index ffbaf362e..47bad6d1f 100644 --- a/poky/bitbake/lib/bb/taskdata.py +++ b/poky/bitbake/lib/bb/taskdata.py @@ -131,7 +131,7 @@ class TaskData: for depend in dataCache.deps[fn]: dependids.add(depend) self.depids[fn] = list(dependids) - logger.debug(2, "Added dependencies %s for %s", str(dataCache.deps[fn]), fn) + logger.debug2("Added dependencies %s for %s", str(dataCache.deps[fn]), fn) # Work out runtime dependencies if not fn in self.rdepids: @@ -149,9 +149,9 @@ class TaskData: rreclist.append(rdepend) rdependids.add(rdepend) if rdependlist: - logger.debug(2, "Added runtime dependencies %s for %s", str(rdependlist), fn) + logger.debug2("Added runtime dependencies %s for %s", str(rdependlist), fn) if rreclist: - logger.debug(2, "Added runtime recommendations %s for %s", str(rreclist), fn) + logger.debug2("Added runtime recommendations %s for %s", str(rreclist), fn) self.rdepids[fn] = list(rdependids) for dep in self.depids[fn]: @@ -378,7 +378,7 @@ class TaskData: for fn in eligible: if fn in self.failed_fns: continue - logger.debug(2, "adding %s to satisfy %s", fn, item) + logger.debug2("adding %s to satisfy %s", fn, item) self.add_build_target(fn, item) self.add_tasks(fn, dataCache) @@ -431,7 +431,7 @@ class TaskData: for fn in eligible: if fn in self.failed_fns: continue - logger.debug(2, "adding '%s' to satisfy runtime '%s'", fn, item) + logger.debug2("adding '%s' to satisfy runtime '%s'", fn, item) self.add_runtime_target(fn, item) self.add_tasks(fn, dataCache) @@ -446,7 +446,7 @@ class TaskData: return if not missing_list: missing_list = [] - logger.debug(1, "File '%s' is unbuildable, removing...", fn) + logger.debug("File '%s' is unbuildable, removing...", fn) self.failed_fns.append(fn) for target in self.build_targets: if fn in self.build_targets[target]: @@ -526,7 +526,7 @@ class TaskData: added = added + 1 except (bb.providers.NoRProvider, bb.providers.MultipleRProvider): self.remove_runtarget(target) - logger.debug(1, "Resolved " + str(added) + " extra dependencies") + logger.debug("Resolved " + str(added) + " extra dependencies") if added == 0: break # self.dump_data() @@ -549,38 +549,38 @@ class TaskData: """ Dump some debug information on the internal data structures """ - logger.debug(3, "build_names:") - logger.debug(3, ", ".join(self.build_targets)) + logger.debug3("build_names:") + logger.debug3(", ".join(self.build_targets)) - logger.debug(3, "run_names:") - logger.debug(3, ", ".join(self.run_targets)) + logger.debug3("run_names:") + logger.debug3(", ".join(self.run_targets)) - logger.debug(3, "build_targets:") + logger.debug3("build_targets:") for target in self.build_targets: targets = "None" if target in self.build_targets: targets = self.build_targets[target] - logger.debug(3, " %s: %s", target, targets) + logger.debug3(" %s: %s", target, targets) - logger.debug(3, "run_targets:") + logger.debug3("run_targets:") for target in self.run_targets: targets = "None" if target in self.run_targets: targets = self.run_targets[target] - logger.debug(3, " %s: %s", target, targets) + logger.debug3(" %s: %s", target, targets) - logger.debug(3, "tasks:") + logger.debug3("tasks:") for tid in self.taskentries: - logger.debug(3, " %s: %s %s %s", + logger.debug3(" %s: %s %s %s", tid, self.taskentries[tid].idepends, self.taskentries[tid].irdepends, self.taskentries[tid].tdepends) - logger.debug(3, "dependency ids (per fn):") + logger.debug3("dependency ids (per fn):") for fn in self.depids: - logger.debug(3, " %s: %s", fn, self.depids[fn]) + logger.debug3(" %s: %s", fn, self.depids[fn]) - logger.debug(3, "runtime dependency ids (per fn):") + logger.debug3("runtime dependency ids (per fn):") for fn in self.rdepids: - logger.debug(3, " %s: %s", fn, self.rdepids[fn]) + logger.debug3(" %s: %s", fn, self.rdepids[fn]) diff --git a/poky/bitbake/lib/bb/tests/fetch.py b/poky/bitbake/lib/bb/tests/fetch.py index da17d7f28..7b2dac7b8 100644 --- a/poky/bitbake/lib/bb/tests/fetch.py +++ b/poky/bitbake/lib/bb/tests/fetch.py @@ -87,6 +87,25 @@ class URITest(unittest.TestCase): }, 'relative': False }, + # Check that trailing semicolons are handled correctly + "http://www.example.org/index.html?qparam1=qvalue1;param2=value2;" : { + 'uri': 'http://www.example.org/index.html?qparam1=qvalue1;param2=value2', + 'scheme': 'http', + 'hostname': 'www.example.org', + 'port': None, + 'hostport': 'www.example.org', + 'path': '/index.html', + 'userinfo': '', + 'username': '', + 'password': '', + 'params': { + 'param2': 'value2' + }, + 'query': { + 'qparam1': 'qvalue1' + }, + 'relative': False + }, "http://www.example.com:8080/index.html" : { 'uri': 'http://www.example.com:8080/index.html', 'scheme': 'http', @@ -654,6 +673,58 @@ class FetcherLocalTest(FetcherTest): with self.assertRaises(bb.fetch2.UnpackError): self.fetchUnpack(['file://a;subdir=/bin/sh']) + def test_local_gitfetch_usehead(self): + # Create dummy local Git repo + src_dir = tempfile.mkdtemp(dir=self.tempdir, + prefix='gitfetch_localusehead_') + src_dir = os.path.abspath(src_dir) + bb.process.run("git init", cwd=src_dir) + bb.process.run("git commit --allow-empty -m'Dummy commit'", + cwd=src_dir) + # Use other branch than master + bb.process.run("git checkout -b my-devel", cwd=src_dir) + bb.process.run("git commit --allow-empty -m'Dummy commit 2'", + cwd=src_dir) + stdout = bb.process.run("git rev-parse HEAD", cwd=src_dir) + orig_rev = stdout[0].strip() + + # Fetch and check revision + self.d.setVar("SRCREV", "AUTOINC") + url = "git://" + src_dir + ";protocol=file;usehead=1" + fetcher = bb.fetch.Fetch([url], self.d) + fetcher.download() + fetcher.unpack(self.unpackdir) + stdout = bb.process.run("git rev-parse HEAD", + cwd=os.path.join(self.unpackdir, 'git')) + unpack_rev = stdout[0].strip() + self.assertEqual(orig_rev, unpack_rev) + + def test_local_gitfetch_usehead_withname(self): + # Create dummy local Git repo + src_dir = tempfile.mkdtemp(dir=self.tempdir, + prefix='gitfetch_localusehead_') + src_dir = os.path.abspath(src_dir) + bb.process.run("git init", cwd=src_dir) + bb.process.run("git commit --allow-empty -m'Dummy commit'", + cwd=src_dir) + # Use other branch than master + bb.process.run("git checkout -b my-devel", cwd=src_dir) + bb.process.run("git commit --allow-empty -m'Dummy commit 2'", + cwd=src_dir) + stdout = bb.process.run("git rev-parse HEAD", cwd=src_dir) + orig_rev = stdout[0].strip() + + # Fetch and check revision + self.d.setVar("SRCREV", "AUTOINC") + url = "git://" + src_dir + ";protocol=file;usehead=1;name=newName" + fetcher = bb.fetch.Fetch([url], self.d) + fetcher.download() + fetcher.unpack(self.unpackdir) + stdout = bb.process.run("git rev-parse HEAD", + cwd=os.path.join(self.unpackdir, 'git')) + unpack_rev = stdout[0].strip() + self.assertEqual(orig_rev, unpack_rev) + class FetcherNoNetworkTest(FetcherTest): def setUp(self): super().setUp() @@ -844,35 +915,21 @@ class FetcherNetworkTest(FetcherTest): self.assertRaises(bb.fetch.FetchError, self.gitfetcher, url1, url2) @skipIfNoNetwork() - def test_gitfetch_localusehead(self): - # Create dummy local Git repo - src_dir = tempfile.mkdtemp(dir=self.tempdir, - prefix='gitfetch_localusehead_') - src_dir = os.path.abspath(src_dir) - bb.process.run("git init", cwd=src_dir) - bb.process.run("git commit --allow-empty -m'Dummy commit'", - cwd=src_dir) - # Use other branch than master - bb.process.run("git checkout -b my-devel", cwd=src_dir) - bb.process.run("git commit --allow-empty -m'Dummy commit 2'", - cwd=src_dir) - stdout = bb.process.run("git rev-parse HEAD", cwd=src_dir) - orig_rev = stdout[0].strip() - - # Fetch and check revision - self.d.setVar("SRCREV", "AUTOINC") - url = "git://" + src_dir + ";protocol=file;usehead=1" - fetcher = bb.fetch.Fetch([url], self.d) - fetcher.download() - fetcher.unpack(self.unpackdir) - stdout = bb.process.run("git rev-parse HEAD", - cwd=os.path.join(self.unpackdir, 'git')) - unpack_rev = stdout[0].strip() - self.assertEqual(orig_rev, unpack_rev) + def test_gitfetch_usehead(self): + # Since self.gitfetcher() sets SRCREV we expect this to override + # `usehead=1' and instead fetch the specified SRCREV. See + # test_local_gitfetch_usehead() for a positive use of the usehead + # feature. + url = "git://git.openembedded.org/bitbake;usehead=1" + self.assertRaises(bb.fetch.ParameterError, self.gitfetcher, url, url) @skipIfNoNetwork() - def test_gitfetch_remoteusehead(self): - url = "git://git.openembedded.org/bitbake;usehead=1" + def test_gitfetch_usehead_withname(self): + # Since self.gitfetcher() sets SRCREV we expect this to override + # `usehead=1' and instead fetch the specified SRCREV. See + # test_local_gitfetch_usehead() for a positive use of the usehead + # feature. + url = "git://git.openembedded.org/bitbake;usehead=1;name=newName" self.assertRaises(bb.fetch.ParameterError, self.gitfetcher, url, url) @skipIfNoNetwork() @@ -2051,13 +2108,14 @@ class GitLfsTest(FetcherTest): cwd = self.gitdir return bb.process.run(cmd, cwd=cwd)[0] - def fetch(self, uri=None): + def fetch(self, uri=None, download=True): uris = self.d.getVar('SRC_URI').split() uri = uris[0] d = self.d fetcher = bb.fetch2.Fetch(uris, d) - fetcher.download() + if download: + fetcher.download() ud = fetcher.ud[uri] return fetcher, ud @@ -2067,16 +2125,21 @@ class GitLfsTest(FetcherTest): uri = 'git://%s;protocol=file;subdir=${S};lfs=1' % self.srcdir self.d.setVar('SRC_URI', uri) - fetcher, ud = self.fetch() + # Careful: suppress initial attempt at downloading until + # we know whether git-lfs is installed. + fetcher, ud = self.fetch(uri=None, download=False) self.assertIsNotNone(ud.method._find_git_lfs) - # If git-lfs can be found, the unpack should be successful - ud.method._find_git_lfs = lambda d: True - shutil.rmtree(self.gitdir, ignore_errors=True) - fetcher.unpack(self.d.getVar('WORKDIR')) + # If git-lfs can be found, the unpack should be successful. Only + # attempt this with the real live copy of git-lfs installed. + if ud.method._find_git_lfs(self.d): + fetcher.download() + shutil.rmtree(self.gitdir, ignore_errors=True) + fetcher.unpack(self.d.getVar('WORKDIR')) # If git-lfs cannot be found, the unpack should throw an error with self.assertRaises(bb.fetch2.FetchError): + fetcher.download() ud.method._find_git_lfs = lambda d: False shutil.rmtree(self.gitdir, ignore_errors=True) fetcher.unpack(self.d.getVar('WORKDIR')) @@ -2087,10 +2150,16 @@ class GitLfsTest(FetcherTest): uri = 'git://%s;protocol=file;subdir=${S};lfs=0' % self.srcdir self.d.setVar('SRC_URI', uri) + # In contrast to test_lfs_enabled(), allow the implicit download + # done by self.fetch() to occur here. The point of this test case + # is to verify that the fetcher can survive even if the source + # repository has Git LFS usage configured. fetcher, ud = self.fetch() self.assertIsNotNone(ud.method._find_git_lfs) - # If git-lfs can be found, the unpack should be successful + # If git-lfs can be found, the unpack should be successful. A + # live copy of git-lfs is not required for this case, so + # unconditionally forge its presence. ud.method._find_git_lfs = lambda d: True shutil.rmtree(self.gitdir, ignore_errors=True) fetcher.unpack(self.d.getVar('WORKDIR')) diff --git a/poky/bitbake/lib/bb/ui/buildinfohelper.py b/poky/bitbake/lib/bb/ui/buildinfohelper.py index 82c62e332..43aa59284 100644 --- a/poky/bitbake/lib/bb/ui/buildinfohelper.py +++ b/poky/bitbake/lib/bb/ui/buildinfohelper.py @@ -148,14 +148,14 @@ class ORMWrapper(object): buildrequest = None if brbe is not None: # Toaster-triggered build - logger.debug(1, "buildinfohelper: brbe is %s" % brbe) + logger.debug("buildinfohelper: brbe is %s" % brbe) br, _ = brbe.split(":") buildrequest = BuildRequest.objects.get(pk=br) prj = buildrequest.project else: # CLI build prj = Project.objects.get_or_create_default_project() - logger.debug(1, "buildinfohelper: project is not specified, defaulting to %s" % prj) + logger.debug("buildinfohelper: project is not specified, defaulting to %s" % prj) if buildrequest is not None: # reuse existing Build object @@ -171,7 +171,7 @@ class ORMWrapper(object): completed_on=now, build_name='') - logger.debug(1, "buildinfohelper: build is created %s" % build) + logger.debug("buildinfohelper: build is created %s" % build) if buildrequest is not None: buildrequest.build = build @@ -906,7 +906,7 @@ class BuildInfoHelper(object): self.project = None - logger.debug(1, "buildinfohelper: Build info helper inited %s" % vars(self)) + logger.debug("buildinfohelper: Build info helper inited %s" % vars(self)) ################### @@ -1620,7 +1620,7 @@ class BuildInfoHelper(object): # if we have a backlog of events, do our best to save them here if len(self.internal_state['backlog']): tempevent = self.internal_state['backlog'].pop() - logger.debug(1, "buildinfohelper: Saving stored event %s " + logger.debug("buildinfohelper: Saving stored event %s " % tempevent) self.store_log_event(tempevent,cli_backlog) else: diff --git a/poky/bitbake/lib/bb/utils.py b/poky/bitbake/lib/bb/utils.py index f73d31fb7..b282d09ab 100644 --- a/poky/bitbake/lib/bb/utils.py +++ b/poky/bitbake/lib/bb/utils.py @@ -129,6 +129,7 @@ def vercmp(ta, tb): return r def vercmp_string(a, b): + """ Split version strings and compare them """ ta = split_version(a) tb = split_version(b) return vercmp(ta, tb) @@ -247,6 +248,12 @@ def explode_dep_versions2(s, *, sort=True): return r def explode_dep_versions(s): + """ + Take an RDEPENDS style string of format: + "DEPEND1 (optional version) DEPEND2 (optional version) ..." + skip null value and items appeared in dependancy string multiple times + and return a dictionary of dependencies and versions. + """ r = explode_dep_versions2(s) for d in r: if not r[d]: @@ -602,7 +609,7 @@ def filter_environment(good_vars): os.environ["LC_ALL"] = "en_US.UTF-8" if removed_vars: - logger.debug(1, "Removed the following variables from the environment: %s", ", ".join(removed_vars.keys())) + logger.debug("Removed the following variables from the environment: %s", ", ".join(removed_vars.keys())) return removed_vars @@ -692,7 +699,7 @@ def remove(path, recurse=False, ionice=False): raise def prunedir(topdir, ionice=False): - # Delete everything reachable from the directory named in 'topdir'. + """ Delete everything reachable from the directory named in 'topdir'. """ # CAUTION: This is dangerous! if _check_unsafe_delete_path(topdir): raise Exception('bb.utils.prunedir: called with dangerous path "%s", refusing to delete!' % topdir) @@ -703,8 +710,10 @@ def prunedir(topdir, ionice=False): # but thats possibly insane and suffixes is probably going to be small # def prune_suffix(var, suffixes, d): - # See if var ends with any of the suffixes listed and - # remove it if found + """ + See if var ends with any of the suffixes listed and + remove it if found + """ for suffix in suffixes: if suffix and var.endswith(suffix): return var[:-len(suffix)] @@ -956,6 +965,10 @@ def umask(new_mask): os.umask(current_mask) def to_boolean(string, default=None): + """ + Check input string and return boolean value True/False/None + depending upon the checks + """ if not string: return default @@ -999,6 +1012,23 @@ def contains(variable, checkvalues, truevalue, falsevalue, d): return falsevalue def contains_any(variable, checkvalues, truevalue, falsevalue, d): + """Check if a variable contains any values specified. + + Arguments: + + variable -- the variable name. This will be fetched and expanded (using + d.getVar(variable)) and then split into a set(). + + checkvalues -- if this is a string it is split on whitespace into a set(), + otherwise coerced directly into a set(). + + truevalue -- the value to return if checkvalues is a subset of variable. + + falsevalue -- the value to return if variable is empty or if checkvalues is + not a subset of variable. + + d -- the data store. + """ val = d.getVar(variable) if not val: return falsevalue @@ -1560,8 +1590,8 @@ def set_process_name(name): except: pass -# export common proxies variables from datastore to environment def export_proxies(d): + """ export common proxies variables from datastore to environment """ import os variables = ['http_proxy', 'HTTP_PROXY', 'https_proxy', 'HTTPS_PROXY', @@ -1583,12 +1613,12 @@ def export_proxies(d): def load_plugins(logger, plugins, pluginpath): def load_plugin(name): - logger.debug(1, 'Loading plugin %s' % name) + logger.debug('Loading plugin %s' % name) spec = importlib.machinery.PathFinder.find_spec(name, path=[pluginpath] ) if spec: return spec.loader.load_module() - logger.debug(1, 'Loading plugins from %s...' % pluginpath) + logger.debug('Loading plugins from %s...' % pluginpath) expanded = (glob.glob(os.path.join(pluginpath, '*' + ext)) for ext in python_extensions) diff --git a/poky/bitbake/lib/bblayers/action.py b/poky/bitbake/lib/bblayers/action.py index 5b78195ad..f05f5d330 100644 --- a/poky/bitbake/lib/bblayers/action.py +++ b/poky/bitbake/lib/bblayers/action.py @@ -50,10 +50,10 @@ class ActionPlugin(LayerPlugin): if not (args.force or notadded): try: self.tinfoil.run_command('parseConfiguration') - except bb.tinfoil.TinfoilUIException: + except (bb.tinfoil.TinfoilUIException, bb.BBHandledException): # Restore the back up copy of bblayers.conf shutil.copy2(backup, bblayers_conf) - bb.fatal("Parse failure with the specified layer added") + bb.fatal("Parse failure with the specified layer added, aborting.") else: for item in notadded: sys.stderr.write("Specified layer %s is already in BBLAYERS\n" % item) diff --git a/poky/bitbake/lib/bblayers/layerindex.py b/poky/bitbake/lib/bblayers/layerindex.py index 95b67a662..b2f27b21e 100644 --- a/poky/bitbake/lib/bblayers/layerindex.py +++ b/poky/bitbake/lib/bblayers/layerindex.py @@ -79,7 +79,7 @@ class LayerIndexPlugin(ActionPlugin): branches = [args.branch] else: branches = (self.tinfoil.config_data.getVar('LAYERSERIES_CORENAMES') or 'master').split() - logger.debug(1, 'Trying branches: %s' % branches) + logger.debug('Trying branches: %s' % branches) ignore_layers = [] if args.ignore: diff --git a/poky/bitbake/lib/hashserv/__init__.py b/poky/bitbake/lib/hashserv/__init__.py index 55f48410d..5f2e101e5 100644 --- a/poky/bitbake/lib/hashserv/__init__.py +++ b/poky/bitbake/lib/hashserv/__init__.py @@ -94,10 +94,10 @@ def chunkify(msg, max_chunk): yield "\n" -def create_server(addr, dbname, *, sync=True, upstream=None): +def create_server(addr, dbname, *, sync=True, upstream=None, read_only=False): from . import server db = setup_database(dbname, sync=sync) - s = server.Server(db, upstream=upstream) + s = server.Server(db, upstream=upstream, read_only=read_only) (typ, a) = parse_address(addr) if typ == ADDR_TYPE_UNIX: diff --git a/poky/bitbake/lib/hashserv/client.py b/poky/bitbake/lib/hashserv/client.py index 0ffd0c2ae..e05c1eb56 100644 --- a/poky/bitbake/lib/hashserv/client.py +++ b/poky/bitbake/lib/hashserv/client.py @@ -99,7 +99,7 @@ class AsyncClient(object): l = await get_line() m = json.loads(l) - if "chunk-stream" in m: + if m and "chunk-stream" in m: lines = [] while True: l = (await get_line()).rstrip("\n") @@ -170,6 +170,12 @@ class AsyncClient(object): {"get": {"taskhash": taskhash, "method": method, "all": all_properties}} ) + async def get_outhash(self, method, outhash, taskhash): + await self._set_mode(self.MODE_NORMAL) + return await self.send_message( + {"get-outhash": {"outhash": outhash, "taskhash": taskhash, "method": method}} + ) + async def get_stats(self): await self._set_mode(self.MODE_NORMAL) return await self.send_message({"get-stats": None}) diff --git a/poky/bitbake/lib/hashserv/server.py b/poky/bitbake/lib/hashserv/server.py index 3ff4c51cc..a0dc0c170 100644 --- a/poky/bitbake/lib/hashserv/server.py +++ b/poky/bitbake/lib/hashserv/server.py @@ -112,6 +112,9 @@ class Stats(object): class ClientError(Exception): pass +class ServerError(Exception): + pass + def insert_task(cursor, data, ignore=False): keys = sorted(data.keys()) query = '''INSERT%s INTO tasks_v2 (%s) VALUES (%s)''' % ( @@ -127,6 +130,18 @@ async def copy_from_upstream(client, db, method, taskhash): d = {k: v for k, v in d.items() if k in TABLE_COLUMNS} keys = sorted(d.keys()) + with closing(db.cursor()) as cursor: + insert_task(cursor, d) + db.commit() + + return d + +async def copy_outhash_from_upstream(client, db, method, outhash, taskhash): + d = await client.get_outhash(method, outhash, taskhash) + if d is not None: + # Filter out unknown columns + d = {k: v for k, v in d.items() if k in TABLE_COLUMNS} + keys = sorted(d.keys()) with closing(db.cursor()) as cursor: insert_task(cursor, d) @@ -137,8 +152,22 @@ async def copy_from_upstream(client, db, method, taskhash): class ServerClient(object): FAST_QUERY = 'SELECT taskhash, method, unihash FROM tasks_v2 WHERE method=:method AND taskhash=:taskhash ORDER BY created ASC LIMIT 1' ALL_QUERY = 'SELECT * FROM tasks_v2 WHERE method=:method AND taskhash=:taskhash ORDER BY created ASC LIMIT 1' - - def __init__(self, reader, writer, db, request_stats, backfill_queue, upstream): + OUTHASH_QUERY = ''' + -- Find tasks with a matching outhash (that is, tasks that + -- are equivalent) + SELECT * FROM tasks_v2 WHERE method=:method AND outhash=:outhash + + -- If there is an exact match on the taskhash, return it. + -- Otherwise return the oldest matching outhash of any + -- taskhash + ORDER BY CASE WHEN taskhash=:taskhash THEN 1 ELSE 2 END, + created ASC + + -- Only return one row + LIMIT 1 + ''' + + def __init__(self, reader, writer, db, request_stats, backfill_queue, upstream, read_only): self.reader = reader self.writer = writer self.db = db @@ -149,15 +178,20 @@ class ServerClient(object): self.handlers = { 'get': self.handle_get, - 'report': self.handle_report, - 'report-equiv': self.handle_equivreport, + 'get-outhash': self.handle_get_outhash, 'get-stream': self.handle_get_stream, 'get-stats': self.handle_get_stats, - 'reset-stats': self.handle_reset_stats, 'chunk-stream': self.handle_chunk, - 'backfill-wait': self.handle_backfill_wait, } + if not read_only: + self.handlers.update({ + 'report': self.handle_report, + 'report-equiv': self.handle_equivreport, + 'reset-stats': self.handle_reset_stats, + 'backfill-wait': self.handle_backfill_wait, + }) + async def process_requests(self): if self.upstream is not None: self.upstream_client = await create_async_client(self.upstream) @@ -282,6 +316,21 @@ class ServerClient(object): self.write_message(d) + async def handle_get_outhash(self, request): + with closing(self.db.cursor()) as cursor: + cursor.execute(self.OUTHASH_QUERY, + {k: request[k] for k in ('method', 'outhash', 'taskhash')}) + + row = cursor.fetchone() + + if row is not None: + logger.debug('Found equivalent outhash %s -> %s', (row['outhash'], row['unihash'])) + d = {k: row[k] for k in row.keys()} + else: + d = None + + self.write_message(d) + async def handle_get_stream(self, request): self.write_message('ok') @@ -335,23 +384,19 @@ class ServerClient(object): async def handle_report(self, data): with closing(self.db.cursor()) as cursor: - cursor.execute(''' - -- Find tasks with a matching outhash (that is, tasks that - -- are equivalent) - SELECT taskhash, method, unihash FROM tasks_v2 WHERE method=:method AND outhash=:outhash - - -- If there is an exact match on the taskhash, return it. - -- Otherwise return the oldest matching outhash of any - -- taskhash - ORDER BY CASE WHEN taskhash=:taskhash THEN 1 ELSE 2 END, - created ASC - - -- Only return one row - LIMIT 1 - ''', {k: data[k] for k in ('method', 'outhash', 'taskhash')}) + cursor.execute(self.OUTHASH_QUERY, + {k: data[k] for k in ('method', 'outhash', 'taskhash')}) row = cursor.fetchone() + if row is None and self.upstream_client: + # Try upstream + row = await copy_outhash_from_upstream(self.upstream_client, + self.db, + data['method'], + data['outhash'], + data['taskhash']) + # If no matching outhash was found, or one *was* found but it # wasn't an exact match on the taskhash, a new entry for this # taskhash should be added @@ -455,7 +500,10 @@ class ServerClient(object): class Server(object): - def __init__(self, db, loop=None, upstream=None): + def __init__(self, db, loop=None, upstream=None, read_only=False): + if upstream and read_only: + raise ServerError("Read-only hashserv cannot pull from an upstream server") + self.request_stats = Stats() self.db = db @@ -467,6 +515,7 @@ class Server(object): self.close_loop = False self.upstream = upstream + self.read_only = read_only self._cleanup_socket = None @@ -510,7 +559,7 @@ class Server(object): async def handle_client(self, reader, writer): # writer.transport.set_write_buffer_limits(0) try: - client = ServerClient(reader, writer, self.db, self.request_stats, self.backfill_queue, self.upstream) + client = ServerClient(reader, writer, self.db, self.request_stats, self.backfill_queue, self.upstream, self.read_only) await client.process_requests() except Exception as e: import traceback diff --git a/poky/bitbake/lib/hashserv/tests.py b/poky/bitbake/lib/hashserv/tests.py index 77a19b807..1a696481e 100644 --- a/poky/bitbake/lib/hashserv/tests.py +++ b/poky/bitbake/lib/hashserv/tests.py @@ -6,6 +6,7 @@ # from . import create_server, create_client +from .client import HashConnectionError import hashlib import logging import multiprocessing @@ -29,7 +30,7 @@ class HashEquivalenceTestSetup(object): server_index = 0 - def start_server(self, dbpath=None, upstream=None): + def start_server(self, dbpath=None, upstream=None, read_only=False): self.server_index += 1 if dbpath is None: dbpath = os.path.join(self.temp_dir.name, "db%d.sqlite" % self.server_index) @@ -38,7 +39,10 @@ class HashEquivalenceTestSetup(object): thread.terminate() thread.join() - server = create_server(self.get_server_addr(self.server_index), dbpath, upstream=upstream) + server = create_server(self.get_server_addr(self.server_index), + dbpath, + upstream=upstream, + read_only=read_only) server.dbpath = dbpath server.thread = multiprocessing.Process(target=_run_server, args=(server, self.server_index)) @@ -242,6 +246,43 @@ class HashEquivalenceCommonTests(object): self.assertClientGetHash(side_client, taskhash4, unihash4) self.assertClientGetHash(self.client, taskhash4, None) + # Test that reporting a unihash in the downstream is able to find a + # match which was previously reported to the upstream server + taskhash5 = '35788efcb8dfb0a02659d81cf2bfd695fb30faf9' + outhash5 = '2765d4a5884be49b28601445c2760c5f21e7e5c0ee2b7e3fce98fd7e5970796f' + unihash5 = 'f46d3fbb439bd9b921095da657a4de906510d2cd' + result = self.client.report_unihash(taskhash5, self.METHOD, outhash5, unihash5) + + taskhash6 = '35788efcb8dfb0a02659d81cf2bfd695fb30fafa' + unihash6 = 'f46d3fbb439bd9b921095da657a4de906510d2ce' + result = down_client.report_unihash(taskhash6, self.METHOD, outhash5, unihash6) + self.assertEqual(result['unihash'], unihash5, 'Server failed to copy unihash from upstream') + + def test_ro_server(self): + (ro_client, ro_server) = self.start_server(dbpath=self.server.dbpath, read_only=True) + + # Report a hash via the read-write server + taskhash = '35788efcb8dfb0a02659d81cf2bfd695fb30faf9' + outhash = '2765d4a5884be49b28601445c2760c5f21e7e5c0ee2b7e3fce98fd7e5970796f' + unihash = 'f46d3fbb439bd9b921095da657a4de906510d2cd' + + result = self.client.report_unihash(taskhash, self.METHOD, outhash, unihash) + self.assertEqual(result['unihash'], unihash, 'Server returned bad unihash') + + # Check the hash via the read-only server + self.assertClientGetHash(ro_client, taskhash, unihash) + + # Ensure that reporting via the read-only server fails + taskhash2 = 'c665584ee6817aa99edfc77a44dd853828279370' + outhash2 = '3c979c3db45c569f51ab7626a4651074be3a9d11a84b1db076f5b14f7d39db44' + unihash2 = '90e9bc1d1f094c51824adca7f8ea79a048d68824' + + with self.assertRaises(HashConnectionError): + ro_client.report_unihash(taskhash2, self.METHOD, outhash2, unihash2) + + # Ensure that the database was not modified + self.assertClientGetHash(self.client, taskhash2, None) + class TestHashEquivalenceUnixServer(HashEquivalenceTestSetup, HashEquivalenceCommonTests, unittest.TestCase): def get_server_addr(self, server_idx): diff --git a/poky/bitbake/lib/layerindexlib/__init__.py b/poky/bitbake/lib/layerindexlib/__init__.py index 45157b668..9ca127b9d 100644 --- a/poky/bitbake/lib/layerindexlib/__init__.py +++ b/poky/bitbake/lib/layerindexlib/__init__.py @@ -94,7 +94,7 @@ class LayerIndex(): if not param: continue item = param.split('=', 1) - logger.debug(1, item) + logger.debug(item) param_dict[item[0]] = item[1] return param_dict @@ -123,7 +123,7 @@ class LayerIndex(): up = urlparse(url) if username: - logger.debug(1, "Configuring authentication for %s..." % url) + logger.debug("Configuring authentication for %s..." % url) password_mgr = urllib.request.HTTPPasswordMgrWithDefaultRealm() password_mgr.add_password(None, "%s://%s" % (up.scheme, up.netloc), username, password) handler = urllib.request.HTTPBasicAuthHandler(password_mgr) @@ -133,20 +133,20 @@ class LayerIndex(): urllib.request.install_opener(opener) - logger.debug(1, "Fetching %s (%s)..." % (url, ["without authentication", "with authentication"][bool(username)])) + logger.debug("Fetching %s (%s)..." % (url, ["without authentication", "with authentication"][bool(username)])) try: res = urlopen(Request(url, headers={'User-Agent': 'Mozilla/5.0 (bitbake/lib/layerindex)'}, unverifiable=True)) except urllib.error.HTTPError as e: - logger.debug(1, "HTTP Error: %s: %s" % (e.code, e.reason)) - logger.debug(1, " Requested: %s" % (url)) - logger.debug(1, " Actual: %s" % (e.geturl())) + logger.debug("HTTP Error: %s: %s" % (e.code, e.reason)) + logger.debug(" Requested: %s" % (url)) + logger.debug(" Actual: %s" % (e.geturl())) if e.code == 404: - logger.debug(1, "Request not found.") + logger.debug("Request not found.") raise LayerIndexFetchError(url, e) else: - logger.debug(1, "Headers:\n%s" % (e.headers)) + logger.debug("Headers:\n%s" % (e.headers)) raise LayerIndexFetchError(url, e) except OSError as e: error = 0 @@ -170,7 +170,7 @@ class LayerIndex(): raise LayerIndexFetchError(url, "Unable to fetch OSError exception: %s" % e) finally: - logger.debug(1, "...fetching %s (%s), done." % (url, ["without authentication", "with authentication"][bool(username)])) + logger.debug("...fetching %s (%s), done." % (url, ["without authentication", "with authentication"][bool(username)])) return res @@ -205,14 +205,14 @@ The format of the indexURI: if reload: self.indexes = [] - logger.debug(1, 'Loading: %s' % indexURI) + logger.debug('Loading: %s' % indexURI) if not self.plugins: raise LayerIndexException("No LayerIndex Plugins available") for plugin in self.plugins: # Check if the plugin was initialized - logger.debug(1, 'Trying %s' % plugin.__class__) + logger.debug('Trying %s' % plugin.__class__) if not hasattr(plugin, 'type') or not plugin.type: continue try: @@ -220,11 +220,11 @@ The format of the indexURI: indexEnt = plugin.load_index(indexURI, load) break except LayerIndexPluginUrlError as e: - logger.debug(1, "%s doesn't support %s" % (plugin.type, e.url)) + logger.debug("%s doesn't support %s" % (plugin.type, e.url)) except NotImplementedError: pass else: - logger.debug(1, "No plugins support %s" % indexURI) + logger.debug("No plugins support %s" % indexURI) raise LayerIndexException("No plugins support %s" % indexURI) # Mark CONFIG data as something we've added... @@ -255,19 +255,19 @@ will write out the individual elements split by layer and related components. for plugin in self.plugins: # Check if the plugin was initialized - logger.debug(1, 'Trying %s' % plugin.__class__) + logger.debug('Trying %s' % plugin.__class__) if not hasattr(plugin, 'type') or not plugin.type: continue try: plugin.store_index(indexURI, index) break except LayerIndexPluginUrlError as e: - logger.debug(1, "%s doesn't support %s" % (plugin.type, e.url)) + logger.debug("%s doesn't support %s" % (plugin.type, e.url)) except NotImplementedError: - logger.debug(1, "Store not implemented in %s" % plugin.type) + logger.debug("Store not implemented in %s" % plugin.type) pass else: - logger.debug(1, "No plugins support %s" % indexURI) + logger.debug("No plugins support %s" % indexURI) raise LayerIndexException("No plugins support %s" % indexURI) @@ -292,7 +292,7 @@ layerBranches set. If not, they are effectively blank.''' the default configuration until the first vcs_url/branch match.''' for index in self.indexes: - logger.debug(1, ' searching %s' % index.config['DESCRIPTION']) + logger.debug(' searching %s' % index.config['DESCRIPTION']) layerBranch = index.find_vcs_url(vcs_url, [branch]) if layerBranch: return layerBranch @@ -304,7 +304,7 @@ layerBranches set. If not, they are effectively blank.''' If a branch has not been specified, we will iterate over the branches in the default configuration until the first collection/branch match.''' - logger.debug(1, 'find_collection: %s (%s) %s' % (collection, version, branch)) + logger.debug('find_collection: %s (%s) %s' % (collection, version, branch)) if branch: branches = [branch] @@ -312,12 +312,12 @@ layerBranches set. If not, they are effectively blank.''' branches = None for index in self.indexes: - logger.debug(1, ' searching %s' % index.config['DESCRIPTION']) + logger.debug(' searching %s' % index.config['DESCRIPTION']) layerBranch = index.find_collection(collection, version, branches) if layerBranch: return layerBranch else: - logger.debug(1, 'Collection %s (%s) not found for branch (%s)' % (collection, version, branch)) + logger.debug('Collection %s (%s) not found for branch (%s)' % (collection, version, branch)) return None def find_layerbranch(self, name, branch=None): @@ -408,7 +408,7 @@ layerBranches set. If not, they are effectively blank.''' version=deplayerbranch.version ) if rdeplayerbranch != deplayerbranch: - logger.debug(1, 'Replaced %s:%s:%s with %s:%s:%s' % \ + logger.debug('Replaced %s:%s:%s with %s:%s:%s' % \ (deplayerbranch.index.config['DESCRIPTION'], deplayerbranch.branch.name, deplayerbranch.layer.name, @@ -1121,7 +1121,7 @@ class LayerBranch(LayerIndexItemObj): @property def branch(self): try: - logger.debug(1, "Get branch object from branches[%s]" % (self.branch_id)) + logger.debug("Get branch object from branches[%s]" % (self.branch_id)) return self.index.branches[self.branch_id] except KeyError: raise AttributeError('Unable to find branches in index to map branch_id %s' % self.branch_id) @@ -1149,7 +1149,7 @@ class LayerBranch(LayerIndexItemObj): @actual_branch.setter def actual_branch(self, value): - logger.debug(1, "Set actual_branch to %s .. name is %s" % (value, self.branch.name)) + logger.debug("Set actual_branch to %s .. name is %s" % (value, self.branch.name)) if value != self.branch.name: self._setattr('actual_branch', value, prop=False) else: diff --git a/poky/bitbake/lib/layerindexlib/cooker.py b/poky/bitbake/lib/layerindexlib/cooker.py index 21ec438a2..2de6e5faa 100644 --- a/poky/bitbake/lib/layerindexlib/cooker.py +++ b/poky/bitbake/lib/layerindexlib/cooker.py @@ -173,7 +173,7 @@ class CookerPlugin(layerindexlib.plugin.IndexPlugin): else: branches = ['HEAD'] - logger.debug(1, "Loading cooker data branches %s" % branches) + logger.debug("Loading cooker data branches %s" % branches) index = self._load_bblayers(branches=branches) @@ -220,7 +220,7 @@ class CookerPlugin(layerindexlib.plugin.IndexPlugin): required=required, layerbranch=layerBranchId, dependency=depLayerBranch.layer_id) - logger.debug(1, '%s requires %s' % (layerDependency.layer.name, layerDependency.dependency.name)) + logger.debug('%s requires %s' % (layerDependency.layer.name, layerDependency.dependency.name)) index.add_element("layerDependencies", [layerDependency]) return layerDependencyId diff --git a/poky/bitbake/lib/layerindexlib/restapi.py b/poky/bitbake/lib/layerindexlib/restapi.py index 7023f42f2..26a1c9674 100644 --- a/poky/bitbake/lib/layerindexlib/restapi.py +++ b/poky/bitbake/lib/layerindexlib/restapi.py @@ -82,7 +82,7 @@ class RestApiPlugin(layerindexlib.plugin.IndexPlugin): def load_cache(path, index, branches=[]): - logger.debug(1, 'Loading json file %s' % path) + logger.debug('Loading json file %s' % path) with open(path, 'rt', encoding='utf-8') as f: pindex = json.load(f) @@ -102,7 +102,7 @@ class RestApiPlugin(layerindexlib.plugin.IndexPlugin): if newpBranch: index.add_raw_element('branches', layerindexlib.Branch, newpBranch) else: - logger.debug(1, 'No matching branches (%s) in index file(s)' % branches) + logger.debug('No matching branches (%s) in index file(s)' % branches) # No matching branches.. return nothing... return @@ -120,7 +120,7 @@ class RestApiPlugin(layerindexlib.plugin.IndexPlugin): load_cache(up.path, index, branches) return index - logger.debug(1, 'Loading from dir %s...' % (up.path)) + logger.debug('Loading from dir %s...' % (up.path)) for (dirpath, _, filenames) in os.walk(up.path): for filename in filenames: if not filename.endswith('.json'): @@ -144,7 +144,7 @@ class RestApiPlugin(layerindexlib.plugin.IndexPlugin): def _get_json_response(apiurl=None, username=None, password=None, retry=True): assert apiurl is not None - logger.debug(1, "fetching %s" % apiurl) + logger.debug("fetching %s" % apiurl) up = urlparse(apiurl) @@ -163,9 +163,9 @@ class RestApiPlugin(layerindexlib.plugin.IndexPlugin): parsed = json.loads(res.read().decode('utf-8')) except ConnectionResetError: if retry: - logger.debug(1, "%s: Connection reset by peer. Retrying..." % url) + logger.debug("%s: Connection reset by peer. Retrying..." % url) parsed = _get_json_response(apiurl=up_stripped.geturl(), username=username, password=password, retry=False) - logger.debug(1, "%s: retry successful.") + logger.debug("%s: retry successful.") else: raise layerindexlib.LayerIndexFetchError('%s: Connection reset by peer. Is there a firewall blocking your connection?' % apiurl) @@ -207,25 +207,25 @@ class RestApiPlugin(layerindexlib.plugin.IndexPlugin): if "*" not in branches: filter = "?filter=name:%s" % "OR".join(branches) - logger.debug(1, "Loading %s from %s" % (branches, index.apilinks['branches'])) + logger.debug("Loading %s from %s" % (branches, index.apilinks['branches'])) # The link won't include username/password, so pull it from the original url pindex['branches'] = _get_json_response(index.apilinks['branches'] + filter, username=up.username, password=up.password) if not pindex['branches']: - logger.debug(1, "No valid branches (%s) found at url %s." % (branch, url)) + logger.debug("No valid branches (%s) found at url %s." % (branch, url)) return index index.add_raw_element("branches", layerindexlib.Branch, pindex['branches']) # Load all of the layerItems (these can not be easily filtered) - logger.debug(1, "Loading %s from %s" % ('layerItems', index.apilinks['layerItems'])) + logger.debug("Loading %s from %s" % ('layerItems', index.apilinks['layerItems'])) # The link won't include username/password, so pull it from the original url pindex['layerItems'] = _get_json_response(index.apilinks['layerItems'], username=up.username, password=up.password) if not pindex['layerItems']: - logger.debug(1, "No layers were found at url %s." % (url)) + logger.debug("No layers were found at url %s." % (url)) return index index.add_raw_element("layerItems", layerindexlib.LayerItem, pindex['layerItems']) @@ -235,13 +235,13 @@ class RestApiPlugin(layerindexlib.plugin.IndexPlugin): for branch in index.branches: filter = "?filter=branch__name:%s" % index.branches[branch].name - logger.debug(1, "Loading %s from %s" % ('layerBranches', index.apilinks['layerBranches'])) + logger.debug("Loading %s from %s" % ('layerBranches', index.apilinks['layerBranches'])) # The link won't include username/password, so pull it from the original url pindex['layerBranches'] = _get_json_response(index.apilinks['layerBranches'] + filter, username=up.username, password=up.password) if not pindex['layerBranches']: - logger.debug(1, "No valid layer branches (%s) found at url %s." % (branches or "*", url)) + logger.debug("No valid layer branches (%s) found at url %s." % (branches or "*", url)) return index index.add_raw_element("layerBranches", layerindexlib.LayerBranch, pindex['layerBranches']) @@ -256,7 +256,7 @@ class RestApiPlugin(layerindexlib.plugin.IndexPlugin): ("distros", layerindexlib.Distro)]: if lName not in load: continue - logger.debug(1, "Loading %s from %s" % (lName, index.apilinks[lName])) + logger.debug("Loading %s from %s" % (lName, index.apilinks[lName])) # The link won't include username/password, so pull it from the original url pindex[lName] = _get_json_response(index.apilinks[lName] + filter, @@ -283,7 +283,7 @@ class RestApiPlugin(layerindexlib.plugin.IndexPlugin): if up.scheme != 'file': raise layerindexlib.plugin.LayerIndexPluginUrlError(self.type, url) - logger.debug(1, "Storing to %s..." % up.path) + logger.debug("Storing to %s..." % up.path) try: layerbranches = index.layerBranches @@ -299,12 +299,12 @@ class RestApiPlugin(layerindexlib.plugin.IndexPlugin): if getattr(index, objects)[obj].layerbranch_id == layerbranchid: filtered.append(getattr(index, objects)[obj]._data) except AttributeError: - logger.debug(1, 'No obj.layerbranch_id: %s' % objects) + logger.debug('No obj.layerbranch_id: %s' % objects) # No simple filter method, just include it... try: filtered.append(getattr(index, objects)[obj]._data) except AttributeError: - logger.debug(1, 'No obj._data: %s %s' % (objects, type(obj))) + logger.debug('No obj._data: %s %s' % (objects, type(obj))) filtered.append(obj) return filtered diff --git a/poky/bitbake/lib/layerindexlib/tests/cooker.py b/poky/bitbake/lib/layerindexlib/tests/cooker.py index 1d0685e09..5ddf89aa2 100644 --- a/poky/bitbake/lib/layerindexlib/tests/cooker.py +++ b/poky/bitbake/lib/layerindexlib/tests/cooker.py @@ -72,7 +72,7 @@ class LayerIndexCookerTest(LayersTest): def test_find_collection(self): def _check(collection, expected): - self.logger.debug(1, "Looking for collection %s..." % collection) + self.logger.debug("Looking for collection %s..." % collection) result = self.layerindex.find_collection(collection) if expected: self.assertIsNotNone(result, msg="Did not find %s when it shouldn't be there" % collection) @@ -91,7 +91,7 @@ class LayerIndexCookerTest(LayersTest): def test_find_layerbranch(self): def _check(name, expected): - self.logger.debug(1, "Looking for layerbranch %s..." % name) + self.logger.debug("Looking for layerbranch %s..." % name) result = self.layerindex.find_layerbranch(name) if expected: self.assertIsNotNone(result, msg="Did not find %s when it shouldn't be there" % collection) diff --git a/poky/bitbake/lib/layerindexlib/tests/restapi.py b/poky/bitbake/lib/layerindexlib/tests/restapi.py index 4646d01f9..33b5c1c4c 100644 --- a/poky/bitbake/lib/layerindexlib/tests/restapi.py +++ b/poky/bitbake/lib/layerindexlib/tests/restapi.py @@ -57,11 +57,11 @@ class LayerIndexWebRestApiTest(LayersTest): type in self.layerindex.indexes[0].config['local']: continue for id in getattr(self.layerindex.indexes[0], type): - self.logger.debug(1, "type %s" % (type)) + self.logger.debug("type %s" % (type)) self.assertTrue(id in getattr(reload.indexes[0], type), msg="Id number not in reloaded index") - self.logger.debug(1, "%s ? %s" % (getattr(self.layerindex.indexes[0], type)[id], getattr(reload.indexes[0], type)[id])) + self.logger.debug("%s ? %s" % (getattr(self.layerindex.indexes[0], type)[id], getattr(reload.indexes[0], type)[id])) self.assertEqual(getattr(self.layerindex.indexes[0], type)[id], getattr(reload.indexes[0], type)[id], msg="Reloaded contents different") @@ -80,11 +80,11 @@ class LayerIndexWebRestApiTest(LayersTest): type in self.layerindex.indexes[0].config['local']: continue for id in getattr(self.layerindex.indexes[0] ,type): - self.logger.debug(1, "type %s" % (type)) + self.logger.debug("type %s" % (type)) self.assertTrue(id in getattr(reload.indexes[0], type), msg="Id number missing from reloaded data") - self.logger.debug(1, "%s ? %s" % (getattr(self.layerindex.indexes[0] ,type)[id], getattr(reload.indexes[0], type)[id])) + self.logger.debug("%s ? %s" % (getattr(self.layerindex.indexes[0] ,type)[id], getattr(reload.indexes[0], type)[id])) self.assertEqual(getattr(self.layerindex.indexes[0] ,type)[id], getattr(reload.indexes[0], type)[id], msg="reloaded data does not match original") @@ -111,14 +111,14 @@ class LayerIndexWebRestApiTest(LayersTest): if dep.layer.name == 'meta-python': break else: - self.logger.debug(1, "meta-python was not found") + self.logger.debug("meta-python was not found") raise self.failureException # Only check the first element... break else: # Empty list, this is bad. - self.logger.debug(1, "Empty list of dependencies") + self.logger.debug("Empty list of dependencies") self.assertIsNotNone(first, msg="Empty list of dependencies") # Last dep should be the requested item @@ -128,7 +128,7 @@ class LayerIndexWebRestApiTest(LayersTest): @skipIfNoNetwork() def test_find_collection(self): def _check(collection, expected): - self.logger.debug(1, "Looking for collection %s..." % collection) + self.logger.debug("Looking for collection %s..." % collection) result = self.layerindex.find_collection(collection) if expected: self.assertIsNotNone(result, msg="Did not find %s when it should be there" % collection) @@ -148,11 +148,11 @@ class LayerIndexWebRestApiTest(LayersTest): @skipIfNoNetwork() def test_find_layerbranch(self): def _check(name, expected): - self.logger.debug(1, "Looking for layerbranch %s..." % name) + self.logger.debug("Looking for layerbranch %s..." % name) for index in self.layerindex.indexes: for layerbranchid in index.layerBranches: - self.logger.debug(1, "Present: %s" % index.layerBranches[layerbranchid].layer.name) + self.logger.debug("Present: %s" % index.layerBranches[layerbranchid].layer.name) result = self.layerindex.find_layerbranch(name) if expected: self.assertIsNotNone(result, msg="Did not find %s when it should be there" % collection) |