summaryrefslogtreecommitdiff
path: root/poky/scripts/lib
diff options
context:
space:
mode:
Diffstat (limited to 'poky/scripts/lib')
-rw-r--r--poky/scripts/lib/devtool/standard.py7
-rw-r--r--poky/scripts/lib/devtool/upgrade.py56
-rw-r--r--poky/scripts/lib/recipetool/create.py42
-rw-r--r--poky/scripts/lib/recipetool/create_go.py4
-rw-r--r--poky/scripts/lib/recipetool/create_npm.py95
-rw-r--r--poky/scripts/lib/resulttool/junit.py77
-rwxr-xr-xpoky/scripts/lib/resulttool/manualexecution.py2
-rw-r--r--poky/scripts/lib/resulttool/report.py2
-rw-r--r--poky/scripts/lib/resulttool/resultutils.py76
-rw-r--r--poky/scripts/lib/resulttool/store.py26
-rw-r--r--poky/scripts/lib/wic/canned-wks/efi-bootdisk.wks.in2
-rw-r--r--poky/scripts/lib/wic/plugins/source/bootimg-partition.py39
12 files changed, 273 insertions, 155 deletions
diff --git a/poky/scripts/lib/devtool/standard.py b/poky/scripts/lib/devtool/standard.py
index 1d0fe13788..b2e1a6ca3a 100644
--- a/poky/scripts/lib/devtool/standard.py
+++ b/poky/scripts/lib/devtool/standard.py
@@ -952,13 +952,6 @@ def modify(args, config, basepath, workspace):
f.write('EXTERNALSRC_BUILD:pn-%s = "%s"\n' % (pn, srctree))
if bb.data.inherits_class('kernel', rd):
- f.write('SRCTREECOVEREDTASKS = "do_validate_branches do_kernel_checkout '
- 'do_fetch do_unpack do_kernel_configcheck"\n')
- f.write('\ndo_patch[noexec] = "1"\n')
- f.write('\ndo_configure:append() {\n'
- ' cp ${B}/.config ${S}/.config.baseline\n'
- ' ln -sfT ${B}/.config ${S}/.config.new\n'
- '}\n')
f.write('\ndo_kernel_configme:prepend() {\n'
' if [ -e ${S}/.config ]; then\n'
' mv ${S}/.config ${S}/.config.old\n'
diff --git a/poky/scripts/lib/devtool/upgrade.py b/poky/scripts/lib/devtool/upgrade.py
index 8e13833b51..eed3a49e4b 100644
--- a/poky/scripts/lib/devtool/upgrade.py
+++ b/poky/scripts/lib/devtool/upgrade.py
@@ -76,19 +76,19 @@ def _rename_recipe_dirs(oldpv, newpv, path):
bb.utils.rename(os.path.join(path, oldfile),
os.path.join(path, newfile))
-def _rename_recipe_file(oldrecipe, bpn, oldpv, newpv, path):
+def _rename_recipe_file(oldrecipe, pn, oldpv, newpv, path):
oldrecipe = os.path.basename(oldrecipe)
if oldrecipe.endswith('_%s.bb' % oldpv):
- newrecipe = '%s_%s.bb' % (bpn, newpv)
+ newrecipe = '%s_%s.bb' % (pn, newpv)
if oldrecipe != newrecipe:
shutil.move(os.path.join(path, oldrecipe), os.path.join(path, newrecipe))
else:
newrecipe = oldrecipe
return os.path.join(path, newrecipe)
-def _rename_recipe_files(oldrecipe, bpn, oldpv, newpv, path):
+def _rename_recipe_files(oldrecipe, pn, oldpv, newpv, path):
_rename_recipe_dirs(oldpv, newpv, path)
- return _rename_recipe_file(oldrecipe, bpn, oldpv, newpv, path)
+ return _rename_recipe_file(oldrecipe, pn, oldpv, newpv, path)
def _write_append(rc, srctreebase, srctree, same_dir, no_same_dir, revs, copied, workspace, d):
"""Writes an append file"""
@@ -335,19 +335,19 @@ def _add_license_diff_to_recipe(path, diff):
def _create_new_recipe(newpv, checksums, srcrev, srcbranch, srcsubdir_old, srcsubdir_new, workspace, tinfoil, rd, license_diff, new_licenses, srctree, keep_failure):
"""Creates the new recipe under workspace"""
- bpn = rd.getVar('BPN')
- path = os.path.join(workspace, 'recipes', bpn)
+ pn = rd.getVar('PN')
+ path = os.path.join(workspace, 'recipes', pn)
bb.utils.mkdirhier(path)
copied, _ = oe.recipeutils.copy_recipe_files(rd, path, all_variants=True)
if not copied:
- raise DevtoolError('Internal error - no files were copied for recipe %s' % bpn)
+ raise DevtoolError('Internal error - no files were copied for recipe %s' % pn)
logger.debug('Copied %s to %s' % (copied, path))
oldpv = rd.getVar('PV')
if not newpv:
newpv = oldpv
origpath = rd.getVar('FILE')
- fullpath = _rename_recipe_files(origpath, bpn, oldpv, newpv, path)
+ fullpath = _rename_recipe_files(origpath, pn, oldpv, newpv, path)
logger.debug('Upgraded %s => %s' % (origpath, fullpath))
newvalues = {}
@@ -534,14 +534,14 @@ def _generate_license_diff(old_licenses, new_licenses):
diff = diff + line
return diff
-def _run_recipe_update_extra_tasks(pn, rd, tinfoil):
+def _run_recipe_upgrade_extra_tasks(pn, rd, tinfoil):
tasks = []
- for task in (rd.getVar('RECIPE_UPDATE_EXTRA_TASKS') or '').split():
- logger.info('Running extra recipe update task: %s' % task)
+ for task in (rd.getVar('RECIPE_UPGRADE_EXTRA_TASKS') or '').split():
+ logger.info('Running extra recipe upgrade task: %s' % task)
res = tinfoil.build_targets(pn, task, handle_events=True)
if not res:
- raise DevtoolError('Running extra recipe update task %s for %s failed' % (task, pn))
+ raise DevtoolError('Running extra recipe upgrade task %s for %s failed' % (task, pn))
def upgrade(args, config, basepath, workspace):
"""Entry point for the devtool 'upgrade' subcommand"""
@@ -610,7 +610,7 @@ def upgrade(args, config, basepath, workspace):
license_diff = _generate_license_diff(old_licenses, new_licenses)
rf, copied = _create_new_recipe(args.version, checksums, args.srcrev, srcbranch, srcsubdir1, srcsubdir2, config.workspace_path, tinfoil, rd, license_diff, new_licenses, srctree, args.keep_failure)
except (bb.process.CmdError, DevtoolError) as e:
- recipedir = os.path.join(config.workspace_path, 'recipes', rd.getVar('BPN'))
+ recipedir = os.path.join(config.workspace_path, 'recipes', rd.getVar('PN'))
_upgrade_error(e, recipedir, srctree, args.keep_failure)
standard._add_md5(config, pn, os.path.dirname(rf))
@@ -618,7 +618,7 @@ def upgrade(args, config, basepath, workspace):
copied, config.workspace_path, rd)
standard._add_md5(config, pn, af)
- _run_recipe_update_extra_tasks(pn, rd, tinfoil)
+ _run_recipe_upgrade_extra_tasks(pn, rd, tinfoil)
update_unlockedsigs(basepath, workspace, args.fixed_setup, [pn])
@@ -654,18 +654,28 @@ def latest_version(args, config, basepath, workspace):
return 0
def check_upgrade_status(args, config, basepath, workspace):
+ def _print_status(recipe):
+ print("{:25} {:15} {:15} {} {} {}".format( recipe['pn'],
+ recipe['cur_ver'],
+ recipe['status'] if recipe['status'] != 'UPDATE' else (recipe['next_ver'] if not recipe['next_ver'].endswith("new-commits-available") else "new commits"),
+ recipe['maintainer'],
+ recipe['revision'] if recipe['revision'] != 'N/A' else "",
+ "cannot be updated due to: %s" %(recipe['no_upgrade_reason']) if recipe['no_upgrade_reason'] else ""))
if not args.recipe:
logger.info("Checking the upstream status for all recipes may take a few minutes")
results = oe.recipeutils.get_recipe_upgrade_status(args.recipe)
- for result in results:
- # pn, update_status, current, latest, maintainer, latest_commit, no_update_reason
- if args.all or result[1] != 'MATCH':
- print("{:25} {:15} {:15} {} {} {}".format( result[0],
- result[2],
- result[1] if result[1] != 'UPDATE' else (result[3] if not result[3].endswith("new-commits-available") else "new commits"),
- result[4],
- result[5] if result[5] != 'N/A' else "",
- "cannot be updated due to: %s" %(result[6]) if result[6] else ""))
+ for recipegroup in results:
+ upgrades = [r for r in recipegroup if r['status'] != 'MATCH']
+ currents = [r for r in recipegroup if r['status'] == 'MATCH']
+ if len(upgrades) > 1:
+ print("These recipes need to be upgraded together {")
+ for r in upgrades:
+ _print_status(r)
+ if len(upgrades) > 1:
+ print("}")
+ for r in currents:
+ if args.all:
+ _print_status(r)
def register_commands(subparsers, context):
"""Register devtool subcommands from this plugin"""
diff --git a/poky/scripts/lib/recipetool/create.py b/poky/scripts/lib/recipetool/create.py
index 066366e34f..ea2ef5be63 100644
--- a/poky/scripts/lib/recipetool/create.py
+++ b/poky/scripts/lib/recipetool/create.py
@@ -960,7 +960,7 @@ def handle_license_vars(srctree, lines_before, handled, extravalues, d):
# Someone else has already handled the license vars, just return their value
return lichandled[0][1]
- licvalues = guess_license(srctree, d)
+ licvalues = find_licenses(srctree, d)
licenses = []
lic_files_chksum = []
lic_unknown = []
@@ -1216,13 +1216,7 @@ def crunch_license(licfile):
lictext = ''
return md5val, lictext
-def guess_license(srctree, d):
- import bb
- md5sums = get_license_md5sums(d)
-
- crunched_md5sums = crunch_known_licenses(d)
-
- licenses = []
+def find_license_files(srctree):
licspecs = ['*LICEN[CS]E*', 'COPYING*', '*[Ll]icense*', 'LEGAL*', '[Ll]egal*', '*GPL*', 'README.lic*', 'COPYRIGHT*', '[Cc]opyright*', 'e[dp]l-v10']
skip_extensions = (".html", ".js", ".json", ".svg", ".ts", ".go")
licfiles = []
@@ -1235,11 +1229,22 @@ def guess_license(srctree, d):
fullpath = os.path.join(root, fn)
if not fullpath in licfiles:
licfiles.append(fullpath)
+
+ return licfiles
+
+def match_licenses(licfiles, srctree, d):
+ import bb
+ md5sums = get_license_md5sums(d)
+
+ crunched_md5sums = crunch_known_licenses(d)
+
+ licenses = []
for licfile in sorted(licfiles):
- md5value = bb.utils.md5_file(licfile)
+ resolved_licfile = d.expand(licfile)
+ md5value = bb.utils.md5_file(resolved_licfile)
license = md5sums.get(md5value, None)
if not license:
- crunched_md5, lictext = crunch_license(licfile)
+ crunched_md5, lictext = crunch_license(resolved_licfile)
license = crunched_md5sums.get(crunched_md5, None)
if lictext and not license:
license = 'Unknown'
@@ -1249,13 +1254,19 @@ def guess_license(srctree, d):
if license:
licenses.append((license, os.path.relpath(licfile, srctree), md5value))
+ return licenses
+
+def find_licenses(srctree, d):
+ licfiles = find_license_files(srctree)
+ licenses = match_licenses(licfiles, srctree, d)
+
# FIXME should we grab at least one source file with a license header and add that too?
return licenses
def split_pkg_licenses(licvalues, packages, outlines, fallback_licenses=None, pn='${PN}'):
"""
- Given a list of (license, path, md5sum) as returned by guess_license(),
+ Given a list of (license, path, md5sum) as returned by match_licenses(),
a dict of package name to path mappings, write out a set of
package-specific LICENSE values.
"""
@@ -1284,6 +1295,14 @@ def split_pkg_licenses(licvalues, packages, outlines, fallback_licenses=None, pn
outlicenses[pkgname] = licenses
return outlicenses
+def generate_common_licenses_chksums(common_licenses, d):
+ lic_files_chksums = []
+ for license in tidy_licenses(common_licenses):
+ licfile = '${COMMON_LICENSE_DIR}/' + license
+ md5value = bb.utils.md5_file(d.expand(licfile))
+ lic_files_chksums.append('file://%s;md5=%s' % (licfile, md5value))
+ return lic_files_chksums
+
def read_pkgconfig_provides(d):
pkgdatadir = d.getVar('PKGDATA_DIR')
pkgmap = {}
@@ -1418,4 +1437,3 @@ def register_commands(subparsers):
parser_create.add_argument('--devtool', action="store_true", help=argparse.SUPPRESS)
parser_create.add_argument('--mirrors', action="store_true", help='Enable PREMIRRORS and MIRRORS for source tree fetching (disabled by default).')
parser_create.set_defaults(func=create_recipe)
-
diff --git a/poky/scripts/lib/recipetool/create_go.py b/poky/scripts/lib/recipetool/create_go.py
index a85a2f2786..5cc53931f0 100644
--- a/poky/scripts/lib/recipetool/create_go.py
+++ b/poky/scripts/lib/recipetool/create_go.py
@@ -14,7 +14,7 @@ from collections import namedtuple
from enum import Enum
from html.parser import HTMLParser
from recipetool.create import RecipeHandler, handle_license_vars
-from recipetool.create import guess_license, tidy_licenses, fixup_license
+from recipetool.create import find_licenses, tidy_licenses, fixup_license
from recipetool.create import determine_from_url
from urllib.error import URLError, HTTPError
@@ -624,7 +624,7 @@ class GoRecipeHandler(RecipeHandler):
licenses = []
lic_files_chksum = []
- licvalues = guess_license(tmp_vendor_dir, d)
+ licvalues = find_licenses(tmp_vendor_dir, d)
shutil.rmtree(tmp_vendor_dir)
if licvalues:
diff --git a/poky/scripts/lib/recipetool/create_npm.py b/poky/scripts/lib/recipetool/create_npm.py
index 113a89f6a6..3363a0e7ee 100644
--- a/poky/scripts/lib/recipetool/create_npm.py
+++ b/poky/scripts/lib/recipetool/create_npm.py
@@ -16,8 +16,7 @@ from bb.fetch2.npm import NpmEnvironment
from bb.fetch2.npm import npm_package
from bb.fetch2.npmsw import foreach_dependencies
from recipetool.create import RecipeHandler
-from recipetool.create import get_license_md5sums
-from recipetool.create import guess_license
+from recipetool.create import match_licenses, find_license_files, generate_common_licenses_chksums
from recipetool.create import split_pkg_licenses
logger = logging.getLogger('recipetool')
@@ -112,40 +111,54 @@ class NpmRecipeHandler(RecipeHandler):
"""Return the extra license files and the list of packages"""
licfiles = []
packages = {}
+ # Licenses from package.json will point to COMMON_LICENSE_DIR so we need
+ # to associate them explicitely to packages for split_pkg_licenses()
+ fallback_licenses = dict()
+
+ def _find_package_licenses(destdir):
+ """Either find license files, or use package.json metadata"""
+ def _get_licenses_from_package_json(package_json):
+ with open(os.path.join(srctree, package_json), "r") as f:
+ data = json.load(f)
+ if "license" in data:
+ licenses = data["license"].split(" ")
+ licenses = [license.strip("()") for license in licenses if license != "OR" and license != "AND"]
+ return [], licenses
+ else:
+ return [package_json], None
- # Handle the parent package
- packages["${PN}"] = ""
-
- def _licfiles_append_fallback_readme_files(destdir):
- """Append README files as fallback to license files if a license files is missing"""
-
- fallback = True
- readmes = []
basedir = os.path.join(srctree, destdir)
- for fn in os.listdir(basedir):
- upper = fn.upper()
- if upper.startswith("README"):
- fullpath = os.path.join(basedir, fn)
- readmes.append(fullpath)
- if upper.startswith("COPYING") or "LICENCE" in upper or "LICENSE" in upper:
- fallback = False
- if fallback:
- for readme in readmes:
- licfiles.append(os.path.relpath(readme, srctree))
+ licfiles = find_license_files(basedir)
+ if len(licfiles) > 0:
+ return licfiles, None
+ else:
+ # A license wasn't found in the package directory, so we'll use the package.json metadata
+ pkg_json = os.path.join(basedir, "package.json")
+ return _get_licenses_from_package_json(pkg_json)
+
+ def _get_package_licenses(destdir, package):
+ (package_licfiles, package_licenses) = _find_package_licenses(destdir)
+ if package_licfiles:
+ licfiles.extend(package_licfiles)
+ else:
+ fallback_licenses[package] = package_licenses
# Handle the dependencies
def _handle_dependency(name, params, destdir):
deptree = destdir.split('node_modules/')
suffix = "-".join([npm_package(dep) for dep in deptree])
packages["${PN}" + suffix] = destdir
- _licfiles_append_fallback_readme_files(destdir)
+ _get_package_licenses(destdir, "${PN}" + suffix)
with open(shrinkwrap_file, "r") as f:
shrinkwrap = json.load(f)
-
foreach_dependencies(shrinkwrap, _handle_dependency, dev)
- return licfiles, packages
+ # Handle the parent package
+ packages["${PN}"] = ""
+ _get_package_licenses(srctree, "${PN}")
+
+ return licfiles, packages, fallback_licenses
# Handle the peer dependencies
def _handle_peer_dependency(self, shrinkwrap_file):
@@ -266,36 +279,12 @@ class NpmRecipeHandler(RecipeHandler):
fetcher.unpack(srctree)
bb.note("Handling licences ...")
- (licfiles, packages) = self._handle_licenses(srctree, shrinkwrap_file, dev)
-
- def _guess_odd_license(licfiles):
- import bb
-
- md5sums = get_license_md5sums(d, linenumbers=True)
-
- chksums = []
- licenses = []
- for licfile in licfiles:
- f = os.path.join(srctree, licfile)
- md5value = bb.utils.md5_file(f)
- (license, beginline, endline, md5) = md5sums.get(md5value,
- (None, "", "", ""))
- if not license:
- license = "Unknown"
- logger.info("Please add the following line for '%s' to a "
- "'lib/recipetool/licenses.csv' and replace `Unknown`, "
- "`X`, `Y` and `MD5` with the license, begin line, "
- "end line and partial MD5 checksum:\n" \
- "%s,Unknown,X,Y,MD5" % (licfile, md5value))
- chksums.append("file://%s%s%s;md5=%s" % (licfile,
- ";beginline=%s" % (beginline) if beginline else "",
- ";endline=%s" % (endline) if endline else "",
- md5 if md5 else md5value))
- licenses.append((license, licfile, md5value))
- return (licenses, chksums)
-
- (licenses, extravalues["LIC_FILES_CHKSUM"]) = _guess_odd_license(licfiles)
- split_pkg_licenses([*licenses, *guess_license(srctree, d)], packages, lines_after)
+ (licfiles, packages, fallback_licenses) = self._handle_licenses(srctree, shrinkwrap_file, dev)
+ licvalues = match_licenses(licfiles, srctree, d)
+ split_pkg_licenses(licvalues, packages, lines_after, fallback_licenses)
+ fallback_licenses_flat = [license for sublist in fallback_licenses.values() for license in sublist]
+ extravalues["LIC_FILES_CHKSUM"] = generate_common_licenses_chksums(fallback_licenses_flat, d)
+ extravalues["LICENSE"] = fallback_licenses_flat
classes.append("npm")
handled.append("buildsystem")
diff --git a/poky/scripts/lib/resulttool/junit.py b/poky/scripts/lib/resulttool/junit.py
new file mode 100644
index 0000000000..c7a53dc550
--- /dev/null
+++ b/poky/scripts/lib/resulttool/junit.py
@@ -0,0 +1,77 @@
+# resulttool - report test results in JUnit XML format
+#
+# Copyright (c) 2024, Siemens AG.
+#
+# SPDX-License-Identifier: GPL-2.0-only
+#
+
+import os
+import re
+import xml.etree.ElementTree as ET
+import resulttool.resultutils as resultutils
+
+def junit(args, logger):
+ testresults = resultutils.load_resultsdata(args.json_file, configmap=resultutils.store_map)
+
+ total_time = 0
+ skipped = 0
+ failures = 0
+ errors = 0
+
+ for tests in testresults.values():
+ results = tests[next(reversed(tests))].get("result", {})
+
+ for result_id, result in results.items():
+ # filter out ptestresult.rawlogs and ptestresult.sections
+ if re.search(r'\.test_', result_id):
+ total_time += result.get("duration", 0)
+
+ if result['status'] == "FAILED":
+ failures += 1
+ elif result['status'] == "ERROR":
+ errors += 1
+ elif result['status'] == "SKIPPED":
+ skipped += 1
+
+ testsuites_node = ET.Element("testsuites")
+ testsuites_node.set("time", "%s" % total_time)
+ testsuite_node = ET.SubElement(testsuites_node, "testsuite")
+ testsuite_node.set("name", "Testimage")
+ testsuite_node.set("time", "%s" % total_time)
+ testsuite_node.set("tests", "%s" % len(results))
+ testsuite_node.set("failures", "%s" % failures)
+ testsuite_node.set("errors", "%s" % errors)
+ testsuite_node.set("skipped", "%s" % skipped)
+
+ for result_id, result in results.items():
+ if re.search(r'\.test_', result_id):
+ testcase_node = ET.SubElement(testsuite_node, "testcase", {
+ "name": result_id,
+ "classname": "Testimage",
+ "time": str(result['duration'])
+ })
+ if result['status'] == "SKIPPED":
+ ET.SubElement(testcase_node, "skipped", message=result['log'])
+ elif result['status'] == "FAILED":
+ ET.SubElement(testcase_node, "failure", message=result['log'])
+ elif result['status'] == "ERROR":
+ ET.SubElement(testcase_node, "error", message=result['log'])
+
+ tree = ET.ElementTree(testsuites_node)
+
+ if args.junit_xml_path is None:
+ args.junit_xml_path = os.environ['BUILDDIR'] + '/tmp/log/oeqa/junit.xml'
+ tree.write(args.junit_xml_path, encoding='UTF-8', xml_declaration=True)
+
+ logger.info('Saved JUnit XML report as %s' % args.junit_xml_path)
+
+def register_commands(subparsers):
+ """Register subcommands from this plugin"""
+ parser_build = subparsers.add_parser('junit', help='create test report in JUnit XML format',
+ description='generate unit test report in JUnit XML format based on the latest test results in the testresults.json.',
+ group='analysis')
+ parser_build.set_defaults(func=junit)
+ parser_build.add_argument('json_file',
+ help='json file should point to the testresults.json')
+ parser_build.add_argument('-j', '--junit_xml_path',
+ help='junit xml path allows setting the path of the generated test report. The default location is <build_dir>/tmp/log/oeqa/junit.xml')
diff --git a/poky/scripts/lib/resulttool/manualexecution.py b/poky/scripts/lib/resulttool/manualexecution.py
index ecb27c5933..ae0861ac6b 100755
--- a/poky/scripts/lib/resulttool/manualexecution.py
+++ b/poky/scripts/lib/resulttool/manualexecution.py
@@ -22,7 +22,7 @@ def load_json_file(f):
def write_json_file(f, json_data):
os.makedirs(os.path.dirname(f), exist_ok=True)
with open(f, 'w') as filedata:
- filedata.write(json.dumps(json_data, sort_keys=True, indent=4))
+ filedata.write(json.dumps(json_data, sort_keys=True, indent=1))
class ManualTestRunner(object):
diff --git a/poky/scripts/lib/resulttool/report.py b/poky/scripts/lib/resulttool/report.py
index a349510ab8..1c100b00ab 100644
--- a/poky/scripts/lib/resulttool/report.py
+++ b/poky/scripts/lib/resulttool/report.py
@@ -256,7 +256,7 @@ class ResultsTextReport(object):
if selected_test_case_only:
print_selected_testcase_result(raw_results, selected_test_case_only)
else:
- print(json.dumps(raw_results, sort_keys=True, indent=4))
+ print(json.dumps(raw_results, sort_keys=True, indent=1))
else:
print('Could not find raw test result for %s' % raw_test)
return 0
diff --git a/poky/scripts/lib/resulttool/resultutils.py b/poky/scripts/lib/resulttool/resultutils.py
index c5521d81bd..b8fc79a6ac 100644
--- a/poky/scripts/lib/resulttool/resultutils.py
+++ b/poky/scripts/lib/resulttool/resultutils.py
@@ -14,8 +14,11 @@ import scriptpath
import copy
import urllib.request
import posixpath
+import logging
scriptpath.add_oe_lib_path()
+logger = logging.getLogger('resulttool')
+
flatten_map = {
"oeselftest": [],
"runtime": [],
@@ -31,13 +34,19 @@ regression_map = {
"manual": ['TEST_TYPE', 'TEST_MODULE', 'IMAGE_BASENAME', 'MACHINE']
}
store_map = {
- "oeselftest": ['TEST_TYPE'],
+ "oeselftest": ['TEST_TYPE', 'TESTSERIES', 'MACHINE'],
"runtime": ['TEST_TYPE', 'DISTRO', 'MACHINE', 'IMAGE_BASENAME'],
"sdk": ['TEST_TYPE', 'MACHINE', 'SDKMACHINE', 'IMAGE_BASENAME'],
"sdkext": ['TEST_TYPE', 'MACHINE', 'SDKMACHINE', 'IMAGE_BASENAME'],
"manual": ['TEST_TYPE', 'TEST_MODULE', 'MACHINE', 'IMAGE_BASENAME']
}
+rawlog_sections = {
+ "ptestresult.rawlogs": "ptest",
+ "ltpresult.rawlogs": "ltp",
+ "ltpposixresult.rawlogs": "ltpposix"
+}
+
def is_url(p):
"""
Helper for determining if the given path is a URL
@@ -108,21 +117,57 @@ def filter_resultsdata(results, resultid):
newresults[r][i] = results[r][i]
return newresults
-def strip_ptestresults(results):
+def strip_logs(results):
newresults = copy.deepcopy(results)
- #for a in newresults2:
- # newresults = newresults2[a]
for res in newresults:
if 'result' not in newresults[res]:
continue
- if 'ptestresult.rawlogs' in newresults[res]['result']:
- del newresults[res]['result']['ptestresult.rawlogs']
+ for logtype in rawlog_sections:
+ if logtype in newresults[res]['result']:
+ del newresults[res]['result'][logtype]
if 'ptestresult.sections' in newresults[res]['result']:
for i in newresults[res]['result']['ptestresult.sections']:
if 'log' in newresults[res]['result']['ptestresult.sections'][i]:
del newresults[res]['result']['ptestresult.sections'][i]['log']
return newresults
+# For timing numbers, crazy amounts of precision don't make sense and just confuse
+# the logs. For numbers over 1, trim to 3 decimal places, for numbers less than 1,
+# trim to 4 significant digits
+def trim_durations(results):
+ for res in results:
+ if 'result' not in results[res]:
+ continue
+ for entry in results[res]['result']:
+ if 'duration' in results[res]['result'][entry]:
+ duration = results[res]['result'][entry]['duration']
+ if duration > 1:
+ results[res]['result'][entry]['duration'] = float("%.3f" % duration)
+ elif duration < 1:
+ results[res]['result'][entry]['duration'] = float("%.4g" % duration)
+ return results
+
+def handle_cleanups(results):
+ # Remove pointless path duplication from old format reproducibility results
+ for res2 in results:
+ try:
+ section = results[res2]['result']['reproducible']['files']
+ for pkgtype in section:
+ for filelist in section[pkgtype].copy():
+ if section[pkgtype][filelist] and type(section[pkgtype][filelist][0]) == dict:
+ newlist = []
+ for entry in section[pkgtype][filelist]:
+ newlist.append(entry["reference"].split("/./")[1])
+ section[pkgtype][filelist] = newlist
+
+ except KeyError:
+ pass
+ # Remove pointless duplicate rawlogs data
+ try:
+ del results[res2]['result']['reproducible.rawlogs']
+ except KeyError:
+ pass
+
def decode_log(logdata):
if isinstance(logdata, str):
return logdata
@@ -155,9 +200,6 @@ def generic_get_rawlogs(sectname, results):
return None
return decode_log(results[sectname]['log'])
-def ptestresult_get_rawlogs(results):
- return generic_get_rawlogs('ptestresult.rawlogs', results)
-
def save_resultsdata(results, destdir, fn="testresults.json", ptestjson=False, ptestlogs=False):
for res in results:
if res:
@@ -167,16 +209,20 @@ def save_resultsdata(results, destdir, fn="testresults.json", ptestjson=False, p
os.makedirs(os.path.dirname(dst), exist_ok=True)
resultsout = results[res]
if not ptestjson:
- resultsout = strip_ptestresults(results[res])
+ resultsout = strip_logs(results[res])
+ trim_durations(resultsout)
+ handle_cleanups(resultsout)
with open(dst, 'w') as f:
- f.write(json.dumps(resultsout, sort_keys=True, indent=4))
+ f.write(json.dumps(resultsout, sort_keys=True, indent=1))
for res2 in results[res]:
if ptestlogs and 'result' in results[res][res2]:
seriesresults = results[res][res2]['result']
- rawlogs = ptestresult_get_rawlogs(seriesresults)
- if rawlogs is not None:
- with open(dst.replace(fn, "ptest-raw.log"), "w+") as f:
- f.write(rawlogs)
+ for logtype in rawlog_sections:
+ logdata = generic_get_rawlogs(logtype, seriesresults)
+ if logdata is not None:
+ logger.info("Extracting " + rawlog_sections[logtype] + "-raw.log")
+ with open(dst.replace(fn, rawlog_sections[logtype] + "-raw.log"), "w+") as f:
+ f.write(logdata)
if 'ptestresult.sections' in seriesresults:
for i in seriesresults['ptestresult.sections']:
sectionlog = ptestresult_get_log(seriesresults, i)
diff --git a/poky/scripts/lib/resulttool/store.py b/poky/scripts/lib/resulttool/store.py
index e0951f0a8f..578910d234 100644
--- a/poky/scripts/lib/resulttool/store.py
+++ b/poky/scripts/lib/resulttool/store.py
@@ -65,18 +65,34 @@ def store(args, logger):
for r in revisions:
results = revisions[r]
+ if args.revision and r[0] != args.revision:
+ logger.info('skipping %s as non-matching' % r[0])
+ continue
keywords = {'commit': r[0], 'branch': r[1], "commit_count": r[2]}
- subprocess.check_call(["find", tempdir, "!", "-path", "./.git/*", "-delete"])
+ subprocess.check_call(["find", tempdir, "-name", "testresults.json", "!", "-path", "./.git/*", "-delete"])
resultutils.save_resultsdata(results, tempdir, ptestlogs=True)
logger.info('Storing test result into git repository %s' % args.git_dir)
- gitarchive.gitarchive(tempdir, args.git_dir, False, False,
+ excludes = []
+ if args.logfile_archive:
+ excludes = ['*.log', "*.log.zst"]
+
+ tagname = gitarchive.gitarchive(tempdir, args.git_dir, False, False,
"Results of {branch}:{commit}", "branch: {branch}\ncommit: {commit}", "{branch}",
False, "{branch}/{commit_count}-g{commit}/{tag_number}",
'Test run #{tag_number} of {branch}:{commit}', '',
- [], [], False, keywords, logger)
+ excludes, [], False, keywords, logger)
+ if args.logfile_archive:
+ logdir = args.logfile_archive + "/" + tagname
+ shutil.copytree(tempdir, logdir)
+ for root, dirs, files in os.walk(logdir):
+ for name in files:
+ if not name.endswith(".log"):
+ continue
+ f = os.path.join(root, name)
+ subprocess.run(["zstd", f, "--rm"], check=True, capture_output=True)
finally:
subprocess.check_call(["rm", "-rf", tempdir])
@@ -102,3 +118,7 @@ def register_commands(subparsers):
help='add executed-by configuration to each result file')
parser_build.add_argument('-t', '--extra-test-env', default='',
help='add extra test environment data to each result file configuration')
+ parser_build.add_argument('-r', '--revision', default='',
+ help='only store data for the specified revision')
+ parser_build.add_argument('-l', '--logfile-archive', default='',
+ help='directory to separately archive log files along with a copy of the results')
diff --git a/poky/scripts/lib/wic/canned-wks/efi-bootdisk.wks.in b/poky/scripts/lib/wic/canned-wks/efi-bootdisk.wks.in
index 2fd286ff98..5211972955 100644
--- a/poky/scripts/lib/wic/canned-wks/efi-bootdisk.wks.in
+++ b/poky/scripts/lib/wic/canned-wks/efi-bootdisk.wks.in
@@ -1,3 +1,3 @@
bootloader --ptable gpt
-part /boot --source rootfs --rootfs-dir=${IMAGE_ROOTFS}/boot --fstype=vfat --label boot --active --align 1024 --use-uuid --overhead-factor 1.1
+part /boot --source rootfs --rootfs-dir=${IMAGE_ROOTFS}/boot --fstype=vfat --label boot --active --align 1024 --use-uuid --overhead-factor 1.2
part / --source rootfs --fstype=ext4 --label root --align 1024 --exclude-path boot/
diff --git a/poky/scripts/lib/wic/plugins/source/bootimg-partition.py b/poky/scripts/lib/wic/plugins/source/bootimg-partition.py
index 1071d1af3f..589853a439 100644
--- a/poky/scripts/lib/wic/plugins/source/bootimg-partition.py
+++ b/poky/scripts/lib/wic/plugins/source/bootimg-partition.py
@@ -16,7 +16,7 @@ import logging
import os
import re
-from glob import glob
+from oe.bootfiles import get_boot_files
from wic import WicError
from wic.engine import get_custom_config
@@ -66,42 +66,7 @@ class BootimgPartitionPlugin(SourcePlugin):
logger.debug('Boot files: %s', boot_files)
- # list of tuples (src_name, dst_name)
- deploy_files = []
- for src_entry in re.findall(r'[\w;\-\./\*]+', boot_files):
- if ';' in src_entry:
- dst_entry = tuple(src_entry.split(';'))
- if not dst_entry[0] or not dst_entry[1]:
- raise WicError('Malformed boot file entry: %s' % src_entry)
- else:
- dst_entry = (src_entry, src_entry)
-
- logger.debug('Destination entry: %r', dst_entry)
- deploy_files.append(dst_entry)
-
- cls.install_task = [];
- for deploy_entry in deploy_files:
- src, dst = deploy_entry
- if '*' in src:
- # by default install files under their basename
- entry_name_fn = os.path.basename
- if dst != src:
- # unless a target name was given, then treat name
- # as a directory and append a basename
- entry_name_fn = lambda name: \
- os.path.join(dst,
- os.path.basename(name))
-
- srcs = glob(os.path.join(kernel_dir, src))
-
- logger.debug('Globbed sources: %s', ', '.join(srcs))
- for entry in srcs:
- src = os.path.relpath(entry, kernel_dir)
- entry_dst_name = entry_name_fn(entry)
- cls.install_task.append((src, entry_dst_name))
- else:
- cls.install_task.append((src, dst))
-
+ cls.install_task = get_boot_files(kernel_dir, boot_files)
if source_params.get('loader') != "u-boot":
return