diff options
Diffstat (limited to 'poky/bitbake/lib/bb')
-rw-r--r-- | poky/bitbake/lib/bb/__init__.py | 14 | ||||
-rw-r--r-- | poky/bitbake/lib/bb/cooker.py | 9 | ||||
-rw-r--r-- | poky/bitbake/lib/bb/fetch2/gcp.py | 1 | ||||
-rw-r--r-- | poky/bitbake/lib/bb/fetch2/wget.py | 25 | ||||
-rw-r--r-- | poky/bitbake/lib/bb/parse/__init__.py | 12 | ||||
-rw-r--r-- | poky/bitbake/lib/bb/runqueue.py | 99 | ||||
-rw-r--r-- | poky/bitbake/lib/bb/siggen.py | 11 | ||||
-rw-r--r-- | poky/bitbake/lib/bb/tests/fetch.py | 8 |
8 files changed, 122 insertions, 57 deletions
diff --git a/poky/bitbake/lib/bb/__init__.py b/poky/bitbake/lib/bb/__init__.py index 15013540c2..cdec9e4d6c 100644 --- a/poky/bitbake/lib/bb/__init__.py +++ b/poky/bitbake/lib/bb/__init__.py @@ -9,7 +9,7 @@ # SPDX-License-Identifier: GPL-2.0-only # -__version__ = "2.9.0" +__version__ = "2.8.0" import sys if sys.version_info < (3, 8, 0): @@ -36,6 +36,7 @@ class BBHandledException(Exception): import os import logging +from collections import namedtuple class NullHandler(logging.Handler): @@ -227,3 +228,14 @@ def deprecate_import(current, modulename, fromlist, renames = None): setattr(sys.modules[current], newname, newobj) +TaskData = namedtuple("TaskData", [ + "pn", + "taskname", + "fn", + "deps", + "provides", + "taskhash", + "unihash", + "hashfn", + "taskhash_deps", +]) diff --git a/poky/bitbake/lib/bb/cooker.py b/poky/bitbake/lib/bb/cooker.py index c5bfef55d6..6318ef4a8f 100644 --- a/poky/bitbake/lib/bb/cooker.py +++ b/poky/bitbake/lib/bb/cooker.py @@ -315,13 +315,13 @@ class BBCooker: dbfile = (self.data.getVar("PERSISTENT_DIR") or self.data.getVar("CACHE")) + "/hashserv.db" upstream = self.data.getVar("BB_HASHSERVE_UPSTREAM") or None if upstream: - import socket try: - sock = socket.create_connection(upstream.split(":"), 5) - sock.close() - except socket.error as e: + with hashserv.create_client(upstream) as client: + client.ping() + except (ConnectionError, ImportError) as e: bb.warn("BB_HASHSERVE_UPSTREAM is not valid, unable to connect hash equivalence server at '%s': %s" % (upstream, repr(e))) + upstream = None self.hashservaddr = "unix://%s/hashserve.sock" % self.data.getVar("TOPDIR") self.hashserv = hashserv.create_server( @@ -1459,7 +1459,6 @@ class BBCooker: if t in task or getAllTaskSignatures: try: - rq.rqdata.prepare_task_hash(tid) sig.append([pn, t, rq.rqdata.get_task_unihash(tid)]) except KeyError: sig.append(self.getTaskSignatures(target, [t])[0]) diff --git a/poky/bitbake/lib/bb/fetch2/gcp.py b/poky/bitbake/lib/bb/fetch2/gcp.py index f40ce2eaa5..eb3e0c6a6b 100644 --- a/poky/bitbake/lib/bb/fetch2/gcp.py +++ b/poky/bitbake/lib/bb/fetch2/gcp.py @@ -23,6 +23,7 @@ import urllib.parse, urllib.error from bb.fetch2 import FetchMethod from bb.fetch2 import FetchError from bb.fetch2 import logger +from bb.fetch2 import runfetchcmd class GCP(FetchMethod): """ diff --git a/poky/bitbake/lib/bb/fetch2/wget.py b/poky/bitbake/lib/bb/fetch2/wget.py index fbfa6938ac..2e92117634 100644 --- a/poky/bitbake/lib/bb/fetch2/wget.py +++ b/poky/bitbake/lib/bb/fetch2/wget.py @@ -108,7 +108,8 @@ class Wget(FetchMethod): fetchcmd = self.basecmd - localpath = os.path.join(d.getVar("DL_DIR"), ud.localfile) + ".tmp" + dldir = os.path.realpath(d.getVar("DL_DIR")) + localpath = os.path.join(dldir, ud.localfile) + ".tmp" bb.utils.mkdirhier(os.path.dirname(localpath)) fetchcmd += " -O %s" % shlex.quote(localpath) @@ -128,12 +129,21 @@ class Wget(FetchMethod): uri = ud.url.split(";")[0] if os.path.exists(ud.localpath): # file exists, but we didnt complete it.. trying again.. - fetchcmd += d.expand(" -c -P ${DL_DIR} '%s'" % uri) + fetchcmd += " -c -P " + dldir + " '" + uri + "'" else: - fetchcmd += d.expand(" -P ${DL_DIR} '%s'" % uri) + fetchcmd += " -P " + dldir + " '" + uri + "'" self._runwget(ud, d, fetchcmd, False) + # Sanity check since wget can pretend it succeed when it didn't + # Also, this used to happen if sourceforge sent us to the mirror page + if not os.path.exists(localpath): + raise FetchError("The fetch command returned success for url %s but %s doesn't exist?!" % (uri, localpath), uri) + + if os.path.getsize(localpath) == 0: + os.remove(localpath) + raise FetchError("The fetch of %s resulted in a zero size file?! Deleting and failing since this isn't right." % (uri), uri) + # Try and verify any checksum now, meaning if it isn't correct, we don't remove the # original file, which might be a race (imagine two recipes referencing the same # source, one with an incorrect checksum) @@ -143,15 +153,6 @@ class Wget(FetchMethod): # Our lock prevents multiple writers but mirroring code may grab incomplete files os.rename(localpath, localpath[:-4]) - # Sanity check since wget can pretend it succeed when it didn't - # Also, this used to happen if sourceforge sent us to the mirror page - if not os.path.exists(ud.localpath): - raise FetchError("The fetch command returned success for url %s but %s doesn't exist?!" % (uri, ud.localpath), uri) - - if os.path.getsize(ud.localpath) == 0: - os.remove(ud.localpath) - raise FetchError("The fetch of %s resulted in a zero size file?! Deleting and failing since this isn't right." % (uri), uri) - return True def checkstatus(self, fetch, ud, d, try_again=True): diff --git a/poky/bitbake/lib/bb/parse/__init__.py b/poky/bitbake/lib/bb/parse/__init__.py index a4358f1374..7ffdaa6fd7 100644 --- a/poky/bitbake/lib/bb/parse/__init__.py +++ b/poky/bitbake/lib/bb/parse/__init__.py @@ -49,20 +49,23 @@ class SkipPackage(SkipRecipe): __mtime_cache = {} def cached_mtime(f): if f not in __mtime_cache: - __mtime_cache[f] = os.stat(f)[stat.ST_MTIME] + res = os.stat(f) + __mtime_cache[f] = (res.st_mtime_ns, res.st_size, res.st_ino) return __mtime_cache[f] def cached_mtime_noerror(f): if f not in __mtime_cache: try: - __mtime_cache[f] = os.stat(f)[stat.ST_MTIME] + res = os.stat(f) + __mtime_cache[f] = (res.st_mtime_ns, res.st_size, res.st_ino) except OSError: return 0 return __mtime_cache[f] def check_mtime(f, mtime): try: - current_mtime = os.stat(f)[stat.ST_MTIME] + res = os.stat(f) + current_mtime = (res.st_mtime_ns, res.st_size, res.st_ino) __mtime_cache[f] = current_mtime except OSError: current_mtime = 0 @@ -70,7 +73,8 @@ def check_mtime(f, mtime): def update_mtime(f): try: - __mtime_cache[f] = os.stat(f)[stat.ST_MTIME] + res = os.stat(f) + __mtime_cache[f] = (res.st_mtime_ns, res.st_size, res.st_ino) except OSError: if f in __mtime_cache: del __mtime_cache[f] diff --git a/poky/bitbake/lib/bb/runqueue.py b/poky/bitbake/lib/bb/runqueue.py index bc7e18175d..93079a9776 100644 --- a/poky/bitbake/lib/bb/runqueue.py +++ b/poky/bitbake/lib/bb/runqueue.py @@ -1273,27 +1273,41 @@ class RunQueueData: bb.parse.siggen.set_setscene_tasks(self.runq_setscene_tids) + starttime = time.time() + lasttime = starttime + # Iterate over the task list and call into the siggen code dealtwith = set() todeal = set(self.runtaskentries) while todeal: + ready = set() for tid in todeal.copy(): if not (self.runtaskentries[tid].depends - dealtwith): - dealtwith.add(tid) - todeal.remove(tid) - self.prepare_task_hash(tid) - bb.event.check_for_interrupts(self.cooker.data) + self.runtaskentries[tid].taskhash_deps = bb.parse.siggen.prep_taskhash(tid, self.runtaskentries[tid].depends, self.dataCaches) + # get_taskhash for a given tid *must* be called before get_unihash* below + self.runtaskentries[tid].hash = bb.parse.siggen.get_taskhash(tid, self.runtaskentries[tid].depends, self.dataCaches) + ready.add(tid) + unihashes = bb.parse.siggen.get_unihashes(ready) + for tid in ready: + dealtwith.add(tid) + todeal.remove(tid) + self.runtaskentries[tid].unihash = unihashes[tid] + + bb.event.check_for_interrupts(self.cooker.data) + + if time.time() > (lasttime + 30): + lasttime = time.time() + hashequiv_logger.verbose("Initial setup loop progress: %s of %s in %s" % (len(todeal), len(self.runtaskentries), lasttime - starttime)) + + endtime = time.time() + if (endtime-starttime > 60): + hashequiv_logger.verbose("Initial setup loop took: %s" % (endtime-starttime)) bb.parse.siggen.writeout_file_checksum_cache() #self.dump_data() return len(self.runtaskentries) - def prepare_task_hash(self, tid): - bb.parse.siggen.prep_taskhash(tid, self.runtaskentries[tid].depends, self.dataCaches) - self.runtaskentries[tid].hash = bb.parse.siggen.get_taskhash(tid, self.runtaskentries[tid].depends, self.dataCaches) - self.runtaskentries[tid].unihash = bb.parse.siggen.get_unihash(tid) - def dump_data(self): """ Dump some debug information on the internal data structures @@ -2438,14 +2452,17 @@ class RunQueueExecute: taskdepdata_cache = {} for task in self.rqdata.runtaskentries: (mc, fn, taskname, taskfn) = split_tid_mcfn(task) - pn = self.rqdata.dataCaches[mc].pkg_fn[taskfn] - deps = self.rqdata.runtaskentries[task].depends - provides = self.rqdata.dataCaches[mc].fn_provides[taskfn] - taskhash = self.rqdata.runtaskentries[task].hash - unihash = self.rqdata.runtaskentries[task].unihash - deps = self.filtermcdeps(task, mc, deps) - hashfn = self.rqdata.dataCaches[mc].hashfn[taskfn] - taskdepdata_cache[task] = [pn, taskname, fn, deps, provides, taskhash, unihash, hashfn] + taskdepdata_cache[task] = bb.TaskData( + pn = self.rqdata.dataCaches[mc].pkg_fn[taskfn], + taskname = taskname, + fn = fn, + deps = self.filtermcdeps(task, mc, self.rqdata.runtaskentries[task].depends), + provides = self.rqdata.dataCaches[mc].fn_provides[taskfn], + taskhash = self.rqdata.runtaskentries[task].hash, + unihash = self.rqdata.runtaskentries[task].unihash, + hashfn = self.rqdata.dataCaches[mc].hashfn[taskfn], + taskhash_deps = self.rqdata.runtaskentries[task].taskhash_deps, + ) self.taskdepdata_cache = taskdepdata_cache @@ -2460,9 +2477,11 @@ class RunQueueExecute: while next: additional = [] for revdep in next: - self.taskdepdata_cache[revdep][6] = self.rqdata.runtaskentries[revdep].unihash + self.taskdepdata_cache[revdep] = self.taskdepdata_cache[revdep]._replace( + unihash=self.rqdata.runtaskentries[revdep].unihash + ) taskdepdata[revdep] = self.taskdepdata_cache[revdep] - for revdep2 in self.taskdepdata_cache[revdep][3]: + for revdep2 in self.taskdepdata_cache[revdep].deps: if revdep2 not in taskdepdata: additional.append(revdep2) next = additional @@ -2556,17 +2575,28 @@ class RunQueueExecute: elif self.rqdata.runtaskentries[p].depends.isdisjoint(total): next.add(p) + starttime = time.time() + lasttime = starttime + # When an item doesn't have dependencies in total, we can process it. Drop items from total when handled while next: current = next.copy() next = set() + ready = {} for tid in current: if self.rqdata.runtaskentries[p].depends and not self.rqdata.runtaskentries[tid].depends.isdisjoint(total): continue + # get_taskhash for a given tid *must* be called before get_unihash* below + ready[tid] = bb.parse.siggen.get_taskhash(tid, self.rqdata.runtaskentries[tid].depends, self.rqdata.dataCaches) + + unihashes = bb.parse.siggen.get_unihashes(ready.keys()) + + for tid in ready: orighash = self.rqdata.runtaskentries[tid].hash - newhash = bb.parse.siggen.get_taskhash(tid, self.rqdata.runtaskentries[tid].depends, self.rqdata.dataCaches) + newhash = ready[tid] origuni = self.rqdata.runtaskentries[tid].unihash - newuni = bb.parse.siggen.get_unihash(tid) + newuni = unihashes[tid] + # FIXME, need to check it can come from sstate at all for determinism? remapped = False if newuni == origuni: @@ -2587,6 +2617,15 @@ class RunQueueExecute: next |= self.rqdata.runtaskentries[tid].revdeps total.remove(tid) next.intersection_update(total) + bb.event.check_for_interrupts(self.cooker.data) + + if time.time() > (lasttime + 30): + lasttime = time.time() + hashequiv_logger.verbose("Rehash loop slow progress: %s in %s" % (len(total), lasttime - starttime)) + + endtime = time.time() + if (endtime-starttime > 60): + hashequiv_logger.verbose("Rehash loop took more than 60s: %s" % (endtime-starttime)) if changed: for mc in self.rq.worker: @@ -2806,13 +2845,19 @@ class RunQueueExecute: additional = [] for revdep in next: (mc, fn, taskname, taskfn) = split_tid_mcfn(revdep) - pn = self.rqdata.dataCaches[mc].pkg_fn[taskfn] deps = getsetscenedeps(revdep) - provides = self.rqdata.dataCaches[mc].fn_provides[taskfn] - taskhash = self.rqdata.runtaskentries[revdep].hash - unihash = self.rqdata.runtaskentries[revdep].unihash - hashfn = self.rqdata.dataCaches[mc].hashfn[taskfn] - taskdepdata[revdep] = [pn, taskname, fn, deps, provides, taskhash, unihash, hashfn] + + taskdepdata[revdep] = bb.TaskData( + pn = self.rqdata.dataCaches[mc].pkg_fn[taskfn], + taskname = taskname, + fn = fn, + deps = deps, + provides = self.rqdata.dataCaches[mc].fn_provides[taskfn], + taskhash = self.rqdata.runtaskentries[revdep].hash, + unihash = self.rqdata.runtaskentries[revdep].unihash, + hashfn = self.rqdata.dataCaches[mc].hashfn[taskfn], + taskhash_deps = self.rqdata.runtaskentries[revdep].taskhash_deps, + ) for revdep2 in deps: if revdep2 not in taskdepdata: additional.append(revdep2) diff --git a/poky/bitbake/lib/bb/siggen.py b/poky/bitbake/lib/bb/siggen.py index 8ab08ec961..65ca0811d5 100644 --- a/poky/bitbake/lib/bb/siggen.py +++ b/poky/bitbake/lib/bb/siggen.py @@ -381,7 +381,7 @@ class SignatureGeneratorBasic(SignatureGenerator): self.taints[tid] = taint logger.warning("%s is tainted from a forced run" % tid) - return + return set(dep for _, dep in self.runtaskdeps[tid]) def get_taskhash(self, tid, deps, dataCaches): @@ -726,10 +726,13 @@ class SignatureGeneratorUniHashMixIn(object): return result if self.max_parallel <= 1 or len(queries) <= 1: - # No parallelism required. Make the query serially with the single client + # No parallelism required. Make the query using a single client with self.client() as client: - for tid, args in queries.items(): - query_result[tid] = client.get_unihash(*args) + keys = list(queries.keys()) + unihashes = client.get_unihash_batch(queries[k] for k in keys) + + for idx, k in enumerate(keys): + query_result[k] = unihashes[idx] else: with self.client_pool() as client_pool: query_result = client_pool.get_unihashes(queries) diff --git a/poky/bitbake/lib/bb/tests/fetch.py b/poky/bitbake/lib/bb/tests/fetch.py index 85c1f79ff3..33cc9bcac6 100644 --- a/poky/bitbake/lib/bb/tests/fetch.py +++ b/poky/bitbake/lib/bb/tests/fetch.py @@ -1421,7 +1421,7 @@ class FetchLatestVersionTest(FetcherTest): # combination version pattern ("sysprof", "git://gitlab.gnome.org/GNOME/sysprof.git;protocol=https;branch=master", "cd44ee6644c3641507fb53b8a2a69137f2971219", "", "") : "1.2.0", - ("u-boot-mkimage", "git://git.denx.de/u-boot.git;branch=master;protocol=git", "62c175fbb8a0f9a926c88294ea9f7e88eb898f6c", "", "") + ("u-boot-mkimage", "git://source.denx.de/u-boot/u-boot.git;branch=master;protocol=https", "62c175fbb8a0f9a926c88294ea9f7e88eb898f6c", "", "") : "2014.01", # version pattern "yyyymmdd" ("mobile-broadband-provider-info", "git://gitlab.gnome.org/GNOME/mobile-broadband-provider-info.git;protocol=https;branch=master", "4ed19e11c2975105b71b956440acdb25d46a347d", "", "") @@ -1511,7 +1511,7 @@ class FetchLatestVersionTest(FetcherTest): def test_wget_latest_versionstring(self): testdata = os.path.dirname(os.path.abspath(__file__)) + "/fetch-testdata" - server = HTTPService(testdata) + server = HTTPService(testdata, host="127.0.0.1") server.start() port = server.port try: @@ -1519,10 +1519,10 @@ class FetchLatestVersionTest(FetcherTest): self.d.setVar("PN", k[0]) checkuri = "" if k[2]: - checkuri = "http://localhost:%s/" % port + k[2] + checkuri = "http://127.0.0.1:%s/" % port + k[2] self.d.setVar("UPSTREAM_CHECK_URI", checkuri) self.d.setVar("UPSTREAM_CHECK_REGEX", k[3]) - url = "http://localhost:%s/" % port + k[1] + url = "http://127.0.0.1:%s/" % port + k[1] ud = bb.fetch2.FetchData(url, self.d) pupver = ud.method.latest_versionstring(ud, self.d) verstring = pupver[0] |