summaryrefslogtreecommitdiff
path: root/poky/bitbake/lib/bb
diff options
context:
space:
mode:
Diffstat (limited to 'poky/bitbake/lib/bb')
-rw-r--r--poky/bitbake/lib/bb/build.py17
-rw-r--r--poky/bitbake/lib/bb/cache.py23
-rw-r--r--poky/bitbake/lib/bb/codeparser.py46
-rw-r--r--poky/bitbake/lib/bb/cooker.py42
-rw-r--r--poky/bitbake/lib/bb/cookerdata.py13
-rw-r--r--poky/bitbake/lib/bb/data.py2
-rw-r--r--poky/bitbake/lib/bb/data_smart.py8
-rw-r--r--poky/bitbake/lib/bb/event.py9
-rw-r--r--poky/bitbake/lib/bb/exceptions.py96
-rw-r--r--poky/bitbake/lib/bb/fetch2/__init__.py69
-rw-r--r--poky/bitbake/lib/bb/fetch2/clearcase.py4
-rw-r--r--poky/bitbake/lib/bb/fetch2/gcp.py14
-rw-r--r--poky/bitbake/lib/bb/fetch2/git.py81
-rw-r--r--poky/bitbake/lib/bb/fetch2/gitsm.py44
-rw-r--r--poky/bitbake/lib/bb/fetch2/gomod.py268
-rw-r--r--poky/bitbake/lib/bb/fetch2/npm.py7
-rw-r--r--poky/bitbake/lib/bb/fetch2/npmsw.py3
-rw-r--r--poky/bitbake/lib/bb/fetch2/wget.py11
-rw-r--r--poky/bitbake/lib/bb/msg.py4
-rw-r--r--poky/bitbake/lib/bb/parse/ast.py26
-rw-r--r--poky/bitbake/lib/bb/parse/parse_py/BBHandler.py43
-rw-r--r--poky/bitbake/lib/bb/persist_data.py1
-rw-r--r--poky/bitbake/lib/bb/runqueue.py22
-rw-r--r--poky/bitbake/lib/bb/siggen.py2
-rw-r--r--poky/bitbake/lib/bb/tests/fetch.py226
-rw-r--r--poky/bitbake/lib/bb/tests/parse.py27
-rw-r--r--poky/bitbake/lib/bb/ui/knotty.py20
-rw-r--r--poky/bitbake/lib/bb/ui/teamcity.py5
28 files changed, 795 insertions, 338 deletions
diff --git a/poky/bitbake/lib/bb/build.py b/poky/bitbake/lib/bb/build.py
index ab8bce3d57..9f9285de3d 100644
--- a/poky/bitbake/lib/bb/build.py
+++ b/poky/bitbake/lib/bb/build.py
@@ -743,7 +743,7 @@ def _exec_task(fn, task, d, quieterr):
if quieterr:
if not handled:
- logger.warning(repr(exc))
+ logger.warning(str(exc))
event.fire(TaskFailedSilent(task, fn, logfn, localdata), localdata)
else:
errprinted = errchk.triggered
@@ -752,7 +752,7 @@ def _exec_task(fn, task, d, quieterr):
if verboseStdoutLogging or handled:
errprinted = True
if not handled:
- logger.error(repr(exc))
+ logger.error(str(exc))
event.fire(TaskFailed(task, fn, logfn, localdata, errprinted), localdata)
return 1
@@ -932,9 +932,13 @@ def add_tasks(tasklist, d):
# don't assume holding a reference
d.setVar('_task_deps', task_deps)
+def ensure_task_prefix(name):
+ if name[:3] != "do_":
+ name = "do_" + name
+ return name
+
def addtask(task, before, after, d):
- if task[:3] != "do_":
- task = "do_" + task
+ task = ensure_task_prefix(task)
d.setVarFlag(task, "task", 1)
bbtasks = d.getVar('__BBTASKS', False) or []
@@ -946,19 +950,20 @@ def addtask(task, before, after, d):
if after is not None:
# set up deps for function
for entry in after.split():
+ entry = ensure_task_prefix(entry)
if entry not in existing:
existing.append(entry)
d.setVarFlag(task, "deps", existing)
if before is not None:
# set up things that depend on this func
for entry in before.split():
+ entry = ensure_task_prefix(entry)
existing = d.getVarFlag(entry, "deps", False) or []
if task not in existing:
d.setVarFlag(entry, "deps", [task] + existing)
def deltask(task, d):
- if task[:3] != "do_":
- task = "do_" + task
+ task = ensure_task_prefix(task)
bbtasks = d.getVar('__BBTASKS', False) or []
if task in bbtasks:
diff --git a/poky/bitbake/lib/bb/cache.py b/poky/bitbake/lib/bb/cache.py
index 18d5574a31..958652e0e3 100644
--- a/poky/bitbake/lib/bb/cache.py
+++ b/poky/bitbake/lib/bb/cache.py
@@ -28,7 +28,7 @@ import shutil
logger = logging.getLogger("BitBake.Cache")
-__cache_version__ = "155"
+__cache_version__ = "156"
def getCacheFile(path, filename, mc, data_hash):
mcspec = ''
@@ -441,7 +441,7 @@ class Cache(object):
else:
symlink = os.path.join(self.cachedir, "bb_cache.dat")
- if os.path.exists(symlink):
+ if os.path.exists(symlink) or os.path.islink(symlink):
bb.utils.remove(symlink)
try:
os.symlink(os.path.basename(self.cachefile), symlink)
@@ -779,25 +779,6 @@ class MulticonfigCache(Mapping):
for k in self.__caches:
yield k
-def init(cooker):
- """
- The Objective: Cache the minimum amount of data possible yet get to the
- stage of building packages (i.e. tryBuild) without reparsing any .bb files.
-
- To do this, we intercept getVar calls and only cache the variables we see
- being accessed. We rely on the cache getVar calls being made for all
- variables bitbake might need to use to reach this stage. For each cached
- file we need to track:
-
- * Its mtime
- * The mtimes of all its dependencies
- * Whether it caused a parse.SkipRecipe exception
-
- Files causing parsing errors are evicted from the cache.
-
- """
- return Cache(cooker.configuration.data, cooker.configuration.data_hash)
-
class CacheData(object):
"""
diff --git a/poky/bitbake/lib/bb/codeparser.py b/poky/bitbake/lib/bb/codeparser.py
index 691bdff75e..d249af326e 100644
--- a/poky/bitbake/lib/bb/codeparser.py
+++ b/poky/bitbake/lib/bb/codeparser.py
@@ -72,6 +72,11 @@ def add_module_functions(fn, functions, namespace):
parser.parse_python(None, filename=fn, lineno=1, fixedhash=fixedhash+f)
#bb.warn("Cached %s" % f)
except KeyError:
+ targetfn = inspect.getsourcefile(functions[f])
+ if fn != targetfn:
+ # Skip references to other modules outside this file
+ #bb.warn("Skipping %s" % name)
+ continue
lines, lineno = inspect.getsourcelines(functions[f])
src = "".join(lines)
parser.parse_python(src, filename=fn, lineno=lineno, fixedhash=fixedhash+f)
@@ -82,14 +87,17 @@ def add_module_functions(fn, functions, namespace):
if e in functions:
execs.remove(e)
execs.add(namespace + "." + e)
- modulecode_deps[name] = [parser.references.copy(), execs, parser.var_execs.copy(), parser.contains.copy()]
+ visitorcode = None
+ if hasattr(functions[f], 'visitorcode'):
+ visitorcode = getattr(functions[f], "visitorcode")
+ modulecode_deps[name] = [parser.references.copy(), execs, parser.var_execs.copy(), parser.contains.copy(), parser.extra, visitorcode]
#bb.warn("%s: %s\nRefs:%s Execs: %s %s %s" % (name, fn, parser.references, parser.execs, parser.var_execs, parser.contains))
def update_module_dependencies(d):
for mod in modulecode_deps:
excludes = set((d.getVarFlag(mod, "vardepsexclude") or "").split())
if excludes:
- modulecode_deps[mod] = [modulecode_deps[mod][0] - excludes, modulecode_deps[mod][1] - excludes, modulecode_deps[mod][2] - excludes, modulecode_deps[mod][3]]
+ modulecode_deps[mod] = [modulecode_deps[mod][0] - excludes, modulecode_deps[mod][1] - excludes, modulecode_deps[mod][2] - excludes, modulecode_deps[mod][3], modulecode_deps[mod][4], modulecode_deps[mod][5]]
# A custom getstate/setstate using tuples is actually worth 15% cachesize by
# avoiding duplication of the attribute names!
@@ -112,21 +120,22 @@ class SetCache(object):
codecache = SetCache()
class pythonCacheLine(object):
- def __init__(self, refs, execs, contains):
+ def __init__(self, refs, execs, contains, extra):
self.refs = codecache.internSet(refs)
self.execs = codecache.internSet(execs)
self.contains = {}
for c in contains:
self.contains[c] = codecache.internSet(contains[c])
+ self.extra = extra
def __getstate__(self):
- return (self.refs, self.execs, self.contains)
+ return (self.refs, self.execs, self.contains, self.extra)
def __setstate__(self, state):
- (refs, execs, contains) = state
- self.__init__(refs, execs, contains)
+ (refs, execs, contains, extra) = state
+ self.__init__(refs, execs, contains, extra)
def __hash__(self):
- l = (hash(self.refs), hash(self.execs))
+ l = (hash(self.refs), hash(self.execs), hash(self.extra))
for c in sorted(self.contains.keys()):
l = l + (c, hash(self.contains[c]))
return hash(l)
@@ -155,7 +164,7 @@ class CodeParserCache(MultiProcessCache):
# so that an existing cache gets invalidated. Additionally you'll need
# to increment __cache_version__ in cache.py in order to ensure that old
# recipe caches don't trigger "Taskhash mismatch" errors.
- CACHE_VERSION = 11
+ CACHE_VERSION = 14
def __init__(self):
MultiProcessCache.__init__(self)
@@ -169,8 +178,8 @@ class CodeParserCache(MultiProcessCache):
self.pythoncachelines = {}
self.shellcachelines = {}
- def newPythonCacheLine(self, refs, execs, contains):
- cacheline = pythonCacheLine(refs, execs, contains)
+ def newPythonCacheLine(self, refs, execs, contains, extra):
+ cacheline = pythonCacheLine(refs, execs, contains, extra)
h = hash(cacheline)
if h in self.pythoncachelines:
return self.pythoncachelines[h]
@@ -255,7 +264,15 @@ class PythonParser():
def visit_Call(self, node):
name = self.called_node_name(node.func)
- if name and (name.endswith(self.getvars) or name.endswith(self.getvarflags) or name in self.containsfuncs or name in self.containsanyfuncs):
+ if name and name in modulecode_deps and modulecode_deps[name][5]:
+ visitorcode = modulecode_deps[name][5]
+ contains, execs, warn = visitorcode(name, node.args)
+ for i in contains:
+ self.contains[i] = contains[i]
+ self.execs |= execs
+ if warn:
+ self.warn(node.func, warn)
+ elif name and (name.endswith(self.getvars) or name.endswith(self.getvarflags) or name in self.containsfuncs or name in self.containsanyfuncs):
if isinstance(node.args[0], ast.Constant) and isinstance(node.args[0].value, str):
varname = node.args[0].value
if name in self.containsfuncs and isinstance(node.args[1], ast.Constant):
@@ -338,6 +355,7 @@ class PythonParser():
self.contains = {}
for i in codeparsercache.pythoncache[h].contains:
self.contains[i] = set(codeparsercache.pythoncache[h].contains[i])
+ self.extra = codeparsercache.pythoncache[h].extra
return
if h in codeparsercache.pythoncacheextras:
@@ -346,6 +364,7 @@ class PythonParser():
self.contains = {}
for i in codeparsercache.pythoncacheextras[h].contains:
self.contains[i] = set(codeparsercache.pythoncacheextras[h].contains[i])
+ self.extra = codeparsercache.pythoncacheextras[h].extra
return
if fixedhash and not node:
@@ -364,8 +383,11 @@ class PythonParser():
self.visit_Call(n)
self.execs.update(self.var_execs)
+ self.extra = None
+ if fixedhash:
+ self.extra = bbhash(str(node))
- codeparsercache.pythoncacheextras[h] = codeparsercache.newPythonCacheLine(self.references, self.execs, self.contains)
+ codeparsercache.pythoncacheextras[h] = codeparsercache.newPythonCacheLine(self.references, self.execs, self.contains, self.extra)
class ShellParser():
def __init__(self, name, log):
diff --git a/poky/bitbake/lib/bb/cooker.py b/poky/bitbake/lib/bb/cooker.py
index 6754f986bf..582fc35f24 100644
--- a/poky/bitbake/lib/bb/cooker.py
+++ b/poky/bitbake/lib/bb/cooker.py
@@ -17,7 +17,7 @@ import threading
from io import StringIO, UnsupportedOperation
from contextlib import closing
from collections import defaultdict, namedtuple
-import bb, bb.exceptions, bb.command
+import bb, bb.command
from bb import utils, data, parse, event, cache, providers, taskdata, runqueue, build
import queue
import signal
@@ -281,7 +281,6 @@ class BBCooker:
self.databuilder = bb.cookerdata.CookerDataBuilder(self.configuration, False)
self.databuilder.parseBaseConfiguration()
self.data = self.databuilder.data
- self.data_hash = self.databuilder.data_hash
self.extraconfigdata = {}
eventlog = self.data.getVar("BB_DEFAULT_EVENTLOG")
@@ -370,6 +369,11 @@ class BBCooker:
if not clean:
bb.parse.BBHandler.cached_statements = {}
+ # If writes were made to any of the data stores, we need to recalculate the data
+ # store cache
+ if hasattr(self, "databuilder"):
+ self.databuilder.calc_datastore_hashes()
+
def parseConfiguration(self):
self.updateCacheSync()
@@ -1338,7 +1342,7 @@ class BBCooker:
self.buildSetVars()
self.reset_mtime_caches()
- bb_caches = bb.cache.MulticonfigCache(self.databuilder, self.data_hash, self.caches_array)
+ bb_caches = bb.cache.MulticonfigCache(self.databuilder, self.databuilder.data_hash, self.caches_array)
layername = self.collections[mc].calc_bbfile_priority(fn)[2]
infos = bb_caches[mc].parse(fn, self.collections[mc].get_file_appends(fn), layername)
@@ -2097,7 +2101,6 @@ class Parser(multiprocessing.Process):
except Exception as exc:
tb = sys.exc_info()[2]
exc.recipe = filename
- exc.traceback = list(bb.exceptions.extract_traceback(tb, context=3))
return True, None, exc
# Need to turn BaseExceptions into Exceptions here so we gracefully shutdown
# and for example a worker thread doesn't just exit on its own in response to
@@ -2112,7 +2115,7 @@ class CookerParser(object):
self.mcfilelist = mcfilelist
self.cooker = cooker
self.cfgdata = cooker.data
- self.cfghash = cooker.data_hash
+ self.cfghash = cooker.databuilder.data_hash
self.cfgbuilder = cooker.databuilder
# Accounting statistics
@@ -2298,8 +2301,12 @@ class CookerParser(object):
return False
except ParsingFailure as exc:
self.error += 1
- logger.error('Unable to parse %s: %s' %
- (exc.recipe, bb.exceptions.to_string(exc.realexception)))
+
+ exc_desc = str(exc)
+ if isinstance(exc, SystemExit) and not isinstance(exc.code, str):
+ exc_desc = 'Exited with "%d"' % exc.code
+
+ logger.error('Unable to parse %s: %s' % (exc.recipe, exc_desc))
self.shutdown(clean=False)
return False
except bb.parse.ParseError as exc:
@@ -2308,20 +2315,33 @@ class CookerParser(object):
self.shutdown(clean=False, eventmsg=str(exc))
return False
except bb.data_smart.ExpansionError as exc:
+ def skip_frames(f, fn_prefix):
+ while f and f.tb_frame.f_code.co_filename.startswith(fn_prefix):
+ f = f.tb_next
+ return f
+
self.error += 1
bbdir = os.path.dirname(__file__) + os.sep
- etype, value, _ = sys.exc_info()
- tb = list(itertools.dropwhile(lambda e: e.filename.startswith(bbdir), exc.traceback))
+ etype, value, tb = sys.exc_info()
+
+ # Remove any frames where the code comes from bitbake. This
+ # prevents deep (and pretty useless) backtraces for expansion error
+ tb = skip_frames(tb, bbdir)
+ cur = tb
+ while cur:
+ cur.tb_next = skip_frames(cur.tb_next, bbdir)
+ cur = cur.tb_next
+
logger.error('ExpansionError during parsing %s', value.recipe,
exc_info=(etype, value, tb))
self.shutdown(clean=False)
return False
except Exception as exc:
self.error += 1
- etype, value, tb = sys.exc_info()
+ _, value, _ = sys.exc_info()
if hasattr(value, "recipe"):
logger.error('Unable to parse %s' % value.recipe,
- exc_info=(etype, value, exc.traceback))
+ exc_info=sys.exc_info())
else:
# Most likely, an exception occurred during raising an exception
import traceback
diff --git a/poky/bitbake/lib/bb/cookerdata.py b/poky/bitbake/lib/bb/cookerdata.py
index 0649e40995..3ad5cf3dd0 100644
--- a/poky/bitbake/lib/bb/cookerdata.py
+++ b/poky/bitbake/lib/bb/cookerdata.py
@@ -254,9 +254,16 @@ class CookerDataBuilder(object):
self.data = self.basedata
self.mcdata = {}
+ def calc_datastore_hashes(self):
+ data_hash = hashlib.sha256()
+ data_hash.update(self.data.get_hash().encode('utf-8'))
+ multiconfig = (self.data.getVar("BBMULTICONFIG") or "").split()
+ for config in multiconfig:
+ data_hash.update(self.mcdata[config].get_hash().encode('utf-8'))
+ self.data_hash = data_hash.hexdigest()
+
def parseBaseConfiguration(self, worker=False):
mcdata = {}
- data_hash = hashlib.sha256()
try:
self.data = self.parseConfigurationFiles(self.prefiles, self.postfiles)
@@ -279,7 +286,6 @@ class CookerDataBuilder(object):
bb.event.fire(bb.event.ConfigParsed(), self.data)
bb.parse.init_parser(self.data)
- data_hash.update(self.data.get_hash().encode('utf-8'))
mcdata[''] = self.data
multiconfig = (self.data.getVar("BBMULTICONFIG") or "").split()
@@ -289,11 +295,9 @@ class CookerDataBuilder(object):
parsed_mcdata = self.parseConfigurationFiles(self.prefiles, self.postfiles, config)
bb.event.fire(bb.event.ConfigParsed(), parsed_mcdata)
mcdata[config] = parsed_mcdata
- data_hash.update(parsed_mcdata.get_hash().encode('utf-8'))
if multiconfig:
bb.event.fire(bb.event.MultiConfigParsed(mcdata), self.data)
- self.data_hash = data_hash.hexdigest()
except bb.data_smart.ExpansionError as e:
logger.error(str(e))
raise bb.BBHandledException()
@@ -328,6 +332,7 @@ class CookerDataBuilder(object):
for mc in mcdata:
self.mcdata[mc] = bb.data.createCopy(mcdata[mc])
self.data = self.mcdata['']
+ self.calc_datastore_hashes()
def reset(self):
# We may not have run parseBaseConfiguration() yet
diff --git a/poky/bitbake/lib/bb/data.py b/poky/bitbake/lib/bb/data.py
index 505f42950f..f672a84451 100644
--- a/poky/bitbake/lib/bb/data.py
+++ b/poky/bitbake/lib/bb/data.py
@@ -293,7 +293,7 @@ def build_dependencies(key, keys, mod_funcs, shelldeps, varflagsexcl, ignored_va
if key in mod_funcs:
exclusions = set()
moddep = bb.codeparser.modulecode_deps[key]
- value = handle_contains("", moddep[3], exclusions, d)
+ value = handle_contains(moddep[4], moddep[3], exclusions, d)
return frozenset((moddep[0] | keys & moddep[1]) - ignored_vars), value
if key[-1] == ']':
diff --git a/poky/bitbake/lib/bb/data_smart.py b/poky/bitbake/lib/bb/data_smart.py
index 0128a5bb17..c6049d578e 100644
--- a/poky/bitbake/lib/bb/data_smart.py
+++ b/poky/bitbake/lib/bb/data_smart.py
@@ -272,12 +272,9 @@ class VariableHistory(object):
return
if 'op' not in loginfo or not loginfo['op']:
loginfo['op'] = 'set'
- if 'detail' in loginfo:
- loginfo['detail'] = str(loginfo['detail'])
if 'variable' not in loginfo or 'file' not in loginfo:
raise ValueError("record() missing variable or file.")
var = loginfo['variable']
-
if var not in self.variables:
self.variables[var] = []
if not isinstance(self.variables[var], list):
@@ -336,7 +333,8 @@ class VariableHistory(object):
flag = '[%s] ' % (event['flag'])
else:
flag = ''
- o.write("# %s %s:%s%s\n# %s\"%s\"\n" % (event['op'], event['file'], event['line'], display_func, flag, re.sub('\n', '\n# ', event['detail'])))
+ o.write("# %s %s:%s%s\n# %s\"%s\"\n" % \
+ (event['op'], event['file'], event['line'], display_func, flag, re.sub('\n', '\n# ', str(event['detail']))))
if len(history) > 1:
o.write("# pre-expansion value:\n")
o.write('# "%s"\n' % (commentVal))
@@ -390,7 +388,7 @@ class VariableHistory(object):
if isset and event['op'] == 'set?':
continue
isset = True
- items = d.expand(event['detail']).split()
+ items = d.expand(str(event['detail'])).split()
for item in items:
# This is a little crude but is belt-and-braces to avoid us
# having to handle every possible operation type specifically
diff --git a/poky/bitbake/lib/bb/event.py b/poky/bitbake/lib/bb/event.py
index 4761c86880..952c85c0bd 100644
--- a/poky/bitbake/lib/bb/event.py
+++ b/poky/bitbake/lib/bb/event.py
@@ -19,7 +19,6 @@ import sys
import threading
import traceback
-import bb.exceptions
import bb.utils
# This is the pid for which we should generate the event. This is set when
@@ -759,13 +758,7 @@ class LogHandler(logging.Handler):
def emit(self, record):
if record.exc_info:
- etype, value, tb = record.exc_info
- if hasattr(tb, 'tb_next'):
- tb = list(bb.exceptions.extract_traceback(tb, context=3))
- # Need to turn the value into something the logging system can pickle
- record.bb_exc_info = (etype, value, tb)
- record.bb_exc_formatted = bb.exceptions.format_exception(etype, value, tb, limit=5)
- value = str(value)
+ record.bb_exc_formatted = traceback.format_exception(*record.exc_info)
record.exc_info = None
fire(record, None)
diff --git a/poky/bitbake/lib/bb/exceptions.py b/poky/bitbake/lib/bb/exceptions.py
deleted file mode 100644
index 801db9c82f..0000000000
--- a/poky/bitbake/lib/bb/exceptions.py
+++ /dev/null
@@ -1,96 +0,0 @@
-#
-# Copyright BitBake Contributors
-#
-# SPDX-License-Identifier: GPL-2.0-only
-#
-
-import inspect
-import traceback
-import bb.namedtuple_with_abc
-from collections import namedtuple
-
-
-class TracebackEntry(namedtuple.abc):
- """Pickleable representation of a traceback entry"""
- _fields = 'filename lineno function args code_context index'
- _header = ' File "{0.filename}", line {0.lineno}, in {0.function}{0.args}'
-
- def format(self, formatter=None):
- if not self.code_context:
- return self._header.format(self) + '\n'
-
- formatted = [self._header.format(self) + ':\n']
-
- for lineindex, line in enumerate(self.code_context):
- if formatter:
- line = formatter(line)
-
- if lineindex == self.index:
- formatted.append(' >%s' % line)
- else:
- formatted.append(' %s' % line)
- return formatted
-
- def __str__(self):
- return ''.join(self.format())
-
-def _get_frame_args(frame):
- """Get the formatted arguments and class (if available) for a frame"""
- arginfo = inspect.getargvalues(frame)
-
- try:
- if not arginfo.args:
- return '', None
- # There have been reports from the field of python 2.6 which doesn't
- # return a namedtuple here but simply a tuple so fallback gracefully if
- # args isn't present.
- except AttributeError:
- return '', None
-
- firstarg = arginfo.args[0]
- if firstarg == 'self':
- self = arginfo.locals['self']
- cls = self.__class__.__name__
-
- arginfo.args.pop(0)
- del arginfo.locals['self']
- else:
- cls = None
-
- formatted = inspect.formatargvalues(*arginfo)
- return formatted, cls
-
-def extract_traceback(tb, context=1):
- frames = inspect.getinnerframes(tb, context)
- for frame, filename, lineno, function, code_context, index in frames:
- formatted_args, cls = _get_frame_args(frame)
- if cls:
- function = '%s.%s' % (cls, function)
- yield TracebackEntry(filename, lineno, function, formatted_args,
- code_context, index)
-
-def format_extracted(extracted, formatter=None, limit=None):
- if limit:
- extracted = extracted[-limit:]
-
- formatted = []
- for tracebackinfo in extracted:
- formatted.extend(tracebackinfo.format(formatter))
- return formatted
-
-
-def format_exception(etype, value, tb, context=1, limit=None, formatter=None):
- formatted = ['Traceback (most recent call last):\n']
-
- if hasattr(tb, 'tb_next'):
- tb = extract_traceback(tb, context)
-
- formatted.extend(format_extracted(tb, formatter, limit))
- formatted.extend(traceback.format_exception_only(etype, value))
- return formatted
-
-def to_string(exc):
- if isinstance(exc, SystemExit):
- if not isinstance(exc.code, str):
- return 'Exited with "%d"' % exc.code
- return str(exc)
diff --git a/poky/bitbake/lib/bb/fetch2/__init__.py b/poky/bitbake/lib/bb/fetch2/__init__.py
index 5bf2c4b8cf..8f0ed2b9e2 100644
--- a/poky/bitbake/lib/bb/fetch2/__init__.py
+++ b/poky/bitbake/lib/bb/fetch2/__init__.py
@@ -499,30 +499,30 @@ def fetcher_init(d):
Calls before this must not hit the cache.
"""
- revs = bb.persist_data.persist('BB_URI_HEADREVS', d)
- try:
- # fetcher_init is called multiple times, so make sure we only save the
- # revs the first time it is called.
- if not bb.fetch2.saved_headrevs:
- bb.fetch2.saved_headrevs = dict(revs)
- except:
- pass
-
- # When to drop SCM head revisions controlled by user policy
- srcrev_policy = d.getVar('BB_SRCREV_POLICY') or "clear"
- if srcrev_policy == "cache":
- logger.debug("Keeping SRCREV cache due to cache policy of: %s", srcrev_policy)
- elif srcrev_policy == "clear":
- logger.debug("Clearing SRCREV cache due to cache policy of: %s", srcrev_policy)
- revs.clear()
- else:
- raise FetchError("Invalid SRCREV cache policy of: %s" % srcrev_policy)
+ with bb.persist_data.persist('BB_URI_HEADREVS', d) as revs:
+ try:
+ # fetcher_init is called multiple times, so make sure we only save the
+ # revs the first time it is called.
+ if not bb.fetch2.saved_headrevs:
+ bb.fetch2.saved_headrevs = dict(revs)
+ except:
+ pass
- _checksum_cache.init_cache(d.getVar("BB_CACHEDIR"))
+ # When to drop SCM head revisions controlled by user policy
+ srcrev_policy = d.getVar('BB_SRCREV_POLICY') or "clear"
+ if srcrev_policy == "cache":
+ logger.debug("Keeping SRCREV cache due to cache policy of: %s", srcrev_policy)
+ elif srcrev_policy == "clear":
+ logger.debug("Clearing SRCREV cache due to cache policy of: %s", srcrev_policy)
+ revs.clear()
+ else:
+ raise FetchError("Invalid SRCREV cache policy of: %s" % srcrev_policy)
+
+ _checksum_cache.init_cache(d.getVar("BB_CACHEDIR"))
- for m in methods:
- if hasattr(m, "init"):
- m.init(d)
+ for m in methods:
+ if hasattr(m, "init"):
+ m.init(d)
def fetcher_parse_save():
_checksum_cache.save_extras()
@@ -536,8 +536,8 @@ def fetcher_compare_revisions(d):
when bitbake was started and return true if they have changed.
"""
- headrevs = dict(bb.persist_data.persist('BB_URI_HEADREVS', d))
- return headrevs != bb.fetch2.saved_headrevs
+ with dict(bb.persist_data.persist('BB_URI_HEADREVS', d)) as headrevs:
+ return headrevs != bb.fetch2.saved_headrevs
def mirror_from_string(data):
mirrors = (data or "").replace('\\n',' ').split()
@@ -1317,7 +1317,7 @@ class FetchData(object):
if checksum_name in self.parm:
checksum_expected = self.parm[checksum_name]
- elif self.type not in ["http", "https", "ftp", "ftps", "sftp", "s3", "az", "crate", "gs"]:
+ elif self.type not in ["http", "https", "ftp", "ftps", "sftp", "s3", "az", "crate", "gs", "gomod"]:
checksum_expected = None
else:
checksum_expected = d.getVarFlag("SRC_URI", checksum_name)
@@ -1606,7 +1606,7 @@ class FetchMethod(object):
if urlpath.find("/") != -1:
destdir = urlpath.rsplit("/", 1)[0] + '/'
bb.utils.mkdirhier("%s/%s" % (unpackdir, destdir))
- cmd = 'cp -fpPRH "%s" "%s"' % (file, destdir)
+ cmd = 'cp --force --preserve=timestamps --no-dereference --recursive -H "%s" "%s"' % (file, destdir)
else:
urldata.unpack_tracer.unpack("archive-extract", unpackdir)
@@ -1662,13 +1662,13 @@ class FetchMethod(object):
if not hasattr(self, "_latest_revision"):
raise ParameterError("The fetcher for this URL does not support _latest_revision", ud.url)
- revs = bb.persist_data.persist('BB_URI_HEADREVS', d)
- key = self.generate_revision_key(ud, d, name)
- try:
- return revs[key]
- except KeyError:
- revs[key] = rev = self._latest_revision(ud, d, name)
- return rev
+ with bb.persist_data.persist('BB_URI_HEADREVS', d) as revs:
+ key = self.generate_revision_key(ud, d, name)
+ try:
+ return revs[key]
+ except KeyError:
+ revs[key] = rev = self._latest_revision(ud, d, name)
+ return rev
def sortable_revision(self, ud, d, name):
latest_rev = self._build_revision(ud, d, name)
@@ -2088,6 +2088,7 @@ from . import npmsw
from . import az
from . import crate
from . import gcp
+from . import gomod
methods.append(local.Local())
methods.append(wget.Wget())
@@ -2110,3 +2111,5 @@ methods.append(npmsw.NpmShrinkWrap())
methods.append(az.Az())
methods.append(crate.Crate())
methods.append(gcp.GCP())
+methods.append(gomod.GoMod())
+methods.append(gomod.GoModGit())
diff --git a/poky/bitbake/lib/bb/fetch2/clearcase.py b/poky/bitbake/lib/bb/fetch2/clearcase.py
index 1a9c863769..2b3bd70693 100644
--- a/poky/bitbake/lib/bb/fetch2/clearcase.py
+++ b/poky/bitbake/lib/bb/fetch2/clearcase.py
@@ -108,7 +108,7 @@ class ClearCase(FetchMethod):
ud.module.replace("/", "."),
ud.label.replace("/", "."))
- ud.viewname = "%s-view%s" % (ud.identifier, d.getVar("DATETIME", d, True))
+ ud.viewname = "%s-view%s" % (ud.identifier, d.getVar("DATETIME"))
ud.csname = "%s-config-spec" % (ud.identifier)
ud.ccasedir = os.path.join(d.getVar("DL_DIR"), ud.type)
ud.viewdir = os.path.join(ud.ccasedir, ud.viewname)
@@ -196,7 +196,7 @@ class ClearCase(FetchMethod):
def need_update(self, ud, d):
if ("LATEST" in ud.label) or (ud.customspec and "LATEST" in ud.customspec):
- ud.identifier += "-%s" % d.getVar("DATETIME",d, True)
+ ud.identifier += "-%s" % d.getVar("DATETIME")
return True
if os.path.exists(ud.localpath):
return False
diff --git a/poky/bitbake/lib/bb/fetch2/gcp.py b/poky/bitbake/lib/bb/fetch2/gcp.py
index eb3e0c6a6b..2ee9ed2194 100644
--- a/poky/bitbake/lib/bb/fetch2/gcp.py
+++ b/poky/bitbake/lib/bb/fetch2/gcp.py
@@ -23,7 +23,6 @@ import urllib.parse, urllib.error
from bb.fetch2 import FetchMethod
from bb.fetch2 import FetchError
from bb.fetch2 import logger
-from bb.fetch2 import runfetchcmd
class GCP(FetchMethod):
"""
@@ -48,7 +47,6 @@ class GCP(FetchMethod):
ud.basename = os.path.basename(ud.path)
ud.localfile = d.expand(urllib.parse.unquote(ud.basename))
- ud.basecmd = "gsutil stat"
def get_gcp_client(self):
from google.cloud import storage
@@ -59,17 +57,20 @@ class GCP(FetchMethod):
Fetch urls using the GCP API.
Assumes localpath was called first.
"""
+ from google.api_core.exceptions import NotFound
logger.debug2(f"Trying to download gs://{ud.host}{ud.path} to {ud.localpath}")
if self.gcp_client is None:
self.get_gcp_client()
- bb.fetch2.check_network_access(d, ud.basecmd, f"gs://{ud.host}{ud.path}")
- runfetchcmd("%s %s" % (ud.basecmd, f"gs://{ud.host}{ud.path}"), d)
+ bb.fetch2.check_network_access(d, "blob.download_to_filename", f"gs://{ud.host}{ud.path}")
# Path sometimes has leading slash, so strip it
path = ud.path.lstrip("/")
blob = self.gcp_client.bucket(ud.host).blob(path)
- blob.download_to_filename(ud.localpath)
+ try:
+ blob.download_to_filename(ud.localpath)
+ except NotFound:
+ raise FetchError("The GCP API threw a NotFound exception")
# Additional sanity checks copied from the wget class (although there
# are no known issues which mean these are required, treat the GCP API
@@ -91,8 +92,7 @@ class GCP(FetchMethod):
if self.gcp_client is None:
self.get_gcp_client()
- bb.fetch2.check_network_access(d, ud.basecmd, f"gs://{ud.host}{ud.path}")
- runfetchcmd("%s %s" % (ud.basecmd, f"gs://{ud.host}{ud.path}"), d)
+ bb.fetch2.check_network_access(d, "gcp_client.bucket(ud.host).blob(path).exists()", f"gs://{ud.host}{ud.path}")
# Path sometimes has leading slash, so strip it
path = ud.path.lstrip("/")
diff --git a/poky/bitbake/lib/bb/fetch2/git.py b/poky/bitbake/lib/bb/fetch2/git.py
index c7ff769fdf..5b678827ed 100644
--- a/poky/bitbake/lib/bb/fetch2/git.py
+++ b/poky/bitbake/lib/bb/fetch2/git.py
@@ -262,7 +262,7 @@ class Git(FetchMethod):
for name in ud.names:
ud.unresolvedrev[name] = 'HEAD'
- ud.basecmd = d.getVar("FETCHCMD_git") or "git -c gc.autoDetach=false -c core.pager=cat -c safe.bareRepository=all"
+ ud.basecmd = d.getVar("FETCHCMD_git") or "git -c gc.autoDetach=false -c core.pager=cat -c safe.bareRepository=all -c clone.defaultRemoteName=origin"
write_tarballs = d.getVar("BB_GENERATE_MIRROR_TARBALLS") or "0"
ud.write_tarballs = write_tarballs != "0" or ud.rebaseable
@@ -551,18 +551,31 @@ class Git(FetchMethod):
runfetchcmd("touch %s.done" % ud.fullmirror, d)
def clone_shallow_local(self, ud, dest, d):
- """Clone the repo and make it shallow.
+ """
+ Shallow fetch from ud.clonedir (${DL_DIR}/git2/<gitrepo> by default):
+ - For BB_GIT_SHALLOW_DEPTH: git fetch --depth <depth> rev
+ - For BB_GIT_SHALLOW_REVS: git fetch --shallow-exclude=<revs> rev
+ """
+
+ bb.utils.mkdirhier(dest)
+ init_cmd = "%s init -q" % ud.basecmd
+ if ud.bareclone:
+ init_cmd += " --bare"
+ runfetchcmd(init_cmd, d, workdir=dest)
+ runfetchcmd("%s remote add origin %s" % (ud.basecmd, ud.clonedir), d, workdir=dest)
- The upstream url of the new clone isn't set at this time, as it'll be
- set correctly when unpacked."""
- runfetchcmd("%s clone %s %s %s" % (ud.basecmd, ud.cloneflags, ud.clonedir, dest), d)
+ # Check the histories which should be excluded
+ shallow_exclude = ''
+ for revision in ud.shallow_revs:
+ shallow_exclude += " --shallow-exclude=%s" % revision
- to_parse, shallow_branches = [], []
for name in ud.names:
revision = ud.revisions[name]
depth = ud.shallow_depths[name]
- if depth:
- to_parse.append('%s~%d^{}' % (revision, depth - 1))
+
+ # The --depth and --shallow-exclude can't be used together
+ if depth and shallow_exclude:
+ raise bb.fetch2.FetchError("BB_GIT_SHALLOW_REVS is set, but BB_GIT_SHALLOW_DEPTH is not 0.")
# For nobranch, we need a ref, otherwise the commits will be
# removed, and for non-nobranch, we truncate the branch to our
@@ -575,36 +588,49 @@ class Git(FetchMethod):
else:
ref = "refs/remotes/origin/%s" % branch
- shallow_branches.append(ref)
- runfetchcmd("%s update-ref %s %s" % (ud.basecmd, ref, revision), d, workdir=dest)
+ fetch_cmd = "%s fetch origin %s" % (ud.basecmd, revision)
+ if depth:
+ fetch_cmd += " --depth %s" % depth
+
+ if shallow_exclude:
+ fetch_cmd += shallow_exclude
- # Map srcrev+depths to revisions
- parsed_depths = runfetchcmd("%s rev-parse %s" % (ud.basecmd, " ".join(to_parse)), d, workdir=dest)
+ # Advertise the revision for lower version git such as 2.25.1:
+ # error: Server does not allow request for unadvertised object.
+ # The ud.clonedir is a local temporary dir, will be removed when
+ # fetch is done, so we can do anything on it.
+ adv_cmd = 'git branch -f advertise-%s %s' % (revision, revision)
+ runfetchcmd(adv_cmd, d, workdir=ud.clonedir)
- # Resolve specified revisions
- parsed_revs = runfetchcmd("%s rev-parse %s" % (ud.basecmd, " ".join('"%s^{}"' % r for r in ud.shallow_revs)), d, workdir=dest)
- shallow_revisions = parsed_depths.splitlines() + parsed_revs.splitlines()
+ runfetchcmd(fetch_cmd, d, workdir=dest)
+ runfetchcmd("%s update-ref %s %s" % (ud.basecmd, ref, revision), d, workdir=dest)
# Apply extra ref wildcards
- all_refs = runfetchcmd('%s for-each-ref "--format=%%(refname)"' % ud.basecmd,
- d, workdir=dest).splitlines()
+ all_refs_remote = runfetchcmd("%s ls-remote origin 'refs/*'" % ud.basecmd, \
+ d, workdir=dest).splitlines()
+ all_refs = []
+ for line in all_refs_remote:
+ all_refs.append(line.split()[-1])
+ extra_refs = []
for r in ud.shallow_extra_refs:
if not ud.bareclone:
r = r.replace('refs/heads/', 'refs/remotes/origin/')
if '*' in r:
matches = filter(lambda a: fnmatch.fnmatchcase(a, r), all_refs)
- shallow_branches.extend(matches)
+ extra_refs.extend(matches)
else:
- shallow_branches.append(r)
+ extra_refs.append(r)
- # Make the repository shallow
- shallow_cmd = [self.make_shallow_path, '-s']
- for b in shallow_branches:
- shallow_cmd.append('-r')
- shallow_cmd.append(b)
- shallow_cmd.extend(shallow_revisions)
- runfetchcmd(subprocess.list2cmdline(shallow_cmd), d, workdir=dest)
+ for ref in extra_refs:
+ ref_fetch = os.path.basename(ref)
+ runfetchcmd("%s fetch origin --depth 1 %s" % (ud.basecmd, ref_fetch), d, workdir=dest)
+ revision = runfetchcmd("%s rev-parse FETCH_HEAD" % ud.basecmd, d, workdir=dest)
+ runfetchcmd("%s update-ref %s %s" % (ud.basecmd, ref, revision), d, workdir=dest)
+
+ # The url is local ud.clonedir, set it to upstream one
+ repourl = self._get_repo_url(ud)
+ runfetchcmd("%s remote set-url origin %s" % (ud.basecmd, shlex.quote(repourl)), d, workdir=dest)
def unpack(self, ud, destdir, d):
""" unpack the downloaded src to destdir"""
@@ -926,9 +952,8 @@ class Git(FetchMethod):
commits = None
else:
if not os.path.exists(rev_file) or not os.path.getsize(rev_file):
- from pipes import quote
commits = bb.fetch2.runfetchcmd(
- "git rev-list %s -- | wc -l" % quote(rev),
+ "git rev-list %s -- | wc -l" % shlex.quote(rev),
d, quiet=True).strip().lstrip('0')
if commits:
open(rev_file, "w").write("%d\n" % int(commits))
diff --git a/poky/bitbake/lib/bb/fetch2/gitsm.py b/poky/bitbake/lib/bb/fetch2/gitsm.py
index f7f3af7212..fab4b1164c 100644
--- a/poky/bitbake/lib/bb/fetch2/gitsm.py
+++ b/poky/bitbake/lib/bb/fetch2/gitsm.py
@@ -147,6 +147,19 @@ class GitSM(Git):
return submodules != []
+ def call_process_submodules(self, ud, d, extra_check, subfunc):
+ # If we're using a shallow mirror tarball it needs to be
+ # unpacked temporarily so that we can examine the .gitmodules file
+ if ud.shallow and os.path.exists(ud.fullshallow) and extra_check:
+ tmpdir = tempfile.mkdtemp(dir=d.getVar("DL_DIR"))
+ try:
+ runfetchcmd("tar -xzf %s" % ud.fullshallow, d, workdir=tmpdir)
+ self.process_submodules(ud, tmpdir, subfunc, d)
+ finally:
+ shutil.rmtree(tmpdir)
+ else:
+ self.process_submodules(ud, ud.clonedir, subfunc, d)
+
def need_update(self, ud, d):
if Git.need_update(self, ud, d):
return True
@@ -164,15 +177,7 @@ class GitSM(Git):
logger.error('gitsm: submodule update check failed: %s %s' % (type(e).__name__, str(e)))
need_update_result = True
- # If we're using a shallow mirror tarball it needs to be unpacked
- # temporarily so that we can examine the .gitmodules file
- if ud.shallow and os.path.exists(ud.fullshallow) and not os.path.exists(ud.clonedir):
- tmpdir = tempfile.mkdtemp(dir=d.getVar("DL_DIR"))
- runfetchcmd("tar -xzf %s" % ud.fullshallow, d, workdir=tmpdir)
- self.process_submodules(ud, tmpdir, need_update_submodule, d)
- shutil.rmtree(tmpdir)
- else:
- self.process_submodules(ud, ud.clonedir, need_update_submodule, d)
+ self.call_process_submodules(ud, d, not os.path.exists(ud.clonedir), need_update_submodule)
if need_update_list:
logger.debug('gitsm: Submodules requiring update: %s' % (' '.join(need_update_list)))
@@ -195,16 +200,7 @@ class GitSM(Git):
raise
Git.download(self, ud, d)
-
- # If we're using a shallow mirror tarball it needs to be unpacked
- # temporarily so that we can examine the .gitmodules file
- if ud.shallow and os.path.exists(ud.fullshallow) and self.need_update(ud, d):
- tmpdir = tempfile.mkdtemp(dir=d.getVar("DL_DIR"))
- runfetchcmd("tar -xzf %s" % ud.fullshallow, d, workdir=tmpdir)
- self.process_submodules(ud, tmpdir, download_submodule, d)
- shutil.rmtree(tmpdir)
- else:
- self.process_submodules(ud, ud.clonedir, download_submodule, d)
+ self.call_process_submodules(ud, d, self.need_update(ud, d), download_submodule)
def unpack(self, ud, destdir, d):
def unpack_submodules(ud, url, module, modpath, workdir, d):
@@ -263,14 +259,6 @@ class GitSM(Git):
newfetch = Fetch([url], d, cache=False)
urldata.extend(newfetch.expanded_urldata())
- # If we're using a shallow mirror tarball it needs to be unpacked
- # temporarily so that we can examine the .gitmodules file
- if ud.shallow and os.path.exists(ud.fullshallow) and ud.method.need_update(ud, d):
- tmpdir = tempfile.mkdtemp(dir=d.getVar("DL_DIR"))
- subprocess.check_call("tar -xzf %s" % ud.fullshallow, cwd=tmpdir, shell=True)
- self.process_submodules(ud, tmpdir, add_submodule, d)
- shutil.rmtree(tmpdir)
- else:
- self.process_submodules(ud, ud.clonedir, add_submodule, d)
+ self.call_process_submodules(ud, d, ud.method.need_update(ud, d), add_submodule)
return urldata
diff --git a/poky/bitbake/lib/bb/fetch2/gomod.py b/poky/bitbake/lib/bb/fetch2/gomod.py
new file mode 100644
index 0000000000..21fbe80f56
--- /dev/null
+++ b/poky/bitbake/lib/bb/fetch2/gomod.py
@@ -0,0 +1,268 @@
+"""
+BitBake 'Fetch' implementation for Go modules
+
+The gomod/gomodgit fetchers are used to download Go modules to the module cache
+from a module proxy or directly from a version control repository.
+
+Example SRC_URI:
+
+SRC_URI += "gomod://golang.org/x/net;version=v0.9.0;sha256sum=..."
+SRC_URI += "gomodgit://golang.org/x/net;version=v0.9.0;repo=go.googlesource.com/net;srcrev=..."
+
+Required SRC_URI parameters:
+
+- version
+ The version of the module.
+
+Optional SRC_URI parameters:
+
+- mod
+ Fetch and unpack the go.mod file only instead of the complete module.
+ The go command may need to download go.mod files for many different modules
+ when computing the build list, and go.mod files are much smaller than
+ module zip files.
+ The default is "0", set mod=1 for the go.mod file only.
+
+- sha256sum
+ The checksum of the module zip file, or the go.mod file in case of fetching
+ only the go.mod file. Alternatively, set the SRC_URI varible flag for
+ "module@version.sha256sum".
+
+- protocol
+ The method used when fetching directly from a version control repository.
+ The default is "https" for git.
+
+- repo
+ The URL when fetching directly from a version control repository. Required
+ when the URL is different from the module path.
+
+- srcrev
+ The revision identifier used when fetching directly from a version control
+ repository. Alternatively, set the SRCREV varible for "module@version".
+
+- subdir
+ The module subdirectory when fetching directly from a version control
+ repository. Required when the module is not located in the root of the
+ repository.
+
+Related variables:
+
+- GO_MOD_PROXY
+ The module proxy used by the fetcher.
+
+- GO_MOD_CACHE_DIR
+ The directory where the module cache is located.
+ This must match the exported GOMODCACHE variable for the go command to find
+ the downloaded modules.
+
+See the Go modules reference, https://go.dev/ref/mod, for more information
+about the module cache, module proxies and version control systems.
+"""
+
+import hashlib
+import os
+import re
+import shutil
+import subprocess
+import zipfile
+
+import bb
+from bb.fetch2 import FetchError
+from bb.fetch2 import MissingParameterError
+from bb.fetch2 import runfetchcmd
+from bb.fetch2 import subprocess_setup
+from bb.fetch2.git import Git
+from bb.fetch2.wget import Wget
+
+
+def escape(path):
+ """Escape capital letters using exclamation points."""
+ return re.sub(r'([A-Z])', lambda m: '!' + m.group(1).lower(), path)
+
+
+class GoMod(Wget):
+ """Class to fetch Go modules from a Go module proxy via wget"""
+
+ def supports(self, ud, d):
+ """Check to see if a given URL is for this fetcher."""
+ return ud.type == 'gomod'
+
+ def urldata_init(self, ud, d):
+ """Set up to download the module from the module proxy.
+
+ Set up to download the module zip file to the module cache directory
+ and unpack the go.mod file (unless downloading only the go.mod file):
+
+ cache/download/<module>/@v/<version>.zip: The module zip file.
+ cache/download/<module>/@v/<version>.mod: The go.mod file.
+ """
+
+ proxy = d.getVar('GO_MOD_PROXY') or 'proxy.golang.org'
+ moddir = d.getVar('GO_MOD_CACHE_DIR') or 'pkg/mod'
+
+ if 'version' not in ud.parm:
+ raise MissingParameterError('version', ud.url)
+
+ module = ud.host
+ if ud.path != '/':
+ module += ud.path
+ ud.parm['module'] = module
+
+ # Set URL and filename for wget download
+ path = escape(module + '/@v/' + ud.parm['version'])
+ if ud.parm.get('mod', '0') == '1':
+ path += '.mod'
+ else:
+ path += '.zip'
+ ud.parm['unpack'] = '0'
+ ud.url = bb.fetch2.encodeurl(
+ ('https', proxy, '/' + path, None, None, None))
+ ud.parm['downloadfilename'] = path
+
+ # Set name parameter if sha256sum is set in recipe
+ name = f"{module}@{ud.parm['version']}"
+ if d.getVarFlag('SRC_URI', name + '.sha256sum'):
+ ud.parm['name'] = name
+
+ # Set subdir for unpack
+ ud.parm['subdir'] = os.path.join(moddir, 'cache/download',
+ os.path.dirname(path))
+
+ super().urldata_init(ud, d)
+
+ def unpack(self, ud, rootdir, d):
+ """Unpack the module in the module cache."""
+
+ # Unpack the module zip file or go.mod file
+ super().unpack(ud, rootdir, d)
+
+ if ud.localpath.endswith('.zip'):
+ # Unpack the go.mod file from the zip file
+ module = ud.parm['module']
+ unpackdir = os.path.join(rootdir, ud.parm['subdir'])
+ name = os.path.basename(ud.localpath).rsplit('.', 1)[0] + '.mod'
+ bb.note(f"Unpacking {name} to {unpackdir}/")
+ with zipfile.ZipFile(ud.localpath) as zf:
+ with open(os.path.join(unpackdir, name), mode='wb') as mf:
+ try:
+ f = module + '@' + ud.parm['version'] + '/go.mod'
+ shutil.copyfileobj(zf.open(f), mf)
+ except KeyError:
+ # If the module does not have a go.mod file, synthesize
+ # one containing only a module statement.
+ mf.write(f'module {module}\n'.encode())
+
+
+class GoModGit(Git):
+ """Class to fetch Go modules directly from a git repository"""
+
+ def supports(self, ud, d):
+ """Check to see if a given URL is for this fetcher."""
+ return ud.type == 'gomodgit'
+
+ def urldata_init(self, ud, d):
+ """Set up to download the module from the git repository.
+
+ Set up to download the git repository to the module cache directory and
+ unpack the module zip file and the go.mod file:
+
+ cache/vcs/<hash>: The bare git repository.
+ cache/download/<module>/@v/<version>.zip: The module zip file.
+ cache/download/<module>/@v/<version>.mod: The go.mod file.
+ """
+
+ moddir = d.getVar('GO_MOD_CACHE_DIR') or 'pkg/mod'
+
+ if 'version' not in ud.parm:
+ raise MissingParameterError('version', ud.url)
+
+ module = ud.host
+ if ud.path != '/':
+ module += ud.path
+ ud.parm['module'] = module
+
+ # Set host, path and srcrev for git download
+ if 'repo' in ud.parm:
+ repo = ud.parm['repo']
+ idx = repo.find('/')
+ if idx != -1:
+ ud.host = repo[:idx]
+ ud.path = repo[idx:]
+ else:
+ ud.host = repo
+ ud.path = ''
+ if 'protocol' not in ud.parm:
+ ud.parm['protocol'] = 'https'
+ name = f"{module}@{ud.parm['version']}"
+ ud.names = [name]
+ srcrev = d.getVar('SRCREV_' + name)
+ if srcrev:
+ if 'srcrev' not in ud.parm:
+ ud.parm['srcrev'] = srcrev
+ else:
+ if 'srcrev' in ud.parm:
+ d.setVar('SRCREV_' + name, ud.parm['srcrev'])
+ if 'branch' not in ud.parm:
+ ud.parm['nobranch'] = '1'
+
+ # Set subpath, subdir and bareclone for git unpack
+ if 'subdir' in ud.parm:
+ ud.parm['subpath'] = ud.parm['subdir']
+ key = f"git3:{ud.parm['protocol']}://{ud.host}{ud.path}".encode()
+ ud.parm['key'] = key
+ ud.parm['subdir'] = os.path.join(moddir, 'cache/vcs',
+ hashlib.sha256(key).hexdigest())
+ ud.parm['bareclone'] = '1'
+
+ super().urldata_init(ud, d)
+
+ def unpack(self, ud, rootdir, d):
+ """Unpack the module in the module cache."""
+
+ # Unpack the bare git repository
+ super().unpack(ud, rootdir, d)
+
+ moddir = d.getVar('GO_MOD_CACHE_DIR') or 'pkg/mod'
+
+ # Create the info file
+ module = ud.parm['module']
+ repodir = os.path.join(rootdir, ud.parm['subdir'])
+ with open(repodir + '.info', 'wb') as f:
+ f.write(ud.parm['key'])
+
+ # Unpack the go.mod file from the repository
+ unpackdir = os.path.join(rootdir, moddir, 'cache/download',
+ escape(module), '@v')
+ bb.utils.mkdirhier(unpackdir)
+ srcrev = ud.parm['srcrev']
+ version = ud.parm['version']
+ escaped_version = escape(version)
+ cmd = f"git ls-tree -r --name-only '{srcrev}'"
+ if 'subpath' in ud.parm:
+ cmd += f" '{ud.parm['subpath']}'"
+ files = runfetchcmd(cmd, d, workdir=repodir).split()
+ name = escaped_version + '.mod'
+ bb.note(f"Unpacking {name} to {unpackdir}/")
+ with open(os.path.join(unpackdir, name), mode='wb') as mf:
+ f = 'go.mod'
+ if 'subpath' in ud.parm:
+ f = os.path.join(ud.parm['subpath'], f)
+ if f in files:
+ cmd = ['git', 'cat-file', 'blob', srcrev + ':' + f]
+ subprocess.check_call(cmd, stdout=mf, cwd=repodir,
+ preexec_fn=subprocess_setup)
+ else:
+ # If the module does not have a go.mod file, synthesize one
+ # containing only a module statement.
+ mf.write(f'module {module}\n'.encode())
+
+ # Synthesize the module zip file from the repository
+ name = escaped_version + '.zip'
+ bb.note(f"Unpacking {name} to {unpackdir}/")
+ with zipfile.ZipFile(os.path.join(unpackdir, name), mode='w') as zf:
+ prefix = module + '@' + version + '/'
+ for f in files:
+ cmd = ['git', 'cat-file', 'blob', srcrev + ':' + f]
+ data = subprocess.check_output(cmd, cwd=repodir,
+ preexec_fn=subprocess_setup)
+ zf.writestr(prefix + f, data)
diff --git a/poky/bitbake/lib/bb/fetch2/npm.py b/poky/bitbake/lib/bb/fetch2/npm.py
index 15f3f19bc8..ac76d64cdb 100644
--- a/poky/bitbake/lib/bb/fetch2/npm.py
+++ b/poky/bitbake/lib/bb/fetch2/npm.py
@@ -42,11 +42,12 @@ from bb.utils import is_semver
def npm_package(package):
"""Convert the npm package name to remove unsupported character"""
- # Scoped package names (with the @) use the same naming convention
- # as the 'npm pack' command.
+ # For scoped package names ('@user/package') the '/' is replaced by a '-'.
+ # This is similar to what 'npm pack' does, but 'npm pack' also strips the
+ # leading '@', which can lead to ambiguous package names.
name = re.sub("/", "-", package)
name = name.lower()
- name = re.sub(r"[^\-a-z0-9]", "", name)
+ name = re.sub(r"[^\-a-z0-9@]", "", name)
name = name.strip("-")
return name
diff --git a/poky/bitbake/lib/bb/fetch2/npmsw.py b/poky/bitbake/lib/bb/fetch2/npmsw.py
index b55e885d7b..a5fa598deb 100644
--- a/poky/bitbake/lib/bb/fetch2/npmsw.py
+++ b/poky/bitbake/lib/bb/fetch2/npmsw.py
@@ -97,7 +97,7 @@ class NpmShrinkWrap(FetchMethod):
integrity = params.get("integrity", None)
resolved = params.get("resolved", None)
- version = params.get("version", None)
+ version = params.get("version", resolved)
# Handle registry sources
if is_semver(version) and integrity:
@@ -184,6 +184,7 @@ class NpmShrinkWrap(FetchMethod):
uri = URI("git://" + str(groups["url"]))
uri.params["protocol"] = str(groups["protocol"])
uri.params["rev"] = str(groups["rev"])
+ uri.params["nobranch"] = "1"
uri.params["destsuffix"] = destsuffix
url = str(uri)
diff --git a/poky/bitbake/lib/bb/fetch2/wget.py b/poky/bitbake/lib/bb/fetch2/wget.py
index 2e92117634..80f5ea46c7 100644
--- a/poky/bitbake/lib/bb/fetch2/wget.py
+++ b/poky/bitbake/lib/bb/fetch2/wget.py
@@ -87,7 +87,7 @@ class Wget(FetchMethod):
if not ud.localfile:
ud.localfile = d.expand(urllib.parse.unquote(ud.host + ud.path).replace("/", "."))
- self.basecmd = d.getVar("FETCHCMD_wget") or "/usr/bin/env wget -t 2 -T 30"
+ self.basecmd = d.getVar("FETCHCMD_wget") or "/usr/bin/env wget -t 2 -T 100"
if ud.type == 'ftp' or ud.type == 'ftps':
self.basecmd += " --passive-ftp"
@@ -244,7 +244,12 @@ class Wget(FetchMethod):
fetch.connection_cache.remove_connection(h.host, h.port)
raise urllib.error.URLError(err)
else:
- r = h.getresponse()
+ try:
+ r = h.getresponse()
+ except TimeoutError as e:
+ if fetch.connection_cache:
+ fetch.connection_cache.remove_connection(h.host, h.port)
+ raise TimeoutError(e)
# Pick apart the HTTPResponse object to get the addinfourl
# object initialized properly.
@@ -371,7 +376,7 @@ class Wget(FetchMethod):
except (FileNotFoundError, netrc.NetrcParseError):
pass
- with opener.open(r, timeout=30) as response:
+ with opener.open(r, timeout=100) as response:
pass
except (urllib.error.URLError, ConnectionResetError, TimeoutError) as e:
if try_again:
diff --git a/poky/bitbake/lib/bb/msg.py b/poky/bitbake/lib/bb/msg.py
index 3e18596faa..4f616ff42e 100644
--- a/poky/bitbake/lib/bb/msg.py
+++ b/poky/bitbake/lib/bb/msg.py
@@ -89,10 +89,6 @@ class BBLogFormatter(logging.Formatter):
msg = logging.Formatter.format(self, record)
if hasattr(record, 'bb_exc_formatted'):
msg += '\n' + ''.join(record.bb_exc_formatted)
- elif hasattr(record, 'bb_exc_info'):
- etype, value, tb = record.bb_exc_info
- formatted = bb.exceptions.format_exception(etype, value, tb, limit=5)
- msg += '\n' + ''.join(formatted)
return msg
def colorize(self, record):
diff --git a/poky/bitbake/lib/bb/parse/ast.py b/poky/bitbake/lib/bb/parse/ast.py
index 7581d003fd..001ba8d289 100644
--- a/poky/bitbake/lib/bb/parse/ast.py
+++ b/poky/bitbake/lib/bb/parse/ast.py
@@ -240,14 +240,16 @@ class ExportFuncsNode(AstNode):
data.setVar(func, sentinel + " " + calledfunc + "\n", parsing=True)
class AddTaskNode(AstNode):
- def __init__(self, filename, lineno, func, before, after):
+ def __init__(self, filename, lineno, tasks, before, after):
AstNode.__init__(self, filename, lineno)
- self.func = func
+ self.tasks = tasks
self.before = before
self.after = after
def eval(self, data):
- bb.build.addtask(self.func, self.before, self.after, data)
+ tasks = self.tasks.split()
+ for task in tasks:
+ bb.build.addtask(task, self.before, self.after, data)
class DelTaskNode(AstNode):
def __init__(self, filename, lineno, tasks):
@@ -348,21 +350,11 @@ def handlePythonMethod(statements, filename, lineno, funcname, modulename, body)
def handleExportFuncs(statements, filename, lineno, m, classname):
statements.append(ExportFuncsNode(filename, lineno, m.group(1), classname))
-def handleAddTask(statements, filename, lineno, m):
- func = m.group("func")
- before = m.group("before")
- after = m.group("after")
- if func is None:
- return
-
- statements.append(AddTaskNode(filename, lineno, func, before, after))
-
-def handleDelTask(statements, filename, lineno, m):
- func = m.group(1)
- if func is None:
- return
+def handleAddTask(statements, filename, lineno, tasks, before, after):
+ statements.append(AddTaskNode(filename, lineno, tasks, before, after))
- statements.append(DelTaskNode(filename, lineno, func))
+def handleDelTask(statements, filename, lineno, tasks):
+ statements.append(DelTaskNode(filename, lineno, tasks))
def handleBBHandlers(statements, filename, lineno, m):
statements.append(BBHandlerNode(filename, lineno, m.group(1)))
diff --git a/poky/bitbake/lib/bb/parse/parse_py/BBHandler.py b/poky/bitbake/lib/bb/parse/parse_py/BBHandler.py
index c13e4b9755..4bdb11994f 100644
--- a/poky/bitbake/lib/bb/parse/parse_py/BBHandler.py
+++ b/poky/bitbake/lib/bb/parse/parse_py/BBHandler.py
@@ -23,8 +23,8 @@ __func_start_regexp__ = re.compile(r"(((?P<py>python(?=(\s|\()))|(?P<fr>faker
__inherit_regexp__ = re.compile(r"inherit\s+(.+)" )
__inherit_def_regexp__ = re.compile(r"inherit_defer\s+(.+)" )
__export_func_regexp__ = re.compile(r"EXPORT_FUNCTIONS\s+(.+)" )
-__addtask_regexp__ = re.compile(r"addtask\s+(?P<func>\w+)\s*((before\s*(?P<before>((.*(?=after))|(.*))))|(after\s*(?P<after>((.*(?=before))|(.*)))))*")
-__deltask_regexp__ = re.compile(r"deltask\s+(.+)")
+__addtask_regexp__ = re.compile(r"addtask\s+([^#\n]+)(?P<comment>#.*|.*?)")
+__deltask_regexp__ = re.compile(r"deltask\s+([^#\n]+)(?P<comment>#.*|.*?)")
__addhandler_regexp__ = re.compile(r"addhandler\s+(.+)" )
__def_regexp__ = re.compile(r"def\s+(\w+).*:" )
__python_func_regexp__ = re.compile(r"(\s+.*)|(^$)|(^#)" )
@@ -239,29 +239,38 @@ def feeder(lineno, s, fn, root, statements, eof=False):
m = __addtask_regexp__.match(s)
if m:
- if len(m.group().split()) == 2:
- # Check and warn for "addtask task1 task2"
- m2 = re.match(r"addtask\s+(?P<func>\w+)(?P<ignores>.*)", s)
- if m2 and m2.group('ignores'):
- logger.warning('addtask ignored: "%s"' % m2.group('ignores'))
-
- # Check and warn for "addtask task1 before task2 before task3", the
- # similar to "after"
- taskexpression = s.split()
- for word in ('before', 'after'):
- if taskexpression.count(word) > 1:
- logger.warning("addtask contained multiple '%s' keywords, only one is supported" % word)
+ after = ""
+ before = ""
+
+ # This code splits on 'before' and 'after' instead of on whitespace so we can defer
+ # evaluation to as late as possible.
+ tasks = m.group(1).split(" before ")[0].split(" after ")[0]
+
+ for exp in m.group(1).split(" before "):
+ exp2 = exp.split(" after ")
+ if len(exp2) > 1:
+ after = after + " ".join(exp2[1:])
- # Check and warn for having task with exprssion as part of task name
+ for exp in m.group(1).split(" after "):
+ exp2 = exp.split(" before ")
+ if len(exp2) > 1:
+ before = before + " ".join(exp2[1:])
+
+ # Check and warn for having task with a keyword as part of task name
+ taskexpression = s.split()
for te in taskexpression:
if any( ( "%s_" % keyword ) in te for keyword in bb.data_smart.__setvar_keyword__ ):
raise ParseError("Task name '%s' contains a keyword which is not recommended/supported.\nPlease rename the task not to include the keyword.\n%s" % (te, ("\n".join(map(str, bb.data_smart.__setvar_keyword__)))), fn)
- ast.handleAddTask(statements, fn, lineno, m)
+
+ if tasks is not None:
+ ast.handleAddTask(statements, fn, lineno, tasks, before, after)
return
m = __deltask_regexp__.match(s)
if m:
- ast.handleDelTask(statements, fn, lineno, m)
+ task = m.group(1)
+ if task is not None:
+ ast.handleDelTask(statements, fn, lineno, task)
return
m = __addhandler_regexp__.match(s)
diff --git a/poky/bitbake/lib/bb/persist_data.py b/poky/bitbake/lib/bb/persist_data.py
index bcca791edf..c4454b153a 100644
--- a/poky/bitbake/lib/bb/persist_data.py
+++ b/poky/bitbake/lib/bb/persist_data.py
@@ -154,6 +154,7 @@ class SQLTable(collections.abc.MutableMapping):
def __exit__(self, *excinfo):
self.connection.__exit__(*excinfo)
+ self.connection.close()
@_Decorators.retry()
@_Decorators.transaction
diff --git a/poky/bitbake/lib/bb/runqueue.py b/poky/bitbake/lib/bb/runqueue.py
index 3462ed4457..7f95140c49 100644
--- a/poky/bitbake/lib/bb/runqueue.py
+++ b/poky/bitbake/lib/bb/runqueue.py
@@ -14,6 +14,7 @@ import os
import sys
import stat
import errno
+import itertools
import logging
import re
import bb
@@ -2189,12 +2190,20 @@ class RunQueueExecute:
if not hasattr(self, "sorted_setscene_tids"):
# Don't want to sort this set every execution
self.sorted_setscene_tids = sorted(self.rqdata.runq_setscene_tids)
+ # Resume looping where we left off when we returned to feed the mainloop
+ self.setscene_tids_generator = itertools.cycle(self.rqdata.runq_setscene_tids)
task = None
if not self.sqdone and self.can_start_task():
- # Find the next setscene to run
- for nexttask in self.sorted_setscene_tids:
+ loopcount = 0
+ # Find the next setscene to run, exit the loop when we've processed all tids or found something to execute
+ while loopcount < len(self.rqdata.runq_setscene_tids):
+ loopcount += 1
+ nexttask = next(self.setscene_tids_generator)
if nexttask in self.sq_buildable and nexttask not in self.sq_running and self.sqdata.stamps[nexttask] not in self.build_stamps.values() and nexttask not in self.sq_harddep_deferred:
+ if nexttask in self.sq_deferred and self.sq_deferred[nexttask] not in self.runq_complete:
+ # Skip deferred tasks quickly before the 'expensive' tests below - this is key to performant multiconfig builds
+ continue
if nexttask not in self.sqdata.unskippable and self.sqdata.sq_revdeps[nexttask] and \
nexttask not in self.sq_needed_harddeps and \
self.sqdata.sq_revdeps[nexttask].issubset(self.scenequeue_covered) and \
@@ -2224,8 +2233,7 @@ class RunQueueExecute:
if t in self.runq_running and t not in self.runq_complete:
continue
if nexttask in self.sq_deferred:
- if self.sq_deferred[nexttask] not in self.runq_complete:
- continue
+ # Deferred tasks that were still deferred were skipped above so we now need to process
logger.debug("Task %s no longer deferred" % nexttask)
del self.sq_deferred[nexttask]
valid = self.rq.validate_hashes(set([nexttask]), self.cooker.data, 0, False, summary=False)
@@ -2748,8 +2756,12 @@ class RunQueueExecute:
logger.debug2("%s was unavailable and is a hard dependency of %s so skipping" % (task, dep))
self.sq_task_failoutright(dep)
continue
+
+ # For performance, only compute allcovered once if needed
+ if self.sqdata.sq_deps[task]:
+ allcovered = self.scenequeue_covered | self.scenequeue_notcovered
for dep in sorted(self.sqdata.sq_deps[task]):
- if self.sqdata.sq_revdeps[dep].issubset(self.scenequeue_covered | self.scenequeue_notcovered):
+ if self.sqdata.sq_revdeps[dep].issubset(allcovered):
if dep not in self.sq_buildable:
self.sq_buildable.add(dep)
diff --git a/poky/bitbake/lib/bb/siggen.py b/poky/bitbake/lib/bb/siggen.py
index 8f24535528..a6163b55ea 100644
--- a/poky/bitbake/lib/bb/siggen.py
+++ b/poky/bitbake/lib/bb/siggen.py
@@ -878,7 +878,7 @@ def clean_checksum_file_path(file_checksum_tuple):
f, cs = file_checksum_tuple
if "/./" in f:
return "./" + f.split("/./")[1]
- return f
+ return os.path.basename(f)
def dump_this_task(outfile, d):
import bb.parse
diff --git a/poky/bitbake/lib/bb/tests/fetch.py b/poky/bitbake/lib/bb/tests/fetch.py
index 701129d138..363f3a2185 100644
--- a/poky/bitbake/lib/bb/tests/fetch.py
+++ b/poky/bitbake/lib/bb/tests/fetch.py
@@ -1427,12 +1427,12 @@ class FetchLatestVersionTest(FetcherTest):
("dtc", "git://git.yoctoproject.org/bbfetchtests-dtc.git;branch=master;protocol=https", "65cc4d2748a2c2e6f27f1cf39e07a5dbabd80ebf", "", "")
: "1.4.0",
# combination version pattern
- ("sysprof", "git://gitlab.gnome.org/GNOME/sysprof.git;protocol=https;branch=master", "cd44ee6644c3641507fb53b8a2a69137f2971219", "", "")
+ ("sysprof", "git://git.yoctoproject.org/sysprof.git;protocol=https;branch=master", "cd44ee6644c3641507fb53b8a2a69137f2971219", "", "")
: "1.2.0",
("u-boot-mkimage", "git://source.denx.de/u-boot/u-boot.git;branch=master;protocol=https", "62c175fbb8a0f9a926c88294ea9f7e88eb898f6c", "", "")
: "2014.01",
# version pattern "yyyymmdd"
- ("mobile-broadband-provider-info", "git://gitlab.gnome.org/GNOME/mobile-broadband-provider-info.git;protocol=https;branch=master", "4ed19e11c2975105b71b956440acdb25d46a347d", "", "")
+ ("mobile-broadband-provider-info", "git://git.yoctoproject.org/mobile-broadband-provider-info.git;protocol=https;branch=master", "4ed19e11c2975105b71b956440acdb25d46a347d", "", "")
: "20120614",
# packages with a valid UPSTREAM_CHECK_GITTAGREGEX
# mirror of git://anongit.freedesktop.org/xorg/driver/xf86-video-omap since network issues interfered with testing
@@ -2034,9 +2034,9 @@ class GitShallowTest(FetcherTest):
self.add_empty_file('b')
self.git('checkout -b a_branch', cwd=self.srcdir)
self.add_empty_file('c')
+ self.git('tag v0.0 HEAD', cwd=self.srcdir)
self.add_empty_file('d')
self.git('checkout master', cwd=self.srcdir)
- self.git('tag v0.0 a_branch', cwd=self.srcdir)
self.add_empty_file('e')
self.git('merge --no-ff --no-edit a_branch', cwd=self.srcdir)
self.add_empty_file('f')
@@ -2052,7 +2052,7 @@ class GitShallowTest(FetcherTest):
self.fetch_shallow(uri)
- self.assertRevCount(5)
+ self.assertRevCount(4)
self.assertRefs(['master', 'origin/master', 'origin/a_branch'])
def test_shallow_multi_one_uri_depths(self):
@@ -2199,7 +2199,7 @@ class GitShallowTest(FetcherTest):
self.fetch_shallow()
- self.assertRevCount(5)
+ self.assertRevCount(2)
def test_shallow_invalid_revs(self):
self.add_empty_file('a')
@@ -2218,7 +2218,10 @@ class GitShallowTest(FetcherTest):
self.git('tag v0.0 master', cwd=self.srcdir)
self.d.setVar('BB_GIT_SHALLOW_DEPTH', '0')
self.d.setVar('BB_GIT_SHALLOW_REVS', 'v0.0')
- self.fetch_shallow()
+
+ with self.assertRaises(bb.fetch2.FetchError), self.assertLogs("BitBake.Fetcher", level="ERROR") as cm:
+ self.fetch_shallow()
+ self.assertIn("fatal: no commits selected for shallow requests", cm.output[0])
def test_shallow_fetch_missing_revs_fails(self):
self.add_empty_file('a')
@@ -2249,7 +2252,7 @@ class GitShallowTest(FetcherTest):
revs = len(self.git('rev-list master').splitlines())
self.assertNotEqual(orig_revs, revs)
self.assertRefs(['master', 'origin/master'])
- self.assertRevCount(orig_revs - 1758)
+ self.assertRevCount(orig_revs - 1760)
def test_that_unpack_throws_an_error_when_the_git_clone_nor_shallow_tarball_exist(self):
self.add_empty_file('a')
@@ -3387,3 +3390,212 @@ class FetchPremirroronlyBrokenTarball(FetcherTest):
fetcher.download()
output = "".join(logs.output)
self.assertFalse(" not a git repository (or any parent up to mount point /)" in output)
+
+class GoModTest(FetcherTest):
+
+ @skipIfNoNetwork()
+ def test_gomod_url(self):
+ urls = ['gomod://github.com/Azure/azure-sdk-for-go/sdk/storage/azblob;version=v1.0.0;'
+ 'sha256sum=9bb69aea32f1d59711701f9562d66432c9c0374205e5009d1d1a62f03fb4fdad']
+
+ fetcher = bb.fetch2.Fetch(urls, self.d)
+ ud = fetcher.ud[urls[0]]
+ self.assertEqual(ud.url, 'https://proxy.golang.org/github.com/%21azure/azure-sdk-for-go/sdk/storage/azblob/%40v/v1.0.0.zip')
+ self.assertNotIn('name', ud.parm)
+
+ fetcher.download()
+ fetcher.unpack(self.unpackdir)
+ downloaddir = os.path.join(self.unpackdir, 'pkg/mod/cache/download')
+ self.assertTrue(os.path.exists(os.path.join(downloaddir, 'github.com/!azure/azure-sdk-for-go/sdk/storage/azblob/@v/v1.0.0.zip')))
+ self.assertTrue(os.path.exists(os.path.join(downloaddir, 'github.com/!azure/azure-sdk-for-go/sdk/storage/azblob/@v/v1.0.0.mod')))
+ self.assertEqual(bb.utils.sha256_file(os.path.join(downloaddir, 'github.com/!azure/azure-sdk-for-go/sdk/storage/azblob/@v/v1.0.0.mod')),
+ '7873b8544842329b4f385a3aa6cf82cc2bc8defb41a04fa5291c35fd5900e873')
+
+ @skipIfNoNetwork()
+ def test_gomod_url_go_mod_only(self):
+ urls = ['gomod://github.com/Azure/azure-sdk-for-go/sdk/storage/azblob;version=v1.0.0;mod=1;'
+ 'sha256sum=7873b8544842329b4f385a3aa6cf82cc2bc8defb41a04fa5291c35fd5900e873']
+
+ fetcher = bb.fetch2.Fetch(urls, self.d)
+ ud = fetcher.ud[urls[0]]
+ self.assertEqual(ud.url, 'https://proxy.golang.org/github.com/%21azure/azure-sdk-for-go/sdk/storage/azblob/%40v/v1.0.0.mod')
+ self.assertNotIn('name', ud.parm)
+
+ fetcher.download()
+ fetcher.unpack(self.unpackdir)
+ downloaddir = os.path.join(self.unpackdir, 'pkg/mod/cache/download')
+ self.assertTrue(os.path.exists(os.path.join(downloaddir, 'github.com/!azure/azure-sdk-for-go/sdk/storage/azblob/@v/v1.0.0.mod')))
+
+ @skipIfNoNetwork()
+ def test_gomod_url_sha256sum_varflag(self):
+ urls = ['gomod://gopkg.in/ini.v1;version=v1.67.0']
+ self.d.setVarFlag('SRC_URI', 'gopkg.in/ini.v1@v1.67.0.sha256sum', 'bd845dfc762a87a56e5a32a07770dc83e86976db7705d7f89c5dbafdc60b06c6')
+
+ fetcher = bb.fetch2.Fetch(urls, self.d)
+ ud = fetcher.ud[urls[0]]
+ self.assertEqual(ud.url, 'https://proxy.golang.org/gopkg.in/ini.v1/%40v/v1.67.0.zip')
+ self.assertEqual(ud.parm['name'], 'gopkg.in/ini.v1@v1.67.0')
+
+ fetcher.download()
+ fetcher.unpack(self.unpackdir)
+ downloaddir = os.path.join(self.unpackdir, 'pkg/mod/cache/download')
+ self.assertTrue(os.path.exists(os.path.join(downloaddir, 'gopkg.in/ini.v1/@v/v1.67.0.zip')))
+ self.assertTrue(os.path.exists(os.path.join(downloaddir, 'gopkg.in/ini.v1/@v/v1.67.0.mod')))
+ self.assertEqual(bb.utils.sha256_file(os.path.join(downloaddir, 'gopkg.in/ini.v1/@v/v1.67.0.mod')),
+ '13aedd85db8e555104108e0e613bb7e4d1242af7f27c15423dd9ab63b60b72a1')
+
+ @skipIfNoNetwork()
+ def test_gomod_url_no_go_mod_in_module(self):
+ urls = ['gomod://gopkg.in/ini.v1;version=v1.67.0;'
+ 'sha256sum=bd845dfc762a87a56e5a32a07770dc83e86976db7705d7f89c5dbafdc60b06c6']
+
+ fetcher = bb.fetch2.Fetch(urls, self.d)
+ ud = fetcher.ud[urls[0]]
+ self.assertEqual(ud.url, 'https://proxy.golang.org/gopkg.in/ini.v1/%40v/v1.67.0.zip')
+ self.assertNotIn('name', ud.parm)
+
+ fetcher.download()
+ fetcher.unpack(self.unpackdir)
+ downloaddir = os.path.join(self.unpackdir, 'pkg/mod/cache/download')
+ self.assertTrue(os.path.exists(os.path.join(downloaddir, 'gopkg.in/ini.v1/@v/v1.67.0.zip')))
+ self.assertTrue(os.path.exists(os.path.join(downloaddir, 'gopkg.in/ini.v1/@v/v1.67.0.mod')))
+ self.assertEqual(bb.utils.sha256_file(os.path.join(downloaddir, 'gopkg.in/ini.v1/@v/v1.67.0.mod')),
+ '13aedd85db8e555104108e0e613bb7e4d1242af7f27c15423dd9ab63b60b72a1')
+
+ @skipIfNoNetwork()
+ def test_gomod_url_host_only(self):
+ urls = ['gomod://go.opencensus.io;version=v0.24.0;'
+ 'sha256sum=203a767d7f8e7c1ebe5588220ad168d1e15b14ae70a636de7ca9a4a88a7e0d0c']
+
+ fetcher = bb.fetch2.Fetch(urls, self.d)
+ ud = fetcher.ud[urls[0]]
+ self.assertEqual(ud.url, 'https://proxy.golang.org/go.opencensus.io/%40v/v0.24.0.zip')
+ self.assertNotIn('name', ud.parm)
+
+ fetcher.download()
+ fetcher.unpack(self.unpackdir)
+ downloaddir = os.path.join(self.unpackdir, 'pkg/mod/cache/download')
+ self.assertTrue(os.path.exists(os.path.join(downloaddir, 'go.opencensus.io/@v/v0.24.0.zip')))
+ self.assertTrue(os.path.exists(os.path.join(downloaddir, 'go.opencensus.io/@v/v0.24.0.mod')))
+ self.assertEqual(bb.utils.sha256_file(os.path.join(downloaddir, 'go.opencensus.io/@v/v0.24.0.mod')),
+ '0dc9ccc660ad21cebaffd548f2cc6efa27891c68b4fbc1f8a3893b00f1acec96')
+
+class GoModGitTest(FetcherTest):
+
+ @skipIfNoNetwork()
+ def test_gomodgit_url_repo(self):
+ urls = ['gomodgit://golang.org/x/net;version=v0.9.0;'
+ 'repo=go.googlesource.com/net;'
+ 'srcrev=694cff8668bac64e0864b552bffc280cd27f21b1']
+
+ fetcher = bb.fetch2.Fetch(urls, self.d)
+ ud = fetcher.ud[urls[0]]
+ self.assertEqual(ud.host, 'go.googlesource.com')
+ self.assertEqual(ud.path, '/net')
+ self.assertEqual(ud.names, ['golang.org/x/net@v0.9.0'])
+ self.assertEqual(self.d.getVar('SRCREV_golang.org/x/net@v0.9.0'), '694cff8668bac64e0864b552bffc280cd27f21b1')
+
+ fetcher.download()
+ self.assertTrue(os.path.exists(ud.localpath))
+
+ fetcher.unpack(self.unpackdir)
+ vcsdir = os.path.join(self.unpackdir, 'pkg/mod/cache/vcs')
+ self.assertTrue(os.path.exists(os.path.join(vcsdir, 'ed42bd05533fd84ae290a5d33ebd3695a0a2b06131beebd5450825bee8603aca')))
+ downloaddir = os.path.join(self.unpackdir, 'pkg/mod/cache/download')
+ self.assertTrue(os.path.exists(os.path.join(downloaddir, 'golang.org/x/net/@v/v0.9.0.zip')))
+ self.assertTrue(os.path.exists(os.path.join(downloaddir, 'golang.org/x/net/@v/v0.9.0.mod')))
+ self.assertEqual(bb.utils.sha256_file(os.path.join(downloaddir, 'golang.org/x/net/@v/v0.9.0.mod')),
+ 'c5d6851ede50ec1c001afb763040194b68961bf06997e2605e8bf06dcd2aeb2e')
+
+ @skipIfNoNetwork()
+ def test_gomodgit_url_subdir(self):
+ urls = ['gomodgit://github.com/Azure/azure-sdk-for-go/sdk/storage/azblob;version=v1.0.0;'
+ 'repo=github.com/Azure/azure-sdk-for-go;subdir=sdk/storage/azblob;'
+ 'srcrev=ec928e0ed34db682b3f783d3739d1c538142e0c3']
+
+ fetcher = bb.fetch2.Fetch(urls, self.d)
+ ud = fetcher.ud[urls[0]]
+ self.assertEqual(ud.host, 'github.com')
+ self.assertEqual(ud.path, '/Azure/azure-sdk-for-go')
+ self.assertEqual(ud.parm['subpath'], 'sdk/storage/azblob')
+ self.assertEqual(ud.names, ['github.com/Azure/azure-sdk-for-go/sdk/storage/azblob@v1.0.0'])
+ self.assertEqual(self.d.getVar('SRCREV_github.com/Azure/azure-sdk-for-go/sdk/storage/azblob@v1.0.0'), 'ec928e0ed34db682b3f783d3739d1c538142e0c3')
+
+ fetcher.download()
+ self.assertTrue(os.path.exists(ud.localpath))
+
+ fetcher.unpack(self.unpackdir)
+ vcsdir = os.path.join(self.unpackdir, 'pkg/mod/cache/vcs')
+ self.assertTrue(os.path.exists(os.path.join(vcsdir, 'd31d6145676ed3066ce573a8198f326dea5be45a43b3d8f41ce7787fd71d66b3')))
+ downloaddir = os.path.join(self.unpackdir, 'pkg/mod/cache/download')
+ self.assertTrue(os.path.exists(os.path.join(downloaddir, 'github.com/!azure/azure-sdk-for-go/sdk/storage/azblob/@v/v1.0.0.zip')))
+ self.assertTrue(os.path.exists(os.path.join(downloaddir, 'github.com/!azure/azure-sdk-for-go/sdk/storage/azblob/@v/v1.0.0.mod')))
+ self.assertEqual(bb.utils.sha256_file(os.path.join(downloaddir, 'github.com/!azure/azure-sdk-for-go/sdk/storage/azblob/@v/v1.0.0.mod')),
+ '7873b8544842329b4f385a3aa6cf82cc2bc8defb41a04fa5291c35fd5900e873')
+
+ @skipIfNoNetwork()
+ def test_gomodgit_url_srcrev_var(self):
+ urls = ['gomodgit://gopkg.in/ini.v1;version=v1.67.0']
+ self.d.setVar('SRCREV_gopkg.in/ini.v1@v1.67.0', 'b2f570e5b5b844226bbefe6fb521d891f529a951')
+
+ fetcher = bb.fetch2.Fetch(urls, self.d)
+ ud = fetcher.ud[urls[0]]
+ self.assertEqual(ud.host, 'gopkg.in')
+ self.assertEqual(ud.path, '/ini.v1')
+ self.assertEqual(ud.names, ['gopkg.in/ini.v1@v1.67.0'])
+ self.assertEqual(ud.parm['srcrev'], 'b2f570e5b5b844226bbefe6fb521d891f529a951')
+
+ fetcher.download()
+ fetcher.unpack(self.unpackdir)
+ vcsdir = os.path.join(self.unpackdir, 'pkg/mod/cache/vcs')
+ self.assertTrue(os.path.exists(os.path.join(vcsdir, 'b7879a4be9ba8598851b8278b14c4f71a8316be64913298d1639cce6bde59bc3')))
+ downloaddir = os.path.join(self.unpackdir, 'pkg/mod/cache/download')
+ self.assertTrue(os.path.exists(os.path.join(downloaddir, 'gopkg.in/ini.v1/@v/v1.67.0.zip')))
+ self.assertTrue(os.path.exists(os.path.join(downloaddir, 'gopkg.in/ini.v1/@v/v1.67.0.mod')))
+ self.assertEqual(bb.utils.sha256_file(os.path.join(downloaddir, 'gopkg.in/ini.v1/@v/v1.67.0.mod')),
+ '13aedd85db8e555104108e0e613bb7e4d1242af7f27c15423dd9ab63b60b72a1')
+
+ @skipIfNoNetwork()
+ def test_gomodgit_url_no_go_mod_in_module(self):
+ urls = ['gomodgit://gopkg.in/ini.v1;version=v1.67.0;'
+ 'srcrev=b2f570e5b5b844226bbefe6fb521d891f529a951']
+
+ fetcher = bb.fetch2.Fetch(urls, self.d)
+ ud = fetcher.ud[urls[0]]
+ self.assertEqual(ud.host, 'gopkg.in')
+ self.assertEqual(ud.path, '/ini.v1')
+ self.assertEqual(ud.names, ['gopkg.in/ini.v1@v1.67.0'])
+ self.assertEqual(self.d.getVar('SRCREV_gopkg.in/ini.v1@v1.67.0'), 'b2f570e5b5b844226bbefe6fb521d891f529a951')
+
+ fetcher.download()
+ fetcher.unpack(self.unpackdir)
+ vcsdir = os.path.join(self.unpackdir, 'pkg/mod/cache/vcs')
+ self.assertTrue(os.path.exists(os.path.join(vcsdir, 'b7879a4be9ba8598851b8278b14c4f71a8316be64913298d1639cce6bde59bc3')))
+ downloaddir = os.path.join(self.unpackdir, 'pkg/mod/cache/download')
+ self.assertTrue(os.path.exists(os.path.join(downloaddir, 'gopkg.in/ini.v1/@v/v1.67.0.zip')))
+ self.assertTrue(os.path.exists(os.path.join(downloaddir, 'gopkg.in/ini.v1/@v/v1.67.0.mod')))
+ self.assertEqual(bb.utils.sha256_file(os.path.join(downloaddir, 'gopkg.in/ini.v1/@v/v1.67.0.mod')),
+ '13aedd85db8e555104108e0e613bb7e4d1242af7f27c15423dd9ab63b60b72a1')
+
+ @skipIfNoNetwork()
+ def test_gomodgit_url_host_only(self):
+ urls = ['gomodgit://go.opencensus.io;version=v0.24.0;'
+ 'repo=github.com/census-instrumentation/opencensus-go;'
+ 'srcrev=b1a01ee95db0e690d91d7193d037447816fae4c5']
+
+ fetcher = bb.fetch2.Fetch(urls, self.d)
+ ud = fetcher.ud[urls[0]]
+ self.assertEqual(ud.host, 'github.com')
+ self.assertEqual(ud.path, '/census-instrumentation/opencensus-go')
+ self.assertEqual(ud.names, ['go.opencensus.io@v0.24.0'])
+ self.assertEqual(self.d.getVar('SRCREV_go.opencensus.io@v0.24.0'), 'b1a01ee95db0e690d91d7193d037447816fae4c5')
+
+ fetcher.download()
+ fetcher.unpack(self.unpackdir)
+ vcsdir = os.path.join(self.unpackdir, 'pkg/mod/cache/vcs')
+ self.assertTrue(os.path.exists(os.path.join(vcsdir, 'aae3ac7b2122ed3345654e6327855e9682f4a5350d63e93dbcfc51c4419df0e1')))
+ downloaddir = os.path.join(self.unpackdir, 'pkg/mod/cache/download')
+ self.assertTrue(os.path.exists(os.path.join(downloaddir, 'go.opencensus.io/@v/v0.24.0.zip')))
+ self.assertTrue(os.path.exists(os.path.join(downloaddir, 'go.opencensus.io/@v/v0.24.0.mod')))
+ self.assertEqual(bb.utils.sha256_file(os.path.join(downloaddir, 'go.opencensus.io/@v/v0.24.0.mod')),
+ '0dc9ccc660ad21cebaffd548f2cc6efa27891c68b4fbc1f8a3893b00f1acec96')
diff --git a/poky/bitbake/lib/bb/tests/parse.py b/poky/bitbake/lib/bb/tests/parse.py
index 72d1962e7e..410679d5a1 100644
--- a/poky/bitbake/lib/bb/tests/parse.py
+++ b/poky/bitbake/lib/bb/tests/parse.py
@@ -177,7 +177,19 @@ python () {
addtask_deltask = """
addtask do_patch after do_foo after do_unpack before do_configure before do_compile
-addtask do_fetch do_patch
+addtask do_fetch2 do_patch2
+
+addtask do_myplaintask
+addtask do_myplaintask2
+deltask do_myplaintask2
+addtask do_mytask# comment
+addtask do_mytask2 # comment2
+addtask do_mytask3
+deltask do_mytask3# comment
+deltask do_mytask4 # comment2
+
+# Ensure a missing task prefix on after works
+addtask do_mytask5 after mytask
MYVAR = "do_patch"
EMPTYVAR = ""
@@ -185,17 +197,12 @@ deltask do_fetch ${MYVAR} ${EMPTYVAR}
deltask ${EMPTYVAR}
"""
def test_parse_addtask_deltask(self):
- import sys
- with self.assertLogs() as logs:
- f = self.parsehelper(self.addtask_deltask)
- d = bb.parse.handle(f.name, self.d)['']
+ f = self.parsehelper(self.addtask_deltask)
+ d = bb.parse.handle(f.name, self.d)['']
- output = "".join(logs.output)
- self.assertTrue("addtask contained multiple 'before' keywords" in output)
- self.assertTrue("addtask contained multiple 'after' keywords" in output)
- self.assertTrue('addtask ignored: " do_patch"' in output)
- #self.assertTrue('dependent task do_foo for do_patch does not exist' in output)
+ self.assertEqual(['do_fetch2', 'do_patch2', 'do_myplaintask', 'do_mytask', 'do_mytask2', 'do_mytask5'], d.getVar("__BBTASKS"))
+ self.assertEqual(['do_mytask'], d.getVarFlag("do_mytask5", "deps"))
broken_multiline_comment = """
# First line of comment \\
diff --git a/poky/bitbake/lib/bb/ui/knotty.py b/poky/bitbake/lib/bb/ui/knotty.py
index f86999bb09..3784c93ad8 100644
--- a/poky/bitbake/lib/bb/ui/knotty.py
+++ b/poky/bitbake/lib/bb/ui/knotty.py
@@ -577,6 +577,8 @@ def main(server, eventHandler, params, tf = TerminalFilter):
else:
log_exec_tty = False
+ should_print_hyperlinks = sys.stdout.isatty() and os.environ.get('NO_COLOR', '') == ''
+
helper = uihelper.BBUIHelper()
# Look for the specially designated handlers which need to be passed to the
@@ -640,7 +642,7 @@ def main(server, eventHandler, params, tf = TerminalFilter):
return_value = 0
errors = 0
warnings = 0
- taskfailures = []
+ taskfailures = {}
printintervaldelta = 10 * 60 # 10 minutes
printinterval = printintervaldelta
@@ -726,6 +728,8 @@ def main(server, eventHandler, params, tf = TerminalFilter):
if isinstance(event, bb.build.TaskFailed):
return_value = 1
print_event_log(event, includelogs, loglines, termfilter)
+ k = "{}:{}".format(event._fn, event._task)
+ taskfailures[k] = event.logfile
if isinstance(event, bb.build.TaskBase):
logger.info(event._message)
continue
@@ -821,7 +825,7 @@ def main(server, eventHandler, params, tf = TerminalFilter):
if isinstance(event, bb.runqueue.runQueueTaskFailed):
return_value = 1
- taskfailures.append(event.taskstring)
+ taskfailures.setdefault(event.taskstring)
logger.error(str(event))
continue
@@ -942,11 +946,21 @@ def main(server, eventHandler, params, tf = TerminalFilter):
try:
termfilter.clearFooter()
summary = ""
+ def format_hyperlink(url, link_text):
+ if should_print_hyperlinks:
+ start = f'\033]8;;{url}\033\\'
+ end = '\033]8;;\033\\'
+ return f'{start}{link_text}{end}'
+ return link_text
+
if taskfailures:
summary += pluralise("\nSummary: %s task failed:",
"\nSummary: %s tasks failed:", len(taskfailures))
- for failure in taskfailures:
+ for (failure, log_file) in taskfailures.items():
summary += "\n %s" % failure
+ if log_file:
+ hyperlink = format_hyperlink(f"file://{log_file}", log_file)
+ summary += "\n log: {}".format(hyperlink)
if warnings:
summary += pluralise("\nSummary: There was %s WARNING message.",
"\nSummary: There were %s WARNING messages.", warnings)
diff --git a/poky/bitbake/lib/bb/ui/teamcity.py b/poky/bitbake/lib/bb/ui/teamcity.py
index fca46c2874..7eeaab8d63 100644
--- a/poky/bitbake/lib/bb/ui/teamcity.py
+++ b/poky/bitbake/lib/bb/ui/teamcity.py
@@ -30,7 +30,6 @@ import bb.build
import bb.command
import bb.cooker
import bb.event
-import bb.exceptions
import bb.runqueue
from bb.ui import uihelper
@@ -102,10 +101,6 @@ class TeamcityLogFormatter(logging.Formatter):
details = ""
if hasattr(record, 'bb_exc_formatted'):
details = ''.join(record.bb_exc_formatted)
- elif hasattr(record, 'bb_exc_info'):
- etype, value, tb = record.bb_exc_info
- formatted = bb.exceptions.format_exception(etype, value, tb, limit=5)
- details = ''.join(formatted)
if record.levelno in [bb.msg.BBLogFormatter.ERROR, bb.msg.BBLogFormatter.CRITICAL]:
# ERROR gets a separate errorDetails field