summaryrefslogtreecommitdiff
path: root/poky/bitbake/lib/bb/fetch2
diff options
context:
space:
mode:
Diffstat (limited to 'poky/bitbake/lib/bb/fetch2')
-rw-r--r--poky/bitbake/lib/bb/fetch2/__init__.py69
-rw-r--r--poky/bitbake/lib/bb/fetch2/clearcase.py4
-rw-r--r--poky/bitbake/lib/bb/fetch2/gcp.py14
-rw-r--r--poky/bitbake/lib/bb/fetch2/git.py81
-rw-r--r--poky/bitbake/lib/bb/fetch2/gitsm.py44
-rw-r--r--poky/bitbake/lib/bb/fetch2/gomod.py268
-rw-r--r--poky/bitbake/lib/bb/fetch2/npm.py7
-rw-r--r--poky/bitbake/lib/bb/fetch2/npmsw.py3
-rw-r--r--poky/bitbake/lib/bb/fetch2/wget.py11
9 files changed, 396 insertions, 105 deletions
diff --git a/poky/bitbake/lib/bb/fetch2/__init__.py b/poky/bitbake/lib/bb/fetch2/__init__.py
index 5bf2c4b8cf..8f0ed2b9e2 100644
--- a/poky/bitbake/lib/bb/fetch2/__init__.py
+++ b/poky/bitbake/lib/bb/fetch2/__init__.py
@@ -499,30 +499,30 @@ def fetcher_init(d):
Calls before this must not hit the cache.
"""
- revs = bb.persist_data.persist('BB_URI_HEADREVS', d)
- try:
- # fetcher_init is called multiple times, so make sure we only save the
- # revs the first time it is called.
- if not bb.fetch2.saved_headrevs:
- bb.fetch2.saved_headrevs = dict(revs)
- except:
- pass
-
- # When to drop SCM head revisions controlled by user policy
- srcrev_policy = d.getVar('BB_SRCREV_POLICY') or "clear"
- if srcrev_policy == "cache":
- logger.debug("Keeping SRCREV cache due to cache policy of: %s", srcrev_policy)
- elif srcrev_policy == "clear":
- logger.debug("Clearing SRCREV cache due to cache policy of: %s", srcrev_policy)
- revs.clear()
- else:
- raise FetchError("Invalid SRCREV cache policy of: %s" % srcrev_policy)
+ with bb.persist_data.persist('BB_URI_HEADREVS', d) as revs:
+ try:
+ # fetcher_init is called multiple times, so make sure we only save the
+ # revs the first time it is called.
+ if not bb.fetch2.saved_headrevs:
+ bb.fetch2.saved_headrevs = dict(revs)
+ except:
+ pass
- _checksum_cache.init_cache(d.getVar("BB_CACHEDIR"))
+ # When to drop SCM head revisions controlled by user policy
+ srcrev_policy = d.getVar('BB_SRCREV_POLICY') or "clear"
+ if srcrev_policy == "cache":
+ logger.debug("Keeping SRCREV cache due to cache policy of: %s", srcrev_policy)
+ elif srcrev_policy == "clear":
+ logger.debug("Clearing SRCREV cache due to cache policy of: %s", srcrev_policy)
+ revs.clear()
+ else:
+ raise FetchError("Invalid SRCREV cache policy of: %s" % srcrev_policy)
+
+ _checksum_cache.init_cache(d.getVar("BB_CACHEDIR"))
- for m in methods:
- if hasattr(m, "init"):
- m.init(d)
+ for m in methods:
+ if hasattr(m, "init"):
+ m.init(d)
def fetcher_parse_save():
_checksum_cache.save_extras()
@@ -536,8 +536,8 @@ def fetcher_compare_revisions(d):
when bitbake was started and return true if they have changed.
"""
- headrevs = dict(bb.persist_data.persist('BB_URI_HEADREVS', d))
- return headrevs != bb.fetch2.saved_headrevs
+ with dict(bb.persist_data.persist('BB_URI_HEADREVS', d)) as headrevs:
+ return headrevs != bb.fetch2.saved_headrevs
def mirror_from_string(data):
mirrors = (data or "").replace('\\n',' ').split()
@@ -1317,7 +1317,7 @@ class FetchData(object):
if checksum_name in self.parm:
checksum_expected = self.parm[checksum_name]
- elif self.type not in ["http", "https", "ftp", "ftps", "sftp", "s3", "az", "crate", "gs"]:
+ elif self.type not in ["http", "https", "ftp", "ftps", "sftp", "s3", "az", "crate", "gs", "gomod"]:
checksum_expected = None
else:
checksum_expected = d.getVarFlag("SRC_URI", checksum_name)
@@ -1606,7 +1606,7 @@ class FetchMethod(object):
if urlpath.find("/") != -1:
destdir = urlpath.rsplit("/", 1)[0] + '/'
bb.utils.mkdirhier("%s/%s" % (unpackdir, destdir))
- cmd = 'cp -fpPRH "%s" "%s"' % (file, destdir)
+ cmd = 'cp --force --preserve=timestamps --no-dereference --recursive -H "%s" "%s"' % (file, destdir)
else:
urldata.unpack_tracer.unpack("archive-extract", unpackdir)
@@ -1662,13 +1662,13 @@ class FetchMethod(object):
if not hasattr(self, "_latest_revision"):
raise ParameterError("The fetcher for this URL does not support _latest_revision", ud.url)
- revs = bb.persist_data.persist('BB_URI_HEADREVS', d)
- key = self.generate_revision_key(ud, d, name)
- try:
- return revs[key]
- except KeyError:
- revs[key] = rev = self._latest_revision(ud, d, name)
- return rev
+ with bb.persist_data.persist('BB_URI_HEADREVS', d) as revs:
+ key = self.generate_revision_key(ud, d, name)
+ try:
+ return revs[key]
+ except KeyError:
+ revs[key] = rev = self._latest_revision(ud, d, name)
+ return rev
def sortable_revision(self, ud, d, name):
latest_rev = self._build_revision(ud, d, name)
@@ -2088,6 +2088,7 @@ from . import npmsw
from . import az
from . import crate
from . import gcp
+from . import gomod
methods.append(local.Local())
methods.append(wget.Wget())
@@ -2110,3 +2111,5 @@ methods.append(npmsw.NpmShrinkWrap())
methods.append(az.Az())
methods.append(crate.Crate())
methods.append(gcp.GCP())
+methods.append(gomod.GoMod())
+methods.append(gomod.GoModGit())
diff --git a/poky/bitbake/lib/bb/fetch2/clearcase.py b/poky/bitbake/lib/bb/fetch2/clearcase.py
index 1a9c863769..2b3bd70693 100644
--- a/poky/bitbake/lib/bb/fetch2/clearcase.py
+++ b/poky/bitbake/lib/bb/fetch2/clearcase.py
@@ -108,7 +108,7 @@ class ClearCase(FetchMethod):
ud.module.replace("/", "."),
ud.label.replace("/", "."))
- ud.viewname = "%s-view%s" % (ud.identifier, d.getVar("DATETIME", d, True))
+ ud.viewname = "%s-view%s" % (ud.identifier, d.getVar("DATETIME"))
ud.csname = "%s-config-spec" % (ud.identifier)
ud.ccasedir = os.path.join(d.getVar("DL_DIR"), ud.type)
ud.viewdir = os.path.join(ud.ccasedir, ud.viewname)
@@ -196,7 +196,7 @@ class ClearCase(FetchMethod):
def need_update(self, ud, d):
if ("LATEST" in ud.label) or (ud.customspec and "LATEST" in ud.customspec):
- ud.identifier += "-%s" % d.getVar("DATETIME",d, True)
+ ud.identifier += "-%s" % d.getVar("DATETIME")
return True
if os.path.exists(ud.localpath):
return False
diff --git a/poky/bitbake/lib/bb/fetch2/gcp.py b/poky/bitbake/lib/bb/fetch2/gcp.py
index eb3e0c6a6b..2ee9ed2194 100644
--- a/poky/bitbake/lib/bb/fetch2/gcp.py
+++ b/poky/bitbake/lib/bb/fetch2/gcp.py
@@ -23,7 +23,6 @@ import urllib.parse, urllib.error
from bb.fetch2 import FetchMethod
from bb.fetch2 import FetchError
from bb.fetch2 import logger
-from bb.fetch2 import runfetchcmd
class GCP(FetchMethod):
"""
@@ -48,7 +47,6 @@ class GCP(FetchMethod):
ud.basename = os.path.basename(ud.path)
ud.localfile = d.expand(urllib.parse.unquote(ud.basename))
- ud.basecmd = "gsutil stat"
def get_gcp_client(self):
from google.cloud import storage
@@ -59,17 +57,20 @@ class GCP(FetchMethod):
Fetch urls using the GCP API.
Assumes localpath was called first.
"""
+ from google.api_core.exceptions import NotFound
logger.debug2(f"Trying to download gs://{ud.host}{ud.path} to {ud.localpath}")
if self.gcp_client is None:
self.get_gcp_client()
- bb.fetch2.check_network_access(d, ud.basecmd, f"gs://{ud.host}{ud.path}")
- runfetchcmd("%s %s" % (ud.basecmd, f"gs://{ud.host}{ud.path}"), d)
+ bb.fetch2.check_network_access(d, "blob.download_to_filename", f"gs://{ud.host}{ud.path}")
# Path sometimes has leading slash, so strip it
path = ud.path.lstrip("/")
blob = self.gcp_client.bucket(ud.host).blob(path)
- blob.download_to_filename(ud.localpath)
+ try:
+ blob.download_to_filename(ud.localpath)
+ except NotFound:
+ raise FetchError("The GCP API threw a NotFound exception")
# Additional sanity checks copied from the wget class (although there
# are no known issues which mean these are required, treat the GCP API
@@ -91,8 +92,7 @@ class GCP(FetchMethod):
if self.gcp_client is None:
self.get_gcp_client()
- bb.fetch2.check_network_access(d, ud.basecmd, f"gs://{ud.host}{ud.path}")
- runfetchcmd("%s %s" % (ud.basecmd, f"gs://{ud.host}{ud.path}"), d)
+ bb.fetch2.check_network_access(d, "gcp_client.bucket(ud.host).blob(path).exists()", f"gs://{ud.host}{ud.path}")
# Path sometimes has leading slash, so strip it
path = ud.path.lstrip("/")
diff --git a/poky/bitbake/lib/bb/fetch2/git.py b/poky/bitbake/lib/bb/fetch2/git.py
index c7ff769fdf..5b678827ed 100644
--- a/poky/bitbake/lib/bb/fetch2/git.py
+++ b/poky/bitbake/lib/bb/fetch2/git.py
@@ -262,7 +262,7 @@ class Git(FetchMethod):
for name in ud.names:
ud.unresolvedrev[name] = 'HEAD'
- ud.basecmd = d.getVar("FETCHCMD_git") or "git -c gc.autoDetach=false -c core.pager=cat -c safe.bareRepository=all"
+ ud.basecmd = d.getVar("FETCHCMD_git") or "git -c gc.autoDetach=false -c core.pager=cat -c safe.bareRepository=all -c clone.defaultRemoteName=origin"
write_tarballs = d.getVar("BB_GENERATE_MIRROR_TARBALLS") or "0"
ud.write_tarballs = write_tarballs != "0" or ud.rebaseable
@@ -551,18 +551,31 @@ class Git(FetchMethod):
runfetchcmd("touch %s.done" % ud.fullmirror, d)
def clone_shallow_local(self, ud, dest, d):
- """Clone the repo and make it shallow.
+ """
+ Shallow fetch from ud.clonedir (${DL_DIR}/git2/<gitrepo> by default):
+ - For BB_GIT_SHALLOW_DEPTH: git fetch --depth <depth> rev
+ - For BB_GIT_SHALLOW_REVS: git fetch --shallow-exclude=<revs> rev
+ """
+
+ bb.utils.mkdirhier(dest)
+ init_cmd = "%s init -q" % ud.basecmd
+ if ud.bareclone:
+ init_cmd += " --bare"
+ runfetchcmd(init_cmd, d, workdir=dest)
+ runfetchcmd("%s remote add origin %s" % (ud.basecmd, ud.clonedir), d, workdir=dest)
- The upstream url of the new clone isn't set at this time, as it'll be
- set correctly when unpacked."""
- runfetchcmd("%s clone %s %s %s" % (ud.basecmd, ud.cloneflags, ud.clonedir, dest), d)
+ # Check the histories which should be excluded
+ shallow_exclude = ''
+ for revision in ud.shallow_revs:
+ shallow_exclude += " --shallow-exclude=%s" % revision
- to_parse, shallow_branches = [], []
for name in ud.names:
revision = ud.revisions[name]
depth = ud.shallow_depths[name]
- if depth:
- to_parse.append('%s~%d^{}' % (revision, depth - 1))
+
+ # The --depth and --shallow-exclude can't be used together
+ if depth and shallow_exclude:
+ raise bb.fetch2.FetchError("BB_GIT_SHALLOW_REVS is set, but BB_GIT_SHALLOW_DEPTH is not 0.")
# For nobranch, we need a ref, otherwise the commits will be
# removed, and for non-nobranch, we truncate the branch to our
@@ -575,36 +588,49 @@ class Git(FetchMethod):
else:
ref = "refs/remotes/origin/%s" % branch
- shallow_branches.append(ref)
- runfetchcmd("%s update-ref %s %s" % (ud.basecmd, ref, revision), d, workdir=dest)
+ fetch_cmd = "%s fetch origin %s" % (ud.basecmd, revision)
+ if depth:
+ fetch_cmd += " --depth %s" % depth
+
+ if shallow_exclude:
+ fetch_cmd += shallow_exclude
- # Map srcrev+depths to revisions
- parsed_depths = runfetchcmd("%s rev-parse %s" % (ud.basecmd, " ".join(to_parse)), d, workdir=dest)
+ # Advertise the revision for lower version git such as 2.25.1:
+ # error: Server does not allow request for unadvertised object.
+ # The ud.clonedir is a local temporary dir, will be removed when
+ # fetch is done, so we can do anything on it.
+ adv_cmd = 'git branch -f advertise-%s %s' % (revision, revision)
+ runfetchcmd(adv_cmd, d, workdir=ud.clonedir)
- # Resolve specified revisions
- parsed_revs = runfetchcmd("%s rev-parse %s" % (ud.basecmd, " ".join('"%s^{}"' % r for r in ud.shallow_revs)), d, workdir=dest)
- shallow_revisions = parsed_depths.splitlines() + parsed_revs.splitlines()
+ runfetchcmd(fetch_cmd, d, workdir=dest)
+ runfetchcmd("%s update-ref %s %s" % (ud.basecmd, ref, revision), d, workdir=dest)
# Apply extra ref wildcards
- all_refs = runfetchcmd('%s for-each-ref "--format=%%(refname)"' % ud.basecmd,
- d, workdir=dest).splitlines()
+ all_refs_remote = runfetchcmd("%s ls-remote origin 'refs/*'" % ud.basecmd, \
+ d, workdir=dest).splitlines()
+ all_refs = []
+ for line in all_refs_remote:
+ all_refs.append(line.split()[-1])
+ extra_refs = []
for r in ud.shallow_extra_refs:
if not ud.bareclone:
r = r.replace('refs/heads/', 'refs/remotes/origin/')
if '*' in r:
matches = filter(lambda a: fnmatch.fnmatchcase(a, r), all_refs)
- shallow_branches.extend(matches)
+ extra_refs.extend(matches)
else:
- shallow_branches.append(r)
+ extra_refs.append(r)
- # Make the repository shallow
- shallow_cmd = [self.make_shallow_path, '-s']
- for b in shallow_branches:
- shallow_cmd.append('-r')
- shallow_cmd.append(b)
- shallow_cmd.extend(shallow_revisions)
- runfetchcmd(subprocess.list2cmdline(shallow_cmd), d, workdir=dest)
+ for ref in extra_refs:
+ ref_fetch = os.path.basename(ref)
+ runfetchcmd("%s fetch origin --depth 1 %s" % (ud.basecmd, ref_fetch), d, workdir=dest)
+ revision = runfetchcmd("%s rev-parse FETCH_HEAD" % ud.basecmd, d, workdir=dest)
+ runfetchcmd("%s update-ref %s %s" % (ud.basecmd, ref, revision), d, workdir=dest)
+
+ # The url is local ud.clonedir, set it to upstream one
+ repourl = self._get_repo_url(ud)
+ runfetchcmd("%s remote set-url origin %s" % (ud.basecmd, shlex.quote(repourl)), d, workdir=dest)
def unpack(self, ud, destdir, d):
""" unpack the downloaded src to destdir"""
@@ -926,9 +952,8 @@ class Git(FetchMethod):
commits = None
else:
if not os.path.exists(rev_file) or not os.path.getsize(rev_file):
- from pipes import quote
commits = bb.fetch2.runfetchcmd(
- "git rev-list %s -- | wc -l" % quote(rev),
+ "git rev-list %s -- | wc -l" % shlex.quote(rev),
d, quiet=True).strip().lstrip('0')
if commits:
open(rev_file, "w").write("%d\n" % int(commits))
diff --git a/poky/bitbake/lib/bb/fetch2/gitsm.py b/poky/bitbake/lib/bb/fetch2/gitsm.py
index f7f3af7212..fab4b1164c 100644
--- a/poky/bitbake/lib/bb/fetch2/gitsm.py
+++ b/poky/bitbake/lib/bb/fetch2/gitsm.py
@@ -147,6 +147,19 @@ class GitSM(Git):
return submodules != []
+ def call_process_submodules(self, ud, d, extra_check, subfunc):
+ # If we're using a shallow mirror tarball it needs to be
+ # unpacked temporarily so that we can examine the .gitmodules file
+ if ud.shallow and os.path.exists(ud.fullshallow) and extra_check:
+ tmpdir = tempfile.mkdtemp(dir=d.getVar("DL_DIR"))
+ try:
+ runfetchcmd("tar -xzf %s" % ud.fullshallow, d, workdir=tmpdir)
+ self.process_submodules(ud, tmpdir, subfunc, d)
+ finally:
+ shutil.rmtree(tmpdir)
+ else:
+ self.process_submodules(ud, ud.clonedir, subfunc, d)
+
def need_update(self, ud, d):
if Git.need_update(self, ud, d):
return True
@@ -164,15 +177,7 @@ class GitSM(Git):
logger.error('gitsm: submodule update check failed: %s %s' % (type(e).__name__, str(e)))
need_update_result = True
- # If we're using a shallow mirror tarball it needs to be unpacked
- # temporarily so that we can examine the .gitmodules file
- if ud.shallow and os.path.exists(ud.fullshallow) and not os.path.exists(ud.clonedir):
- tmpdir = tempfile.mkdtemp(dir=d.getVar("DL_DIR"))
- runfetchcmd("tar -xzf %s" % ud.fullshallow, d, workdir=tmpdir)
- self.process_submodules(ud, tmpdir, need_update_submodule, d)
- shutil.rmtree(tmpdir)
- else:
- self.process_submodules(ud, ud.clonedir, need_update_submodule, d)
+ self.call_process_submodules(ud, d, not os.path.exists(ud.clonedir), need_update_submodule)
if need_update_list:
logger.debug('gitsm: Submodules requiring update: %s' % (' '.join(need_update_list)))
@@ -195,16 +200,7 @@ class GitSM(Git):
raise
Git.download(self, ud, d)
-
- # If we're using a shallow mirror tarball it needs to be unpacked
- # temporarily so that we can examine the .gitmodules file
- if ud.shallow and os.path.exists(ud.fullshallow) and self.need_update(ud, d):
- tmpdir = tempfile.mkdtemp(dir=d.getVar("DL_DIR"))
- runfetchcmd("tar -xzf %s" % ud.fullshallow, d, workdir=tmpdir)
- self.process_submodules(ud, tmpdir, download_submodule, d)
- shutil.rmtree(tmpdir)
- else:
- self.process_submodules(ud, ud.clonedir, download_submodule, d)
+ self.call_process_submodules(ud, d, self.need_update(ud, d), download_submodule)
def unpack(self, ud, destdir, d):
def unpack_submodules(ud, url, module, modpath, workdir, d):
@@ -263,14 +259,6 @@ class GitSM(Git):
newfetch = Fetch([url], d, cache=False)
urldata.extend(newfetch.expanded_urldata())
- # If we're using a shallow mirror tarball it needs to be unpacked
- # temporarily so that we can examine the .gitmodules file
- if ud.shallow and os.path.exists(ud.fullshallow) and ud.method.need_update(ud, d):
- tmpdir = tempfile.mkdtemp(dir=d.getVar("DL_DIR"))
- subprocess.check_call("tar -xzf %s" % ud.fullshallow, cwd=tmpdir, shell=True)
- self.process_submodules(ud, tmpdir, add_submodule, d)
- shutil.rmtree(tmpdir)
- else:
- self.process_submodules(ud, ud.clonedir, add_submodule, d)
+ self.call_process_submodules(ud, d, ud.method.need_update(ud, d), add_submodule)
return urldata
diff --git a/poky/bitbake/lib/bb/fetch2/gomod.py b/poky/bitbake/lib/bb/fetch2/gomod.py
new file mode 100644
index 0000000000..21fbe80f56
--- /dev/null
+++ b/poky/bitbake/lib/bb/fetch2/gomod.py
@@ -0,0 +1,268 @@
+"""
+BitBake 'Fetch' implementation for Go modules
+
+The gomod/gomodgit fetchers are used to download Go modules to the module cache
+from a module proxy or directly from a version control repository.
+
+Example SRC_URI:
+
+SRC_URI += "gomod://golang.org/x/net;version=v0.9.0;sha256sum=..."
+SRC_URI += "gomodgit://golang.org/x/net;version=v0.9.0;repo=go.googlesource.com/net;srcrev=..."
+
+Required SRC_URI parameters:
+
+- version
+ The version of the module.
+
+Optional SRC_URI parameters:
+
+- mod
+ Fetch and unpack the go.mod file only instead of the complete module.
+ The go command may need to download go.mod files for many different modules
+ when computing the build list, and go.mod files are much smaller than
+ module zip files.
+ The default is "0", set mod=1 for the go.mod file only.
+
+- sha256sum
+ The checksum of the module zip file, or the go.mod file in case of fetching
+ only the go.mod file. Alternatively, set the SRC_URI varible flag for
+ "module@version.sha256sum".
+
+- protocol
+ The method used when fetching directly from a version control repository.
+ The default is "https" for git.
+
+- repo
+ The URL when fetching directly from a version control repository. Required
+ when the URL is different from the module path.
+
+- srcrev
+ The revision identifier used when fetching directly from a version control
+ repository. Alternatively, set the SRCREV varible for "module@version".
+
+- subdir
+ The module subdirectory when fetching directly from a version control
+ repository. Required when the module is not located in the root of the
+ repository.
+
+Related variables:
+
+- GO_MOD_PROXY
+ The module proxy used by the fetcher.
+
+- GO_MOD_CACHE_DIR
+ The directory where the module cache is located.
+ This must match the exported GOMODCACHE variable for the go command to find
+ the downloaded modules.
+
+See the Go modules reference, https://go.dev/ref/mod, for more information
+about the module cache, module proxies and version control systems.
+"""
+
+import hashlib
+import os
+import re
+import shutil
+import subprocess
+import zipfile
+
+import bb
+from bb.fetch2 import FetchError
+from bb.fetch2 import MissingParameterError
+from bb.fetch2 import runfetchcmd
+from bb.fetch2 import subprocess_setup
+from bb.fetch2.git import Git
+from bb.fetch2.wget import Wget
+
+
+def escape(path):
+ """Escape capital letters using exclamation points."""
+ return re.sub(r'([A-Z])', lambda m: '!' + m.group(1).lower(), path)
+
+
+class GoMod(Wget):
+ """Class to fetch Go modules from a Go module proxy via wget"""
+
+ def supports(self, ud, d):
+ """Check to see if a given URL is for this fetcher."""
+ return ud.type == 'gomod'
+
+ def urldata_init(self, ud, d):
+ """Set up to download the module from the module proxy.
+
+ Set up to download the module zip file to the module cache directory
+ and unpack the go.mod file (unless downloading only the go.mod file):
+
+ cache/download/<module>/@v/<version>.zip: The module zip file.
+ cache/download/<module>/@v/<version>.mod: The go.mod file.
+ """
+
+ proxy = d.getVar('GO_MOD_PROXY') or 'proxy.golang.org'
+ moddir = d.getVar('GO_MOD_CACHE_DIR') or 'pkg/mod'
+
+ if 'version' not in ud.parm:
+ raise MissingParameterError('version', ud.url)
+
+ module = ud.host
+ if ud.path != '/':
+ module += ud.path
+ ud.parm['module'] = module
+
+ # Set URL and filename for wget download
+ path = escape(module + '/@v/' + ud.parm['version'])
+ if ud.parm.get('mod', '0') == '1':
+ path += '.mod'
+ else:
+ path += '.zip'
+ ud.parm['unpack'] = '0'
+ ud.url = bb.fetch2.encodeurl(
+ ('https', proxy, '/' + path, None, None, None))
+ ud.parm['downloadfilename'] = path
+
+ # Set name parameter if sha256sum is set in recipe
+ name = f"{module}@{ud.parm['version']}"
+ if d.getVarFlag('SRC_URI', name + '.sha256sum'):
+ ud.parm['name'] = name
+
+ # Set subdir for unpack
+ ud.parm['subdir'] = os.path.join(moddir, 'cache/download',
+ os.path.dirname(path))
+
+ super().urldata_init(ud, d)
+
+ def unpack(self, ud, rootdir, d):
+ """Unpack the module in the module cache."""
+
+ # Unpack the module zip file or go.mod file
+ super().unpack(ud, rootdir, d)
+
+ if ud.localpath.endswith('.zip'):
+ # Unpack the go.mod file from the zip file
+ module = ud.parm['module']
+ unpackdir = os.path.join(rootdir, ud.parm['subdir'])
+ name = os.path.basename(ud.localpath).rsplit('.', 1)[0] + '.mod'
+ bb.note(f"Unpacking {name} to {unpackdir}/")
+ with zipfile.ZipFile(ud.localpath) as zf:
+ with open(os.path.join(unpackdir, name), mode='wb') as mf:
+ try:
+ f = module + '@' + ud.parm['version'] + '/go.mod'
+ shutil.copyfileobj(zf.open(f), mf)
+ except KeyError:
+ # If the module does not have a go.mod file, synthesize
+ # one containing only a module statement.
+ mf.write(f'module {module}\n'.encode())
+
+
+class GoModGit(Git):
+ """Class to fetch Go modules directly from a git repository"""
+
+ def supports(self, ud, d):
+ """Check to see if a given URL is for this fetcher."""
+ return ud.type == 'gomodgit'
+
+ def urldata_init(self, ud, d):
+ """Set up to download the module from the git repository.
+
+ Set up to download the git repository to the module cache directory and
+ unpack the module zip file and the go.mod file:
+
+ cache/vcs/<hash>: The bare git repository.
+ cache/download/<module>/@v/<version>.zip: The module zip file.
+ cache/download/<module>/@v/<version>.mod: The go.mod file.
+ """
+
+ moddir = d.getVar('GO_MOD_CACHE_DIR') or 'pkg/mod'
+
+ if 'version' not in ud.parm:
+ raise MissingParameterError('version', ud.url)
+
+ module = ud.host
+ if ud.path != '/':
+ module += ud.path
+ ud.parm['module'] = module
+
+ # Set host, path and srcrev for git download
+ if 'repo' in ud.parm:
+ repo = ud.parm['repo']
+ idx = repo.find('/')
+ if idx != -1:
+ ud.host = repo[:idx]
+ ud.path = repo[idx:]
+ else:
+ ud.host = repo
+ ud.path = ''
+ if 'protocol' not in ud.parm:
+ ud.parm['protocol'] = 'https'
+ name = f"{module}@{ud.parm['version']}"
+ ud.names = [name]
+ srcrev = d.getVar('SRCREV_' + name)
+ if srcrev:
+ if 'srcrev' not in ud.parm:
+ ud.parm['srcrev'] = srcrev
+ else:
+ if 'srcrev' in ud.parm:
+ d.setVar('SRCREV_' + name, ud.parm['srcrev'])
+ if 'branch' not in ud.parm:
+ ud.parm['nobranch'] = '1'
+
+ # Set subpath, subdir and bareclone for git unpack
+ if 'subdir' in ud.parm:
+ ud.parm['subpath'] = ud.parm['subdir']
+ key = f"git3:{ud.parm['protocol']}://{ud.host}{ud.path}".encode()
+ ud.parm['key'] = key
+ ud.parm['subdir'] = os.path.join(moddir, 'cache/vcs',
+ hashlib.sha256(key).hexdigest())
+ ud.parm['bareclone'] = '1'
+
+ super().urldata_init(ud, d)
+
+ def unpack(self, ud, rootdir, d):
+ """Unpack the module in the module cache."""
+
+ # Unpack the bare git repository
+ super().unpack(ud, rootdir, d)
+
+ moddir = d.getVar('GO_MOD_CACHE_DIR') or 'pkg/mod'
+
+ # Create the info file
+ module = ud.parm['module']
+ repodir = os.path.join(rootdir, ud.parm['subdir'])
+ with open(repodir + '.info', 'wb') as f:
+ f.write(ud.parm['key'])
+
+ # Unpack the go.mod file from the repository
+ unpackdir = os.path.join(rootdir, moddir, 'cache/download',
+ escape(module), '@v')
+ bb.utils.mkdirhier(unpackdir)
+ srcrev = ud.parm['srcrev']
+ version = ud.parm['version']
+ escaped_version = escape(version)
+ cmd = f"git ls-tree -r --name-only '{srcrev}'"
+ if 'subpath' in ud.parm:
+ cmd += f" '{ud.parm['subpath']}'"
+ files = runfetchcmd(cmd, d, workdir=repodir).split()
+ name = escaped_version + '.mod'
+ bb.note(f"Unpacking {name} to {unpackdir}/")
+ with open(os.path.join(unpackdir, name), mode='wb') as mf:
+ f = 'go.mod'
+ if 'subpath' in ud.parm:
+ f = os.path.join(ud.parm['subpath'], f)
+ if f in files:
+ cmd = ['git', 'cat-file', 'blob', srcrev + ':' + f]
+ subprocess.check_call(cmd, stdout=mf, cwd=repodir,
+ preexec_fn=subprocess_setup)
+ else:
+ # If the module does not have a go.mod file, synthesize one
+ # containing only a module statement.
+ mf.write(f'module {module}\n'.encode())
+
+ # Synthesize the module zip file from the repository
+ name = escaped_version + '.zip'
+ bb.note(f"Unpacking {name} to {unpackdir}/")
+ with zipfile.ZipFile(os.path.join(unpackdir, name), mode='w') as zf:
+ prefix = module + '@' + version + '/'
+ for f in files:
+ cmd = ['git', 'cat-file', 'blob', srcrev + ':' + f]
+ data = subprocess.check_output(cmd, cwd=repodir,
+ preexec_fn=subprocess_setup)
+ zf.writestr(prefix + f, data)
diff --git a/poky/bitbake/lib/bb/fetch2/npm.py b/poky/bitbake/lib/bb/fetch2/npm.py
index 15f3f19bc8..ac76d64cdb 100644
--- a/poky/bitbake/lib/bb/fetch2/npm.py
+++ b/poky/bitbake/lib/bb/fetch2/npm.py
@@ -42,11 +42,12 @@ from bb.utils import is_semver
def npm_package(package):
"""Convert the npm package name to remove unsupported character"""
- # Scoped package names (with the @) use the same naming convention
- # as the 'npm pack' command.
+ # For scoped package names ('@user/package') the '/' is replaced by a '-'.
+ # This is similar to what 'npm pack' does, but 'npm pack' also strips the
+ # leading '@', which can lead to ambiguous package names.
name = re.sub("/", "-", package)
name = name.lower()
- name = re.sub(r"[^\-a-z0-9]", "", name)
+ name = re.sub(r"[^\-a-z0-9@]", "", name)
name = name.strip("-")
return name
diff --git a/poky/bitbake/lib/bb/fetch2/npmsw.py b/poky/bitbake/lib/bb/fetch2/npmsw.py
index b55e885d7b..a5fa598deb 100644
--- a/poky/bitbake/lib/bb/fetch2/npmsw.py
+++ b/poky/bitbake/lib/bb/fetch2/npmsw.py
@@ -97,7 +97,7 @@ class NpmShrinkWrap(FetchMethod):
integrity = params.get("integrity", None)
resolved = params.get("resolved", None)
- version = params.get("version", None)
+ version = params.get("version", resolved)
# Handle registry sources
if is_semver(version) and integrity:
@@ -184,6 +184,7 @@ class NpmShrinkWrap(FetchMethod):
uri = URI("git://" + str(groups["url"]))
uri.params["protocol"] = str(groups["protocol"])
uri.params["rev"] = str(groups["rev"])
+ uri.params["nobranch"] = "1"
uri.params["destsuffix"] = destsuffix
url = str(uri)
diff --git a/poky/bitbake/lib/bb/fetch2/wget.py b/poky/bitbake/lib/bb/fetch2/wget.py
index 2e92117634..80f5ea46c7 100644
--- a/poky/bitbake/lib/bb/fetch2/wget.py
+++ b/poky/bitbake/lib/bb/fetch2/wget.py
@@ -87,7 +87,7 @@ class Wget(FetchMethod):
if not ud.localfile:
ud.localfile = d.expand(urllib.parse.unquote(ud.host + ud.path).replace("/", "."))
- self.basecmd = d.getVar("FETCHCMD_wget") or "/usr/bin/env wget -t 2 -T 30"
+ self.basecmd = d.getVar("FETCHCMD_wget") or "/usr/bin/env wget -t 2 -T 100"
if ud.type == 'ftp' or ud.type == 'ftps':
self.basecmd += " --passive-ftp"
@@ -244,7 +244,12 @@ class Wget(FetchMethod):
fetch.connection_cache.remove_connection(h.host, h.port)
raise urllib.error.URLError(err)
else:
- r = h.getresponse()
+ try:
+ r = h.getresponse()
+ except TimeoutError as e:
+ if fetch.connection_cache:
+ fetch.connection_cache.remove_connection(h.host, h.port)
+ raise TimeoutError(e)
# Pick apart the HTTPResponse object to get the addinfourl
# object initialized properly.
@@ -371,7 +376,7 @@ class Wget(FetchMethod):
except (FileNotFoundError, netrc.NetrcParseError):
pass
- with opener.open(r, timeout=30) as response:
+ with opener.open(r, timeout=100) as response:
pass
except (urllib.error.URLError, ConnectionResetError, TimeoutError) as e:
if try_again: