git.py 42 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495969798991001011021031041051061071081091101111121131141151161171181191201211221231241251261271281291301311321331341351361371381391401411421431441451461471481491501511521531541551561571581591601611621631641651661671681691701711721731741751761771781791801811821831841851861871881891901911921931941951961971981992002012022032042052062072082092102112122132142152162172182192202212222232242252262272282292302312322332342352362372382392402412422432442452462472482492502512522532542552562572582592602612622632642652662672682692702712722732742752762772782792802812822832842852862872882892902912922932942952962972982993003013023033043053063073083093103113123133143153163173183193203213223233243253263273283293303313323333343353363373383393403413423433443453463473483493503513523533543553563573583593603613623633643653663673683693703713723733743753763773783793803813823833843853863873883893903913923933943953963973983994004014024034044054064074084094104114124134144154164174184194204214224234244254264274284294304314324334344354364374384394404414424434444454464474484494504514524534544554564574584594604614624634644654664674684694704714724734744754764774784794804814824834844854864874884894904914924934944954964974984995005015025035045055065075085095105115125135145155165175185195205215225235245255265275285295305315325335345355365375385395405415425435445455465475485495505515525535545555565575585595605615625635645655665675685695705715725735745755765775785795805815825835845855865875885895905915925935945955965975985996006016026036046056066076086096106116126136146156166176186196206216226236246256266276286296306316326336346356366376386396406416426436446456466476486496506516526536546556566576586596606616626636646656666676686696706716726736746756766776786796806816826836846856866876886896906916926936946956966976986997007017027037047057067077087097107117127137147157167177187197207217227237247257267277287297307317327337347357367377387397407417427437447457467477487497507517527537547557567577587597607617627637647657667677687697707717727737747757767777787797807817827837847857867877887897907917927937947957967977987998008018028038048058068078088098108118128138148158168178188198208218228238248258268278288298308318328338348358368378388398408418428438448458468478488498508518528538548558568578588598608618628638648658668678688698708718728738748758768778788798808818828838848858868878888898908918928938948958968978988999009019029039049059069079089099109119129139149159169179189199209219229239249259269279289299309319329339349359369379389399409419429439449459469479489499509519529539549559569579589599609619629639649659669679689699709719729739749759769779789799809819829839849859869879889899909919929939949959969979989991000100110021003100410051006
  1. """
  2. BitBake 'Fetch' git implementation
  3. git fetcher support the SRC_URI with format of:
  4. SRC_URI = "git://some.host/somepath;OptionA=xxx;OptionB=xxx;..."
  5. Supported SRC_URI options are:
  6. - branch
  7. The git branch to retrieve from. The default is "master"
  8. - tag
  9. The git tag to retrieve. The default is "master"
  10. - protocol
  11. The method to use to access the repository. Common options are "git",
  12. "http", "https", "file", "ssh" and "rsync". The default is "git".
  13. - rebaseable
  14. rebaseable indicates that the upstream git repo may rebase in the future,
  15. and current revision may disappear from upstream repo. This option will
  16. remind fetcher to preserve local cache carefully for future use.
  17. The default value is "0", set rebaseable=1 for rebaseable git repo.
  18. - nocheckout
  19. Don't checkout source code when unpacking. set this option for the recipe
  20. who has its own routine to checkout code.
  21. The default is "0", set nocheckout=1 if needed.
  22. - bareclone
  23. Create a bare clone of the source code and don't checkout the source code
  24. when unpacking. Set this option for the recipe who has its own routine to
  25. checkout code and tracking branch requirements.
  26. The default is "0", set bareclone=1 if needed.
  27. - nobranch
  28. Don't check the SHA validation for branch. set this option for the recipe
  29. referring to commit which is valid in any namespace (branch, tag, ...)
  30. instead of branch.
  31. The default is "0", set nobranch=1 if needed.
  32. - subpath
  33. Limit the checkout to a specific subpath of the tree.
  34. By default, checkout the whole tree, set subpath=<path> if needed
  35. - destsuffix
  36. The name of the path in which to place the checkout.
  37. By default, the path is git/, set destsuffix=<suffix> if needed
  38. - usehead
  39. For local git:// urls to use the current branch HEAD as the revision for use with
  40. AUTOREV. Implies nobranch.
  41. - lfs
  42. Enable the checkout to use LFS for large files. This will download all LFS files
  43. in the download step, as the unpack step does not have network access.
  44. The default is "1", set lfs=0 to skip.
  45. """
  46. # Copyright (C) 2005 Richard Purdie
  47. #
  48. # SPDX-License-Identifier: GPL-2.0-only
  49. #
  50. import collections
  51. import errno
  52. import fnmatch
  53. import os
  54. import re
  55. import shlex
  56. import shutil
  57. import subprocess
  58. import tempfile
  59. import urllib
  60. import bb
  61. import bb.progress
  62. from contextlib import contextmanager
  63. from bb.fetch2 import FetchMethod
  64. from bb.fetch2 import runfetchcmd
  65. from bb.fetch2 import logger
  66. from bb.fetch2 import trusted_network
  67. sha1_re = re.compile(r'^[0-9a-f]{40}$')
  68. slash_re = re.compile(r"/+")
  69. class GitProgressHandler(bb.progress.LineFilterProgressHandler):
  70. """Extract progress information from git output"""
  71. def __init__(self, d):
  72. self._buffer = ''
  73. self._count = 0
  74. super(GitProgressHandler, self).__init__(d)
  75. # Send an initial progress event so the bar gets shown
  76. self._fire_progress(-1)
  77. def write(self, string):
  78. self._buffer += string
  79. stages = ['Counting objects', 'Compressing objects', 'Receiving objects', 'Resolving deltas']
  80. stage_weights = [0.2, 0.05, 0.5, 0.25]
  81. stagenum = 0
  82. for i, stage in reversed(list(enumerate(stages))):
  83. if stage in self._buffer:
  84. stagenum = i
  85. self._buffer = ''
  86. break
  87. self._status = stages[stagenum]
  88. percs = re.findall(r'(\d+)%', string)
  89. if percs:
  90. progress = int(round((int(percs[-1]) * stage_weights[stagenum]) + (sum(stage_weights[:stagenum]) * 100)))
  91. rates = re.findall(r'([\d.]+ [a-zA-Z]*/s+)', string)
  92. if rates:
  93. rate = rates[-1]
  94. else:
  95. rate = None
  96. self.update(progress, rate)
  97. else:
  98. if stagenum == 0:
  99. percs = re.findall(r': (\d+)', string)
  100. if percs:
  101. count = int(percs[-1])
  102. if count > self._count:
  103. self._count = count
  104. self._fire_progress(-count)
  105. super(GitProgressHandler, self).write(string)
  106. class Git(FetchMethod):
  107. bitbake_dir = os.path.abspath(os.path.join(os.path.dirname(os.path.join(os.path.abspath(__file__))), '..', '..', '..'))
  108. make_shallow_path = os.path.join(bitbake_dir, 'bin', 'git-make-shallow')
  109. """Class to fetch a module or modules from git repositories"""
  110. def init(self, d):
  111. pass
  112. def supports(self, ud, d):
  113. """
  114. Check to see if a given url can be fetched with git.
  115. """
  116. return ud.type in ['git']
  117. def supports_checksum(self, urldata):
  118. return False
  119. def cleanup_upon_failure(self):
  120. return False
  121. def urldata_init(self, ud, d):
  122. """
  123. init git specific variable within url data
  124. so that the git method like latest_revision() can work
  125. """
  126. if 'protocol' in ud.parm:
  127. ud.proto = ud.parm['protocol']
  128. elif not ud.host:
  129. ud.proto = 'file'
  130. else:
  131. ud.proto = "git"
  132. if ud.host == "github.com" and ud.proto == "git":
  133. # github stopped supporting git protocol
  134. # https://github.blog/2021-09-01-improving-git-protocol-security-github/#no-more-unauthenticated-git
  135. ud.proto = "https"
  136. bb.warn("URL: %s uses git protocol which is no longer supported by github. Please change to ;protocol=https in the url." % ud.url)
  137. if not ud.proto in ('git', 'file', 'ssh', 'http', 'https', 'rsync'):
  138. raise bb.fetch2.ParameterError("Invalid protocol type", ud.url)
  139. ud.nocheckout = ud.parm.get("nocheckout","0") == "1"
  140. ud.rebaseable = ud.parm.get("rebaseable","0") == "1"
  141. ud.nobranch = ud.parm.get("nobranch","0") == "1"
  142. # usehead implies nobranch
  143. ud.usehead = ud.parm.get("usehead","0") == "1"
  144. if ud.usehead:
  145. if ud.proto != "file":
  146. raise bb.fetch2.ParameterError("The usehead option is only for use with local ('protocol=file') git repositories", ud.url)
  147. ud.nobranch = 1
  148. # bareclone implies nocheckout
  149. ud.bareclone = ud.parm.get("bareclone","0") == "1"
  150. if ud.bareclone:
  151. ud.nocheckout = 1
  152. ud.unresolvedrev = ""
  153. ud.branch = ud.parm.get("branch", "")
  154. if not ud.branch and not ud.nobranch:
  155. raise bb.fetch2.ParameterError("The url does not set any branch parameter or set nobranch=1.", ud.url)
  156. ud.noshared = d.getVar("BB_GIT_NOSHARED") == "1"
  157. ud.cloneflags = "-n"
  158. if not ud.noshared:
  159. ud.cloneflags += " -s"
  160. if ud.bareclone:
  161. ud.cloneflags += " --mirror"
  162. ud.shallow_skip_fast = False
  163. ud.shallow = d.getVar("BB_GIT_SHALLOW") == "1"
  164. ud.shallow_extra_refs = (d.getVar("BB_GIT_SHALLOW_EXTRA_REFS") or "").split()
  165. depth_default = d.getVar("BB_GIT_SHALLOW_DEPTH")
  166. if depth_default is not None:
  167. try:
  168. depth_default = int(depth_default or 0)
  169. except ValueError:
  170. raise bb.fetch2.FetchError("Invalid depth for BB_GIT_SHALLOW_DEPTH: %s" % depth_default)
  171. else:
  172. if depth_default < 0:
  173. raise bb.fetch2.FetchError("Invalid depth for BB_GIT_SHALLOW_DEPTH: %s" % depth_default)
  174. else:
  175. depth_default = 1
  176. ud.shallow_depths = collections.defaultdict(lambda: depth_default)
  177. revs_default = d.getVar("BB_GIT_SHALLOW_REVS")
  178. ud.shallow_revs = []
  179. ud.unresolvedrev = ud.branch
  180. shallow_depth = d.getVar("BB_GIT_SHALLOW_DEPTH_%s" % ud.name)
  181. if shallow_depth is not None:
  182. try:
  183. shallow_depth = int(shallow_depth or 0)
  184. except ValueError:
  185. raise bb.fetch2.FetchError("Invalid depth for BB_GIT_SHALLOW_DEPTH_%s: %s" % (ud.name, shallow_depth))
  186. else:
  187. if shallow_depth < 0:
  188. raise bb.fetch2.FetchError("Invalid depth for BB_GIT_SHALLOW_DEPTH_%s: %s" % (ud.name, shallow_depth))
  189. ud.shallow_depths[ud.name] = shallow_depth
  190. revs = d.getVar("BB_GIT_SHALLOW_REVS_%s" % ud.name)
  191. if revs is not None:
  192. ud.shallow_revs.extend(revs.split())
  193. elif revs_default is not None:
  194. ud.shallow_revs.extend(revs_default.split())
  195. if ud.shallow and not ud.shallow_revs and ud.shallow_depths[ud.name] == 0:
  196. # Shallow disabled for this URL
  197. ud.shallow = False
  198. if ud.usehead:
  199. # When usehead is set let's associate 'HEAD' with the unresolved
  200. # rev of this repository. This will get resolved into a revision
  201. # later. If an actual revision happens to have also been provided
  202. # then this setting will be overridden.
  203. ud.unresolvedrev = 'HEAD'
  204. ud.basecmd = d.getVar("FETCHCMD_git") or "git -c gc.autoDetach=false -c core.pager=cat -c safe.bareRepository=all -c clone.defaultRemoteName=origin"
  205. write_tarballs = d.getVar("BB_GENERATE_MIRROR_TARBALLS") or "0"
  206. ud.write_tarballs = write_tarballs != "0" or ud.rebaseable
  207. ud.write_shallow_tarballs = (d.getVar("BB_GENERATE_SHALLOW_TARBALLS") or write_tarballs) != "0"
  208. ud.setup_revisions(d)
  209. # Ensure any revision that doesn't look like a SHA-1 is translated into one
  210. if not sha1_re.match(ud.revision or ''):
  211. if ud.revision:
  212. ud.unresolvedrev = ud.revision
  213. ud.revision = self.latest_revision(ud, d, ud.name)
  214. gitsrcname = '%s%s' % (ud.host.replace(':', '.'), ud.path.replace('/', '.').replace('*', '.').replace(' ','_').replace('(', '_').replace(')', '_'))
  215. if gitsrcname.startswith('.'):
  216. gitsrcname = gitsrcname[1:]
  217. # For a rebaseable git repo, it is necessary to keep a mirror tar ball
  218. # per revision, so that even if the revision disappears from the
  219. # upstream repo in the future, the mirror will remain intact and still
  220. # contain the revision
  221. if ud.rebaseable:
  222. gitsrcname = gitsrcname + '_' + ud.revision
  223. dl_dir = d.getVar("DL_DIR")
  224. gitdir = d.getVar("GITDIR") or (dl_dir + "/git2")
  225. ud.clonedir = os.path.join(gitdir, gitsrcname)
  226. ud.localfile = ud.clonedir
  227. mirrortarball = 'git2_%s.tar.gz' % gitsrcname
  228. ud.fullmirror = os.path.join(dl_dir, mirrortarball)
  229. ud.mirrortarballs = [mirrortarball]
  230. if ud.shallow:
  231. tarballname = gitsrcname
  232. if ud.bareclone:
  233. tarballname = "%s_bare" % tarballname
  234. if ud.shallow_revs:
  235. tarballname = "%s_%s" % (tarballname, "_".join(sorted(ud.shallow_revs)))
  236. tarballname = "%s_%s" % (tarballname, ud.revision[:7])
  237. depth = ud.shallow_depths[ud.name]
  238. if depth:
  239. tarballname = "%s-%s" % (tarballname, depth)
  240. shallow_refs = []
  241. if not ud.nobranch:
  242. shallow_refs.append(ud.branch)
  243. if ud.shallow_extra_refs:
  244. shallow_refs.extend(r.replace('refs/heads/', '').replace('*', 'ALL') for r in ud.shallow_extra_refs)
  245. if shallow_refs:
  246. tarballname = "%s_%s" % (tarballname, "_".join(sorted(shallow_refs)).replace('/', '.'))
  247. fetcher = self.__class__.__name__.lower()
  248. ud.shallowtarball = '%sshallow_%s.tar.gz' % (fetcher, tarballname)
  249. ud.fullshallow = os.path.join(dl_dir, ud.shallowtarball)
  250. ud.mirrortarballs.insert(0, ud.shallowtarball)
  251. def localpath(self, ud, d):
  252. return ud.clonedir
  253. def need_update(self, ud, d):
  254. return self.clonedir_need_update(ud, d) \
  255. or self.shallow_tarball_need_update(ud) \
  256. or self.tarball_need_update(ud) \
  257. or self.lfs_need_update(ud, d)
  258. def clonedir_need_update(self, ud, d):
  259. if not os.path.exists(ud.clonedir):
  260. return True
  261. if ud.shallow and ud.write_shallow_tarballs and self.clonedir_need_shallow_revs(ud, d):
  262. return True
  263. if not self._contains_ref(ud, d, ud.name, ud.clonedir):
  264. return True
  265. return False
  266. def lfs_need_update(self, ud, d):
  267. if not self._need_lfs(ud):
  268. return False
  269. if self.clonedir_need_update(ud, d):
  270. return True
  271. if not self._lfs_objects_downloaded(ud, d, ud.clonedir):
  272. return True
  273. return False
  274. def clonedir_need_shallow_revs(self, ud, d):
  275. for rev in ud.shallow_revs:
  276. try:
  277. runfetchcmd('%s rev-parse -q --verify %s' % (ud.basecmd, rev), d, quiet=True, workdir=ud.clonedir)
  278. except bb.fetch2.FetchError:
  279. return rev
  280. return None
  281. def shallow_tarball_need_update(self, ud):
  282. return ud.shallow and ud.write_shallow_tarballs and not os.path.exists(ud.fullshallow)
  283. def tarball_need_update(self, ud):
  284. return ud.write_tarballs and not os.path.exists(ud.fullmirror)
  285. def update_mirror_links(self, ud, origud):
  286. super().update_mirror_links(ud, origud)
  287. # When using shallow mode, add a symlink to the original fullshallow
  288. # path to ensure a valid symlink even in the `PREMIRRORS` case
  289. if ud.shallow and not os.path.exists(origud.fullshallow):
  290. self.ensure_symlink(ud.localpath, origud.fullshallow)
  291. def try_premirror(self, ud, d):
  292. # If we don't do this, updating an existing checkout with only premirrors
  293. # is not possible
  294. if bb.utils.to_boolean(d.getVar("BB_FETCH_PREMIRRORONLY")):
  295. return True
  296. # If the url is not in trusted network, that is, BB_NO_NETWORK is set to 0
  297. # and BB_ALLOWED_NETWORKS does not contain the host that ud.url uses, then
  298. # we need to try premirrors first as using upstream is destined to fail.
  299. if not trusted_network(d, ud.url):
  300. return True
  301. # the following check is to ensure incremental fetch in downloads, this is
  302. # because the premirror might be old and does not contain the new rev required,
  303. # and this will cause a total removal and new clone. So if we can reach to
  304. # network, we prefer upstream over premirror, though the premirror might contain
  305. # the new rev.
  306. if os.path.exists(ud.clonedir):
  307. return False
  308. return True
  309. def download(self, ud, d):
  310. """Fetch url"""
  311. # A current clone is preferred to either tarball, a shallow tarball is
  312. # preferred to an out of date clone, and a missing clone will use
  313. # either tarball.
  314. if ud.shallow and os.path.exists(ud.fullshallow) and self.need_update(ud, d):
  315. ud.localpath = ud.fullshallow
  316. return
  317. elif os.path.exists(ud.fullmirror) and self.need_update(ud, d):
  318. if not os.path.exists(ud.clonedir):
  319. bb.utils.mkdirhier(ud.clonedir)
  320. runfetchcmd("tar -xzf %s" % ud.fullmirror, d, workdir=ud.clonedir)
  321. else:
  322. tmpdir = tempfile.mkdtemp(dir=d.getVar('DL_DIR'))
  323. runfetchcmd("tar -xzf %s" % ud.fullmirror, d, workdir=tmpdir)
  324. output = runfetchcmd("%s remote" % ud.basecmd, d, quiet=True, workdir=ud.clonedir)
  325. if 'mirror' in output:
  326. runfetchcmd("%s remote rm mirror" % ud.basecmd, d, workdir=ud.clonedir)
  327. runfetchcmd("%s remote add --mirror=fetch mirror %s" % (ud.basecmd, tmpdir), d, workdir=ud.clonedir)
  328. fetch_cmd = "LANG=C %s fetch -f --update-head-ok --progress mirror " % (ud.basecmd)
  329. runfetchcmd(fetch_cmd, d, workdir=ud.clonedir)
  330. repourl = self._get_repo_url(ud)
  331. needs_clone = False
  332. if os.path.exists(ud.clonedir):
  333. # The directory may exist, but not be the top level of a bare git
  334. # repository in which case it needs to be deleted and re-cloned.
  335. try:
  336. # Since clones can be bare, use --absolute-git-dir instead of --show-toplevel
  337. output = runfetchcmd("LANG=C %s rev-parse --absolute-git-dir" % ud.basecmd, d, workdir=ud.clonedir)
  338. toplevel = output.rstrip()
  339. if not bb.utils.path_is_descendant(toplevel, ud.clonedir):
  340. logger.warning("Top level directory '%s' is not a descendant of '%s'. Re-cloning", toplevel, ud.clonedir)
  341. needs_clone = True
  342. except bb.fetch2.FetchError as e:
  343. logger.warning("Unable to get top level for %s (not a git directory?): %s", ud.clonedir, e)
  344. needs_clone = True
  345. except FileNotFoundError as e:
  346. logger.warning("%s", e)
  347. needs_clone = True
  348. if needs_clone:
  349. shutil.rmtree(ud.clonedir)
  350. else:
  351. needs_clone = True
  352. # If the repo still doesn't exist, fallback to cloning it
  353. if needs_clone:
  354. # We do this since git will use a "-l" option automatically for local urls where possible,
  355. # but it doesn't work when git/objects is a symlink, only works when it is a directory.
  356. if repourl.startswith("file://"):
  357. repourl_path = repourl[7:]
  358. objects = os.path.join(repourl_path, 'objects')
  359. if os.path.isdir(objects) and not os.path.islink(objects):
  360. repourl = repourl_path
  361. clone_cmd = "LANG=C %s clone --bare --mirror %s %s --progress" % (ud.basecmd, shlex.quote(repourl), ud.clonedir)
  362. if ud.proto.lower() != 'file':
  363. bb.fetch2.check_network_access(d, clone_cmd, ud.url)
  364. progresshandler = GitProgressHandler(d)
  365. # Try creating a fast initial shallow clone
  366. # Enabling ud.shallow_skip_fast will skip this
  367. # If the Git error "Server does not allow request for unadvertised object"
  368. # occurs, shallow_skip_fast is enabled automatically.
  369. # This may happen if the Git server does not allow the request
  370. # or if the Git client has issues with this functionality.
  371. if ud.shallow and not ud.shallow_skip_fast:
  372. try:
  373. self.clone_shallow_with_tarball(ud, d)
  374. # When the shallow clone has succeeded, use the shallow tarball
  375. ud.localpath = ud.fullshallow
  376. return
  377. except:
  378. logger.warning("Creating fast initial shallow clone failed, try initial regular clone now.")
  379. # When skipping fast initial shallow or the fast inital shallow clone failed:
  380. # Try again with an initial regular clone
  381. runfetchcmd(clone_cmd, d, log=progresshandler)
  382. # Update the checkout if needed
  383. if self.clonedir_need_update(ud, d):
  384. output = runfetchcmd("%s remote" % ud.basecmd, d, quiet=True, workdir=ud.clonedir)
  385. if "origin" in output:
  386. runfetchcmd("%s remote rm origin" % ud.basecmd, d, workdir=ud.clonedir)
  387. runfetchcmd("%s remote add --mirror=fetch origin %s" % (ud.basecmd, shlex.quote(repourl)), d, workdir=ud.clonedir)
  388. if ud.nobranch:
  389. fetch_cmd = "LANG=C %s fetch -f --progress %s refs/*:refs/*" % (ud.basecmd, shlex.quote(repourl))
  390. else:
  391. fetch_cmd = "LANG=C %s fetch -f --progress %s refs/heads/*:refs/heads/* refs/tags/*:refs/tags/*" % (ud.basecmd, shlex.quote(repourl))
  392. if ud.proto.lower() != 'file':
  393. bb.fetch2.check_network_access(d, fetch_cmd, ud.url)
  394. progresshandler = GitProgressHandler(d)
  395. runfetchcmd(fetch_cmd, d, log=progresshandler, workdir=ud.clonedir)
  396. runfetchcmd("%s prune-packed" % ud.basecmd, d, workdir=ud.clonedir)
  397. runfetchcmd("%s pack-refs --all" % ud.basecmd, d, workdir=ud.clonedir)
  398. runfetchcmd("%s pack-redundant --all | xargs -r rm" % ud.basecmd, d, workdir=ud.clonedir)
  399. try:
  400. os.unlink(ud.fullmirror)
  401. except OSError as exc:
  402. if exc.errno != errno.ENOENT:
  403. raise
  404. if not self._contains_ref(ud, d, ud.name, ud.clonedir):
  405. raise bb.fetch2.FetchError("Unable to find revision %s in branch %s even from upstream" % (ud.revision, ud.branch))
  406. if ud.shallow and ud.write_shallow_tarballs:
  407. missing_rev = self.clonedir_need_shallow_revs(ud, d)
  408. if missing_rev:
  409. raise bb.fetch2.FetchError("Unable to find revision %s even from upstream" % missing_rev)
  410. if self.lfs_need_update(ud, d):
  411. self.lfs_fetch(ud, d, ud.clonedir, ud.revision)
  412. def lfs_fetch(self, ud, d, clonedir, revision, fetchall=False, progresshandler=None):
  413. """Helper method for fetching Git LFS data"""
  414. try:
  415. if self._need_lfs(ud) and self._contains_lfs(ud, d, clonedir) and len(revision):
  416. self._ensure_git_lfs(d, ud)
  417. # Using worktree with the revision because .lfsconfig may exists
  418. worktree_add_cmd = "%s worktree add wt %s" % (ud.basecmd, revision)
  419. runfetchcmd(worktree_add_cmd, d, log=progresshandler, workdir=clonedir)
  420. lfs_fetch_cmd = "%s lfs fetch %s" % (ud.basecmd, "--all" if fetchall else "")
  421. runfetchcmd(lfs_fetch_cmd, d, log=progresshandler, workdir=(clonedir + "/wt"))
  422. worktree_rem_cmd = "%s worktree remove -f wt" % ud.basecmd
  423. runfetchcmd(worktree_rem_cmd, d, log=progresshandler, workdir=clonedir)
  424. except:
  425. logger.warning("Fetching LFS did not succeed.")
  426. @contextmanager
  427. def create_atomic(self, filename):
  428. """Create as a temp file and move atomically into position to avoid races"""
  429. fd, tfile = tempfile.mkstemp(dir=os.path.dirname(filename))
  430. try:
  431. yield tfile
  432. umask = os.umask(0o666)
  433. os.umask(umask)
  434. os.chmod(tfile, (0o666 & ~umask))
  435. os.rename(tfile, filename)
  436. finally:
  437. os.close(fd)
  438. def build_mirror_data(self, ud, d):
  439. if ud.shallow and ud.write_shallow_tarballs:
  440. if not os.path.exists(ud.fullshallow):
  441. if os.path.islink(ud.fullshallow):
  442. os.unlink(ud.fullshallow)
  443. self.clone_shallow_with_tarball(ud, d)
  444. elif ud.write_tarballs and not os.path.exists(ud.fullmirror):
  445. if os.path.islink(ud.fullmirror):
  446. os.unlink(ud.fullmirror)
  447. logger.info("Creating tarball of git repository")
  448. with self.create_atomic(ud.fullmirror) as tfile:
  449. mtime = runfetchcmd("{} log --all -1 --format=%cD".format(ud.basecmd), d,
  450. quiet=True, workdir=ud.clonedir)
  451. runfetchcmd("tar -czf %s --owner oe:0 --group oe:0 --mtime \"%s\" ."
  452. % (tfile, mtime), d, workdir=ud.clonedir)
  453. runfetchcmd("touch %s.done" % ud.fullmirror, d)
  454. def clone_shallow_with_tarball(self, ud, d):
  455. ret = False
  456. tempdir = tempfile.mkdtemp(dir=d.getVar('DL_DIR'))
  457. shallowclone = os.path.join(tempdir, 'git')
  458. try:
  459. try:
  460. self.clone_shallow_local(ud, shallowclone, d)
  461. except:
  462. logger.warning("Fast shallow clone failed, try to skip fast mode now.")
  463. bb.utils.remove(tempdir, recurse=True)
  464. os.mkdir(tempdir)
  465. ud.shallow_skip_fast = True
  466. self.clone_shallow_local(ud, shallowclone, d)
  467. logger.info("Creating tarball of git repository")
  468. with self.create_atomic(ud.fullshallow) as tfile:
  469. runfetchcmd("tar -czf %s ." % tfile, d, workdir=shallowclone)
  470. runfetchcmd("touch %s.done" % ud.fullshallow, d)
  471. ret = True
  472. finally:
  473. bb.utils.remove(tempdir, recurse=True)
  474. return ret
  475. def clone_shallow_local(self, ud, dest, d):
  476. """
  477. Shallow fetch from ud.clonedir (${DL_DIR}/git2/<gitrepo> by default):
  478. - For BB_GIT_SHALLOW_DEPTH: git fetch --depth <depth> rev
  479. - For BB_GIT_SHALLOW_REVS: git fetch --shallow-exclude=<revs> rev
  480. """
  481. progresshandler = GitProgressHandler(d)
  482. repourl = self._get_repo_url(ud)
  483. bb.utils.mkdirhier(dest)
  484. init_cmd = "%s init -q" % ud.basecmd
  485. if ud.bareclone:
  486. init_cmd += " --bare"
  487. runfetchcmd(init_cmd, d, workdir=dest)
  488. # Use repourl when creating a fast initial shallow clone
  489. # Prefer already existing full bare clones if available
  490. if not ud.shallow_skip_fast and not os.path.exists(ud.clonedir):
  491. remote = shlex.quote(repourl)
  492. else:
  493. remote = ud.clonedir
  494. runfetchcmd("%s remote add origin %s" % (ud.basecmd, remote), d, workdir=dest)
  495. # Check the histories which should be excluded
  496. shallow_exclude = ''
  497. for revision in ud.shallow_revs:
  498. shallow_exclude += " --shallow-exclude=%s" % revision
  499. revision = ud.revision
  500. depth = ud.shallow_depths[ud.name]
  501. # The --depth and --shallow-exclude can't be used together
  502. if depth and shallow_exclude:
  503. raise bb.fetch2.FetchError("BB_GIT_SHALLOW_REVS is set, but BB_GIT_SHALLOW_DEPTH is not 0.")
  504. # For nobranch, we need a ref, otherwise the commits will be
  505. # removed, and for non-nobranch, we truncate the branch to our
  506. # srcrev, to avoid keeping unnecessary history beyond that.
  507. branch = ud.branch
  508. if ud.nobranch:
  509. ref = "refs/shallow/%s" % ud.name
  510. elif ud.bareclone:
  511. ref = "refs/heads/%s" % branch
  512. else:
  513. ref = "refs/remotes/origin/%s" % branch
  514. fetch_cmd = "%s fetch origin %s" % (ud.basecmd, revision)
  515. if depth:
  516. fetch_cmd += " --depth %s" % depth
  517. if shallow_exclude:
  518. fetch_cmd += shallow_exclude
  519. # Advertise the revision for lower version git such as 2.25.1:
  520. # error: Server does not allow request for unadvertised object.
  521. # The ud.clonedir is a local temporary dir, will be removed when
  522. # fetch is done, so we can do anything on it.
  523. adv_cmd = 'git branch -f advertise-%s %s' % (revision, revision)
  524. if ud.shallow_skip_fast:
  525. runfetchcmd(adv_cmd, d, workdir=ud.clonedir)
  526. runfetchcmd(fetch_cmd, d, workdir=dest)
  527. runfetchcmd("%s update-ref %s %s" % (ud.basecmd, ref, revision), d, workdir=dest)
  528. # Fetch Git LFS data
  529. self.lfs_fetch(ud, d, dest, ud.revision)
  530. # Apply extra ref wildcards
  531. all_refs_remote = runfetchcmd("%s ls-remote origin 'refs/*'" % ud.basecmd, \
  532. d, workdir=dest).splitlines()
  533. all_refs = []
  534. for line in all_refs_remote:
  535. all_refs.append(line.split()[-1])
  536. extra_refs = []
  537. if 'tag' in ud.parm:
  538. extra_refs.append(ud.parm['tag'])
  539. for r in ud.shallow_extra_refs:
  540. if not ud.bareclone:
  541. r = r.replace('refs/heads/', 'refs/remotes/origin/')
  542. if '*' in r:
  543. matches = filter(lambda a: fnmatch.fnmatchcase(a, r), all_refs)
  544. extra_refs.extend(matches)
  545. else:
  546. extra_refs.append(r)
  547. for ref in extra_refs:
  548. ref_fetch = ref.replace('refs/heads/', '').replace('refs/remotes/origin/', '').replace('refs/tags/', '')
  549. runfetchcmd("%s fetch origin --depth 1 %s" % (ud.basecmd, ref_fetch), d, workdir=dest)
  550. revision = runfetchcmd("%s rev-parse FETCH_HEAD" % ud.basecmd, d, workdir=dest)
  551. runfetchcmd("%s update-ref %s %s" % (ud.basecmd, ref, revision), d, workdir=dest)
  552. # The url is local ud.clonedir, set it to upstream one
  553. runfetchcmd("%s remote set-url origin %s" % (ud.basecmd, shlex.quote(repourl)), d, workdir=dest)
  554. def unpack(self, ud, destdir, d):
  555. """ unpack the downloaded src to destdir"""
  556. subdir = ud.parm.get("subdir")
  557. subpath = ud.parm.get("subpath")
  558. readpathspec = ""
  559. def_destsuffix = "git/"
  560. if subpath:
  561. readpathspec = ":%s" % subpath
  562. def_destsuffix = "%s/" % os.path.basename(subpath.rstrip('/'))
  563. if subdir:
  564. # If 'subdir' param exists, create a dir and use it as destination for unpack cmd
  565. if os.path.isabs(subdir):
  566. if not os.path.realpath(subdir).startswith(os.path.realpath(destdir)):
  567. raise bb.fetch2.UnpackError("subdir argument isn't a subdirectory of unpack root %s" % destdir, ud.url)
  568. destdir = subdir
  569. else:
  570. destdir = os.path.join(destdir, subdir)
  571. def_destsuffix = ""
  572. destsuffix = ud.parm.get("destsuffix", def_destsuffix)
  573. destdir = ud.destdir = os.path.join(destdir, destsuffix)
  574. if os.path.exists(destdir):
  575. bb.utils.prunedir(destdir)
  576. if not ud.bareclone:
  577. ud.unpack_tracer.unpack("git", destdir)
  578. need_lfs = self._need_lfs(ud)
  579. if not need_lfs:
  580. ud.basecmd = "GIT_LFS_SKIP_SMUDGE=1 " + ud.basecmd
  581. source_found = False
  582. source_error = []
  583. clonedir_is_up_to_date = not self.clonedir_need_update(ud, d)
  584. if clonedir_is_up_to_date:
  585. runfetchcmd("%s clone %s %s/ %s" % (ud.basecmd, ud.cloneflags, ud.clonedir, destdir), d)
  586. source_found = True
  587. else:
  588. source_error.append("clone directory not available or not up to date: " + ud.clonedir)
  589. if not source_found:
  590. if ud.shallow:
  591. if os.path.exists(ud.fullshallow):
  592. bb.utils.mkdirhier(destdir)
  593. runfetchcmd("tar -xzf %s" % ud.fullshallow, d, workdir=destdir)
  594. source_found = True
  595. else:
  596. source_error.append("shallow clone not available: " + ud.fullshallow)
  597. else:
  598. source_error.append("shallow clone not enabled")
  599. if not source_found:
  600. raise bb.fetch2.UnpackError("No up to date source found: " + "; ".join(source_error), ud.url)
  601. # If there is a tag parameter in the url and we also have a fixed srcrev, check the tag
  602. # matches the revision
  603. if 'tag' in ud.parm and sha1_re.match(ud.revision):
  604. output = runfetchcmd("%s rev-list -n 1 %s" % (ud.basecmd, ud.parm['tag']), d, workdir=destdir)
  605. output = output.strip()
  606. if output != ud.revision:
  607. # It is possible ud.revision is the revision on an annotated tag which won't match the output of rev-list
  608. # If it resolves to the same thing there isn't a problem.
  609. output2 = runfetchcmd("%s rev-list -n 1 %s" % (ud.basecmd, ud.revision), d, workdir=destdir)
  610. output2 = output2.strip()
  611. if output != output2:
  612. raise bb.fetch2.FetchError("The revision the git tag '%s' resolved to didn't match the SRCREV in use (%s vs %s)" % (ud.parm['tag'], output, ud.revision), ud.url)
  613. repourl = self._get_repo_url(ud)
  614. runfetchcmd("%s remote set-url origin %s" % (ud.basecmd, shlex.quote(repourl)), d, workdir=destdir)
  615. if self._contains_lfs(ud, d, destdir):
  616. if not need_lfs:
  617. bb.note("Repository %s has LFS content but it is not being fetched" % (repourl))
  618. else:
  619. self._ensure_git_lfs(d, ud)
  620. runfetchcmd("%s lfs install --local" % ud.basecmd, d, workdir=destdir)
  621. if not ud.nocheckout:
  622. if subpath:
  623. runfetchcmd("%s read-tree %s%s" % (ud.basecmd, ud.revision, readpathspec), d,
  624. workdir=destdir)
  625. runfetchcmd("%s checkout-index -q -f -a" % ud.basecmd, d, workdir=destdir)
  626. elif not ud.nobranch:
  627. branchname = ud.branch
  628. runfetchcmd("%s checkout -B %s %s" % (ud.basecmd, branchname, \
  629. ud.revision), d, workdir=destdir)
  630. runfetchcmd("%s branch %s --set-upstream-to origin/%s" % (ud.basecmd, branchname, \
  631. branchname), d, workdir=destdir)
  632. else:
  633. runfetchcmd("%s checkout %s" % (ud.basecmd, ud.revision), d, workdir=destdir)
  634. return True
  635. def clean(self, ud, d):
  636. """ clean the git directory """
  637. to_remove = [ud.localpath, ud.fullmirror, ud.fullmirror + ".done"]
  638. # The localpath is a symlink to clonedir when it is cloned from a
  639. # mirror, so remove both of them.
  640. if os.path.islink(ud.localpath):
  641. clonedir = os.path.realpath(ud.localpath)
  642. to_remove.append(clonedir)
  643. # Remove shallow mirror tarball
  644. if ud.shallow:
  645. to_remove.append(ud.fullshallow)
  646. to_remove.append(ud.fullshallow + ".done")
  647. for r in to_remove:
  648. if os.path.exists(r) or os.path.islink(r):
  649. bb.note('Removing %s' % r)
  650. bb.utils.remove(r, True)
  651. def supports_srcrev(self):
  652. return True
  653. def _contains_ref(self, ud, d, name, wd):
  654. cmd = ""
  655. if ud.nobranch:
  656. cmd = "%s log --pretty=oneline -n 1 %s -- 2> /dev/null | wc -l" % (
  657. ud.basecmd, ud.revision)
  658. else:
  659. cmd = "%s branch --contains %s --list %s 2> /dev/null | wc -l" % (
  660. ud.basecmd, ud.revision, ud.branch)
  661. try:
  662. output = runfetchcmd(cmd, d, quiet=True, workdir=wd)
  663. except bb.fetch2.FetchError:
  664. return False
  665. if len(output.split()) > 1:
  666. raise bb.fetch2.FetchError("The command '%s' gave output with more then 1 line unexpectedly, output: '%s'" % (cmd, output))
  667. return output.split()[0] != "0"
  668. def _lfs_objects_downloaded(self, ud, d, wd):
  669. """
  670. Verifies whether the LFS objects for requested revisions have already been downloaded
  671. """
  672. # Bail out early if this repository doesn't use LFS
  673. if not self._contains_lfs(ud, d, wd):
  674. return True
  675. self._ensure_git_lfs(d, ud)
  676. # The Git LFS specification specifies ([1]) the LFS folder layout so it should be safe to check for file
  677. # existence.
  678. # [1] https://github.com/git-lfs/git-lfs/blob/main/docs/spec.md#intercepting-git
  679. cmd = "%s lfs ls-files -l %s" \
  680. % (ud.basecmd, ud.revision)
  681. output = runfetchcmd(cmd, d, quiet=True, workdir=wd).rstrip()
  682. # Do not do any further matching if no objects are managed by LFS
  683. if not output:
  684. return True
  685. # Match all lines beginning with the hexadecimal OID
  686. oid_regex = re.compile("^(([a-fA-F0-9]{2})([a-fA-F0-9]{2})[A-Fa-f0-9]+)")
  687. for line in output.split("\n"):
  688. oid = re.search(oid_regex, line)
  689. if not oid:
  690. bb.warn("git lfs ls-files output '%s' did not match expected format." % line)
  691. if not os.path.exists(os.path.join(wd, "lfs", "objects", oid.group(2), oid.group(3), oid.group(1))):
  692. return False
  693. return True
  694. def _need_lfs(self, ud):
  695. return ud.parm.get("lfs", "1") == "1"
  696. def _contains_lfs(self, ud, d, wd):
  697. """
  698. Check if the repository has 'lfs' (large file) content
  699. """
  700. cmd = "%s grep lfs %s:.gitattributes | wc -l" % (
  701. ud.basecmd, ud.revision)
  702. try:
  703. output = runfetchcmd(cmd, d, quiet=True, workdir=wd)
  704. if int(output) > 0:
  705. return True
  706. except (bb.fetch2.FetchError,ValueError):
  707. pass
  708. return False
  709. def _ensure_git_lfs(self, d, ud):
  710. """
  711. Ensures that git-lfs is available, raising a FetchError if it isn't.
  712. """
  713. if shutil.which("git-lfs", path=d.getVar('PATH')) is None:
  714. raise bb.fetch2.FetchError(
  715. "Repository %s has LFS content, install git-lfs on host to download (or set lfs=0 "
  716. "to ignore it)" % self._get_repo_url(ud))
  717. def _get_repo_url(self, ud):
  718. """
  719. Return the repository URL
  720. """
  721. # Note that we do not support passwords directly in the git urls. There are several
  722. # reasons. SRC_URI can be written out to things like buildhistory and people don't
  723. # want to leak passwords like that. Its also all too easy to share metadata without
  724. # removing the password. ssh keys, ~/.netrc and ~/.ssh/config files can be used as
  725. # alternatives so we will not take patches adding password support here.
  726. if ud.user:
  727. username = ud.user + '@'
  728. else:
  729. username = ""
  730. return "%s://%s%s%s" % (ud.proto, username, ud.host, urllib.parse.quote(ud.path))
  731. def _revision_key(self, ud, d, name):
  732. """
  733. Return a unique key for the url
  734. """
  735. # Collapse adjacent slashes
  736. return "git:" + ud.host + slash_re.sub(".", ud.path) + ud.unresolvedrev
  737. def _lsremote(self, ud, d, search):
  738. """
  739. Run git ls-remote with the specified search string
  740. """
  741. # Prevent recursion e.g. in OE if SRCPV is in PV, PV is in WORKDIR,
  742. # and WORKDIR is in PATH (as a result of RSS), our call to
  743. # runfetchcmd() exports PATH so this function will get called again (!)
  744. # In this scenario the return call of the function isn't actually
  745. # important - WORKDIR isn't needed in PATH to call git ls-remote
  746. # anyway.
  747. if d.getVar('_BB_GIT_IN_LSREMOTE', False):
  748. return ''
  749. d.setVar('_BB_GIT_IN_LSREMOTE', '1')
  750. try:
  751. repourl = self._get_repo_url(ud)
  752. cmd = "%s ls-remote %s %s" % \
  753. (ud.basecmd, shlex.quote(repourl), search)
  754. if ud.proto.lower() != 'file':
  755. bb.fetch2.check_network_access(d, cmd, repourl)
  756. output = runfetchcmd(cmd, d, True)
  757. if not output:
  758. raise bb.fetch2.FetchError("The command %s gave empty output unexpectedly" % cmd, ud.url)
  759. finally:
  760. d.delVar('_BB_GIT_IN_LSREMOTE')
  761. return output
  762. def _latest_revision(self, ud, d, name):
  763. """
  764. Compute the HEAD revision for the url
  765. """
  766. if not d.getVar("__BBSRCREV_SEEN"):
  767. raise bb.fetch2.FetchError("Recipe uses a floating tag/branch '%s' for repo '%s' without a fixed SRCREV yet doesn't call bb.fetch2.get_srcrev() (use SRCPV in PV for OE)." % (ud.unresolvedrev, ud.host+ud.path))
  768. # Ensure we mark as not cached
  769. bb.fetch2.mark_recipe_nocache(d)
  770. output = self._lsremote(ud, d, "")
  771. # Tags of the form ^{} may not work, need to fallback to other form
  772. if ud.unresolvedrev[:5] == "refs/" or ud.usehead:
  773. head = ud.unresolvedrev
  774. tag = ud.unresolvedrev
  775. else:
  776. head = "refs/heads/%s" % ud.unresolvedrev
  777. tag = "refs/tags/%s" % ud.unresolvedrev
  778. for s in [head, tag + "^{}", tag]:
  779. for l in output.strip().split('\n'):
  780. sha1, ref = l.split()
  781. if s == ref:
  782. return sha1
  783. raise bb.fetch2.FetchError("Unable to resolve '%s' in upstream git repository in git ls-remote output for %s" % \
  784. (ud.unresolvedrev, ud.host+ud.path))
  785. def latest_versionstring(self, ud, d):
  786. """
  787. Compute the latest release name like "x.y.x" in "x.y.x+gitHASH"
  788. by searching through the tags output of ls-remote, comparing
  789. versions and returning the highest match.
  790. """
  791. pupver = ('', '')
  792. try:
  793. output = self._lsremote(ud, d, "refs/tags/*")
  794. except (bb.fetch2.FetchError, bb.fetch2.NetworkAccess) as e:
  795. bb.note("Could not list remote: %s" % str(e))
  796. return pupver
  797. rev_tag_re = re.compile(r"([0-9a-f]{40})\s+refs/tags/(.*)")
  798. pver_re = re.compile(d.getVar('UPSTREAM_CHECK_GITTAGREGEX') or r"(?P<pver>([0-9][\.|_]?)+)")
  799. nonrel_re = re.compile(r"(alpha|beta|rc|final)+")
  800. verstring = ""
  801. for line in output.split("\n"):
  802. if not line:
  803. break
  804. m = rev_tag_re.match(line)
  805. if not m:
  806. continue
  807. (revision, tag) = m.groups()
  808. # Ignore non-released branches
  809. if nonrel_re.search(tag):
  810. continue
  811. # search for version in the line
  812. m = pver_re.search(tag)
  813. if not m:
  814. continue
  815. pver = m.group('pver').replace("_", ".")
  816. if verstring and bb.utils.vercmp(("0", pver, ""), ("0", verstring, "")) < 0:
  817. continue
  818. verstring = pver
  819. pupver = (verstring, revision)
  820. return pupver
  821. def _build_revision(self, ud, d, name):
  822. return ud.revision
  823. def gitpkgv_revision(self, ud, d, name):
  824. """
  825. Return a sortable revision number by counting commits in the history
  826. Based on gitpkgv.bblass in meta-openembedded
  827. """
  828. rev = ud.revision
  829. localpath = ud.localpath
  830. rev_file = os.path.join(localpath, "oe-gitpkgv_" + rev)
  831. if not os.path.exists(localpath):
  832. commits = None
  833. else:
  834. if not os.path.exists(rev_file) or not os.path.getsize(rev_file):
  835. commits = bb.fetch2.runfetchcmd(
  836. "git rev-list %s -- | wc -l" % shlex.quote(rev),
  837. d, quiet=True).strip().lstrip('0')
  838. if commits:
  839. open(rev_file, "w").write("%d\n" % int(commits))
  840. else:
  841. commits = open(rev_file, "r").readline(128).strip()
  842. if commits:
  843. return False, "%s+%s" % (commits, rev[:7])
  844. else:
  845. return True, str(rev)
  846. def checkstatus(self, fetch, ud, d):
  847. try:
  848. self._lsremote(ud, d, "")
  849. return True
  850. except bb.fetch2.FetchError:
  851. return False